prompt listlengths 1 1 | compression_prompt listlengths 1 1 | target stringlengths 1.03k 828k |
|---|---|---|
[
{
"content": "Reconstruct the code exactly:\n```python\n'''\nThe Salt Console, this system is used to display a curses console that posts\nevent and job data as it happens from within Salt\n'''\n\n# Import python libs\nimport curses\n\n# Import salt libs\nimport salt.utils.event\n\n\nclass SaltConsole(object):\... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\n'''\nThe Salt Console, this system is used to display a curses console that posts\nevent and job data as it happens from within Salt\n'''\n\n# Import python libs\nimport curses\n\n# Import salt libs\nimport salt.utils.event\n\n\nclass SaltC... | ```python
'''
The Salt Console, this system is used to display a curses console that posts
event and job data as it happens from within Salt
'''
# Import python libs
import curses
# Import salt libs
import salt.utils.event
class SaltConsole(object):
'''
'''
def __init__(self, opts):
self.opts = opts
self.scr = curses.initscr()
self.event = salt.utils.event()
self.__setup_screen()
def __setup_screen(self):
'''
'''
# Prep Curses
curses.noecho()
curses.cbreak()
curses.curs_set(0)
# Prep the screen
self.scr.keypad(1)
self.scr.box()
self.scr.addstr(1, 1, 'Salt Console')
self.scr.addstr(1, 2, '='*34)
# Turn it all on!
self.scr.refresh()
def term(self):
'''
'''
curses.curs_set(1)
curses.nocbreak()
self.scr.keypad(0)
curses.echo()
curses.endwin()
def run(self):
'''
'''
while True:
try:
pass
except Exception:
self.term()
if __name__ == '__main__':
console = SaltConsole()
console.run()
``` |
[
{
"content": "Here is some code:\n```python\nfrom merc import message\n\n\nclass BaseError(Exception, message.Reply):\n pass\n\n\nclass Error(Exception, message.Message):\n NAME = \"ERROR\"\n FORCE_TRAILING = True\n\n def __init__(self, reason):\n self.reason = reason\n\n def as_params(self, client):\n ... | [
{
"content": "Here is some code:\n<|memory_start|>```python\nfrom merc import message\n\n\nclass BaseError(Exception, message.Reply):\n pass\n\n\nclass Error(Exception, message.Message):\n NAME = \"ERROR\"\n FORCE_TRAILING = True\n\n def __init__(self, reason):\n self.reason = reason\n\n def as_params(s... | ```python
from merc import message
class BaseError(Exception, message.Reply):
pass
class Error(Exception, message.Message):
NAME = "ERROR"
FORCE_TRAILING = True
def __init__(self, reason):
self.reason = reason
def as_params(self, client):
return [self.reason]
class LinkError(Error):
NAME = "ERROR"
FORCE_TRAILING = True
def as_params(self, client):
host, *_ = client.protocol.transport.get_extra_info("peername")
return ["Closing link: {} ({})".format(host, self.reason)]
class SimpleError(BaseError):
def as_reply_params(self):
return [self.REASON]
class ParametrizedError(BaseError):
def __init__(self, *params):
self.params = params
def as_reply_params(self):
return list(self.params) + [self.REASON]
class NoSuchNick(ParametrizedError):
NAME = "401"
REASON = "No such nick/channel"
class NoSuchServer(ParametrizedError):
NAME = "402"
REASON = "No such server"
class NoSuchChannel(ParametrizedError):
NAME = "403"
REASON = "No such channel"
class CannotSendToChan(ParametrizedError):
NAME = "404"
REASON = "Cannot send to channel"
class ErroneousNickname(SimpleError):
NAME = "432"
REASON = "Erroneous nickname"
class NicknameInUse(ParametrizedError):
NAME = "433"
REASON = "Nickname in use"
class NotRegistered(SimpleError):
NAME = "451"
REASON = "You have not registered"
class NeedMoreParams(ParametrizedError):
NAME = "461"
REASON = "Not enough parameters"
class UnknownCommand(ParametrizedError):
NAME = "421"
REASON = "Unknown command"
class UnknownMode(ParametrizedError):
NAME = "472"
REASON = "is an unknown mode char to me"
class UmodeUnknownFlag(SimpleError):
NAME = "501"
REASON = "Unknown MODE flag"
class UsersDontMatch(SimpleError):
NAME = "502"
REASON = "Can't change mode for other users"
class ChanOpPrivsNeeded(ParametrizedError):
NAME = "482"
REASON = "You're not a channel operator"
class NotOnChannel(ParametrizedError):
NAME = "442"
REASON = "You're not on that channel"
class AlreadyOnChannel(ParametrizedError):
NAME = "443"
REASON = "is already on channel"
class PasswordMismatch(SimpleError):
NAME = "464"
REASON = "Password mismatch"
class NoPrivileges(SimpleError):
NAME = "481"
REASON = "You're not an IRC operator"
class BannedFromChannel(ParametrizedError):
NAME = "474"
REASON = "You are banned from the channel"
class AlreadyRegistered(SimpleError):
NAME = "462"
REASON = "You may not reregister"
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\nfrom setuptools import setup, find_packages\n\nimport taggit\n\n\nwith open('README.rst') as f:\n readme = f.read()\n\nsetup(\n name='django-taggit',\n version='.'.join(str(i) for i in taggit.VERSION[:3]) + ((\"+\" + taggit.VERSION[3])... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\nfrom setuptools import setup, find_packages\n\nimport taggit\n\n\nwith open('README.rst') as f:\n readme = f.read()\n\nsetup(\n name='django-taggit',\n version='.'.join(str(i) for i in taggit.VERSION[:3]) + ((\"+\" + ta... | ```python
from setuptools import setup, find_packages
import taggit
with open('README.rst') as f:
readme = f.read()
setup(
name='django-taggit',
version='.'.join(str(i) for i in taggit.VERSION[:3]) + (("+" + taggit.VERSION[3]) if len(taggit.VERSION) > 3 else ""),
description='django-taggit is a reusable Django application for simple tagging.',
long_description=readme,
author='Alex Gaynor',
author_email='alex.gaynor@gmail.com',
url='http://github.com/alex/django-taggit/tree/master',
packages=find_packages(exclude=('tests*',)),
package_data = {
'taggit': [
'locale/*/LC_MESSAGES/*',
],
},
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
include_package_data=True,
zip_safe=False,
)
``` |
[
{
"content": "```python\nfrom django.contrib.sessions.middleware import SessionMiddleware\nfrom django.utils import six\nfrom rest_framework import status\n\nfrom djet import assertions, restframework\nimport djoser.social.views\nfrom social_core.exceptions import AuthException\n\nfrom ..common import create_us... | [
{
"content": "<|memory_start|>```python\nfrom django.contrib.sessions.middleware import SessionMiddleware\nfrom django.utils import six\nfrom rest_framework import status\n\nfrom djet import assertions, restframework\nimport djoser.social.views\nfrom social_core.exceptions import AuthException\n\nfrom ..common ... | ```python
from django.contrib.sessions.middleware import SessionMiddleware
from django.utils import six
from rest_framework import status
from djet import assertions, restframework
import djoser.social.views
from social_core.exceptions import AuthException
from ..common import create_user, mock
class ProviderAuthViewTestCase(restframework.APIViewTestCase,
assertions.StatusCodeAssertionsMixin):
view_class = djoser.social.views.ProviderAuthView
middleware = [SessionMiddleware]
def test_get_facebook_provider_fails_if_no_redirect_uri(self):
request = self.factory.get()
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
def test_get_facebook_provider_fails_if_wrong_redirect_uri(self):
request = self.factory.get(data={'redirect_uri': 'http://yolo.com/'})
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
def test_get_facebook_provider_provides_valid_authorization_url(self):
request = self.factory.get(data={
'redirect_uri': 'http://test.localhost/'
})
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_200_OK)
self.assertIn('authorization_url', response.data)
def test_post_facebook_provider_success_returns_token(self):
data = {'code': 'XYZ', 'state': 'ABC'}
mock.patch(
'social_core.backends.facebook.FacebookOAuth2.auth_complete',
return_value=create_user()
).start()
mock.patch(
'social_core.backends.oauth.OAuthAuth.get_session_state',
return_value=data['state']
).start()
request = self.factory.post()
request.GET = {k: v for k, v in six.iteritems(data)}
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_201_CREATED)
self.assertEqual(set(response.data.keys()), {'token', 'user'})
def test_post_facebook_provider_code_validation_fails(self):
data = {'code': 'XYZ', 'state': 'ABC'}
mock.patch(
'social_core.backends.facebook.FacebookOAuth2.auth_complete',
side_effect=AuthException(backend=None)
).start()
mock.patch(
'social_core.backends.oauth.OAuthAuth.get_session_state',
return_value=data['state']
).start()
request = self.factory.post()
request.GET = {k: v for k, v in six.iteritems(data)}
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
def test_post_facebook_provider_validation_fails_if_invalid_state(self):
data = {'code': 'XYZ', 'state': 'ABC'}
mock.patch(
'social_core.backends.oauth.OAuthAuth.get_session_state',
return_value=data['state'][::-1]
).start()
request = self.factory.post()
request.GET = {k: v for k, v in six.iteritems(data)}
response = self.view(request, provider='facebook')
self.assert_status_equal(response, status.HTTP_400_BAD_REQUEST)
``` |
[
{
"content": "Repeat the code exactly:\n```python\nfrom __future__ import absolute_import\n\nimport sys\nimport datetime as dt\nfrom collections import OrderedDict, defaultdict, Iterable\n\ntry:\n import itertools.izip as zip\nexcept ImportError:\n pass\n\nimport numpy as np\n\nfrom .dictionary import Dic... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\nfrom __future__ import absolute_import\n\nimport sys\nimport datetime as dt\nfrom collections import OrderedDict, defaultdict, Iterable\n\ntry:\n import itertools.izip as zip\nexcept ImportError:\n pass\n\nimport numpy as np\n\nfrom .dicti... | ```python
from __future__ import absolute_import
import sys
import datetime as dt
from collections import OrderedDict, defaultdict, Iterable
try:
import itertools.izip as zip
except ImportError:
pass
import numpy as np
from .dictionary import DictInterface
from .interface import Interface, DataError
from ..dimension import dimension_name
from ..element import Element
from ..dimension import OrderedDict as cyODict
from ..ndmapping import NdMapping, item_check, sorted_context
from .. import util
from .interface import is_dask, dask_array_module, get_array_types
class GridInterface(DictInterface):
"""
Interface for simple dictionary-based dataset format using a
compressed representation that uses the cartesian product between
key dimensions. As with DictInterface, the dictionary keys correspond
to the column (i.e dimension) names and the values are NumPy arrays
representing the values in that column.
To use this compressed format, the key dimensions must be orthogonal
to one another with each key dimension specifying an axis of the
multidimensional space occupied by the value dimension data. For
instance, given an temperature recordings sampled regularly across
the earth surface, a list of N unique latitudes and M unique
longitudes can specify the position of NxM temperature samples.
"""
types = (dict, OrderedDict, cyODict)
datatype = 'grid'
gridded = True
@classmethod
def init(cls, eltype, data, kdims, vdims):
if kdims is None:
kdims = eltype.kdims
if vdims is None:
vdims = eltype.vdims
if not vdims:
raise ValueError('GridInterface interface requires at least '
'one value dimension.')
ndims = len(kdims)
dimensions = [dimension_name(d) for d in kdims+vdims]
if isinstance(data, tuple):
data = {d: v for d, v in zip(dimensions, data)}
elif isinstance(data, list) and data == []:
data = OrderedDict([(d, []) for d in dimensions])
elif not any(isinstance(data, tuple(t for t in interface.types if t is not None))
for interface in cls.interfaces.values()):
data = {k: v for k, v in zip(dimensions, zip(*data))}
elif isinstance(data, np.ndarray):
if data.ndim == 1:
if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1:
data = np.column_stack([np.arange(len(data)), data])
else:
data = np.atleast_2d(data).T
data = {k: data[:,i] for i,k in enumerate(dimensions)}
elif isinstance(data, list) and data == []:
data = {d: np.array([]) for d in dimensions[:ndims]}
data.update({d: np.empty((0,) * ndims) for d in dimensions[ndims:]})
elif not isinstance(data, dict):
raise TypeError('GridInterface must be instantiated as a '
'dictionary or tuple')
for dim in kdims+vdims:
name = dimension_name(dim)
if name not in data:
raise ValueError("Values for dimension %s not found" % dim)
if not isinstance(data[name], get_array_types()):
data[name] = np.array(data[name])
kdim_names = [dimension_name(d) for d in kdims]
vdim_names = [dimension_name(d) for d in vdims]
expected = tuple([len(data[kd]) for kd in kdim_names])
irregular_shape = data[kdim_names[0]].shape if kdim_names else ()
valid_shape = irregular_shape if len(irregular_shape) > 1 else expected[::-1]
shapes = tuple([data[kd].shape for kd in kdim_names])
for vdim in vdim_names:
shape = data[vdim].shape
error = DataError if len(shape) > 1 else ValueError
if (not expected and shape == (1,)) or (len(set((shape,)+shapes)) == 1 and len(shape) > 1):
# If empty or an irregular mesh
pass
elif len(shape) != len(expected):
raise error('The shape of the %s value array does not '
'match the expected dimensionality indicated '
'by the key dimensions. Expected %d-D array, '
'found %d-D array.' % (vdim, len(expected), len(shape)))
elif any((s!=e and (s+1)!=e) for s, e in zip(shape, valid_shape)):
raise error('Key dimension values and value array %s '
'shapes do not match. Expected shape %s, '
'actual shape: %s' % (vdim, valid_shape, shape), cls)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def concat(cls, datasets, dimensions, vdims):
from . import Dataset
with sorted_context(False):
datasets = NdMapping(datasets, kdims=dimensions)
datasets = datasets.clone([(k, v.data if isinstance(v, Dataset) else v)
for k, v in datasets.data.items()])
if len(datasets.kdims) > 1:
items = datasets.groupby(datasets.kdims[:-1]).data.items()
return cls.concat([(k, cls.concat(v, v.kdims, vdims=vdims)) for k, v in items],
datasets.kdims[:-1], vdims)
return cls.concat_dim(datasets, datasets.kdims[0], vdims)
@classmethod
def concat_dim(cls, datasets, dim, vdims):
values, grids = zip(*datasets.items())
new_data = {k: v for k, v in grids[0].items() if k not in vdims}
new_data[dim.name] = np.array(values)
for vdim in vdims:
arrays = [grid[vdim.name] for grid in grids]
shapes = set(arr.shape for arr in arrays)
if len(shapes) > 1:
raise DataError('When concatenating gridded data the shape '
'of arrays must match. %s found that arrays '
'along the %s dimension do not match.' %
(cls.__name__, vdim.name))
stack = dask_array_module().stack if any(is_dask(arr) for arr in arrays) else np.stack
new_data[vdim.name] = stack(arrays, -1)
return new_data
@classmethod
def irregular(cls, dataset, dim):
return dataset.data[dimension_name(dim)].ndim > 1
@classmethod
def isscalar(cls, dataset, dim):
values = cls.values(dataset, dim, expanded=False)
return values.shape in ((), (1,)) or len(np.unique(values)) == 1
@classmethod
def validate(cls, dataset, vdims=True):
Interface.validate(dataset, vdims)
@classmethod
def dimension_type(cls, dataset, dim):
if dim in dataset.dimensions():
arr = cls.values(dataset, dim, False, False)
else:
return None
return arr.dtype.type
@classmethod
def shape(cls, dataset, gridded=False):
shape = dataset.data[dataset.vdims[0].name].shape
if gridded:
return shape
else:
return (np.product(shape, dtype=np.intp), len(dataset.dimensions()))
@classmethod
def length(cls, dataset):
return cls.shape(dataset)[0]
@classmethod
def _infer_interval_breaks(cls, coord, axis=0):
"""
>>> GridInterface._infer_interval_breaks(np.arange(5))
array([-0.5, 0.5, 1.5, 2.5, 3.5, 4.5])
>>> GridInterface._infer_interval_breaks([[0, 1], [3, 4]], axis=1)
array([[-0.5, 0.5, 1.5],
[ 2.5, 3.5, 4.5]])
"""
coord = np.asarray(coord)
if sys.version_info.major == 2 and len(coord) and isinstance(coord[0], (dt.datetime, dt.date)):
# np.diff does not work on datetimes in python 2
coord = coord.astype('datetime64')
deltas = 0.5 * np.diff(coord, axis=axis)
first = np.take(coord, [0], axis=axis) - np.take(deltas, [0], axis=axis)
last = np.take(coord, [-1], axis=axis) + np.take(deltas, [-1], axis=axis)
trim_last = tuple(slice(None, -1) if n == axis else slice(None)
for n in range(coord.ndim))
return np.concatenate([first, coord[trim_last] + deltas, last], axis=axis)
@classmethod
def coords(cls, dataset, dim, ordered=False, expanded=False, edges=False):
"""
Returns the coordinates along a dimension. Ordered ensures
coordinates are in ascending order and expanded creates
ND-array matching the dimensionality of the dataset.
"""
dim = dataset.get_dimension(dim, strict=True)
irregular = cls.irregular(dataset, dim)
if irregular or expanded:
if irregular:
data = dataset.data[dim.name]
else:
data = util.expand_grid_coords(dataset, dim)
if edges and data.shape == dataset.data[dataset.vdims[0].name].shape:
data = cls._infer_interval_breaks(data, axis=1)
data = cls._infer_interval_breaks(data, axis=0)
return data
data = dataset.data[dim.name]
if ordered and np.all(data[1:] < data[:-1]):
data = data[::-1]
shape = cls.shape(dataset, True)
if dim in dataset.kdims:
idx = dataset.get_dimension_index(dim)
isedges = (dim in dataset.kdims and len(shape) == dataset.ndims
and len(data) == (shape[dataset.ndims-idx-1]+1))
else:
isedges = False
if edges and not isedges:
data = cls._infer_interval_breaks(data)
elif not edges and isedges:
data = data[:-1] + np.diff(data)/2.
return data
@classmethod
def canonicalize(cls, dataset, data, data_coords=None, virtual_coords=[]):
"""
Canonicalize takes an array of values as input and reorients
and transposes it to match the canonical format expected by
plotting functions. In certain cases the dimensions defined
via the kdims of an Element may not match the dimensions of
the underlying data. A set of data_coords may be passed in to
define the dimensionality of the data, which can then be used
to np.squeeze the data to remove any constant dimensions. If
the data is also irregular, i.e. contains multi-dimensional
coordinates, a set of virtual_coords can be supplied, required
by some interfaces (e.g. xarray) to index irregular datasets
with a virtual integer index. This ensures these coordinates
are not simply dropped.
"""
if data_coords is None:
data_coords = dataset.dimensions('key', label='name')[::-1]
# Transpose data
dims = [name for name in data_coords
if isinstance(cls.coords(dataset, name), get_array_types())]
dropped = [dims.index(d) for d in dims
if d not in dataset.kdims+virtual_coords]
if dropped:
data = np.squeeze(data, axis=tuple(dropped))
if not any(cls.irregular(dataset, d) for d in dataset.kdims):
inds = [dims.index(kd.name) for kd in dataset.kdims]
inds = [i - sum([1 for d in dropped if i>=d]) for i in inds]
if inds:
data = data.transpose(inds[::-1])
# Reorient data
invert = False
slices = []
for d in dataset.kdims[::-1]:
coords = cls.coords(dataset, d)
if np.all(coords[1:] < coords[:-1]) and not coords.ndim > 1:
slices.append(slice(None, None, -1))
invert = True
else:
slices.append(slice(None))
data = data[tuple(slices)] if invert else data
# Allow lower dimensional views into data
if len(dataset.kdims) < 2:
data = data.flatten()
return data
@classmethod
def invert_index(cls, index, length):
if np.isscalar(index):
return length - index
elif isinstance(index, slice):
start, stop = index.start, index.stop
new_start, new_stop = None, None
if start is not None:
new_stop = length - start
if stop is not None:
new_start = length - stop
return slice(new_start-1, new_stop-1)
elif isinstance(index, Iterable):
new_index = []
for ind in index:
new_index.append(length-ind)
return new_index
@classmethod
def ndloc(cls, dataset, indices):
selected = {}
adjusted_inds = []
all_scalar = True
for i, (kd, ind) in enumerate(zip(dataset.kdims[::-1], indices)):
coords = cls.coords(dataset, kd.name, True)
if np.isscalar(ind):
ind = [ind]
else:
all_scalar = False
selected[kd.name] = coords[ind]
adjusted_inds.append(ind)
for kd in dataset.kdims:
if kd.name not in selected:
coords = cls.coords(dataset, kd.name)
selected[kd.name] = coords
all_scalar = False
for d in dataset.dimensions():
if d in dataset.kdims and not cls.irregular(dataset, d):
continue
arr = cls.values(dataset, d, flat=False, compute=False)
if all_scalar and len(dataset.vdims) == 1:
return arr[tuple(ind[0] for ind in adjusted_inds)]
selected[d.name] = arr[tuple(adjusted_inds)]
return tuple(selected[d.name] for d in dataset.dimensions())
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True, compute=True):
dim = dataset.get_dimension(dim, strict=True)
if dim in dataset.vdims or dataset.data[dim.name].ndim > 1:
data = dataset.data[dim.name]
data = cls.canonicalize(dataset, data)
da = dask_array_module()
if compute and da and isinstance(data, da.Array):
data = data.compute()
return data.T.flatten() if flat else data
elif expanded:
data = cls.coords(dataset, dim.name, expanded=True)
return data.T.flatten() if flat else data
else:
return cls.coords(dataset, dim.name, ordered=True)
@classmethod
def groupby(cls, dataset, dim_names, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d, strict=True) for d in dim_names]
if 'kdims' in kwargs:
kdims = kwargs['kdims']
else:
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
kwargs['kdims'] = kdims
invalid = [d for d in dimensions if dataset.data[d.name].ndim > 1]
if invalid:
if len(invalid) == 1: invalid = "'%s'" % invalid[0]
raise ValueError("Cannot groupby irregularly sampled dimension(s) %s."
% invalid)
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
else:
kwargs.pop('kdims')
group_kwargs.update(kwargs)
drop_dim = any(d not in group_kwargs['kdims'] for d in kdims)
# Find all the keys along supplied dimensions
keys = [cls.coords(dataset, d.name) for d in dimensions]
transpose = [dataset.ndims-dataset.kdims.index(kd)-1 for kd in kdims]
transpose += [i for i in range(dataset.ndims) if i not in transpose]
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in zip(*util.cartesian_product(keys)):
select = dict(zip(dim_names, unique_key))
if drop_dim:
group_data = dataset.select(**select)
group_data = group_data if np.isscalar(group_data) else group_data.columns()
else:
group_data = cls.select(dataset, **select)
if np.isscalar(group_data) or (isinstance(group_data, get_array_types()) and group_data.shape == ()):
group_data = {dataset.vdims[0].name: np.atleast_1d(group_data)}
for dim, v in zip(dim_names, unique_key):
group_data[dim] = np.atleast_1d(v)
elif not drop_dim:
if isinstance(group_data, get_array_types()):
group_data = {dataset.vdims[0].name: group_data}
for vdim in dataset.vdims:
data = group_data[vdim.name]
data = data.transpose(transpose[::-1])
group_data[vdim.name] = np.squeeze(data)
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((tuple(unique_key), group_data))
if issubclass(container_type, NdMapping):
with item_check(False):
return container_type(grouped_data, kdims=dimensions)
else:
return container_type(grouped_data)
@classmethod
def key_select_mask(cls, dataset, values, ind):
if isinstance(ind, tuple):
ind = slice(*ind)
if isinstance(ind, get_array_types()):
mask = ind
elif isinstance(ind, slice):
mask = True
if ind.start is not None:
mask &= ind.start <= values
if ind.stop is not None:
mask &= values < ind.stop
# Expand empty mask
if mask is True:
mask = np.ones(values.shape, dtype=np.bool)
elif isinstance(ind, (set, list)):
iter_slcs = []
for ik in ind:
iter_slcs.append(values == ik)
mask = np.logical_or.reduce(iter_slcs)
elif callable(ind):
mask = ind(values)
elif ind is None:
mask = None
else:
index_mask = values == ind
if (dataset.ndims == 1 or dataset._binned) and np.sum(index_mask) == 0:
data_index = np.argmin(np.abs(values - ind))
mask = np.zeros(len(values), dtype=np.bool)
mask[data_index] = True
else:
mask = index_mask
if mask is None:
mask = np.ones(values.shape, dtype=bool)
return mask
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
dimensions = dataset.kdims
val_dims = [vdim for vdim in dataset.vdims if vdim in selection]
if val_dims:
raise IndexError('Cannot slice value dimensions in compressed format, '
'convert to expanded format before slicing.')
indexed = cls.indexed(dataset, selection)
full_selection = [(d, selection.get(d.name, selection.get(d.label)))
for d in dimensions]
data = {}
value_select = []
for i, (dim, ind) in enumerate(full_selection):
irregular = cls.irregular(dataset, dim)
values = cls.coords(dataset, dim, irregular)
mask = cls.key_select_mask(dataset, values, ind)
if irregular:
if np.isscalar(ind) or isinstance(ind, (set, list)):
raise IndexError("Indexing not supported for irregularly "
"sampled data. %s value along %s dimension."
"must be a slice or 2D boolean mask."
% (ind, dim))
mask = mask.max(axis=i)
elif dataset._binned:
edges = cls.coords(dataset, dim, False, edges=True)
inds = np.argwhere(mask)
if np.isscalar(ind):
emin, emax = edges.min(), edges.max()
if ind < emin:
raise IndexError("Index %s less than lower bound "
"of %s for %s dimension." % (ind, emin, dim))
elif ind >= emax:
raise IndexError("Index %s more than or equal to upper bound "
"of %s for %s dimension." % (ind, emax, dim))
idx = max([np.digitize([ind], edges)[0]-1, 0])
mask = np.zeros(len(values), dtype=np.bool)
mask[idx] = True
values = edges[idx:idx+2]
elif len(inds):
values = edges[inds.min(): inds.max()+2]
else:
values = edges[0:0]
else:
values = values[mask]
values, mask = np.asarray(values), np.asarray(mask)
value_select.append(mask)
data[dim.name] = np.array([values]) if np.isscalar(values) else values
int_inds = [np.argwhere(v) for v in value_select][::-1]
index = np.ix_(*[np.atleast_1d(np.squeeze(ind)) if ind.ndim > 1 else np.atleast_1d(ind)
for ind in int_inds])
for kdim in dataset.kdims:
if cls.irregular(dataset, dim):
da = dask_array_module()
if da and isinstance(dataset.data[kdim.name], da.Array):
data[kdim.name] = dataset.data[kdim.name].vindex[index]
else:
data[kdim.name] = np.asarray(data[kdim.name])[index]
for vdim in dataset.vdims:
da = dask_array_module()
if da and isinstance(dataset.data[vdim.name], da.Array):
data[vdim.name] = dataset.data[vdim.name].vindex[index]
else:
data[vdim.name] = np.asarray(dataset.data[vdim.name])[index]
if indexed:
if len(dataset.vdims) == 1:
da = dask_array_module()
arr = np.squeeze(data[dataset.vdims[0].name])
if da and isinstance(arr, da.Array):
arr = arr.compute()
return arr if np.isscalar(arr) else arr[()]
else:
return np.array([np.squeeze(data[vd.name])
for vd in dataset.vdims])
return data
@classmethod
def sample(cls, dataset, samples=[]):
"""
Samples the gridded data into dataset of samples.
"""
ndims = dataset.ndims
dimensions = dataset.dimensions(label='name')
arrays = [dataset.data[vdim.name] for vdim in dataset.vdims]
data = defaultdict(list)
for sample in samples:
if np.isscalar(sample): sample = [sample]
if len(sample) != ndims:
sample = [sample[i] if i < len(sample) else None
for i in range(ndims)]
sampled, int_inds = [], []
for d, ind in zip(dimensions, sample):
cdata = dataset.data[d]
mask = cls.key_select_mask(dataset, cdata, ind)
inds = np.arange(len(cdata)) if mask is None else np.argwhere(mask)
int_inds.append(inds)
sampled.append(cdata[mask])
for d, arr in zip(dimensions, np.meshgrid(*sampled)):
data[d].append(arr)
for vdim, array in zip(dataset.vdims, arrays):
da = dask_array_module()
flat_index = np.ravel_multi_index(tuple(int_inds)[::-1], array.shape)
if da and isinstance(array, da.Array):
data[vdim.name].append(array.flatten().vindex[tuple(flat_index)])
else:
data[vdim.name].append(array.flat[flat_index])
concatenated = {d: np.concatenate(arrays).flatten() for d, arrays in data.items()}
return concatenated
@classmethod
def aggregate(cls, dataset, kdims, function, **kwargs):
kdims = [dimension_name(kd) for kd in kdims]
data = {kdim: dataset.data[kdim] for kdim in kdims}
axes = tuple(dataset.ndims-dataset.get_dimension_index(kdim)-1
for kdim in dataset.kdims if kdim not in kdims)
da = dask_array_module()
dropped = []
for vdim in dataset.vdims:
values = dataset.data[vdim.name]
atleast_1d = da.atleast_1d if is_dask(values) else np.atleast_1d
try:
data[vdim.name] = atleast_1d(function(values, axis=axes, **kwargs))
except TypeError:
dropped.append(vdim)
return data, dropped
@classmethod
def reindex(cls, dataset, kdims, vdims):
dropped_kdims = [kd for kd in dataset.kdims if kd not in kdims]
dropped_vdims = ([vdim for vdim in dataset.vdims
if vdim not in vdims] if vdims else [])
constant = {}
for kd in dropped_kdims:
vals = cls.values(dataset, kd.name, expanded=False)
if len(vals) == 1:
constant[kd.name] = vals[0]
data = {k: values for k, values in dataset.data.items()
if k not in dropped_kdims+dropped_vdims}
if len(constant) == len(dropped_kdims):
joined_dims = kdims+dropped_kdims
axes = tuple(dataset.ndims-dataset.kdims.index(d)-1
for d in joined_dims)
dropped_axes = tuple(dataset.ndims-joined_dims.index(d)-1
for d in dropped_kdims)
for vdim in vdims:
vdata = data[vdim.name]
if len(axes) > 1:
vdata = vdata.transpose(axes[::-1])
if dropped_axes:
vdata = np.squeeze(vdata, axis=dropped_axes)
data[vdim.name] = vdata
return data
elif dropped_kdims:
return tuple(dataset.columns(kdims+vdims).values())
return data
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
if not vdim:
raise Exception("Cannot add key dimension to a dense representation.")
dim = dimension_name(dimension)
return dict(dataset.data, **{dim: values})
@classmethod
def sort(cls, dataset, by=[], reverse=False):
if not by or by in [dataset.kdims, dataset.dimensions()]:
return dataset.data
else:
raise Exception('Compressed format cannot be sorted, either instantiate '
'in the desired order or use the expanded format.')
@classmethod
def iloc(cls, dataset, index):
rows, cols = index
scalar = False
if np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols, strict=True)]
elif isinstance(cols, slice):
cols = dataset.dimensions()[cols]
else:
cols = [dataset.get_dimension(d, strict=True) for d in cols]
if np.isscalar(rows):
rows = [rows]
new_data = []
for d in cols:
new_data.append(cls.values(dataset, d, compute=False)[rows])
if scalar:
da = dask_array_module()
if new_data and (da and isinstance(new_data[0], da.Array)):
return new_data[0].compute()[0]
return new_data[0][0]
return tuple(new_data)
@classmethod
def range(cls, dataset, dimension):
if dataset._binned and dimension in dataset.kdims:
expanded = cls.irregular(dataset, dimension)
column = cls.coords(dataset, dimension, expanded=expanded, edges=True)
else:
column = cls.values(dataset, dimension, expanded=False, flat=False)
da = dask_array_module()
if column.dtype.kind == 'M':
dmin, dmax = column.min(), column.max()
if da and isinstance(column, da.Array):
return da.compute(dmin, dmax)
return dmin, dmax
elif len(column) == 0:
return np.NaN, np.NaN
else:
try:
dmin, dmax = (np.nanmin(column), np.nanmax(column))
if da and isinstance(column, da.Array):
return da.compute(dmin, dmax)
return dmin, dmax
except TypeError:
column.sort()
return column[0], column[-1]
Interface.register(GridInterface)
``` |
[
{
"content": "Here is the source code:\n```python\n#!/usr/bin/env python\n# -*- encoding: utf-8 -*-\n\n# copyright 2016 twitter. all rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a co... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- encoding: utf-8 -*-\n\n# copyright 2016 twitter. all rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You... | ```python
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# copyright 2016 twitter. all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from heronpy.api.stream import Grouping
from integration_test.src.python.integration_test.core import TestTopologyBuilder
from integration_test.src.python.integration_test.common.bolt import IdentityBolt
from integration_test.src.python.integration_test.common.spout import ABSpout
def one_spout_bolt_multi_tasks_builder(topology_name, http_server_url):
builder = TestTopologyBuilder(topology_name, http_server_url)
ab_spout = builder.add_spout("ab-spout", ABSpout, 3)
builder.add_bolt("identity-bolt", IdentityBolt,
inputs={ab_spout: Grouping.SHUFFLE},
par=3,
optional_outputs=['word'])
return builder.create_topology()
``` |
[
{
"content": "Here is some code:\n```python\n#!/usr/bin/env python\n\n#\n# $File: SelfMating.py $\n#\n# This file is part of simuPOP, a forward-time population genetics\n# simulation environment. Please visit http://simupop.sourceforge.net\n# for details.\n#\n# Copyright (C) 2004 - 2010 Bo Peng (bpeng@mdanderso... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n#!/usr/bin/env python\n\n#\n# $File: SelfMating.py $\n#\n# This file is part of simuPOP, a forward-time population genetics\n# simulation environment. Please visit http://simupop.sourceforge.net\n# for details.\n#\n# Copyright (C) 2004 - 2010 Bo Peng ... | ```python
#!/usr/bin/env python
#
# $File: SelfMating.py $
#
# This file is part of simuPOP, a forward-time population genetics
# simulation environment. Please visit http://simupop.sourceforge.net
# for details.
#
# Copyright (C) 2004 - 2010 Bo Peng (bpeng@mdanderson.org)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This script is an example in the simuPOP user's guide. Please refer to
# the user's guide (http://simupop.sourceforge.net/manual) for a detailed
# description of this example.
#
import simuPOP as sim
pop = sim.Population(20, loci=8)
# every chromosomes are different. :-)
for idx, ind in enumerate(pop.individuals()):
ind.setGenotype([idx*2], 0)
ind.setGenotype([idx*2+1], 1)
pop.evolve(
matingScheme=sim.SelfMating(ops=sim.Recombinator(rates=0.01)),
gen = 1
)
sim.dump(pop, width=3, structure=False, max=10)
``` |
[
{
"content": "Output the full code verbatim (no extra comments):\n```python\n'''\nPUMA560AKB Load kinematic and dynamic data for a Puma 560 manipulator\n\n\tfrom robot.puma560akb import *\n\nDefines the object 'p560m' in current workspace which describes the \nkinematic and dynamic characterstics of a Unimation... | [
{
"content": "Output the full code verbatim (no extra comments):\n<|memory_start|>```python\n'''\nPUMA560AKB Load kinematic and dynamic data for a Puma 560 manipulator\n\n\tfrom robot.puma560akb import *\n\nDefines the object 'p560m' in current workspace which describes the \nkinematic and dynamic characterstic... | ```python
'''
PUMA560AKB Load kinematic and dynamic data for a Puma 560 manipulator
from robot.puma560akb import *
Defines the object 'p560m' in current workspace which describes the
kinematic and dynamic characterstics of a Unimation Puma 560 manipulator
modified DH conventions and using the data and conventions of:
Armstrong, Khatib and Burdick 1986.
"The Explicit Dynamic Model and Inertial Parameters of the Puma 560 Arm"
Also define the vector qz which corresponds to the zero joint
angle configuration, qr which is the vertical 'READY' configuration,
and qstretch in which the arm is stretched out in the X direction.
@see: robot, puma560, stanford, twolink
Python implementation by: Luis Fernando Lara Tobar and Peter Corke.
Based on original Robotics Toolbox for Matlab code by Peter Corke.
Permission to use and copy is granted provided that acknowledgement of
the authors is made.
@author: Luis Fernando Lara Tobar and Peter Corke
'''
from numpy import *
from Link import *
from Robot import *
L = []
L.append(Link(alpha=0, A=0, theta=0, D=0, sigma=0, convention=2))
L.append(Link(alpha=-pi/2, A=0, theta=0, D=0.2435, sigma=0, convention=2))
L.append(Link(alpha=0, A=0.4318, theta=0, D=-0.0934, sigma=0, convention=2))
L.append(Link(alpha=pi/2, A=-0.0203, theta=0, D=.4331, sigma=0, convention=2))
L.append(Link(alpha=-pi/2, A=0, theta=0, D=0, sigma=0, convention=2))
L.append(Link(alpha=pi/2, A=0, theta=0, D=0, sigma=0, convention=2))
L[0].m = 0;
L[1].m = 17.4;
L[2].m = 4.8;
L[3].m = 0.82;
L[4].m = 0.34;
L[5].m = .09;
# rx ry rz
L[0].r = mat([0, 0, 0 ])
L[1].r = mat([0.068, 0.006, -0.016])
L[2].r = mat([0, -0.070, 0.014 ])
L[3].r = mat([0, 0, -0.019])
L[4].r = mat([0, 0, 0 ])
L[5].r = mat([0, 0, .032 ])
# Ixx Iyy Izz Ixy Iyz Ixz
L[0].I = mat([0, 0, 0.35, 0, 0, 0])
L[1].I = mat([.13, .524, .539, 0, 0, 0])
L[2].I = mat([.066, .0125, .066, 0, 0, 0])
L[3].I = mat([1.8e-3, 1.8e-3, 1.3e-3, 0, 0, 0])
L[4].I = mat([.3e-3, .3e-3, .4e-3, 0, 0, 0])
L[5].I = mat([.15e-3, .15e-3, .04e-3, 0, 0, 0])
L[0].Jm = 291e-6;
L[1].Jm = 409e-6;
L[2].Jm = 299e-6;
L[3].Jm = 35e-6;
L[4].Jm = 35e-6;
L[5].Jm = 35e-6;
L[0].G = -62.6111;
L[1].G = 107.815;
L[2].G = -53.7063;
L[3].G = 76.0364;
L[4].G = 71.923;
L[5].G = 76.686;
# viscous friction (motor referenced)
# unknown
# Coulomb friction (motor referenced)
# unknown
#
# some useful poses
#
qz = [0,0,0,0,0,0]; # zero angles, L shaped pose
qr = [0,-pi/2,pi/2,0,0,0]; # ready pose, arm up
qstretch = [0,0,pi/2,0,0,0]; # horizontal along x-axis
p560m = Robot(L, name='Puma560-AKB', manuf='Unimation', comment='AK&B')
``` |
[
{
"content": "Repeat the following code:\n```python\nfrom __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torchvision.dat... | [
{
"content": "Repeat the following code:\n<|memory_start|>```python\nfrom __future__ import print_function\nimport argparse\nimport os\nimport random\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport... | ```python
from __future__ import print_function
import argparse
import os
import random
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
from torch.autograd import Variable
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', required=True, help='cifar10 | lsun | imagenet | folder | lfw | fake')
parser.add_argument('--dataroot', required=True, help='path to dataset')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=2)
parser.add_argument('--batchSize', type=int, default=64, help='input batch size')
parser.add_argument('--imageSize', type=int, default=64, help='the height / width of the input image to network')
parser.add_argument('--nz', type=int, default=100, help='size of the latent z vector')
parser.add_argument('--ngf', type=int, default=64)
parser.add_argument('--ndf', type=int, default=64)
parser.add_argument('--niter', type=int, default=25, help='number of epochs to train for')
parser.add_argument('--lr', type=float, default=0.0002, help='learning rate, default=0.0002')
parser.add_argument('--beta1', type=float, default=0.5, help='beta1 for adam. default=0.5')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--ngpu', type=int, default=1, help='number of GPUs to use')
parser.add_argument('--netG', default='', help="path to netG (to continue training)")
parser.add_argument('--netD', default='', help="path to netD (to continue training)")
parser.add_argument('--outf', default='.', help='folder to output images and model checkpoints')
parser.add_argument('--manualSeed', type=int, help='manual seed')
opt = parser.parse_args()
print(opt)
try:
os.makedirs(opt.outf)
except OSError:
pass
if opt.manualSeed is None:
opt.manualSeed = random.randint(1, 10000)
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
if opt.cuda:
torch.cuda.manual_seed_all(opt.manualSeed)
cudnn.benchmark = True
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
if opt.dataset in ['imagenet', 'folder', 'lfw']:
# folder dataset
dataset = dset.ImageFolder(root=opt.dataroot,
transform=transforms.Compose([
transforms.Scale(opt.imageSize),
transforms.CenterCrop(opt.imageSize),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
elif opt.dataset == 'lsun':
dataset = dset.LSUN(db_path=opt.dataroot, classes=['bedroom_train'],
transform=transforms.Compose([
transforms.Scale(opt.imageSize),
transforms.CenterCrop(opt.imageSize),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
elif opt.dataset == 'cifar10':
dataset = dset.CIFAR10(root=opt.dataroot, download=True,
transform=transforms.Compose([
transforms.Scale(opt.imageSize),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
elif opt.dataset == 'fake':
dataset = dset.FakeData(image_size=(3, opt.imageSize, opt.imageSize),
transform=transforms.ToTensor())
assert dataset
dataloader = torch.utils.data.DataLoader(dataset, batch_size=opt.batchSize,
shuffle=True, num_workers=int(opt.workers))
ngpu = int(opt.ngpu)
nz = int(opt.nz)
ngf = int(opt.ngf)
ndf = int(opt.ndf)
nc = 3
# custom weights initialization called on netG and netD
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
class _netG(nn.Module):
def __init__(self, ngpu):
super(_netG, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is Z, going into a convolution
nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(ngf * 8),
nn.ReLU(True),
# state size. (ngf*8) x 4 x 4
nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 4),
nn.ReLU(True),
# state size. (ngf*4) x 8 x 8
nn.ConvTranspose2d(ngf * 4, ngf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 2),
nn.ReLU(True),
# state size. (ngf*2) x 16 x 16
nn.ConvTranspose2d(ngf * 2, ngf, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf),
nn.ReLU(True),
# state size. (ngf) x 32 x 32
nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (nc) x 64 x 64
)
def forward(self, input):
if isinstance(input.data, torch.cuda.FloatTensor) and self.ngpu > 1:
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output
netG = _netG(ngpu)
netG.apply(weights_init)
if opt.netG != '':
netG.load_state_dict(torch.load(opt.netG))
print(netG)
class _netD(nn.Module):
def __init__(self, ngpu):
super(_netD, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf) x 32 x 32
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*2) x 16 x 16
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*4) x 8 x 8
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*8) x 4 x 4
nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)
def forward(self, input):
if isinstance(input.data, torch.cuda.FloatTensor) and self.ngpu > 1:
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output.view(-1, 1).squeeze(1)
netD = _netD(ngpu)
netD.apply(weights_init)
if opt.netD != '':
netD.load_state_dict(torch.load(opt.netD))
print(netD)
criterion = nn.BCELoss()
input = torch.FloatTensor(opt.batchSize, 3, opt.imageSize, opt.imageSize)
noise = torch.FloatTensor(opt.batchSize, nz, 1, 1)
fixed_noise = torch.FloatTensor(opt.batchSize, nz, 1, 1).normal_(0, 1)
label = torch.FloatTensor(opt.batchSize)
real_label = 1
fake_label = 0
if opt.cuda:
netD.cuda()
netG.cuda()
criterion.cuda()
input, label = input.cuda(), label.cuda()
noise, fixed_noise = noise.cuda(), fixed_noise.cuda()
fixed_noise = Variable(fixed_noise)
# setup optimizer
optimizerD = optim.Adam(netD.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
for epoch in range(opt.niter):
for i, data in enumerate(dataloader, 0):
############################
# (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
###########################
# train with real
netD.zero_grad()
real_cpu, _ = data
batch_size = real_cpu.size(0)
if opt.cuda:
real_cpu = real_cpu.cuda()
input.resize_as_(real_cpu).copy_(real_cpu)
label.resize_(batch_size).fill_(real_label)
inputv = Variable(input)
labelv = Variable(label)
output = netD(inputv)
errD_real = criterion(output, labelv)
errD_real.backward()
D_x = output.data.mean()
# train with fake
noise.resize_(batch_size, nz, 1, 1).normal_(0, 1)
noisev = Variable(noise)
fake = netG(noisev)
labelv = Variable(label.fill_(fake_label))
output = netD(fake.detach())
errD_fake = criterion(output, labelv)
errD_fake.backward()
D_G_z1 = output.data.mean()
errD = errD_real + errD_fake
optimizerD.step()
############################
# (2) Update G network: maximize log(D(G(z)))
###########################
netG.zero_grad()
labelv = Variable(label.fill_(real_label)) # fake labels are real for generator cost
output = netD(fake)
errG = criterion(output, labelv)
errG.backward()
D_G_z2 = output.data.mean()
optimizerG.step()
print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f'
% (epoch, opt.niter, i, len(dataloader),
errD.data[0], errG.data[0], D_x, D_G_z1, D_G_z2))
if i % 100 == 0:
vutils.save_image(real_cpu,
'%s/real_samples.png' % opt.outf,
normalize=True)
fake = netG(fixed_noise)
vutils.save_image(fake.data,
'%s/fake_samples_epoch_%03d.png' % (opt.outf, epoch),
normalize=True)
# do checkpointing
torch.save(netG.state_dict(), '%s/netG_epoch_%d.pth' % (opt.outf, epoch))
torch.save(netD.state_dict(), '%s/netD_epoch_%d.pth' % (opt.outf, epoch))
``` |
[
{
"content": "Provide a verbatim copy of the code:\n```python\n# vim: tabstop=4 shiftwidth=4 softtabstop=4\n\n# Copyright 2011 United States Government as represented by the\n# Administrator of the National Aeronautics and Space Administration.\n# All Rights Reserved.\n#\n# Copyright 2011 Fourth Paradigm Develo... | [
{
"content": "Provide a verbatim copy of the code:\n<|memory_start|>```python\n# vim: tabstop=4 shiftwidth=4 softtabstop=4\n\n# Copyright 2011 United States Government as represented by the\n# Administrator of the National Aeronautics and Space Administration.\n# All Rights Reserved.\n#\n# Copyright 2011 Fourth... | ```python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Fourth Paradigm Development, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Methods and interface objects used to interact with external apis.
API method calls return objects that are in many cases objects with
attributes that are direct maps to the data returned from the API http call.
Unfortunately, these objects are also often constructed dynamically, making
it difficult to know what data is available from the API object. Because of
this, all API calls should wrap their returned object in one defined here,
using only explicitly defined atributes and/or methods.
In other words, django_openstack developers not working on django_openstack.api
shouldn't need to understand the finer details of APIs for Nova/Glance/Swift et
al.
"""
from django.conf import settings
import cloudfiles
import glance.client
import httplib
import json
import logging
import openstack.compute
import openstackx.admin
import openstackx.api.exceptions as api_exceptions
import openstackx.extras
import openstackx.auth
from urlparse import urlparse
LOG = logging.getLogger('django_openstack.api')
class APIResourceWrapper(object):
""" Simple wrapper for api objects
Define _attrs on the child class and pass in the
api object as the only argument to the constructor
"""
_attrs = []
def __init__(self, apiresource):
self._apiresource = apiresource
def __getattr__(self, attr):
if attr in self._attrs:
# __getattr__ won't find properties
return self._apiresource.__getattribute__(attr)
else:
LOG.debug('Attempted to access unknown attribute "%s" on'
' APIResource object of type "%s" wrapping resource of'
' type "%s"' % (attr, self.__class__,
self._apiresource.__class__))
raise AttributeError(attr)
class APIDictWrapper(object):
""" Simple wrapper for api dictionaries
Some api calls return dictionaries. This class provides identical
behavior as APIResourceWrapper, except that it will also behave as a
dictionary, in addition to attribute accesses.
Attribute access is the preferred method of access, to be
consistent with api resource objects from openstackx
"""
def __init__(self, apidict):
self._apidict = apidict
def __getattr__(self, attr):
if attr in self._attrs:
try:
return self._apidict[attr]
except KeyError, e:
raise AttributeError(e)
else:
LOG.debug('Attempted to access unknown item "%s" on'
'APIResource object of type "%s"'
% (attr, self.__class__))
raise AttributeError(attr)
def __getitem__(self, item):
try:
return self.__getattr__(item)
except AttributeError, e:
# caller is expecting a KeyError
raise KeyError(e)
def get(self, item, default=None):
try:
return self.__getattr__(item)
except AttributeError:
return default
class Container(APIResourceWrapper):
"""Simple wrapper around cloudfiles.container.Container"""
_attrs = ['name']
class Console(APIResourceWrapper):
"""Simple wrapper around openstackx.extras.consoles.Console"""
_attrs = ['id', 'output', 'type']
class Flavor(APIResourceWrapper):
"""Simple wrapper around openstackx.admin.flavors.Flavor"""
_attrs = ['disk', 'id', 'links', 'name', 'ram', 'vcpus']
class Image(APIDictWrapper):
"""Simple wrapper around glance image dictionary"""
_attrs = ['checksum', 'container_format', 'created_at', 'deleted',
'deleted_at', 'disk_format', 'id', 'is_public', 'location',
'name', 'properties', 'size', 'status', 'updated_at']
def __getattr__(self, attrname):
if attrname == "properties":
return ImageProperties(super(Image, self).__getattr__(attrname))
else:
return super(Image, self).__getattr__(attrname)
class ImageProperties(APIDictWrapper):
"""Simple wrapper around glance image properties dictionary"""
_attrs = ['architecture', 'image_location', 'image_state', 'kernel_id',
'project_id', 'ramdisk_id']
class KeyPair(APIResourceWrapper):
"""Simple wrapper around openstackx.extras.keypairs.Keypair"""
_attrs = ['fingerprint', 'key_name', 'private_key']
class Server(APIResourceWrapper):
"""Simple wrapper around openstackx.extras.server.Server
Preserves the request info so image name can later be retrieved
"""
_attrs = ['addresses', 'attrs', 'hostId', 'id', 'imageRef', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'virtual_interfaces']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
def __getattr__(self, attr):
if attr == "attrs":
return ServerAttributes(super(Server, self).__getattr__(attr))
else:
return super(Server, self).__getattr__(attr)
@property
def image_name(self):
image = image_get(self.request, self.imageRef)
return image.name
class ServerAttributes(APIDictWrapper):
"""Simple wrapper around openstackx.extras.server.Server attributes
Preserves the request info so image name can later be retrieved
"""
_attrs = ['description', 'disk_gb', 'host', 'image_ref', 'kernel_id',
'key_name', 'launched_at', 'mac_address', 'memory_mb', 'name',
'os_type', 'project_id', 'ramdisk_id', 'scheduled_at',
'terminated_at', 'user_data', 'user_id', 'vcpus', 'hostname']
class Services(APIResourceWrapper):
_attrs = ['disabled', 'host', 'id', 'last_update', 'stats', 'type', 'up',
'zone']
class SwiftObject(APIResourceWrapper):
_attrs = ['name']
class Tenant(APIResourceWrapper):
"""Simple wrapper around openstackx.auth.tokens.Tenant"""
_attrs = ['id', 'description', 'enabled']
class Token(APIResourceWrapper):
"""Simple wrapper around openstackx.auth.tokens.Token"""
_attrs = ['id', 'serviceCatalog', 'tenant_id', 'username']
class Usage(APIResourceWrapper):
"""Simple wrapper around openstackx.extras.usage.Usage"""
_attrs = ['begin', 'instances', 'stop', 'tenant_id',
'total_active_disk_size', 'total_active_instances',
'total_active_ram_size', 'total_active_vcpus', 'total_cpu_usage',
'total_disk_usage', 'total_hours', 'total_ram_usage']
class User(APIResourceWrapper):
"""Simple wrapper around openstackx.extras.users.User"""
_attrs = ['email', 'enabled', 'id', 'tenantId']
def url_for(request, service_name, admin=False):
catalog = request.session['serviceCatalog']
if admin:
rv = catalog[service_name][0]['adminURL']
else:
rv = catalog[service_name][0]['internalURL']
return rv
def check_openstackx(f):
"""Decorator that adds extra info to api exceptions
The dashboard currently depends on openstackx extensions being present
in nova. Error messages depending for views depending on these
extensions do not lead to the conclusion that nova is missing
extensions.
This decorator should be dropped and removed after keystone and
dashboard more gracefully handle extensions and openstackx extensions
aren't required by the dashboard in nova.
"""
def inner(*args, **kwargs):
try:
return f(*args, **kwargs)
except api_exceptions.NotFound, e:
e.message = e.details or ''
e.message += ' This error may be caused by missing openstackx' \
' extensions in nova. See the dashboard README.'
raise
return inner
def compute_api(request):
compute = openstack.compute.Compute(
auth_token=request.session['token'],
management_url=url_for(request, 'nova'))
# this below hack is necessary to make the jacobian compute client work
# TODO(mgius): It looks like this is unused now?
compute.client.auth_token = request.session['token']
compute.client.management_url = url_for(request, 'nova')
LOG.debug('compute_api connection created using token "%s"'
' and url "%s"' %
(request.session['token'], url_for(request, 'nova')))
return compute
def account_api(request):
LOG.debug('account_api connection created using token "%s"'
' and url "%s"' %
(request.session['token'],
url_for(request, 'identity', True)))
return openstackx.extras.Account(
auth_token=request.session['token'],
management_url=url_for(request, 'identity', True))
def glance_api(request):
o = urlparse(url_for(request, 'glance'))
LOG.debug('glance_api connection created for host "%s:%d"' %
(o.hostname, o.port))
return glance.client.Client(o.hostname, o.port)
def admin_api(request):
LOG.debug('admin_api connection created using token "%s"'
' and url "%s"' %
(request.session['token'], url_for(request, 'nova', True)))
return openstackx.admin.Admin(auth_token=request.session['token'],
management_url=url_for(request, 'nova', True))
def extras_api(request):
LOG.debug('extras_api connection created using token "%s"'
' and url "%s"' %
(request.session['token'], url_for(request, 'nova')))
return openstackx.extras.Extras(auth_token=request.session['token'],
management_url=url_for(request, 'nova'))
def auth_api():
LOG.debug('auth_api connection created using url "%s"' %
settings.OPENSTACK_KEYSTONE_URL)
return openstackx.auth.Auth(
management_url=settings.OPENSTACK_KEYSTONE_URL)
def swift_api():
return cloudfiles.get_connection(
settings.SWIFT_ACCOUNT + ":" + settings.SWIFT_USER,
settings.SWIFT_PASS,
authurl=settings.SWIFT_AUTHURL)
def console_create(request, instance_id, kind=None):
return Console(extras_api(request).consoles.create(instance_id, kind))
def flavor_create(request, name, memory, vcpu, disk, flavor_id):
return Flavor(admin_api(request).flavors.create(
name, int(memory), int(vcpu), int(disk), flavor_id))
def flavor_delete(request, flavor_id, purge=False):
admin_api(request).flavors.delete(flavor_id, purge)
def flavor_get(request, flavor_id):
return Flavor(compute_api(request).flavors.get(flavor_id))
@check_openstackx
def flavor_list(request):
return [Flavor(f) for f in extras_api(request).flavors.list()]
def image_create(request, image_meta, image_file):
return Image(glance_api(request).add_image(image_meta, image_file))
def image_delete(request, image_id):
return glance_api(request).delete_image(image_id)
def image_get(request, image_id):
return Image(glance_api(request).get_image(image_id)[0])
def image_list_detailed(request):
return [Image(i) for i in glance_api(request).get_images_detailed()]
def image_update(request, image_id, image_meta=None):
image_meta = image_meta and image_meta or {}
return Image(glance_api(request).update_image(image_id,
image_meta=image_meta))
def keypair_create(request, name):
return KeyPair(extras_api(request).keypairs.create(name))
def keypair_delete(request, keypair_id):
extras_api(request).keypairs.delete(keypair_id)
@check_openstackx
def keypair_list(request):
return [KeyPair(key) for key in extras_api(request).keypairs.list()]
def server_create(request, name, image, flavor, user_data, key_name):
return Server(extras_api(request).servers.create(
name, image, flavor, user_data=user_data, key_name=key_name),
request)
def server_delete(request, instance):
compute_api(request).servers.delete(instance)
def server_get(request, instance_id):
response = compute_api(request).servers.get(instance_id), request
LOG.info(response)
return Server(compute_api(request).servers.get(instance_id), request)
@check_openstackx
def server_list(request):
return [Server(s, request) for s in extras_api(request).servers.list()]
def server_reboot(request,
instance_id,
hardness=openstack.compute.servers.REBOOT_HARD):
server = server_get(request, instance_id)
server.reboot(hardness)
def service_get(request, name):
return Services(admin_api(request).services.get(name))
@check_openstackx
def service_list(request):
return [Services(s) for s in admin_api(request).services.list()]
def service_update(request, name, enabled):
return Services(admin_api(request).services.update(name, enabled))
def token_get_tenant(request, tenant_id):
tenants = auth_api().tenants.for_token(request.session['token'])
for t in tenants:
if str(t.id) == str(tenant_id):
return Tenant(t)
LOG.warning('Unknown tenant id "%s" requested' % tenant_id)
def token_list_tenants(request, token):
return [Tenant(t) for t in auth_api().tenants.for_token(token)]
def tenant_create(request, tenant_id, description, enabled):
return Tenant(account_api(request).tenants.create(tenant_id,
description,
enabled))
def tenant_get(request, tenant_id):
return Tenant(account_api(request).tenants.get(tenant_id))
@check_openstackx
def tenant_list(request):
return [Tenant(t) for t in account_api(request).tenants.list()]
def tenant_update(request, tenant_id, description, enabled):
return Tenant(account_api(request).tenants.update(tenant_id,
description,
enabled))
def token_create(request, tenant, username, password):
return Token(auth_api().tokens.create(tenant, username, password))
def token_info(request, token):
# TODO(mgius): This function doesn't make a whole lot of sense to me. The
# information being gathered here really aught to be attached to Token() as
# part of token_create. May require modification of openstackx so that the
# token_create call returns this information as well
hdrs = {"Content-type": "application/json",
"X_AUTH_TOKEN": settings.OPENSTACK_ADMIN_TOKEN,
"Accept": "text/json"}
o = urlparse(token.serviceCatalog['identity'][0]['adminURL'])
conn = httplib.HTTPConnection(o.hostname, o.port)
conn.request("GET", "/v2.0/tokens/%s" % token.id, headers=hdrs)
response = conn.getresponse()
data = json.loads(response.read())
admin = False
LOG.info(data)
for role in data['auth']['user']['roleRefs']:
if role['roleId'] == 'Admin':
admin = True
return {'tenant': data['auth']['user']['tenantId'],
'user': data['auth']['user']['username'],
'admin': admin}
@check_openstackx
def usage_get(request, tenant_id, start, end):
return Usage(extras_api(request).usage.get(tenant_id, start, end))
@check_openstackx
def usage_list(request, start, end):
return [Usage(u) for u in extras_api(request).usage.list(start, end)]
def user_create(request, user_id, email, password, tenant_id):
return User(account_api(request).users.create(
user_id, email, password, tenant_id))
def user_delete(request, user_id):
account_api(request).users.delete(user_id)
def user_get(request, user_id):
return User(account_api(request).users.get(user_id))
@check_openstackx
def user_list(request):
return [User(u) for u in account_api(request).users.list()]
def user_update_email(request, user_id, email):
return User(account_api(request).users.update_email(user_id, email))
def user_update_password(request, user_id, password):
return User(account_api(request).users.update_password(user_id, password))
def user_update_tenant(request, user_id, tenant_id):
return User(account_api(request).users.update_tenant(user_id, tenant_id))
def swift_container_exists(container_name):
try:
swift_api().get_container(container_name)
return True
except cloudfiles.errors.NoSuchContainer:
return False
def swift_object_exists(container_name, object_name):
container = swift_api().get_container(container_name)
try:
container.get_object(object_name)
return True
except cloudfiles.errors.NoSuchObject:
return False
def swift_get_containers():
return [Container(c) for c in swift_api().get_all_containers()]
def swift_create_container(name):
if swift_container_exists(name):
raise Exception('Container with name %s already exists.' % (name))
return Container(swift_api().create_container(name))
def swift_delete_container(name):
swift_api().delete_container(name)
def swift_get_objects(container_name, prefix=None):
container = swift_api().get_container(container_name)
return [SwiftObject(o) for o in container.get_objects(prefix=prefix)]
def swift_copy_object(orig_container_name, orig_object_name,
new_container_name, new_object_name):
container = swift_api().get_container(orig_container_name)
if swift_object_exists(new_container_name, new_object_name) == True:
raise Exception('Object with name %s already exists in container %s'
% (new_object_name, new_container_name))
orig_obj = container.get_object(orig_object_name)
return orig_obj.copy_to(new_container_name, new_object_name)
def swift_upload_object(container_name, object_name, object_data):
container = swift_api().get_container(container_name)
obj = container.create_object(object_name)
obj.write(object_data)
def swift_delete_object(container_name, object_name):
container = swift_api().get_container(container_name)
container.delete_object(object_name)
def swift_get_object_data(container_name, object_name):
container = swift_api().get_container(container_name)
return container.get_object(object_name).stream()
``` |
[
{
"content": "Repeat the code exactly:\n```python\nfrom multiprocessing import Process, Pipe\nfrom os import getpid, urandom, path\nfrom time import sleep\nfrom enum import Enum\nimport binascii, json, signal, sys\nfrom random import randint\n\nfrom telegram.ext import Updater\nfrom telegram.ext.dispatcher impo... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\nfrom multiprocessing import Process, Pipe\nfrom os import getpid, urandom, path\nfrom time import sleep\nfrom enum import Enum\nimport binascii, json, signal, sys\nfrom random import randint\n\nfrom telegram.ext import Updater\nfrom telegram.ext... | ```python
from multiprocessing import Process, Pipe
from os import getpid, urandom, path
from time import sleep
from enum import Enum
import binascii, json, signal, sys
from random import randint
from telegram.ext import Updater
from telegram.ext.dispatcher import run_async
from telegram.update import Update
class Command(Enum):
message = 1
token = 2
token_ack = 3
delete = 4
class TelegramBot(Process):
CREDENTIALS = "<CREDENTIALS-HERE>"
SAVEPATH = path.expanduser("~") + "/.config/whattelcopybot/telegram"
def __init__(self, conn):
self.connection=conn
super(TelegramBot, self).__init__()
self.telegram_to_whatsapp=dict()
with open("tokens.txt") as f:
self.poems = f.read().splitlines()
# save hashmap to file when exit
def save_to_file(self, signum, frame):
with open(TelegramBot.SAVEPATH, 'w+') as f:
f.write(json.dumps(self.telegram_to_whatsapp))
f.truncate()
sys.exit(0)
#load hashmap from file (if it exists and is not empty)
def load_from_file(self):
if path.isfile(TelegramBot.SAVEPATH):
with open(TelegramBot.SAVEPATH) as f:
read=f.read()
if read!="":
self.telegram_to_whatsapp = json.loads(read)
#send message to Telegram chat
def got_whatsapp(self, bot, msg):
if not "," in msg:
bot.sendMessage(int(msg), "Success: Connected to Whatsapp group!")
else:
telegram_id, content = msg.split(",")
bot.sendMessage(int(telegram_id), text=content)
# if both groups are connected send message to WhatsappBot
def got_telegram(self,bot,update):
if not type(update) is Update or update.message == None:
return
if update.message.new_chat_participant!=None:
if update.message.new_chat_participant.username=="WhattelCopyBot":
self.help(bot,update)
elif update.message.left_chat_participant!=None:
if update.message.left_chat_participant.username=="WhattelCopyBot":
print("REMOVE")
if str(update.message.chat_id) in self.telegram_to_whatsapp:
self.connection.send([Command.delete, self.telegram_to_whatsapp[str(update.message.chat_id)]])
del self.telegram_to_whatsapp[str(update.message.chat_id)]
elif str(update.message.chat_id) in self.telegram_to_whatsapp:
whatsapp_id=self.telegram_to_whatsapp[str(update.message.chat_id)]
self.connection.send([Command.message, whatsapp_id, update.message.from_user.first_name+ ": " + update.message.text])
def help(self,bot,update):
helpText="Hello Traveller, my name is John Whattel. I will copy all of your messages from whatsapp to telegram and vice versa.\n/token (generate token to connects two chats)\n/delete (disconnects the chats)\n/help (show this notice again)"
bot.sendMessage(update.message.chat_id,text=helpText)
# generate token and send it to WhatsappBot and to the Telegram chat
def get_token(self, bot, update):
if str(update.message.chat_id) in self.telegram_to_whatsapp:
bot.sendMessage(update.message.chat_id,text="Sorry, chat is already connected to a Whatsapp group!")
return
rand_int = randint(0,len(self.poems))
while self.poems[rand_int] == "":
rand_int = randint(0,len(self.poems))
bot.sendMessage(update.message.chat_id, text="Please paste this token into the Whatsapp chat you want to be connected to. I have to be a member of this chat.")
bot.sendMessage(update.message.chat_id, text="Generated token: "+self.poems[rand_int])
self.connection.send([Command.token, self.poems[rand_int], update.message.chat_id])
self.poems[rand_int]=""
def delete(self, bot, update):
if str(update.message.chat_id) in self.telegram_to_whatsapp:
self.connection.send([Command.delete, self.telegram_to_whatsapp[str(update.message.chat_id)]])
del self.telegram_to_whatsapp[str(update.message.chat_id)]
bot.sendMessage(update.message.chat_id, text="Hey there, this chat connecion was deleted")
else:
bot.sendMessage(update.message.chat_id, text="Something went terribly wrong :( This chat is not connected")
def run(self):
print("Start TelegramBot with PID: " + str(getpid()))
# connect to TelegramBot with CREDENTIALS
updater = Updater(TelegramBot.CREDENTIALS)
# Get the dispatcher to register handlers
dp = updater.dispatcher
# Message handlers only receive updates that don't contain commands
dp.addTelegramMessageHandler(self.got_telegram)
# got a whatsapp message
dp.addStringRegexHandler('[^/].*', self.got_whatsapp)
dp.addTelegramCommandHandler("help", self.help)
dp.addTelegramCommandHandler("token", self.get_token)
dp.addTelegramCommandHandler("delete", self.delete)
# All TelegramErrors are caught for you and delivered to the error
# handler(s). Other types of Errors are not caught.
#dp.addErrorHandler(error)
# Start the Bot and store the update Queue, so we can insert updates
update_queue = updater.start_polling(poll_interval=0.1, timeout=10)
# save our hashmap when the TelegramBot is terminated
signal.signal(signal.SIGINT, self.save_to_file)
signal.signal(signal.SIGTERM, self.save_to_file)
# load our hashmap when the TelegramBot is started
self.load_from_file()
isRunning = True
while isRunning:
msg = self.connection.recv()
if msg[0] == Command.message:
update_queue.put(str(msg[1])+","+str(msg[2]))
elif msg[0] == Command.token_ack:
# connect Telegram ID to Whatsapp ID
self.telegram_to_whatsapp[str(msg[2])] = msg[1]
update_queue.put(str(msg[2]))
elif msg[0] == Command.token:
print("Error: got wrong message from WhatsappBot")
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Aug 29 00:09:26 2010\r\n\r\n@author: Charles Law\r\n\"\"\"\r\n\r\nimport math\r\n\r\ndef fft(fin, inverse):\r\n nfft = len(fin)\r\n twiddles, factors = fft_alloc(nfft, inverse)\r\n \r\n fout = []\r\n for i in xrange(nfft... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Aug 29 00:09:26 2010\r\n\r\n@author: Charles Law\r\n\"\"\"\r\n\r\nimport math\r\n\r\ndef fft(fin, inverse):\r\n nfft = len(fin)\r\n twiddles, factors = fft_alloc(nfft, inverse)\r\n \r\n fout = []\r\n for ... | ```python
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 29 00:09:26 2010
@author: Charles Law
"""
import math
def fft(fin, inverse):
nfft = len(fin)
twiddles, factors = fft_alloc(nfft, inverse)
fout = []
for i in xrange(nfft):
fout.append((0, 0))
fout_ind_start = 0
fin_ind_start = 0
in_stride = 1
fft_work(fout, fout_ind_start, fin, fin_ind_start, 1, in_stride, factors,
twiddles, nfft)
return fout
def fft_work(fout, fout_ind, f, f_ind, fstride, in_stride, factors,
twiddles, nfft_orig):
p = factors[0][0] # the radix
m = factors[0][1] # stage's fft length/p
factors = factors[1:]
fout_beg = fout_ind
fout_end = fout_ind + p*m
if m == 1 :
dowhile_if = 1
while ( dowhile_if ):
fout[fout_ind] = f[f_ind]
f_ind = f_ind + fstride*in_stride
fout_ind = fout_ind + 1
if fout_ind == fout_end:
dowhile_if = 0
else:
dowhile_if = 1
while ( dowhile_if ):
# recursive call:
# DFT of size m*p performed by doing
# p instances of smaller DFTs of size m,
# each one takes a decimated version of the input
fft_work(fout, fout_ind , f, f_ind, fstride*p, in_stride,
factors, twiddles, nfft_orig)
f_ind = f_ind + fstride*in_stride
#}while( (fout += m) != fout_end )
fout_ind = fout_ind + m
if ( fout_ind == fout_end ):
dowhile_if = 0
fout_ind = fout_beg
# recombine the p smaller DFTs
if p == 2:
fft_bfly2(fout, fout_ind, fstride, twiddles, m)
elif p == 3:
fft_bfly3(fout, fout_ind, fstride, twiddles, m)
else:
fft_bfly_generic(fout, fout_ind, fstride, twiddles, m, p, nfft_orig)
return fout
def fft_bfly2(fout, fout_ind, fstride, twiddles, m):
tw1_ind = 0
fout2_ind = fout_ind + m
dowhile_if = 1
while(dowhile_if):
t = _mult ( fout[fout2_ind], twiddles[tw1_ind] )
tw1_ind = tw1_ind + fstride
fout[fout2_ind] = _sub( fout[fout_ind], t )
fout[fout_ind] = _addto( fout[fout_ind], t )
fout2_ind = fout2_ind + 1
fout_ind = fout_ind + 1
m -= 1
if not(m):
dowhile_if = 0
return fout
def fft_bfly3(fout, fout_ind, fstride, twiddles, m):
k = m
m2 = 2*m
scratch = [(0, 0), (0, 0), (0, 0), (0, 0)]
epi3_i = twiddles[fstride*m][1]
tw1_ind = 0
tw2_ind = tw1_ind
dowhile_if = 1
while (dowhile_if):
scratch[1] = _mult( fout[fout_ind+m], twiddles[tw1_ind] )
scratch[2] = _mult( fout[fout_ind+m2], twiddles[tw2_ind] )
scratch[3] = _add( scratch[1], scratch[2] )
scratch[0] = _sub( scratch[1], scratch[2] )
tw1_ind = tw1_ind + fstride
tw2_ind = tw2_ind + fstride*2
fout[fout_ind+m] = ( fout[fout_ind][0] - (scratch[3][0])/2, \
fout[fout_ind][1] - (scratch[3][1])/2 )
scratch[0] = _mult_by_scalar( scratch[0], epi3_i )
fout[fout_ind] = _addto( fout[fout_ind], scratch[3] )
fout[fout_ind+m2] = ( fout[fout_ind+m][0] + scratch[0][1], \
fout[fout_ind+m][1] - scratch[0][0] )
fout[fout_ind+m] = ( fout[fout_ind+m][0] - scratch[0][1], \
fout[fout_ind+m][1] + scratch[0][0] )
fout_ind = fout_ind + 1
k -= 1
if not(k):
dowhile_if = 0
return fout
def fft_bfly_generic(fout, fout_ind, fstride, twiddles, m, p, nfft_orig):
n_orig = nfft_orig
# initialize scratch
scratch = []
for q1 in xrange(p): #( q1=0 ; q1<p ; ++q1 )
scratch.append(0)
for u in xrange(m): #( u=0; u<m; ++u )
k = u
for q1 in xrange(p): #( q1=0 ; q1<p ; ++q1 )
scratch[q1] = fout[fout_ind+k]
k = k + m
k = u
for q1 in xrange(p):
twidx = 0
fout[fout_ind+k] = scratch[0]
for q in xrange(1, p):
twidx = twidx + fstride * k
if (twidx >= n_orig):
twidx = twidx - nfft_orig
t = _mult( scratch[q], twiddles[twidx] )
fout[fout_ind+k] = _addto( fout[fout_ind+k], t )
k = k + m
return fout
def fft_alloc(nfft, inverse):
twiddles = []
for i in xrange(nfft):
phase = -2*math.pi*float(i) / float(nfft)
if (inverse):
phase = phase * float(-1)
twiddles.append(fft_cexp(phase))
factors = fft_factor(nfft)
return twiddles, factors
def fft_cexp(phase):
x = (math.cos(phase), math.sin(phase))
return x
def fft_factor(n):
facbuf = []
p = 4
floor_sqrt = math.floor( math.sqrt( float(n) ) )
# factor out powers of 4, powers of 2, then any remaining primes
dowhile_test = 1
while (dowhile_test):
while n % p:
if p == 4:
p = 2
elif p == 2:
p = 3
else:
p = p + 2
if (p > floor_sqrt):
p = n # no more factors, skip to end
n = n / p
facbuf.append((p, n))
if not(n > 1):
dowhile_test = 0
return facbuf
def _mult( a, b ):
return ( a[0]*b[0] - a[1]*b[1], a[0]*b[1] + a[1]*b[0] )
def _sub( a, b ):
return ( a[0]-b[0], a[1]-b[1] )
def _add( a, b ):
return ( a[0] + b[0], a[1] + b[1] )
def _addto( res , a):
return ( res[0] + a[0], res[1] + a[1] )
def _mult_by_scalar( c, s ):
return ( c[0] * s, c[1] * s)
def main():
fin = [(0, 0), (1, 0), (1, 0), (1, 0), (1, 0), (0, 0)]
inverse = 0
print fft(fin, inverse)
if __name__ == '__main__':
main()
``` |
[
{
"content": "Here is a code snippet:\n```python\nimport os\r\nimport sys\r\nfrom gooey.gui.windows import layouts\r\nfrom gooey.python_bindings import argparse_to_json\r\nfrom gooey.gui.util.quoting import quote\r\n\r\n\r\ndef create_from_parser(parser, source_path, cmd_args, **kwargs):\r\n auto_start = kwarg... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\nimport os\r\nimport sys\r\nfrom gooey.gui.windows import layouts\r\nfrom gooey.python_bindings import argparse_to_json\r\nfrom gooey.gui.util.quoting import quote\r\n\r\n\r\ndef create_from_parser(parser, source_path, cmd_args, **kwargs):\r\n au... | ```python
import os
import sys
from gooey.gui.windows import layouts
from gooey.python_bindings import argparse_to_json
from gooey.gui.util.quoting import quote
def create_from_parser(parser, source_path, cmd_args, **kwargs):
auto_start = kwargs.get('auto_start', False)
if hasattr(sys, 'frozen'):
run_cmd = quote(source_path)
else:
run_cmd = '{} -u {}'.format(quote(sys.executable), quote(source_path))
build_spec = {
'language': kwargs.get('language', 'english'),
'target': run_cmd,
'program_name': kwargs.get('program_name') or os.path.basename(sys.argv[0]).replace('.py', ''),
'program_description': kwargs.get('program_description', ''),
'auto_start': kwargs.get('auto_start', False),
'show_advanced': kwargs.get('advanced', True),
'default_size': kwargs.get('default_size', (610, 530)),
'manual_start': False,
'layout_type': 'flat',
'monospace_display': kwargs.get('monospace_display', False),
'image_dir': kwargs.get('image_dir'),
'language_dir': kwargs.get('language_dir'),
'progress_regex': kwargs.get('progress_regex'),
'progress_expr': kwargs.get('progress_expr'),
'disable_progress_bar_animation': kwargs.get('disable_progress_bar_animation'),
'disable_stop_button': kwargs.get('disable_stop_button'),
'group_by_type': kwargs.get('group_by_type', True),
'ignore_command': kwargs.get('ignore_command', None),
'force_command': kwargs.get('force_command', None),
'use_argparse_groups': kwargs.get('use_argparse_groups', False),
'use_tabs': kwargs.get('use_tabs', False)
}
if build_spec['use_argparse_groups']:
build_spec['num_default_cols'] = kwargs.get('default_cols', 2)
build_spec['num_cols_dict'] = kwargs.get('cols_dict', {})
else:
build_spec['num_cols_dict'] = {"required arguments": kwargs.get('required_cols', 1),
"optional arguments": kwargs.get('optional_cols', 3)}
if not auto_start:
build_spec['program_description'] = parser.description or build_spec['program_description']
layout_data = argparse_to_json.convert(parser, build_spec['use_argparse_groups'], cmd_args=cmd_args) if build_spec['show_advanced'] else layouts.basic_config.items()
build_spec.update(layout_data)
return build_spec
``` |
[
{
"content": "Here is the snippet:\n```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.11.5 on 2017-12-30 09:19\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0007_mentor_organize... | [
{
"content": "Here is the snippet:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# Generated by Django 1.11.5 on 2017-12-30 09:19\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0007... | ```python
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-30 09:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0007_mentor_organizer_volunteer'),
]
operations = [
migrations.AlterField(
model_name='event',
name='location',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='event',
name='tagline',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='hackathon',
name='location',
field=models.CharField(max_length=250),
),
migrations.AlterField(
model_name='hackathon',
name='name',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='hackathon',
name='shipping_address',
field=models.CharField(max_length=150),
),
migrations.AlterField(
model_name='hackathon',
name='university_name',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='user',
name='about_me',
field=models.CharField(max_length=1000, null=True),
),
migrations.AlterField(
model_name='user',
name='city',
field=models.CharField(max_length=75),
),
migrations.AlterField(
model_name='user',
name='country',
field=models.CharField(max_length=75),
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='user',
name='major_of_study',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='user',
name='school_last_attended',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='user',
name='special_accommodations',
field=models.CharField(max_length=250, null=True),
),
migrations.AlterField(
model_name='user',
name='street_address',
field=models.CharField(max_length=100, null=True),
),
]
``` |
[
{
"content": "```python\nimport urllib, shutil\n\nfrom ConfigParser import SafeConfigParser\nimport pyGeno.configuration as conf\nfrom pyGeno.SNP import *\nfrom pyGeno.tools.ProgressBar import ProgressBar\nfrom pyGeno.tools.io import printf\nfrom Genomes import _decompressPackage, _getFile\n\nfrom pyGeno.tools.... | [
{
"content": "<|memory_start|>```python\nimport urllib, shutil\n\nfrom ConfigParser import SafeConfigParser\nimport pyGeno.configuration as conf\nfrom pyGeno.SNP import *\nfrom pyGeno.tools.ProgressBar import ProgressBar\nfrom pyGeno.tools.io import printf\nfrom Genomes import _decompressPackage, _getFile\n\nfr... | ```python
import urllib, shutil
from ConfigParser import SafeConfigParser
import pyGeno.configuration as conf
from pyGeno.SNP import *
from pyGeno.tools.ProgressBar import ProgressBar
from pyGeno.tools.io import printf
from Genomes import _decompressPackage, _getFile
from pyGeno.tools.parsers.CasavaTools import SNPsTxtFile
from pyGeno.tools.parsers.VCFTools import VCFFile
from pyGeno.tools.parsers.CSVTools import CSVFile
def importSNPs(packageFile) :
"""The big wrapper, this function should detect the SNP type by the package manifest and then launch the corresponding function.
Here's an example of a SNP manifest file for Casava SNPs::
[package_infos]
description = Casava SNPs for testing purposes
maintainer = Tariq Daouda
maintainer_contact = tariq.daouda [at] umontreal
version = 1
[set_infos]
species = human
name = dummySRY
type = Agnostic
source = my place at IRIC
[snps]
filename = snps.txt # as with genomes you can either include de file at the root of the package or specify an URL from where it must be downloaded
"""
printf("Importing polymorphism set: %s... (This may take a while)" % packageFile)
packageDir = _decompressPackage(packageFile)
parser = SafeConfigParser()
parser.read(os.path.normpath(packageDir+'/manifest.ini'))
packageInfos = parser.items('package_infos')
setName = parser.get('set_infos', 'name')
typ = parser.get('set_infos', 'type')+'SNP'
species = parser.get('set_infos', 'species').lower()
genomeSource = parser.get('set_infos', 'source')
snpsFileTmp = parser.get('snps', 'filename').strip()
snpsFile = _getFile(parser.get('snps', 'filename'), packageDir)
try :
SMaster = SNPMaster(setName = setName)
except KeyError :
if typ.lower() == 'casavasnp' :
return _importSNPs_CasavaSNP(setName, species, genomeSource, snpsFile)
elif typ.lower() == 'dbsnpsnp' :
return _importSNPs_dbSNPSNP(setName, species, genomeSource, snpsFile)
elif typ.lower() == 'topHatsnp' :
return _importSNPs_TopHatSNP(setName, species, genomeSource, snpsFile)
elif typ.lower() == 'agnosticsnp' :
return _importSNPs_AgnosticSNP(setName, species, genomeSource, snpsFile)
else :
raise FutureWarning('Unknown SNP type in manifest %s' % typ)
else :
raise KeyError("There's already a SNP set by the name %s. Use deleteSNPs() to remove it first" %setName)
shutil.rmtree(packageDir)
def deleteSNPs(setName) :
"""deletes a set of polymorphisms"""
con = conf.db
try :
SMaster = SNPMaster(setName = setName)
con.beginTransaction()
SNPType = SMaster.SNPType
con.delete(SNPType, 'setName = ?', (setName,))
SMaster.delete()
con.endTransaction()
except KeyError :
raise KeyError("Can't delete the setName %s because i can't find it in SNPMaster, maybe there's not set by that name" % setName)
#~ printf("can't delete the setName %s because i can't find it in SNPMaster, maybe there's no set by that name" % setName)
return False
return True
def _importSNPs_AgnosticSNP(setName, species, genomeSource, snpsFile) :
"This function will also create an index on start->chromosomeNumber->setName. Warning : pyGeno wil interpret all positions as 0 based"
printf('importing SNP set %s for species %s...' % (setName, species))
snpData = CSVFile()
snpData.parse(snpsFile, separator = "\t")
AgnosticSNP.dropIndex(('start', 'chromosomeNumber', 'setName'))
conf.db.beginTransaction()
pBar = ProgressBar(len(snpData))
pLabel = ''
currChrNumber = None
for snpEntry in snpData :
tmpChr = snpEntry['chromosomeNumber']
if tmpChr != currChrNumber :
currChrNumber = tmpChr
pLabel = 'Chr %s...' % currChrNumber
snp = AgnosticSNP()
snp.species = species
snp.setName = setName
for f in snp.getFields() :
try :
setattr(snp, f, snpEntry[f])
except KeyError :
if f != 'species' and f != 'setName' :
printf("Warning filetype as no key %s", f)
snp.start = int(snp.start)
snp.end = int(snp.end)
snp.save()
pBar.update(label = pLabel)
pBar.close()
snpMaster = SNPMaster()
snpMaster.set(setName = setName, SNPType = 'AgnosticSNP', species = species)
snpMaster.save()
printf('saving...')
conf.db.endTransaction()
printf('creating indexes...')
CasavaSNP.ensureGlobalIndex(('start', 'chromosomeNumber', 'setName'))
printf('importation of SNP set %s for species %s done.' %(setName, species))
return True
def _importSNPs_CasavaSNP(setName, species, genomeSource, snpsFile) :
"This function will also create an index on start->chromosomeNumber->setName. Warning : pyGeno positions are 0 based"
printf('importing SNP set %s for species %s...' % (setName, species))
snpData = SNPsTxtFile(snpsFile)
CasavaSNP.dropIndex(('start', 'chromosomeNumber', 'setName'))
conf.db.beginTransaction()
pBar = ProgressBar(len(snpData))
pLabel = ''
currChrNumber = None
for snpEntry in snpData :
tmpChr = snpEntry['chromosomeNumber']
if tmpChr != currChrNumber :
currChrNumber = tmpChr
pLabel = 'Chr %s...' % currChrNumber
snp = CasavaSNP()
snp.species = species
snp.setName = setName
for f in snp.getFields() :
try :
setattr(snp, f, snpEntry[f])
except KeyError :
if f != 'species' and f != 'setName' :
printf("Warning filetype as no key %s", f)
snp.start -= 1
snp.end -= 1
snp.save()
pBar.update(label = pLabel)
pBar.close()
snpMaster = SNPMaster()
snpMaster.set(setName = setName, SNPType = 'CasavaSNP', species = species)
snpMaster.save()
printf('saving...')
conf.db.endTransaction()
printf('creating indexes...')
CasavaSNP.ensureGlobalIndex(('start', 'chromosomeNumber', 'setName'))
printf('importation of SNP set %s for species %s done.' %(setName, species))
return True
def _importSNPs_dbSNPSNP(setName, species, genomeSource, snpsFile) :
"This function will also create an index on start->chromosomeNumber->setName. Warning : pyGeno positions are 0 based"
snpData = VCFFile(snpsFile, gziped = True, stream = True)
dbSNPSNP.dropIndex(('start', 'chromosomeNumber', 'setName'))
conf.db.beginTransaction()
pBar = ProgressBar()
pLabel = ''
for snpEntry in snpData :
pBar.update(label = 'Chr %s, %s...' % (snpEntry['#CHROM'], snpEntry['ID']))
snp = dbSNPSNP()
for f in snp.getFields() :
try :
setattr(snp, f, snpEntry[f])
except KeyError :
pass
snp.chromosomeNumber = snpEntry['#CHROM']
snp.species = species
snp.setName = setName
snp.start = snpEntry['POS']-1
snp.alt = snpEntry['ALT']
snp.ref = snpEntry['REF']
snp.end = snp.start+len(snp.alt)
snp.save()
pBar.close()
snpMaster = SNPMaster()
snpMaster.set(setName = setName, SNPType = 'dbSNPSNP', species = species)
snpMaster.save()
printf('saving...')
conf.db.endTransaction()
printf('creating indexes...')
dbSNPSNP.ensureGlobalIndex(('start', 'chromosomeNumber', 'setName'))
printf('importation of SNP set %s for species %s done.' %(setName, species))
return True
def _importSNPs_TopHatSNP(setName, species, genomeSource, snpsFile) :
raise FutureWarning('Not implemented yet')
``` |
[
{
"content": "Output the full code verbatim (no extra comments):\n```python\nfrom functools import total_ordering\n\n\ndef _less_than(first, second):\n\tif first == second:\n\t\treturn False\n\tif first is None:\n\t\treturn True\n\tif second is None:\n\t\treturn False\n\treturn first < second\n\n\ndef _score_di... | [
{
"content": "Output the full code verbatim (no extra comments):\n<|memory_start|>```python\nfrom functools import total_ordering\n\n\ndef _less_than(first, second):\n\tif first == second:\n\t\treturn False\n\tif first is None:\n\t\treturn True\n\tif second is None:\n\t\treturn False\n\treturn first < second\n\... | ```python
from functools import total_ordering
def _less_than(first, second):
if first == second:
return False
if first is None:
return True
if second is None:
return False
return first < second
def _score_difference_multiplier(old, new):
new = 0 if new is None else new
old = 1 if old is None or old == 0 else old
return 1 - ((old - new) / old)
def _none_aggregator_helper(first, second, func):
if first is not None and second is not None:
return func(first, second)
elif first is not None:
return first
else:
return second
@total_ordering
class Score(object):
_SCORE_PRECEDENTS = ('total', 'lowest_table_score', 'lowest_person_score')
def __init__(self, total=None, lowest_table_score=None, lowest_person_score=None):
self.total = total
self.lowest_table_score = lowest_table_score
self.lowest_person_score = lowest_person_score
def __repr__(self):
return '%s(%s, %s, %s)' % \
tuple(
[self.__class__.__name__]
+
['='.join([sub_score, str(getattr(self, sub_score))]) for sub_score in self._SCORE_PRECEDENTS]
)
def __eq__(self, other):
return all(getattr(self, sub_score) == getattr(other, sub_score) for sub_score in self._SCORE_PRECEDENTS)
def __add__(self, other):
return Score(
total=_none_aggregator_helper(self.total, other.total, lambda x, y: x + y),
lowest_table_score=_none_aggregator_helper(self.lowest_table_score, other.lowest_table_score, min),
lowest_person_score=_none_aggregator_helper(self.lowest_person_score, other.lowest_person_score, min)
)
def _scores_in_order(self, other):
for sub_score in self._SCORE_PRECEDENTS:
self_score = getattr(self, sub_score)
other_score = getattr(other, sub_score)
yield self_score, other_score
def __lt__(self, other):
for self_score, other_score in self._scores_in_order(other):
if _less_than(self_score, other_score):
return True
elif self_score != other_score:
return False
return False
def difference_multiplier(self, other):
"""
Method to give a measure of how close two scores are.
1: self == other
0: other == 0
>1: other > self
"""
for self_score, other_score in self._scores_in_order(other):
if self_score != other_score:
return _score_difference_multiplier(self_score, other_score)
return 1
``` |
[
{
"content": "Repeat the code exactly:\n```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport sys\nimport argparse\n\nfrom avoin.scraper.scraper import DefaultScraper, xpath_parser, ScraperMissingElementError\nfrom avoin.data.utils import read, write\n\ndef main(args):\n if args.command == 'xpa... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport sys\nimport argparse\n\nfrom avoin.scraper.scraper import DefaultScraper, xpath_parser, ScraperMissingElementError\nfrom avoin.data.utils import read, write\n\ndef main(args):\n if args... | ```python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import argparse
from avoin.scraper.scraper import DefaultScraper, xpath_parser, ScraperMissingElementError
from avoin.data.utils import read, write
def main(args):
if args.command == 'xpath':
scraper = DefaultScraper()
xpath = args.xpath
url = args.url
format = args.format.lower()
try:
result = scraper.parse(url, parser=xpath_parser, format=format, xpath=xpath)
if args.outfile:
write(args.outfile, result)
else:
print result
except ScraperMissingElementError:
print "Element w/ XPath %s not found" % xpath
return 0
def run():
parser = argparse.ArgumentParser()
parser.add_argument('command')
parser.add_argument('--xpath', dest='xpath')
parser.add_argument('--url', dest='url')
parser.add_argument('--format', dest='format', default='text')
parser.add_argument('--outfile', dest='outfile')
parser.add_argument('--infile', dest='infile')
args = parser.parse_args()
sys.exit(main(args) or 0)
if __name__ == "__main__":
run()
``` |
[
{
"content": "```python\n# -*- encoding: utf-8 -*-\n\"\"\"Test class for Foreman Discovery Rules\n\n@Requirement: Discoveryrule\n\n@CaseAutomation: Automated\n\n@CaseLevel: Acceptance\n\n@CaseComponent: UI\n\n@TestType: Functional\n\n@CaseImportance: High\n\n@Upstream: No\n\"\"\"\nfrom fauxfactory import gen_in... | [
{
"content": "<|memory_start|>```python\n# -*- encoding: utf-8 -*-\n\"\"\"Test class for Foreman Discovery Rules\n\n@Requirement: Discoveryrule\n\n@CaseAutomation: Automated\n\n@CaseLevel: Acceptance\n\n@CaseComponent: UI\n\n@TestType: Functional\n\n@CaseImportance: High\n\n@Upstream: No\n\"\"\"\nfrom fauxfacto... | ```python
# -*- encoding: utf-8 -*-
"""Test class for Foreman Discovery Rules
@Requirement: Discoveryrule
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from fauxfactory import gen_integer, gen_ipaddr, gen_string
from nailgun import entities
from robottelo.datafactory import (
filtered_datapoint,
invalid_values_list,
valid_data_list,
)
from robottelo.decorators import run_only_on, skip_if_bug_open, tier1
from robottelo.test import UITestCase
from robottelo.ui.factory import make_discoveryrule
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
@filtered_datapoint
def valid_search_queries():
"""Generates a list of all the input strings, (excluding html)"""
return [
'cpu_count ^ 10',
'disk_count > 5',
'disks_size <= {0}'.format(gen_string('numeric', 8)),
'ip = {0}'.format(gen_ipaddr()),
'model = KVM',
u'organization ~ {0}'.format(entities.Organization().create().name),
u'subnet = {0}'.format(entities.Subnet().create().name),
]
class DiscoveryRuleTestCase(UITestCase):
"""Implements Foreman discovery Rules in UI."""
@classmethod
def setUpClass(cls):
"""Display all the discovery rules on the same page"""
super(DiscoveryRuleTestCase, cls).setUpClass()
cls.per_page = entities.Setting().search(
query={'search': 'name="entries_per_page"'})[0]
cls.saved_per_page = str(cls.per_page.value)
cls.per_page.value = '100000'
cls.per_page.update({'value'})
cls.host_group = entities.HostGroup().create()
@classmethod
def tearDownClass(cls):
"""Restore previous 'entries_per_page' value"""
cls.per_page.value = cls.saved_per_page
cls.per_page.update({'value'})
super(DiscoveryRuleTestCase, cls).tearDownClass()
@run_only_on('sat')
@tier1
def test_positive_create_with_name(self):
"""Create Discovery Rule using different names
@id: afdf7000-4bd0-41ec-9773-96ff68e27b8d
@Assert: Rule should be successfully created
"""
with Session(self.browser) as session:
for name in valid_data_list():
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_positive_create_with_search(self):
"""Create Discovery Rule using different search queries
@id: 973ff6e5-572e-401c-bc8c-d614a583e883
@Assert: Rule should be successfully created and has expected search
field value
"""
with Session(self.browser) as session:
for query in valid_search_queries():
with self.subTest(query):
name = gen_string('alpha')
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
search_rule=query,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'search'),
query
)
@run_only_on('sat')
@tier1
def test_positive_create_with_hostname(self):
"""Create Discovery Rule using valid hostname value
@id: e6742ca5-1d41-4ba3-8f2c-2169db92485b
@Assert: Rule should be successfully created and has expected hostname
field value
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=hostname,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_positive_create_with_hosts_limit(self):
"""Create Discovery Rule providing any number from range 1..100 for
hosts limit field
@id: 64b90586-c1a9-4be4-8c44-4fa19ca998f8
@Assert: Rule should be successfully created and has expected hosts
limit field value
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
host_limit=limit,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_positive_create_with_priority(self):
"""Create Discovery Rule providing any number from range 1..100 for
priority field
@id: de847288-257a-4f0e-9cb6-9a0dd0877d23
@Assert: Rule should be successfully created and has expected priority
field value
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=priority,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
@run_only_on('sat')
@tier1
def test_positive_create_disabled(self):
"""Create Discovery Rule in disabled state
@id: 0b98d467-aabf-4efe-890f-50d6edcd99ff
@Assert: Disabled rule should be successfully created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
enabled=False,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'enabled', element_type='checkbox'),
False
)
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_name(self):
"""Create Discovery Rule with invalid names
@id: 79d950dc-4ca1-407e-84ca-9092d1cba978
@Assert: Error should be raised and rule should not be created
"""
with Session(self.browser) as session:
for name in invalid_values_list(interface='ui'):
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['name_haserror'])
)
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_hostname(self):
"""Create Discovery Rule with invalid hostname
@id: a322c8ce-4f05-401a-88cb-a3d30b4ac446
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=gen_string('numeric'),
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_limit(self):
"""Create Discovery Rule with invalid host limit
@id: 743d29f4-a901-400c-ad98-a3b8942f02b5
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
for limit in '-1', gen_string('alpha'):
with self.subTest(limit):
make_discoveryrule(
session,
name=name,
host_limit=limit,
hostgroup=self.host_group.name,
)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@skip_if_bug_open('bugzilla', 1308831)
@tier1
def test_negative_create_with_too_long_limit(self):
"""Create Discovery Rule with too long host limit value
@id: 450b49d9-1058-4186-9b23-15cc615e5bd6
@Assert: Validation error should be raised and rule should not be
created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
host_limit=gen_string('numeric', 50),
hostgroup=self.host_group.name,
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_same_name(self):
"""Create Discovery Rule with name that already exists
@id: 5a914e76-de01-406d-9860-0e4e1521b074
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['name_haserror']
))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_priority(self):
"""Create Discovery Rule with invalid priority
@id: f8829cce-86c0-452c-b866-d5645174e9e1
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=gen_string('alpha'),
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_positive_delete(self):
"""Delete existing Discovery Rule
@id: fc5b714c-e5bc-4b0f-bc94-88e080318704
@Assert: Rule should be successfully deleted
"""
with Session(self.browser) as session:
for name in valid_data_list():
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.delete(name)
@run_only_on('sat')
@tier1
def test_positive_update_name(self):
"""Update discovery rule name
@id: 16a79449-7200-492e-9ddb-65fc034e510d
@Assert: Rule name is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_name in valid_data_list():
with self.subTest(new_name):
self.discoveryrules.update(name=name, new_name=new_name)
self.assertIsNotNone(self.discoveryrules.search(new_name))
name = new_name # for next iteration
@run_only_on('sat')
@tier1
def test_positive_update_query(self):
"""Update discovery rule search query
@id: bcf85a4c-0b27-47a5-8d5d-7ede0f6eea41
@Assert: Rule search field is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_query in valid_search_queries():
with self.subTest(new_query):
self.discoveryrules.update(
name=name, search_rule=new_query)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'search'),
new_query
)
@run_only_on('sat')
@tier1
def test_positive_update_hostgroup(self):
"""Update discovery rule host group
@id: e10274e9-bf1b-42cd-a809-f19e707e7f4c
@Assert: Rule host group is updated
"""
name = gen_string('alpha')
new_hostgroup_name = entities.HostGroup().create().name
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'hostgroup', element_type='select'),
self.host_group.name
)
self.discoveryrules.update(name=name, hostgroup=new_hostgroup_name)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'hostgroup', element_type='select'),
new_hostgroup_name
)
@run_only_on('sat')
@tier1
def test_positive_update_hostname(self):
"""Update discovery rule hostname value
@id: 753ff15b-da73-4fb3-87cd-14d504d8e882
@Assert: Rule host name is updated
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, hostname=hostname)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_positive_update_limit(self):
"""Update discovery rule limit value
@id: 69d59c34-407b-47d0-a2b8-46decb95ef47
@Assert: Rule host limit field is updated
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, host_limit=limit)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_positive_update_priority(self):
"""Update discovery rule priority value
@id: be4de7a9-df8e-44ae-9910-7397341f6d07
@Assert: Rule priority is updated
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, priority=priority)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
@run_only_on('sat')
@tier1
def test_positive_update_disable_enable(self):
"""Update discovery rule enabled state. (Disabled->Enabled)
@id: 60d619e4-a039-4f9e-a16c-b05f0598e8fa
@Assert: Rule enabled checkbox is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
enabled=False,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, enabled=True)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'enabled', element_type='checkbox'),
True
)
@run_only_on('sat')
@tier1
def test_negative_update_name(self):
"""Update discovery rule name using invalid names only
@id: 65f32628-796a-4d7e-bf2c-c84c6b06f309
@Assert: Rule name is not updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_name in invalid_values_list(interface='ui'):
with self.subTest(new_name):
self.discoveryrules.update(name=name, new_name=new_name)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['name_haserror'])
)
self.assertIsNone(self.discoveryrules.search(new_name))
@run_only_on('sat')
@tier1
def test_negative_update_hostname(self):
"""Update discovery rule host name using number as a value
@id: 18713425-22fe-4eaa-a515-8e08aa07e116
@Assert: Rule host name is not updated
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=hostname,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(
name=name, hostname=gen_string('numeric'))
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_negative_update_limit(self):
"""Update discovery rule host limit using invalid values
@id: 7e8b7218-3c8a-4b03-b0df-484e0d793ceb
@Assert: Rule host limit is not updated
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
host_limit=limit,
)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_limit in '-1', gen_string('alpha'):
with self.subTest(new_limit):
self.discoveryrules.update(
name=name, host_limit=new_limit)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_negative_update_priority(self):
"""Update discovery rule priority using invalid values
@id: d44ad49c-5d95-442f-a1b3-cd82dd8ffabf
@Assert: Rule priority is not updated
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=priority,
)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_priority in '-1', gen_string('alpha'):
with self.subTest(new_priority):
self.discoveryrules.update(
name=name, priority=new_priority)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
``` |
[
{
"content": "Write the code verbatim:\n```python\n# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals\n\nfrom pyNastran.gui.qt_version import qt_version\nif qt_version == 4:\n #from PyQt4 import QtCore, QtGui\n from PyQt4.QtGui import (\n QDialog, QLineEdit, QPushButto... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals\n\nfrom pyNastran.gui.qt_version import qt_version\nif qt_version == 4:\n #from PyQt4 import QtCore, QtGui\n from PyQt4.QtGui import (\n QDialog, QLine... | ```python
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from pyNastran.gui.qt_version import qt_version
if qt_version == 4:
#from PyQt4 import QtCore, QtGui
from PyQt4.QtGui import (
QDialog, QLineEdit, QPushButton, QGridLayout, QVBoxLayout, QHBoxLayout, QApplication,
QColorDialog,
)
#QButtonGroup, QCheckBox
#from PyQt4.QtCore import QString
elif qt_version == 5:
#from PyQt5 import QtCore, QtGui
from PyQt5.QtWidgets import (
QDialog, QLineEdit, QPushButton, QGridLayout, QVBoxLayout, QHBoxLayout, QApplication,
QColorDialog,
)
#QButtonGroup, QCheckBox
from numpy import setdiff1d, unique, array, hstack
from pyNastran.bdf.utils import parse_patran_syntax, parse_patran_syntax_dict
from pyNastran.bdf.cards.base_card import collapse_colon_packs
class ChangeBCs(QtGui.QDialog):
"""
+--------------------------+
| Change BCs |
+--------------------------+
| |
| Property xxx Default |
| Element ID xxx Default |
| Angle Tol xxx Default |
| |
| Apply OK Cancel |
+--------------------------+
"""
def __init__(self, data, win_parent=None):
self.win_parent = win_parent
#Init the base class
self._default_property = data['pid']
self._default_elements = data['eid']
self._default_theta = data['theta']
self.elements_pound = data['elements_pound']
self.out_data = data
QDialog.__init__(self, win_parent)
#self.setupUi(self)
self.setWindowTitle('Groups: Modify')
self.create_widgets()
self.create_layout()
#self.set_connections()
#self.show()
def create_widgets(self):
# Name
self.pid = QLabel("New Property ID:")
self.pid_edit = QLineEdit(str(self._default_pid).strip())
self.pid_button = QPushButton("Default")
# Name
self.elements = QLabel("Element ID:")
self.elements_edit = QLineEdit(str(self._default_elements).strip())
self.elements_button = QPushButton("Default")
# elements
self.theta = QLabel("Theta Neighbor Max:")
self.theta_edit = QLineEdit(str(self._default_theta).strip())
self.theta_button = QPushButton("Default")
# applies a unique implicitly
self.eids = parse_patran_syntax(str(self._default_elements), pound=self.elements_pound)
self.cids = parse_patran_syntax(str(self._default_coords), pound=self.coords_pound)
# continuous / discrete
#self.checkbox_continuous = QCheckBox("Continuous")
#self.checkbox_discrete = QCheckBox("Discrete")
#self.checkbox_discrete.setChecked(self._default_is_discrete)
# put these in a group
#checkboxs2 = QButtonGroup(self)
#checkboxs2.addButton(self.checkbox_continuous)
#checkboxs2.addButton(self.checkbox_discrete)
# closing
self.apply_button = QPushButton("Apply")
self.ok_button = QPushButton("OK")
self.cancel_button = QPushButton("Cancel")
def create_layout(self):
grid = QGridLayout()
grid.addWidget(self.pid, 0, 0)
grid.addWidget(self.pid_edit, 0, 1)
grid.addWidget(self.pid_button, 0, 2)
grid.addWidget(self.elements, 2, 0)
grid.addWidget(self.elements_edit, 2, 1)
grid.addWidget(self.elements_button, 2, 2)
ok_cancel_box = QHBoxLayout()
ok_cancel_box.addWidget(self.apply_button)
ok_cancel_box.addWidget(self.ok_button)
ok_cancel_box.addWidget(self.cancel_button)
vbox = QVBoxLayout()
vbox.addLayout(grid)
vbox.addStretch()
vbox.addLayout(ok_cancel_box)
self.setLayout(vbox)
def set_connections(self):
self.connect(self.name_button, QtCore.SIGNAL('clicked()'), self.on_default_name)
self.connect(self.coords_button, QtCore.SIGNAL('clicked()'), self.on_default_coords)
self.connect(self.elements_button, QtCore.SIGNAL('clicked()'), self.on_default_elements)
self.connect(self.add_button, QtCore.SIGNAL('clicked()'), self.on_add)
self.connect(self.remove_button, QtCore.SIGNAL('clicked()'), self.on_remove)
self.connect(self.color_edit, QtCore.SIGNAL('clicked()'), self.on_edit_color)
#self.color_edit.clicked.connect(self.on_edit_color)
self.connect(self.color_button, QtCore.SIGNAL('clicked()'), self.on_default_color)
self.connect(self.apply_button, QtCore.SIGNAL('clicked()'), self.on_apply)
self.connect(self.ok_button, QtCore.SIGNAL('clicked()'), self.on_ok)
self.connect(self.cancel_button, QtCore.SIGNAL('clicked()'), self.on_cancel)
def closeEvent(self, event):
event.accept()
def _apply_cids_eids(self):
ctext = _get_collapsed_text(self.cids)
etext = _get_collapsed_text(self.eids)
self.coords_edit.setText(str(ctext.lstrip()))
self.elements_edit.setText(str(etext.lstrip()))
def on_remove(self):
adict, is_valid = self.check_patran_syntax_dict(self.remove_edit)
if not is_valid:
#self.remove_edit.setStyleSheet("QLineEdit{background: red;}")
return
self.eids = _remove(adict, ['e', 'elem', 'element'], self.eids)
self.cids = _remove(adict, ['c', 'cid', 'coord'], self.cids)
self._apply_cids_eids()
self.remove_edit.clear()
self.remove_edit.setStyleSheet("QLineEdit{background: white;}")
def on_default_name(self):
self.name_edit.setText(str(self._default_name))
self.name_edit.setStyleSheet("QLineEdit{background: white;}")
def on_default_coords(self):
self.coords_edit.setText(str(self._default_coords))
self.coords_edit.setStyleSheet("QLineEdit{background: white;}")
def on_default_elements(self):
self.elements_edit.setText(str(self._default_elements))
self.elements_edit.setStyleSheet("QLineEdit{background: white;}")
def on_edit_color(self):
c = [int(255 * i) for i in self.text_col]
#print('c =', c)
col = QColorDialog.getColor(QtGui.QColor(*c), self, "Choose a text color")
self.color.SetColor(col)
def on_default_color(self):
self.color_edit.setColor(self._default_color)
#self.elements_edit.setStyleSheet("QLineEdit{background: white;}")
def check_patran_syntax(self, cell, pound=None):
text = str(cell.text())
try:
value = parse_patran_syntax(text, pound=pound)
cell.setStyleSheet("QLineEdit{background: white;}")
return value, True
except ValueError as e:
cell.setStyleSheet("QLineEdit{background: red;}")
cell.setToolTip(str(e))
return None, False
def check_patran_syntax_dict(self, cell, pound=None):
text = str(cell.text())
try:
value = parse_patran_syntax_dict(text)
cell.setStyleSheet("QLineEdit{background: white;}")
cell.setToolTip('')
return value, True
except (ValueError, SyntaxError, KeyError) as e:
cell.setStyleSheet("QLineEdit{background: red;}")
cell.setToolTip(str(e))
return None, False
def check_float(self, cell):
text = cell.text()
try:
value = float(text)
cell.setStyleSheet("QLineEdit{background: white;}")
cell.setToolTip('')
return value, True
except ValueError:
cell.setStyleSheet("QLineEdit{background: red;}")
return None, False
def check_name(self, cell):
text = str(cell.text()).strip()
if len(text):
cell.setStyleSheet("QLineEdit{background: white;}")
return text, True
else:
cell.setStyleSheet("QLineEdit{background: red;}")
return None, False
def check_format(self, cell):
text = str(cell.text())
is_valid = True
if len(text) < 2:
is_valid = False
elif 's' in text.lower():
is_valid = False
elif '%' not in text[0]:
is_valid = False
elif text[-1].lower() not in ['g', 'f', 'i', 'e']:
is_valid = False
try:
text % 1
text % .2
text % 1e3
text % -5.
text % -5
except ValueError:
is_valid = False
try:
text % 's'
is_valid = False
except TypeError:
pass
if is_valid:
cell.setStyleSheet("QLineEdit{background: white;}")
return text, True
else:
cell.setStyleSheet("QLineEdit{background: red;}")
return None, False
def on_validate(self):
name_value, flag0 = self.check_name(self.name_edit)
coords_value, flag1 = self.check_patran_syntax(self.coords_edit,
pound=self.coords_pound)
elements_value, flag2 = self.check_patran_syntax(self.elements_edit,
pound=self.elements_pound)
#color = self.color
if flag0 and flag1 and flag2:
self.out_data['name'] = name_value
self.out_data['elements'] = elements_value
self.out_data['coords'] = coords_value
self.out_data['clicked_ok'] = True
#print("name = %r" % self.name_edit.text())
#print("min = %r" % self.min_edit.text())
#print("max = %r" % self.max_edit.text())
#print("format = %r" % self.format_edit.text())
return True
return False
def on_apply(self):
passed = self.on_validate()
if passed:
self.win_parent.on_modify_group(self.out_data)
def on_ok(self):
passed = self.on_validate()
if passed:
self.close()
#self.destroy()
def on_cancel(self):
self.close()
def _get_collapsed_text(values):
singles, doubles = collapse_colon_packs(values)
text = ' '.join([str(s) for s in singles]) + ' '
text += ' '.join([''.join([str(doublei) for doublei in double]) for double in doubles])
return text
def _add(adict, keys, values_to_add):
value_stack = []
for key in keys:
if key not in adict:
continue
values = adict[key]
value_stack.append(values)
if value_stack:
value_stack.append(values_to_add)
values_add = unique(hstack(value_stack))
return values_add
return values_to_add
def _remove(adict, keys, values_to_remove):
value_stack = []
for key in keys:
if key not in adict:
continue
value_stack.append(adict[key])
if value_stack:
values_remove = unique(hstack(value_stack))
return setdiff1d(values_to_remove, values_remove)
return values_to_remove
def main():
# kills the program when you hit Cntl+C from the command line
# doesn't save the current state as presumably there's been an error
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
import sys
# Someone is launching this directly
# Create the QApplication
app = QApplication(sys.argv)
nice_blue = (0.1, 0.2, 0.4)
#The Main window
d = {
'name' : 'asdf',
'coords' : 0,
'coords_pound' : 4,
'elements_pound' : 103,
'elements' : '1:#',
'color' : nice_blue,
}
main_window = GroupsModify(d)
main_window.show()
# Enter the main loop
app.exec_()
if __name__ == "__main__":
main()
``` |
[
{
"content": "Recreate the original code text:\n```python\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless requ... | [
{
"content": "Recreate the original code text:\n<|memory_start|>```python\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n... | ```python
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import jinja2
from oslo_log import log as logging
from oslo_utils import encodeutils
from kube import base
from senlin.common import consts
from senlin.common import exception as exc
from senlin.common.i18n import _
from senlin.common import schema
LOG = logging.getLogger(__name__)
class ServerProfile(base.KubeBaseProfile):
"""Profile for an kubernetes master server."""
VERSIONS = {
'1.0': [
{'status': consts.EXPERIMENTAL, 'since': '2017.10'}
]
}
KEYS = (
CONTEXT, FLAVOR, IMAGE, KEY_NAME,
PUBLIC_NETWORK, BLOCK_DEVICE_MAPPING_V2,
) = (
'context', 'flavor', 'image', 'key_name',
'public_network', 'block_device_mapping_v2',
)
INTERNAL_KEYS = (
KUBEADM_TOKEN, KUBE_MASTER_IP, SECURITY_GROUP,
PRIVATE_NETWORK, PRIVATE_SUBNET, PRIVATE_ROUTER,
KUBE_MASTER_FLOATINGIP, KUBE_MASTER_FLOATINGIP_ID,
SCALE_OUT_RECV_ID, SCALE_OUT_URL,
) = (
'kubeadm_token', 'kube_master_ip', 'security_group',
'private_network', 'private_subnet', 'private_router',
'kube_master_floatingip', 'kube_master_floatingip_id',
'scale_out_recv_id', 'scale_out_url',
)
NETWORK_KEYS = (
PORT, FIXED_IP, NETWORK, PORT_SECURITY_GROUPS,
FLOATING_NETWORK, FLOATING_IP,
) = (
'port', 'fixed_ip', 'network', 'security_groups',
'floating_network', 'floating_ip',
)
BDM2_KEYS = (
BDM2_UUID, BDM2_SOURCE_TYPE, BDM2_DESTINATION_TYPE,
BDM2_DISK_BUS, BDM2_DEVICE_NAME, BDM2_VOLUME_SIZE,
BDM2_GUEST_FORMAT, BDM2_BOOT_INDEX, BDM2_DEVICE_TYPE,
BDM2_DELETE_ON_TERMINATION,
) = (
'uuid', 'source_type', 'destination_type', 'disk_bus',
'device_name', 'volume_size', 'guest_format', 'boot_index',
'device_type', 'delete_on_termination',
)
properties_schema = {
CONTEXT: schema.Map(
_('Customized security context for operating servers.'),
),
FLAVOR: schema.String(
_('ID of flavor used for the server.'),
required=True,
updatable=True,
),
IMAGE: schema.String(
# IMAGE is not required, because there could be BDM or BDMv2
# support and the corresponding settings effective
_('ID of image to be used for the new server.'),
updatable=True,
),
KEY_NAME: schema.String(
_('Name of Nova keypair to be injected to server.'),
),
PUBLIC_NETWORK: schema.String(
_('Public network for kubernetes.'),
required=True,
),
BLOCK_DEVICE_MAPPING_V2: schema.List(
_('A list specifying the properties of block devices to be used '
'for this server.'),
schema=schema.Map(
_('A map specifying the properties of a block device to be '
'used by the server.'),
schema={
BDM2_UUID: schema.String(
_('ID of the source image, snapshot or volume'),
),
BDM2_SOURCE_TYPE: schema.String(
_("Volume source type, must be one of 'image', "
"'snapshot', 'volume' or 'blank'"),
required=True,
),
BDM2_DESTINATION_TYPE: schema.String(
_("Volume destination type, must be 'volume' or "
"'local'"),
required=True,
),
BDM2_DISK_BUS: schema.String(
_('Bus of the device.'),
),
BDM2_DEVICE_NAME: schema.String(
_('Name of the device(e.g. vda, xda, ....).'),
),
BDM2_VOLUME_SIZE: schema.Integer(
_('Size of the block device in MB(for swap) and '
'in GB(for other formats)'),
required=True,
),
BDM2_GUEST_FORMAT: schema.String(
_('Specifies the disk file system format(e.g. swap, '
'ephemeral, ...).'),
),
BDM2_BOOT_INDEX: schema.Integer(
_('Define the boot order of the device'),
),
BDM2_DEVICE_TYPE: schema.String(
_('Type of the device(e.g. disk, cdrom, ...).'),
),
BDM2_DELETE_ON_TERMINATION: schema.Boolean(
_('Whether to delete the volume when the server '
'stops.'),
),
}
),
),
}
def __init__(self, type_name, name, **kwargs):
super(ServerProfile, self).__init__(type_name, name, **kwargs)
self.server_id = None
def do_cluster_create(self, obj):
self._generate_kubeadm_token(obj)
self._create_security_group(obj)
self._create_network(obj)
def do_cluster_delete(self, obj):
if obj.dependents and 'kube-node' in obj.dependents:
msg = ("Cluster %s delete failed, "
"Node clusters %s must be deleted first." %
(obj.id, obj.dependents['kube-node']))
raise exc.EResourceDeletion(type='kubernetes.master',
id=obj.id,
message=msg)
self._delete_network(obj)
self._delete_security_group(obj)
def do_create(self, obj):
"""Create a server for the node object.
:param obj: The node object for which a server will be created.
"""
kwargs = {}
for key in self.KEYS:
if self.properties[key] is not None:
kwargs[key] = self.properties[key]
image_ident = self.properties[self.IMAGE]
if image_ident is not None:
image = self._validate_image(obj, image_ident, 'create')
kwargs.pop(self.IMAGE)
kwargs['imageRef'] = image.id
flavor_ident = self.properties[self.FLAVOR]
flavor = self._validate_flavor(obj, flavor_ident, 'create')
kwargs.pop(self.FLAVOR)
kwargs['flavorRef'] = flavor.id
keypair_name = self.properties[self.KEY_NAME]
if keypair_name:
keypair = self._validate_keypair(obj, keypair_name, 'create')
kwargs['key_name'] = keypair.name
kwargs['name'] = obj.name
metadata = self._build_metadata(obj, {})
kwargs['metadata'] = metadata
jj_vars = {}
cluster_data = self._get_cluster_data(obj)
kwargs['networks'] = [{'uuid': cluster_data[self.PRIVATE_NETWORK]}]
# Get user_data parameters from metadata
jj_vars['KUBETOKEN'] = cluster_data[self.KUBEADM_TOKEN]
jj_vars['MASTER_FLOATINGIP'] = cluster_data[
self.KUBE_MASTER_FLOATINGIP]
block_device_mapping_v2 = self.properties[self.BLOCK_DEVICE_MAPPING_V2]
if block_device_mapping_v2 is not None:
kwargs['block_device_mapping_v2'] = self._resolve_bdm(
obj, block_device_mapping_v2, 'create')
# user_data = self.properties[self.USER_DATA]
user_data = base.loadScript('./scripts/master.sh')
if user_data is not None:
# Use jinja2 to replace variables defined in user_data
try:
jj_t = jinja2.Template(user_data)
user_data = jj_t.render(**jj_vars)
except (jinja2.exceptions.UndefinedError, ValueError) as ex:
# TODO(anyone) Handle jinja2 error
pass
ud = encodeutils.safe_encode(user_data)
kwargs['user_data'] = encodeutils.safe_decode(base64.b64encode(ud))
sgid = self._get_security_group(obj)
kwargs['security_groups'] = [{'name': sgid}]
server = None
resource_id = None
try:
server = self.compute(obj).server_create(**kwargs)
self.compute(obj).wait_for_server(server.id)
server = self.compute(obj).server_get(server.id)
self._update_master_ip(obj, server.addresses[''][0]['addr'])
self._associate_floatingip(obj, server)
LOG.info("Created master node: %s" % server.id)
return server.id
except exc.InternalError as ex:
if server and server.id:
resource_id = server.id
raise exc.EResourceCreation(type='server',
message=str(ex),
resource_id=resource_id)
def do_delete(self, obj, **params):
"""Delete the physical resource associated with the specified node.
:param obj: The node object to operate on.
:param kwargs params: Optional keyword arguments for the delete
operation.
:returns: This operation always return True unless exception is
caught.
:raises: `EResourceDeletion` if interaction with compute service fails.
"""
if not obj.physical_id:
return True
server_id = obj.physical_id
ignore_missing = params.get('ignore_missing', True)
internal_ports = obj.data.get('internal_ports', [])
force = params.get('force', False)
try:
self._disassociate_floatingip(obj, server_id)
driver = self.compute(obj)
if force:
driver.server_force_delete(server_id, ignore_missing)
else:
driver.server_delete(server_id, ignore_missing)
driver.wait_for_server_delete(server_id)
if internal_ports:
ex = self._delete_ports(obj, internal_ports)
if ex:
raise ex
return True
except exc.InternalError as ex:
raise exc.EResourceDeletion(type='server', id=server_id,
message=str(ex))
``` |
[
{
"content": "Here is the code block:\n```python\nfrom xml.etree.ElementTree import XMLParser\n\n\nclass GuestXmlParser:\n\n int_tags = [\"currentMemory\", \"memory\"]\n int_attribs = [\"index\", \"port\", \"startport\", \"vram\"]\n\n def __init__(self):\n self.json = {}\n self.stack = [s... | [
{
"content": "Here is the code block:\n<|memory_start|>```python\nfrom xml.etree.ElementTree import XMLParser\n\n\nclass GuestXmlParser:\n\n int_tags = [\"currentMemory\", \"memory\"]\n int_attribs = [\"index\", \"port\", \"startport\", \"vram\"]\n\n def __init__(self):\n self.json = {}\n ... | ```python
from xml.etree.ElementTree import XMLParser
class GuestXmlParser:
int_tags = ["currentMemory", "memory"]
int_attribs = ["index", "port", "startport", "vram"]
def __init__(self):
self.json = {}
self.stack = [self.json]
self.catogory = None
def start(self, tag, attrib):
self.tag = tag
for attr in self.int_attribs:
if attrib.get(attr):
attrib[attr] = int(attrib[attr])
if tag in ("devices", "clock"):
self.category = tag
self.stack[-1][tag] = []
self.stack.append(self.stack[-1][tag])
elif tag == "emulator":
self.stack[-2][tag] = attrib
self.stack.append(attrib)
elif isinstance(self.stack[-1], dict):
self.stack[-1][tag] = attrib
self.stack.append(attrib)
elif self.category == "devices":
device = {"family": tag}
device.update(attrib)
self.stack[-1].append(device)
self.stack.append(device)
elif self.category == "clock":
self.stack[-1].append(attrib)
self.stack.append(attrib)
def end(self, tag):
self.stack.pop()
def data(self, data):
if data and data.strip():
if self.tag in self.int_tags:
self.stack[-1]["value"] = int(data)
else:
self.stack[-1]["value"] = data
def close(self):
return self.json
def parse_domain_xml(xml):
target = GuestXmlParser()
parser = XMLParser(target=target)
parser.feed(xml)
return parser.close()
``` |
[
{
"content": "Repeat the code exactly:\n```python\n# -*- coding: utf-8 -*-\n# IDD3 - Propositional Idea Density from Dependency Trees\n# Copyright (C) 2014-2015 Andre Luiz Verucci da Cunha\n#\n# This program is free software: you can redistribute it and/or modify it\n# under the terms of the GNU General Public... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# IDD3 - Propositional Idea Density from Dependency Trees\n# Copyright (C) 2014-2015 Andre Luiz Verucci da Cunha\n#\n# This program is free software: you can redistribute it and/or modify it\n# under the terms of the GN... | ```python
# -*- coding: utf-8 -*-
# IDD3 - Propositional Idea Density from Dependency Trees
# Copyright (C) 2014-2015 Andre Luiz Verucci da Cunha
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals, division
from idd3 import Ruleset
class AtomicEmittingRuleset(Ruleset):
"""A base ruleset for atomic relations that just emits the associated word
as a proposition."""
def extract(self, relations, index, context, engine, info={}):
engine.emit((relations[index].word,))
class NegRuleset(AtomicEmittingRuleset):
"""A ruleset that processes the 'neg' relation."""
rel = 'neg'
def extract(self, relations, index, context, engine, info={}):
engine.emit((relations[index].word,), 'M')
class DiscourseRuleset(AtomicEmittingRuleset):
"""A ruleset that processes the 'discourse' relation."""
rel = 'discourse'
def extract(self, relations, index, context, engine, info={}):
engine.emit((relations[index].word,), 'M')
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ValidationError\nfrom django.db import models\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\n\n# CUSTOM FILE SI... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\nfrom django.contrib.auth.models import User\nfrom django.core.exceptions import ValidationError\nfrom django.db import models\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\n\n\n... | ```python
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
# CUSTOM FILE SIZE VALIDATOR
def validate_image(fieldfile_obj):
"""
Limit image size upload
"""
filesize = fieldfile_obj.file.size
megabyte_limit = 0.5
if filesize > megabyte_limit*1024*1024:
raise ValidationError("Max file size is %sMB" % str(megabyte_limit))
class Profile(models.Model):
"""
Author Model
"""
user = models.OneToOneField(
User,
on_delete=models.CASCADE
)
profile_picture = models.ImageField(
upload_to='images/%Y/%m/%d',
validators=[validate_image],
blank=True,
null=True
)
profile_name = models.CharField(
verbose_name='Name',
null=True,
blank=True,
max_length=50
)
profile_email = models.EmailField(
verbose_name='Email Address',
null=True,
blank=True
)
profile_location = models.CharField(
verbose_name='Origin/City',
null=True,
blank=True,
max_length=50
)
profile_github = models.URLField(
verbose_name='Github URL',
null=True,
blank=True
)
slug = models.SlugField()
is_created = models.DateTimeField(
null=True,
blank=True
)
is_moderator = models.BooleanField(
default=False,
)
def __str__(self):
return str(self.user)
def save(self, **kwargs):
if not self.slug:
from djangoid.utils import get_unique_slug
self.slug = get_unique_slug(instance=self, field='profile_name')
super(Profile, self).save(**kwargs)
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
"""
Automatically Create User when Login
"""
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
"""
Automatically Create User when Login
"""
instance.profile.save()
``` |
[
{
"content": "Return the code unaltered:\n```python\n# -*- coding: utf-8 -*-\n\nimport sys\nimport os\nimport argparse\nsys.path.append(os.path.join(os.path.dirname(__file__), '../..'))\nfrom utils.argparsers.simulationargparser import SimulationArgumentParser\n\n\nclass MultileaveArgumentParser(SimulationArgum... | [
{
"content": "Return the code unaltered:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\nimport sys\nimport os\nimport argparse\nsys.path.append(os.path.join(os.path.dirname(__file__), '../..'))\nfrom utils.argparsers.simulationargparser import SimulationArgumentParser\n\n\nclass MultileaveArgumentParser... | ```python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
from utils.argparsers.simulationargparser import SimulationArgumentParser
class MultileaveArgumentParser(SimulationArgumentParser):
def __init__(self, description=None, set_arguments={}):
set_arguments['print_feature_count'] = False
super(MultileaveArgumentParser, self).__init__(description=description,
set_arguments=set_arguments)
# self.set_argument_namespace('MultileaveArgumentParser')
# self.add_argument('--bias', dest='bias_experiment', action='store_true', required=False,
# default=False, help='Flag for bias experiment.')
# self.add_argument('--k --n_results', dest='k', default=10, type=int,
# help='Number of results shown after each query.')
self.add_argument('--n_rankers', dest='n_rankers', required=True, type=int,
help='Number of rankers to use in simulation.')
# def get_multileave_args(self, args):
# return self.get_args(args, 'MultileaveArgumentParser')
# def parse_args_rec(self):
# output_str, args, sim_args = super(MultileaveArgumentParser, self).parse_args_rec()
# multileave_args = self.get_multileave_args(args)
# if not sim_args.no_run_details:
# output_str += '\nMultileave Arguments'
# output_str += '\n---------------------'
# for name, value in vars(multileave_args).items():
# output_str += '\n%s %s' % (name, value)
# output_str += '\n---------------------'
# return output_str, args, sim_args, multileave_args
``` |
[
{
"content": "Here is a code file:\n```python\n# -*- coding: utf-8 -*-\nfrom south.utils import datetime_utils as datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n\n # Changing fie... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\nfrom south.utils import datetime_utils as datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n\n ... | ```python
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Visit.service'
db.alter_column(u'clinics_visit', 'service_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['clinics.Service'], null=True))
def backwards(self, orm):
# Changing field 'Visit.service'
db.alter_column(u'clinics_visit', 'service_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['clinics.Service']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clinics.clinic': {
'Meta': {'object_name': 'Clinic'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_renovated': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'lga': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'lga_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'pbf_rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ward': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'year_opened': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.clinicstaff': {
'Meta': {'object_name': 'ClinicStaff'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'staff_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'year_started': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'})
},
u'clinics.clinicstatistic': {
'Meta': {'unique_together': "[('clinic', 'statistic', 'month')]", 'object_name': 'ClinicStatistic'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'float_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'int_value': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'month': ('django.db.models.fields.DateField', [], {}),
'rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'statistic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['statistics.Statistic']"}),
'text_value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'clinics.patient': {
'Meta': {'object_name': 'Patient'},
'clinic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Clinic']"}),
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '11', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'clinics.region': {
'Meta': {'unique_together': "(('external_id', 'type'),)", 'object_name': 'Region'},
'alternate_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'boundary': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {}),
'external_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'lga'", 'max_length': '16'})
},
u'clinics.service': {
'Meta': {'object_name': 'Service'},
'code': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'clinics.visit': {
'Meta': {'object_name': 'Visit'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'patient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Patient']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.Service']", 'null': 'True', 'blank': 'True'}),
'staff': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['clinics.ClinicStaff']", 'null': 'True', 'blank': 'True'}),
'visit_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'clinics.visitregistrationerror': {
'Meta': {'object_name': 'VisitRegistrationError'},
'error_count': ('django.db.models.fields.PositiveIntegerField', [], {}),
'error_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'clinics.visitregistrationerrorlog': {
'Meta': {'object_name': 'VisitRegistrationErrorLog'},
'error_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'message_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'rapidsms.contact': {
'Meta': {'object_name': 'Contact'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'statistics.statistic': {
'Meta': {'object_name': 'Statistic'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['statistics.StatisticGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'statistic_type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'statistics.statisticgroup': {
'Meta': {'object_name': 'StatisticGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
}
}
complete_apps = ['clinics']
``` |
[
{
"content": "Repeat the code exactly as the original, including blank lines:\n```python\n# Copyright 2021 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a ... | [
{
"content": "Repeat the code exactly as the original, including blank lines:\n<|memory_start|>```python\n# Copyright 2021 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# Y... | ```python
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Forward pass test for Transformer model refactoring."""
import numpy as np
import tensorflow as tf
from official.nlp.modeling import models
from official.nlp.transformer import metrics
from official.nlp.transformer import model_params
from official.nlp.transformer import transformer
def _count_params(layer, trainable_only=True):
"""Returns the count of all model parameters, or just trainable ones."""
if not trainable_only:
return layer.count_params()
else:
return int(
np.sum([
tf.keras.backend.count_params(p) for p in layer.trainable_weights
]))
def _create_model(params, is_train):
"""Creates transformer model."""
encdec_kwargs = dict(
num_layers=params["num_hidden_layers"],
num_attention_heads=params["num_heads"],
intermediate_size=params["filter_size"],
activation="relu",
dropout_rate=params["relu_dropout"],
attention_dropout_rate=params["attention_dropout"],
use_bias=False,
norm_first=True,
norm_epsilon=1e-6,
intermediate_dropout=params["relu_dropout"])
encoder_layer = models.TransformerEncoder(**encdec_kwargs)
decoder_layer = models.TransformerDecoder(**encdec_kwargs)
model_kwargs = dict(
vocab_size=params["vocab_size"],
embedding_width=params["hidden_size"],
dropout_rate=params["layer_postprocess_dropout"],
padded_decode=params["padded_decode"],
decode_max_length=params["decode_max_length"],
dtype=params["dtype"],
extra_decode_length=params["extra_decode_length"],
beam_size=params["beam_size"],
alpha=params["alpha"],
encoder_layer=encoder_layer,
decoder_layer=decoder_layer,
name="transformer_v2")
if is_train:
inputs = tf.keras.layers.Input((None,), dtype="int64", name="inputs")
targets = tf.keras.layers.Input((None,), dtype="int64", name="targets")
internal_model = models.Seq2SeqTransformer(**model_kwargs)
logits = internal_model(
dict(inputs=inputs, targets=targets), training=is_train)
vocab_size = params["vocab_size"]
label_smoothing = params["label_smoothing"]
if params["enable_metrics_in_training"]:
logits = metrics.MetricLayer(vocab_size)([logits, targets])
logits = tf.keras.layers.Lambda(
lambda x: x, name="logits", dtype=tf.float32)(
logits)
model = tf.keras.Model([inputs, targets], logits)
loss = metrics.transformer_loss(logits, targets, label_smoothing,
vocab_size)
model.add_loss(loss)
return model
batch_size = params["decode_batch_size"] if params["padded_decode"] else None
inputs = tf.keras.layers.Input((None,),
batch_size=batch_size,
dtype="int64",
name="inputs")
internal_model = models.Seq2SeqTransformer(**model_kwargs)
ret = internal_model(dict(inputs=inputs), training=is_train)
outputs, scores = ret["outputs"], ret["scores"]
return tf.keras.Model(inputs, [outputs, scores])
class TransformerForwardTest(tf.test.TestCase):
def setUp(self):
super(TransformerForwardTest, self).setUp()
self.params = params = model_params.TINY_PARAMS
params["batch_size"] = params["default_batch_size"] = 16
params["hidden_size"] = 12
params["num_hidden_layers"] = 3
params["filter_size"] = 14
params["num_heads"] = 2
params["vocab_size"] = 41
params["extra_decode_length"] = 0
params["beam_size"] = 3
params["dtype"] = tf.float32
params["layer_postprocess_dropout"] = 0.0
params["attention_dropout"] = 0.0
params["relu_dropout"] = 0.0
def test_forward_pass_train(self):
# Set input_len different from target_len
inputs = np.asarray([[5, 2, 1], [7, 5, 0], [1, 4, 0], [7, 5, 11]])
targets = np.asarray([[4, 3, 4, 0], [13, 19, 17, 8], [20, 14, 1, 2],
[5, 7, 3, 0]])
# src_model is the original model before refactored.
src_model = transformer.create_model(self.params, True)
src_num_weights = _count_params(src_model)
src_weights = src_model.get_weights()
src_model_output = src_model([inputs, targets], training=True)
# dest_model is the refactored model.
dest_model = _create_model(self.params, True)
dest_num_weights = _count_params(dest_model)
self.assertEqual(src_num_weights, dest_num_weights)
dest_model.set_weights(src_weights)
dest_model_output = dest_model([inputs, targets], training=True)
self.assertAllEqual(src_model_output, dest_model_output)
def test_forward_pass_not_train(self):
inputs = np.asarray([[5, 2, 1], [7, 5, 0], [1, 4, 0], [7, 5, 11]])
# src_model is the original model before refactored.
src_model = transformer.create_model(self.params, False)
src_num_weights = _count_params(src_model)
src_weights = src_model.get_weights()
src_model_output = src_model([inputs], training=False)
# dest_model is the refactored model.
dest_model = _create_model(self.params, False)
dest_num_weights = _count_params(dest_model)
self.assertEqual(src_num_weights, dest_num_weights)
dest_model.set_weights(src_weights)
dest_model_output = dest_model([inputs], training=False)
self.assertAllEqual(src_model_output[0], dest_model_output[0])
self.assertAllEqual(src_model_output[1], dest_model_output[1])
if __name__ == "__main__":
tf.test.main()
``` |
[
{
"content": "Write the code verbatim:\n```python\nfrom __future__ import division, absolute_import, print_function\n\n__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack',\n 'stack', 'vstack']\n\nimport functools\nimport operator\nimport warnings\n\nfrom . import numeric as _nx\nfro... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\nfrom __future__ import division, absolute_import, print_function\n\n__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack',\n 'stack', 'vstack']\n\nimport functools\nimport operator\nimport warnings\n\nfrom . import num... | ```python
from __future__ import division, absolute_import, print_function
__all__ = ['atleast_1d', 'atleast_2d', 'atleast_3d', 'block', 'hstack',
'stack', 'vstack']
import functools
import operator
import warnings
from . import numeric as _nx
from . import overrides
from ._asarray import array, asanyarray
from .multiarray import normalize_axis_index
from . import fromnumeric as _from_nx
array_function_dispatch = functools.partial(
overrides.array_function_dispatch, module='numpy')
def _atleast_1d_dispatcher(*arys):
return arys
@array_function_dispatch(_atleast_1d_dispatcher)
def atleast_1d(*arys):
"""
Convert inputs to arrays with at least one dimension.
Scalar inputs are converted to 1-dimensional arrays, whilst
higher-dimensional inputs are preserved.
Parameters
----------
arys1, arys2, ... : array_like
One or more input arrays.
Returns
-------
ret : ndarray
An array, or list of arrays, each with ``a.ndim >= 1``.
Copies are made only if necessary.
See Also
--------
atleast_2d, atleast_3d
Examples
--------
>>> np.atleast_1d(1.0)
array([1.])
>>> x = np.arange(9.0).reshape(3,3)
>>> np.atleast_1d(x)
array([[0., 1., 2.],
[3., 4., 5.],
[6., 7., 8.]])
>>> np.atleast_1d(x) is x
True
>>> np.atleast_1d(1, [3, 4])
[array([1]), array([3, 4])]
"""
res = []
for ary in arys:
ary = asanyarray(ary)
if ary.ndim == 0:
result = ary.reshape(1)
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def _atleast_2d_dispatcher(*arys):
return arys
@array_function_dispatch(_atleast_2d_dispatcher)
def atleast_2d(*arys):
"""
View inputs as arrays with at least two dimensions.
Parameters
----------
arys1, arys2, ... : array_like
One or more array-like sequences. Non-array inputs are converted
to arrays. Arrays that already have two or more dimensions are
preserved.
Returns
-------
res, res2, ... : ndarray
An array, or list of arrays, each with ``a.ndim >= 2``.
Copies are avoided where possible, and views with two or more
dimensions are returned.
See Also
--------
atleast_1d, atleast_3d
Examples
--------
>>> np.atleast_2d(3.0)
array([[3.]])
>>> x = np.arange(3.0)
>>> np.atleast_2d(x)
array([[0., 1., 2.]])
>>> np.atleast_2d(x).base is x
True
>>> np.atleast_2d(1, [1, 2], [[1, 2]])
[array([[1]]), array([[1, 2]]), array([[1, 2]])]
"""
res = []
for ary in arys:
ary = asanyarray(ary)
if ary.ndim == 0:
result = ary.reshape(1, 1)
elif ary.ndim == 1:
result = ary[_nx.newaxis, :]
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def _atleast_3d_dispatcher(*arys):
return arys
@array_function_dispatch(_atleast_3d_dispatcher)
def atleast_3d(*arys):
"""
View inputs as arrays with at least three dimensions.
Parameters
----------
arys1, arys2, ... : array_like
One or more array-like sequences. Non-array inputs are converted to
arrays. Arrays that already have three or more dimensions are
preserved.
Returns
-------
res1, res2, ... : ndarray
An array, or list of arrays, each with ``a.ndim >= 3``. Copies are
avoided where possible, and views with three or more dimensions are
returned. For example, a 1-D array of shape ``(N,)`` becomes a view
of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a
view of shape ``(M, N, 1)``.
See Also
--------
atleast_1d, atleast_2d
Examples
--------
>>> np.atleast_3d(3.0)
array([[[3.]]])
>>> x = np.arange(3.0)
>>> np.atleast_3d(x).shape
(1, 3, 1)
>>> x = np.arange(12.0).reshape(4,3)
>>> np.atleast_3d(x).shape
(4, 3, 1)
>>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself
True
>>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]):
... print(arr, arr.shape) # doctest: +SKIP
...
[[[1]
[2]]] (1, 2, 1)
[[[1]
[2]]] (1, 2, 1)
[[[1 2]]] (1, 1, 2)
"""
res = []
for ary in arys:
ary = asanyarray(ary)
if ary.ndim == 0:
result = ary.reshape(1, 1, 1)
elif ary.ndim == 1:
result = ary[_nx.newaxis, :, _nx.newaxis]
elif ary.ndim == 2:
result = ary[:, :, _nx.newaxis]
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def _arrays_for_stack_dispatcher(arrays, stacklevel=4):
if not hasattr(arrays, '__getitem__') and hasattr(arrays, '__iter__'):
warnings.warn('arrays to stack must be passed as a "sequence" type '
'such as list or tuple. Support for non-sequence '
'iterables such as generators is deprecated as of '
'NumPy 1.16 and will raise an error in the future.',
FutureWarning, stacklevel=stacklevel)
return ()
return arrays
def _vhstack_dispatcher(tup):
return _arrays_for_stack_dispatcher(tup)
@array_function_dispatch(_vhstack_dispatcher)
def vstack(tup):
"""
Stack arrays in sequence vertically (row wise).
This is equivalent to concatenation along the first axis after 1-D arrays
of shape `(N,)` have been reshaped to `(1,N)`. Rebuilds arrays divided by
`vsplit`.
This function makes most sense for arrays with up to 3 dimensions. For
instance, for pixel-data with a height (first axis), width (second axis),
and r/g/b channels (third axis). The functions `concatenate`, `stack` and
`block` provide more general stacking and concatenation operations.
Parameters
----------
tup : sequence of ndarrays
The arrays must have the same shape along all but the first axis.
1-D arrays must have the same length.
Returns
-------
stacked : ndarray
The array formed by stacking the given arrays, will be at least 2-D.
See Also
--------
stack : Join a sequence of arrays along a new axis.
hstack : Stack arrays in sequence horizontally (column wise).
dstack : Stack arrays in sequence depth wise (along third dimension).
concatenate : Join a sequence of arrays along an existing axis.
vsplit : Split array into a list of multiple sub-arrays vertically.
block : Assemble arrays from blocks.
Examples
--------
>>> a = np.array([1, 2, 3])
>>> b = np.array([2, 3, 4])
>>> np.vstack((a,b))
array([[1, 2, 3],
[2, 3, 4]])
>>> a = np.array([[1], [2], [3]])
>>> b = np.array([[2], [3], [4]])
>>> np.vstack((a,b))
array([[1],
[2],
[3],
[2],
[3],
[4]])
"""
if not overrides.ARRAY_FUNCTION_ENABLED:
# raise warning if necessary
_arrays_for_stack_dispatcher(tup, stacklevel=2)
arrs = atleast_2d(*tup)
if not isinstance(arrs, list):
arrs = [arrs]
return _nx.concatenate(arrs, 0)
@array_function_dispatch(_vhstack_dispatcher)
def hstack(tup):
"""
Stack arrays in sequence horizontally (column wise).
This is equivalent to concatenation along the second axis, except for 1-D
arrays where it concatenates along the first axis. Rebuilds arrays divided
by `hsplit`.
This function makes most sense for arrays with up to 3 dimensions. For
instance, for pixel-data with a height (first axis), width (second axis),
and r/g/b channels (third axis). The functions `concatenate`, `stack` and
`block` provide more general stacking and concatenation operations.
Parameters
----------
tup : sequence of ndarrays
The arrays must have the same shape along all but the second axis,
except 1-D arrays which can be any length.
Returns
-------
stacked : ndarray
The array formed by stacking the given arrays.
See Also
--------
stack : Join a sequence of arrays along a new axis.
vstack : Stack arrays in sequence vertically (row wise).
dstack : Stack arrays in sequence depth wise (along third axis).
concatenate : Join a sequence of arrays along an existing axis.
hsplit : Split array along second axis.
block : Assemble arrays from blocks.
Examples
--------
>>> a = np.array((1,2,3))
>>> b = np.array((2,3,4))
>>> np.hstack((a,b))
array([1, 2, 3, 2, 3, 4])
>>> a = np.array([[1],[2],[3]])
>>> b = np.array([[2],[3],[4]])
>>> np.hstack((a,b))
array([[1, 2],
[2, 3],
[3, 4]])
"""
if not overrides.ARRAY_FUNCTION_ENABLED:
# raise warning if necessary
_arrays_for_stack_dispatcher(tup, stacklevel=2)
arrs = atleast_1d(*tup)
if not isinstance(arrs, list):
arrs = [arrs]
# As a special case, dimension 0 of 1-dimensional arrays is "horizontal"
if arrs and arrs[0].ndim == 1:
return _nx.concatenate(arrs, 0)
else:
return _nx.concatenate(arrs, 1)
def _stack_dispatcher(arrays, axis=None, out=None):
arrays = _arrays_for_stack_dispatcher(arrays, stacklevel=6)
if out is not None:
# optimize for the typical case where only arrays is provided
arrays = list(arrays)
arrays.append(out)
return arrays
@array_function_dispatch(_stack_dispatcher)
def stack(arrays, axis=0, out=None):
"""
Join a sequence of arrays along a new axis.
The ``axis`` parameter specifies the index of the new axis in the
dimensions of the result. For example, if ``axis=0`` it will be the first
dimension and if ``axis=-1`` it will be the last dimension.
.. versionadded:: 1.10.0
Parameters
----------
arrays : sequence of array_like
Each array must have the same shape.
axis : int, optional
The axis in the result array along which the input arrays are stacked.
out : ndarray, optional
If provided, the destination to place the result. The shape must be
correct, matching that of what stack would have returned if no
out argument were specified.
Returns
-------
stacked : ndarray
The stacked array has one more dimension than the input arrays.
See Also
--------
concatenate : Join a sequence of arrays along an existing axis.
split : Split array into a list of multiple sub-arrays of equal size.
block : Assemble arrays from blocks.
Examples
--------
>>> arrays = [np.random.randn(3, 4) for _ in range(10)]
>>> np.stack(arrays, axis=0).shape
(10, 3, 4)
>>> np.stack(arrays, axis=1).shape
(3, 10, 4)
>>> np.stack(arrays, axis=2).shape
(3, 4, 10)
>>> a = np.array([1, 2, 3])
>>> b = np.array([2, 3, 4])
>>> np.stack((a, b))
array([[1, 2, 3],
[2, 3, 4]])
>>> np.stack((a, b), axis=-1)
array([[1, 2],
[2, 3],
[3, 4]])
"""
if not overrides.ARRAY_FUNCTION_ENABLED:
# raise warning if necessary
_arrays_for_stack_dispatcher(arrays, stacklevel=2)
arrays = [asanyarray(arr) for arr in arrays]
if not arrays:
raise ValueError('need at least one array to stack')
shapes = {arr.shape for arr in arrays}
if len(shapes) != 1:
raise ValueError('all input arrays must have the same shape')
result_ndim = arrays[0].ndim + 1
axis = normalize_axis_index(axis, result_ndim)
sl = (slice(None),) * axis + (_nx.newaxis,)
expanded_arrays = [arr[sl] for arr in arrays]
return _nx.concatenate(expanded_arrays, axis=axis, out=out)
# Internal functions to eliminate the overhead of repeated dispatch in one of
# the two possible paths inside np.block.
# Use getattr to protect against __array_function__ being disabled.
_size = getattr(_from_nx.size, '__wrapped__', _from_nx.size)
_ndim = getattr(_from_nx.ndim, '__wrapped__', _from_nx.ndim)
_concatenate = getattr(_from_nx.concatenate, '__wrapped__', _from_nx.concatenate)
def _block_format_index(index):
"""
Convert a list of indices ``[0, 1, 2]`` into ``"arrays[0][1][2]"``.
"""
idx_str = ''.join('[{}]'.format(i) for i in index if i is not None)
return 'arrays' + idx_str
def _block_check_depths_match(arrays, parent_index=[]):
"""
Recursive function checking that the depths of nested lists in `arrays`
all match. Mismatch raises a ValueError as described in the block
docstring below.
The entire index (rather than just the depth) needs to be calculated
for each innermost list, in case an error needs to be raised, so that
the index of the offending list can be printed as part of the error.
Parameters
----------
arrays : nested list of arrays
The arrays to check
parent_index : list of int
The full index of `arrays` within the nested lists passed to
`_block_check_depths_match` at the top of the recursion.
Returns
-------
first_index : list of int
The full index of an element from the bottom of the nesting in
`arrays`. If any element at the bottom is an empty list, this will
refer to it, and the last index along the empty axis will be None.
max_arr_ndim : int
The maximum of the ndims of the arrays nested in `arrays`.
final_size: int
The number of elements in the final array. This is used the motivate
the choice of algorithm used using benchmarking wisdom.
"""
if type(arrays) is tuple:
# not strictly necessary, but saves us from:
# - more than one way to do things - no point treating tuples like
# lists
# - horribly confusing behaviour that results when tuples are
# treated like ndarray
raise TypeError(
'{} is a tuple. '
'Only lists can be used to arrange blocks, and np.block does '
'not allow implicit conversion from tuple to ndarray.'.format(
_block_format_index(parent_index)
)
)
elif type(arrays) is list and len(arrays) > 0:
idxs_ndims = (_block_check_depths_match(arr, parent_index + [i])
for i, arr in enumerate(arrays))
first_index, max_arr_ndim, final_size = next(idxs_ndims)
for index, ndim, size in idxs_ndims:
final_size += size
if ndim > max_arr_ndim:
max_arr_ndim = ndim
if len(index) != len(first_index):
raise ValueError(
"List depths are mismatched. First element was at depth "
"{}, but there is an element at depth {} ({})".format(
len(first_index),
len(index),
_block_format_index(index)
)
)
# propagate our flag that indicates an empty list at the bottom
if index[-1] is None:
first_index = index
return first_index, max_arr_ndim, final_size
elif type(arrays) is list and len(arrays) == 0:
# We've 'bottomed out' on an empty list
return parent_index + [None], 0, 0
else:
# We've 'bottomed out' - arrays is either a scalar or an array
size = _size(arrays)
return parent_index, _ndim(arrays), size
def _atleast_nd(a, ndim):
# Ensures `a` has at least `ndim` dimensions by prepending
# ones to `a.shape` as necessary
return array(a, ndmin=ndim, copy=False, subok=True)
def _accumulate(values):
# Helper function because Python 2.7 doesn't have
# itertools.accumulate
value = 0
accumulated = []
for v in values:
value += v
accumulated.append(value)
return accumulated
def _concatenate_shapes(shapes, axis):
"""Given array shapes, return the resulting shape and slices prefixes.
These help in nested concatation.
Returns
-------
shape: tuple of int
This tuple satisfies:
```
shape, _ = _concatenate_shapes([arr.shape for shape in arrs], axis)
shape == concatenate(arrs, axis).shape
```
slice_prefixes: tuple of (slice(start, end), )
For a list of arrays being concatenated, this returns the slice
in the larger array at axis that needs to be sliced into.
For example, the following holds:
```
ret = concatenate([a, b, c], axis)
_, (sl_a, sl_b, sl_c) = concatenate_slices([a, b, c], axis)
ret[(slice(None),) * axis + sl_a] == a
ret[(slice(None),) * axis + sl_b] == b
ret[(slice(None),) * axis + sl_c] == c
```
These are called slice prefixes since they are used in the recursive
blocking algorithm to compute the left-most slices during the
recursion. Therefore, they must be prepended to rest of the slice
that was computed deeper in the recursion.
These are returned as tuples to ensure that they can quickly be added
to existing slice tuple without creating a new tuple every time.
"""
# Cache a result that will be reused.
shape_at_axis = [shape[axis] for shape in shapes]
# Take a shape, any shape
first_shape = shapes[0]
first_shape_pre = first_shape[:axis]
first_shape_post = first_shape[axis+1:]
if any(shape[:axis] != first_shape_pre or
shape[axis+1:] != first_shape_post for shape in shapes):
raise ValueError(
'Mismatched array shapes in block along axis {}.'.format(axis))
shape = (first_shape_pre + (sum(shape_at_axis),) + first_shape[axis+1:])
offsets_at_axis = _accumulate(shape_at_axis)
slice_prefixes = [(slice(start, end),)
for start, end in zip([0] + offsets_at_axis,
offsets_at_axis)]
return shape, slice_prefixes
def _block_info_recursion(arrays, max_depth, result_ndim, depth=0):
"""
Returns the shape of the final array, along with a list
of slices and a list of arrays that can be used for assignment inside the
new array
Parameters
----------
arrays : nested list of arrays
The arrays to check
max_depth : list of int
The number of nested lists
result_ndim: int
The number of dimensions in thefinal array.
Returns
-------
shape : tuple of int
The shape that the final array will take on.
slices: list of tuple of slices
The slices into the full array required for assignment. These are
required to be prepended with ``(Ellipsis, )`` to obtain to correct
final index.
arrays: list of ndarray
The data to assign to each slice of the full array
"""
if depth < max_depth:
shapes, slices, arrays = zip(
*[_block_info_recursion(arr, max_depth, result_ndim, depth+1)
for arr in arrays])
axis = result_ndim - max_depth + depth
shape, slice_prefixes = _concatenate_shapes(shapes, axis)
# Prepend the slice prefix and flatten the slices
slices = [slice_prefix + the_slice
for slice_prefix, inner_slices in zip(slice_prefixes, slices)
for the_slice in inner_slices]
# Flatten the array list
arrays = functools.reduce(operator.add, arrays)
return shape, slices, arrays
else:
# We've 'bottomed out' - arrays is either a scalar or an array
# type(arrays) is not list
# Return the slice and the array inside a list to be consistent with
# the recursive case.
arr = _atleast_nd(arrays, result_ndim)
return arr.shape, [()], [arr]
def _block(arrays, max_depth, result_ndim, depth=0):
"""
Internal implementation of block based on repeated concatenation.
`arrays` is the argument passed to
block. `max_depth` is the depth of nested lists within `arrays` and
`result_ndim` is the greatest of the dimensions of the arrays in
`arrays` and the depth of the lists in `arrays` (see block docstring
for details).
"""
if depth < max_depth:
arrs = [_block(arr, max_depth, result_ndim, depth+1)
for arr in arrays]
return _concatenate(arrs, axis=-(max_depth-depth))
else:
# We've 'bottomed out' - arrays is either a scalar or an array
# type(arrays) is not list
return _atleast_nd(arrays, result_ndim)
def _block_dispatcher(arrays):
# Use type(...) is list to match the behavior of np.block(), which special
# cases list specifically rather than allowing for generic iterables or
# tuple. Also, we know that list.__array_function__ will never exist.
if type(arrays) is list:
for subarrays in arrays:
for subarray in _block_dispatcher(subarrays):
yield subarray
else:
yield arrays
@array_function_dispatch(_block_dispatcher)
def block(arrays):
"""
Assemble an nd-array from nested lists of blocks.
Blocks in the innermost lists are concatenated (see `concatenate`) along
the last dimension (-1), then these are concatenated along the
second-last dimension (-2), and so on until the outermost list is reached.
Blocks can be of any dimension, but will not be broadcasted using the normal
rules. Instead, leading axes of size 1 are inserted, to make ``block.ndim``
the same for all blocks. This is primarily useful for working with scalars,
and means that code like ``np.block([v, 1])`` is valid, where
``v.ndim == 1``.
When the nested list is two levels deep, this allows block matrices to be
constructed from their components.
.. versionadded:: 1.13.0
Parameters
----------
arrays : nested list of array_like or scalars (but not tuples)
If passed a single ndarray or scalar (a nested list of depth 0), this
is returned unmodified (and not copied).
Elements shapes must match along the appropriate axes (without
broadcasting), but leading 1s will be prepended to the shape as
necessary to make the dimensions match.
Returns
-------
block_array : ndarray
The array assembled from the given blocks.
The dimensionality of the output is equal to the greatest of:
* the dimensionality of all the inputs
* the depth to which the input list is nested
Raises
------
ValueError
* If list depths are mismatched - for instance, ``[[a, b], c]`` is
illegal, and should be spelt ``[[a, b], [c]]``
* If lists are empty - for instance, ``[[a, b], []]``
See Also
--------
concatenate : Join a sequence of arrays together.
stack : Stack arrays in sequence along a new dimension.
hstack : Stack arrays in sequence horizontally (column wise).
vstack : Stack arrays in sequence vertically (row wise).
dstack : Stack arrays in sequence depth wise (along third dimension).
vsplit : Split array into a list of multiple sub-arrays vertically.
Notes
-----
When called with only scalars, ``np.block`` is equivalent to an ndarray
call. So ``np.block([[1, 2], [3, 4]])`` is equivalent to
``np.array([[1, 2], [3, 4]])``.
This function does not enforce that the blocks lie on a fixed grid.
``np.block([[a, b], [c, d]])`` is not restricted to arrays of the form::
AAAbb
AAAbb
cccDD
But is also allowed to produce, for some ``a, b, c, d``::
AAAbb
AAAbb
cDDDD
Since concatenation happens along the last axis first, `block` is _not_
capable of producing the following directly::
AAAbb
cccbb
cccDD
Matlab's "square bracket stacking", ``[A, B, ...; p, q, ...]``, is
equivalent to ``np.block([[A, B, ...], [p, q, ...]])``.
Examples
--------
The most common use of this function is to build a block matrix
>>> A = np.eye(2) * 2
>>> B = np.eye(3) * 3
>>> np.block([
... [A, np.zeros((2, 3))],
... [np.ones((3, 2)), B ]
... ])
array([[2., 0., 0., 0., 0.],
[0., 2., 0., 0., 0.],
[1., 1., 3., 0., 0.],
[1., 1., 0., 3., 0.],
[1., 1., 0., 0., 3.]])
With a list of depth 1, `block` can be used as `hstack`
>>> np.block([1, 2, 3]) # hstack([1, 2, 3])
array([1, 2, 3])
>>> a = np.array([1, 2, 3])
>>> b = np.array([2, 3, 4])
>>> np.block([a, b, 10]) # hstack([a, b, 10])
array([ 1, 2, 3, 2, 3, 4, 10])
>>> A = np.ones((2, 2), int)
>>> B = 2 * A
>>> np.block([A, B]) # hstack([A, B])
array([[1, 1, 2, 2],
[1, 1, 2, 2]])
With a list of depth 2, `block` can be used in place of `vstack`:
>>> a = np.array([1, 2, 3])
>>> b = np.array([2, 3, 4])
>>> np.block([[a], [b]]) # vstack([a, b])
array([[1, 2, 3],
[2, 3, 4]])
>>> A = np.ones((2, 2), int)
>>> B = 2 * A
>>> np.block([[A], [B]]) # vstack([A, B])
array([[1, 1],
[1, 1],
[2, 2],
[2, 2]])
It can also be used in places of `atleast_1d` and `atleast_2d`
>>> a = np.array(0)
>>> b = np.array([1])
>>> np.block([a]) # atleast_1d(a)
array([0])
>>> np.block([b]) # atleast_1d(b)
array([1])
>>> np.block([[a]]) # atleast_2d(a)
array([[0]])
>>> np.block([[b]]) # atleast_2d(b)
array([[1]])
"""
arrays, list_ndim, result_ndim, final_size = _block_setup(arrays)
# It was found through benchmarking that making an array of final size
# around 256x256 was faster by straight concatenation on a
# i7-7700HQ processor and dual channel ram 2400MHz.
# It didn't seem to matter heavily on the dtype used.
#
# A 2D array using repeated concatenation requires 2 copies of the array.
#
# The fastest algorithm will depend on the ratio of CPU power to memory
# speed.
# One can monitor the results of the benchmark
# https://pv.github.io/numpy-bench/#bench_shape_base.Block2D.time_block2d
# to tune this parameter until a C version of the `_block_info_recursion`
# algorithm is implemented which would likely be faster than the python
# version.
if list_ndim * final_size > (2 * 512 * 512):
return _block_slicing(arrays, list_ndim, result_ndim)
else:
return _block_concatenate(arrays, list_ndim, result_ndim)
# These helper functions are mostly used for testing.
# They allow us to write tests that directly call `_block_slicing`
# or `_block_concatenate` without blocking large arrays to force the wisdom
# to trigger the desired path.
def _block_setup(arrays):
"""
Returns
(`arrays`, list_ndim, result_ndim, final_size)
"""
bottom_index, arr_ndim, final_size = _block_check_depths_match(arrays)
list_ndim = len(bottom_index)
if bottom_index and bottom_index[-1] is None:
raise ValueError(
'List at {} cannot be empty'.format(
_block_format_index(bottom_index)
)
)
result_ndim = max(arr_ndim, list_ndim)
return arrays, list_ndim, result_ndim, final_size
def _block_slicing(arrays, list_ndim, result_ndim):
shape, slices, arrays = _block_info_recursion(
arrays, list_ndim, result_ndim)
dtype = _nx.result_type(*[arr.dtype for arr in arrays])
# Test preferring F only in the case that all input arrays are F
F_order = all(arr.flags['F_CONTIGUOUS'] for arr in arrays)
C_order = all(arr.flags['C_CONTIGUOUS'] for arr in arrays)
order = 'F' if F_order and not C_order else 'C'
result = _nx.empty(shape=shape, dtype=dtype, order=order)
# Note: In a c implementation, the function
# PyArray_CreateMultiSortedStridePerm could be used for more advanced
# guessing of the desired order.
for the_slice, arr in zip(slices, arrays):
result[(Ellipsis,) + the_slice] = arr
return result
def _block_concatenate(arrays, list_ndim, result_ndim):
result = _block(arrays, list_ndim, result_ndim)
if list_ndim == 0:
# Catch an edge case where _block returns a view because
# `arrays` is a single numpy array and not a list of numpy arrays.
# This might copy scalars or lists twice, but this isn't a likely
# usecase for those interested in performance
result = result.copy()
return result
``` |
[
{
"content": "Write the code verbatim:\n```python\nimport wx, os\nfrom functools import partial\n\nfrom fr0stlib.decorators import *\n\ndef LoadIcon(*path):\n # Check for an icons dir in app base path first for development\n filename = os.path.join(wx.GetApp().AppBaseDir, 'icons', *path) + '.png'\n\n i... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\nimport wx, os\nfrom functools import partial\n\nfrom fr0stlib.decorators import *\n\ndef LoadIcon(*path):\n # Check for an icons dir in app base path first for development\n filename = os.path.join(wx.GetApp().AppBaseDir, 'icons', *path) +... | ```python
import wx, os
from functools import partial
from fr0stlib.decorators import *
def LoadIcon(*path):
# Check for an icons dir in app base path first for development
filename = os.path.join(wx.GetApp().AppBaseDir, 'icons', *path) + '.png'
if not os.path.exists(filename):
# Not there, check install path
filename = os.path.join(wx.GetApp().IconsDir, *path) + '.png'
img = wx.Image(filename, type=wx.BITMAP_TYPE_PNG)
img.Rescale(16,16)
return wx.BitmapFromImage(img)
def Box(self, name, *a, **k):
box = wx.StaticBoxSizer(wx.StaticBox(self, -1, name),
k.get('orient', wx.VERTICAL))
box.AddMany(a)
return box
def MakeTCs(self, *a, **k):
fgs = wx.FlexGridSizer(99, 2, 1, 1)
tcs = {}
for i, default in a:
tc = NumberTextCtrl(self, **k)
tc.SetFloat(default)
tcs[i] = tc
fgs.Add(wx.StaticText(self, -1, i.replace("_", " ").title()),
0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
fgs.Add(tc, 0, wx.ALIGN_RIGHT, 5)
return fgs, tcs
class MyChoice(wx.Choice):
def __init__(self, parent, name, d, initial):
self.d = d
choices = sorted(d.iteritems())
wx.Choice.__init__(self, parent, -1, choices=[k for k,_ in choices])
self.SetSelection([v for _,v in choices].index(initial))
def GetFloat(self):
return self.d[self.GetStringSelection()]
class SizePanel(wx.Panel):
def __init__(self, parent, callback=lambda: None):
self.parent = parent
self.keepratio = True
self.callback = callback
wx.Panel.__init__(self, parent, -1)
fgs, tcs = MakeTCs(self, ("width", 512.), ("height", 384.), low=0,
callback=self.SizeCallback)
self.__dict__.update(tcs)
for i in (self.width, self.height):
i.MakeIntOnly()
i.low = 1
ratio = wx.CheckBox(self, -1, "Keep Ratio")
ratio.SetValue(True)
ratio.Bind(wx.EVT_CHECKBOX, self.OnRatio)
box = Box(self, "Size", fgs, ratio)
self.SetSizer(box)
box.Fit(self)
def GetInts(self):
return [int(tc.GetFloat()) for tc in (self.width, self.height)]
def UpdateSize(self, size):
width, height = (float(i) for i in size)
self.width.SetFloat(width)
self.height.SetFloat(height)
self.ratio = width / height
def OnRatio(self, e):
self.keepratio = e.GetInt()
def SizeCallback(self, tc, tempsave=None):
if self.keepratio:
v = tc.GetFloat()
tc.SetInt(v)
if tc == self.width:
w, h = v, v / self.ratio
self.height.SetInt(h)
else:
w, h = v * self.ratio, v
self.width.SetInt(w)
else:
self.ratio = float(self.width.GetFloat()) / self.height.GetFloat()
self.callback()
class NumberTextCtrl(wx.TextCtrl):
low = None
high = None
@BindEvents
def __init__(self, parent, low=None, high=None, callback=None):
self.parent = parent
# Size is set to ubuntu default (75,27), maybe make it 75x21 in win
wx.TextCtrl.__init__(self,parent,-1, size=(75,27))
if (low,high) != (None,None):
self.SetAllowedRange(low, high)
if callback:
self.callback = partial(callback, self)
else:
self.callback = lambda tempsave=None: None
self.HasChanged = False
self.SetFloat(0.0)
def GetFloat(self):
return float(self.GetValue() or "0")
def SetFloat(self, v):
v = self.Checkrange(float(v))
self._value = v
string = ("%.6f" %v).rstrip("0")
if string.endswith("."):
string += "0" # Avoid values like '0.' or '1.'
self.SetValue(string)
def GetInt(self):
return int(self.GetValue() or "0")
def SetInt(self, v):
v = self.Checkrange(int(v))
self._value = v
self.SetValue(str(v))
def MakeIntOnly(self):
self.SetInt(self.GetFloat())
self.SetFloat, self.GetFloat = self.SetInt, self.GetInt
def SetAllowedRange(self, low=None, high=None):
self.low = low
self.high = high
def Checkrange(self, v):
if self.low is not None and v < self.low:
return self.low
elif self.high is not None and v > self.high:
return self.high
return v
@Bind(wx.EVT_MOUSEWHEEL)
def OnMouseWheel(self, evt):
if self.SetFloat == self.SetInt:
return
if evt.CmdDown():
if evt.AltDown():
delta = 0.01
else:
delta = 0.1
elif evt.AltDown():
delta = 0.001
else:
evt.Skip()
return
self.SetFocus() # Makes sure OnKeyUp gets called.
v = self._value + delta * evt.GetWheelRotation() / evt.GetWheelDelta()
self.SetFloat(v)
self.callback(tempsave=False)
self.HasChanged = True
@Bind(wx.EVT_KEY_UP)
def OnKeyUp(self, e):
# TODO: This code is duplicated with the one found in xformeditor.
key = e.GetKeyCode()
if (key == wx.WXK_CONTROL and not e.AltDown()) or (
key == wx.WXK_ALT and not e.ControlDown()):
if self.HasChanged:
if hasattr(self.parent, 'parent') and hasattr(self.parent.parent, 'TreePanel'):
self.parent.parent.TreePanel.TempSave()
self.HasChanged = False
@Bind(wx.EVT_CHAR)
def OnChar(self, event):
key = event.GetKeyCode()
if key in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.OnKillFocus(None)
elif key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or key == wx.WXK_TAB:
event.Skip()
elif chr(key) in "0123456789.-":
event.Skip()
else:
# not calling Skip() eats the event
pass #wx.Bell()
@Bind(wx.EVT_KILL_FOCUS)
def OnKillFocus(self,event):
# cmp done with strings because equal floats can compare differently.
if str(self._value) != self.GetValue():
try:
v = self.GetFloat() # Can raise ValueError
except ValueError:
self.SetFloat(self._value)
return
self.SetFloat(v)
self.callback()
class MultiSliderMixin(object):
"""Class to dynamically create and control sliders."""
_new = None
_changed = False
def __init__(self, *a, **k):
super(MultiSliderMixin, self).__init__(*a, **k)
self.sliders = {}
self.Bind(wx.EVT_IDLE, self.OnIdle)
def MakeSlider(self, name, init, low, high, strictrange=True):
"""Programatically builds stuff."""
tc = NumberTextCtrl(self, callback=self.__callback)
if strictrange:
tc.SetAllowedRange(low, high)
slider = wx.Slider(self, -1, init*100, low*100, high*100,
style=wx.SL_HORIZONTAL
| wx.SL_SELRANGE
)
self.sliders[name] = slider, tc
slider.Bind(wx.EVT_SLIDER, partial(self.OnSlider, tc=tc))
## slider.Bind(wx.EVT_LEFT_DOWN, self.OnSliderDown)
slider.Bind(wx.EVT_LEFT_UP, self.OnSliderUp)
name = name.replace("_", " ").title()
return Box(self, name, tc, (slider, wx.EXPAND), orient=wx.HORIZONTAL)
def UpdateSlider(self, name, val):
slider, tc = self.sliders[name]
slider.SetValue(int(val*100))
tc.SetFloat(val)
def IterSliders(self):
for name, (_, tc) in self.sliders.iteritems():
yield name, tc.GetFloat()
def OnSlider(self, e, tc):
val = e.GetInt()/100.
# Make sure _new is only set when there are actual changes.
if val != tc._value:
self._new = True
tc.SetFloat(str(val))
e.Skip()
## def OnSliderDown(self, e):
## e.Skip()
def OnSliderUp(self, e):
if self._changed:
self.parent.TreePanel.TempSave()
self._changed = False
e.Skip()
def OnIdle(self, e):
if self._new is not None:
self.UpdateFlame()
self._new = None
self._changed = True
def __callback(self, tc, tempsave=True):
self.UpdateFlame()
if tempsave:
self.parent.TreePanel.TempSave()
def UpdateFlame(self):
Abstract
def UpdateView(self):
Abstract
``` |
[
{
"content": "Here is the script:\n```python\n# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport jsonfield.fields\nimport django_docker_processes.models\nfrom django.conf import settings\n\n\nclass Migration(migrations.Migration):\n\n dependen... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\nimport jsonfield.fields\nimport django_docker_processes.models\nfrom django.conf import settings\n\n\nclass Migration(migrations.Migration):... | ```python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import django_docker_processes.models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ContainerOverrides',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('command', models.TextField(null=True, blank=True)),
('working_dir', models.CharField(max_length=65536, null=True, blank=True)),
('user', models.CharField(max_length=65536, null=True, blank=True)),
('entrypoint', models.CharField(max_length=65536, null=True, blank=True)),
('privileged', models.BooleanField(default=False)),
('lxc_conf', models.CharField(max_length=65536, null=True, blank=True)),
('memory_limit', models.IntegerField(default=0, help_text=b'megabytes')),
('cpu_shares', models.IntegerField(help_text=b'CPU Shares', null=True, blank=True)),
('dns', jsonfield.fields.JSONField(help_text=b'JSON list of alternate DNS servers', null=True, blank=True)),
('net', models.CharField(blank=True, max_length=8, null=True, help_text=b'Network settings - leave blank for default behavior', choices=[(b'bridge', b'bridge'), (b'none', b'none'), (b'host', b'host')])),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerEnvVar',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
('value', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('link_name', models.CharField(max_length=256)),
('docker_overrides', models.ForeignKey(blank=True, to='django_docker_processes.ContainerOverrides', help_text=b'Overrides for the container to run', null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerPort',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerProcess',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('container_id', models.CharField(max_length=128, null=True, blank=True)),
('token', models.CharField(default=django_docker_processes.models.docker_process_token, unique=True, max_length=128, db_index=True)),
('logs', models.TextField(null=True, blank=True)),
('finished', models.BooleanField(default=False)),
('error', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=1024, db_index=True)),
('git_repository', models.CharField(max_length=16384)),
('git_use_submodules', models.BooleanField(default=False)),
('git_username', models.CharField(max_length=256, null=True, blank=True)),
('git_password', models.CharField(max_length=64, null=True, blank=True)),
('commit_id', models.CharField(max_length=64, null=True, blank=True)),
('branch', models.CharField(default=b'master', max_length=1024, null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DockerVolume',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536, null=True, blank=True)),
('container', models.CharField(max_length=65536)),
('readonly', models.BooleanField(default=False)),
('docker_profile', models.ForeignKey(to='django_docker_processes.DockerProfile')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideEnvVar',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
('value', models.TextField()),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('link_name', models.CharField(max_length=256)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
('docker_profile_from', models.ForeignKey(help_text=b'This container must be started and running for the target to run', to='django_docker_processes.DockerProfile')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverridePort',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OverrideVolume',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('host', models.CharField(max_length=65536)),
('container', models.CharField(max_length=65536)),
('container_overrides', models.ForeignKey(to='django_docker_processes.ContainerOverrides')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='dockerprocess',
name='profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerprocess',
name='user',
field=models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='dockerport',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerlink',
name='docker_profile',
field=models.ForeignKey(help_text=b'This is the "target" container. It will receive information about\nthe "from" container as an environment var', to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerlink',
name='docker_profile_from',
field=models.ForeignKey(related_name='profile_link_to', to='django_docker_processes.DockerProfile', help_text=b'This container must be started and running for the target to run'),
preserve_default=True,
),
migrations.AddField(
model_name='dockerenvvar',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
migrations.AddField(
model_name='containeroverrides',
name='docker_profile',
field=models.ForeignKey(to='django_docker_processes.DockerProfile'),
preserve_default=True,
),
]
``` |
[
{
"content": "```python\n#!/usr/bin/env python\n\n# copy pasta from https://github.com/matthiasplappert/keras-rl/blob/master/examples/dqn_cartpole.py\n# with some extra arg parsing\n\nimport numpy as np\nimport gym\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Activation, Flatten\nfrom... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env python\n\n# copy pasta from https://github.com/matthiasplappert/keras-rl/blob/master/examples/dqn_cartpole.py\n# with some extra arg parsing\n\nimport numpy as np\nimport gym\n\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Activatio... | ```python
#!/usr/bin/env python
# copy pasta from https://github.com/matthiasplappert/keras-rl/blob/master/examples/dqn_cartpole.py
# with some extra arg parsing
import numpy as np
import gym
from keras.models import Sequential
from keras.layers import Dense, Activation, Flatten
from keras.optimizers import Adam
from rl.agents.dqn import DQNAgent
from rl.policy import BoltzmannQPolicy
from rl.memory import SequentialMemory
import bullet_cartpole
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--num-train', type=int, default=100)
parser.add_argument('--num-eval', type=int, default=0)
parser.add_argument('--load-file', type=str, default=None)
parser.add_argument('--save-file', type=str, default=None)
bullet_cartpole.add_opts(parser)
opts = parser.parse_args()
print "OPTS", opts
ENV_NAME = 'BulletCartpole'
# Get the environment and extract the number of actions.
env = bullet_cartpole.BulletCartpole(opts=opts, discrete_actions=True)
nb_actions = env.action_space.n
# Next, we build a very simple model.
model = Sequential()
model.add(Flatten(input_shape=(1,) + env.observation_space.shape))
model.add(Dense(32))
model.add(Activation('tanh'))
#model.add(Dense(16))
#model.add(Activation('relu'))
#model.add(Dense(16))
#model.add(Activation('relu'))
model.add(Dense(nb_actions))
model.add(Activation('linear'))
print(model.summary())
memory = SequentialMemory(limit=50000)
policy = BoltzmannQPolicy()
dqn = DQNAgent(model=model, nb_actions=nb_actions, memory=memory, nb_steps_warmup=10,
target_model_update=1e-2, policy=policy)
dqn.compile(Adam(lr=1e-3), metrics=['mae'])
if opts.load_file is not None:
print "loading weights from from [%s]" % opts.load_file
dqn.load_weights(opts.load_file)
# Okay, now it's time to learn something! We visualize the training here for show, but this
# slows down training quite a lot. You can always safely abort the training prematurely using
# Ctrl + C.
dqn.fit(env, nb_steps=opts.num_train, visualize=True, verbose=2)
# After training is done, we save the final weights.
if opts.save_file is not None:
print "saving weights to [%s]" % opts.save_file
dqn.save_weights(opts.save_file, overwrite=True)
# Finally, evaluate our algorithm for 5 episodes.
dqn.test(env, nb_episodes=opts.num_eval, visualize=True)
``` |
[
{
"content": "Return the code unaltered:\n```python\n#!/usr/bin/env python\n# coding=utf8\n\n\"\"\"\nAdd suite options for overrides and control-suite to DB\n\n@contact: Debian FTP Master <ftpmaster@debian.org>\n@copyright: 2011 Mark Hymers <mhy@debian.org>\n@license: GNU General Public License version 2 or lat... | [
{
"content": "Return the code unaltered:\n<|memory_start|>```python\n#!/usr/bin/env python\n# coding=utf8\n\n\"\"\"\nAdd suite options for overrides and control-suite to DB\n\n@contact: Debian FTP Master <ftpmaster@debian.org>\n@copyright: 2011 Mark Hymers <mhy@debian.org>\n@license: GNU General Public License ... | ```python
#!/usr/bin/env python
# coding=utf8
"""
Add suite options for overrides and control-suite to DB
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2011 Mark Hymers <mhy@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
"""
Add suite options for overrides and control-suite to DB
"""
print __doc__
try:
cnf = Config()
c = self.db.cursor()
c.execute("ALTER TABLE suite ADD COLUMN overrideprocess BOOLEAN NOT NULL DEFAULT FALSE")
c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If true, check-overrides will process the suite by default'])
c.execute("ALTER TABLE suite ADD COLUMN overrideorigin TEXT DEFAULT NULL")
c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If NOT NULL, check-overrides will take missing overrides from the named suite'])
# Migrate config file values into database
if "Check-Overrides::OverrideSuites" in cnf:
for suitename in cnf.subtree("Check-Overrides::OverrideSuites").list():
if cnf.get("Check-Overrides::OverrideSuites::%s::Process" % suitename, "0") == "1":
print "Marking %s to have overrides processed automatically" % suitename.lower()
c.execute("UPDATE suite SET overrideprocess = TRUE WHERE suite_name = %s", [suitename.lower()])
originsuite = cnf.get("Check-Overrides::OverrideSuites::%s::OriginSuite" % suitename, '')
if originsuite != '':
print "Setting %s to use %s as origin for overrides" % (suitename.lower(), originsuite.lower())
c.execute("UPDATE suite SET overrideorigin = %s WHERE suite_name = %s", [originsuite.lower(), suitename.lower()])
c.execute("ALTER TABLE suite ADD COLUMN allowcsset BOOLEAN NOT NULL DEFAULT FALSE")
c.execute("COMMENT ON COLUMN suite.allowcsset IS %s", ['Allow control-suite to be used with the --set option without forcing'])
# Import historical hard-coded values
c.execute("UPDATE suite SET allowcsset = TRUE WHERE suite_name IN ('testing', 'squeeze-updates')")
c.execute("UPDATE config SET value = '70' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 70, rollback issued. Error message : %s' % (str(msg)))
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\n# coding:utf-8\r\nfrom importlib import import_module\r\nfrom django.http import HttpResponse\r\nfrom . import settings as USettings\r\nimport os\r\nimport json\r\nfrom django.views.decorators.csrf import csrf_exempt\r\nimport date... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\n# coding:utf-8\r\nfrom importlib import import_module\r\nfrom django.http import HttpResponse\r\nfrom . import settings as USettings\r\nimport os\r\nimport json\r\nfrom django.views.decorators.csrf import csrf_exemp... | ```python
# coding:utf-8
from importlib import import_module
from django.http import HttpResponse
from . import settings as USettings
import os
import json
from django.views.decorators.csrf import csrf_exempt
import datetime
import random
import urllib
from django.utils import six
if six.PY3:
long = int
def get_path_format_vars():
return {
"year": datetime.datetime.now().strftime("%Y"),
"month": datetime.datetime.now().strftime("%m"),
"day": datetime.datetime.now().strftime("%d"),
"date": datetime.datetime.now().strftime("%Y%m%d"),
"time": datetime.datetime.now().strftime("%H%M%S"),
"datetime": datetime.datetime.now().strftime("%Y%m%d%H%M%S"),
"rnd": random.randrange(100, 999)
}
# 保存上传的文件
def save_upload_file(PostFile, FilePath):
try:
f = open(FilePath, 'wb')
for chunk in PostFile.chunks():
f.write(chunk)
except Exception as E:
f.close()
return u"写入文件错误:" + E.message
f.close()
return u"SUCCESS"
@csrf_exempt
def get_ueditor_settings(request):
return HttpResponse(json.dumps(USettings.UEditorUploadSettings, ensure_ascii=False), content_type="application/javascript")
@csrf_exempt
def get_ueditor_controller(request):
"""获取ueditor的后端URL地址 """
action = request.GET.get("action", "")
reponseAction = {
"config": get_ueditor_settings,
"uploadimage": UploadFile,
"uploadscrawl": UploadFile,
"uploadvideo": UploadFile,
"uploadfile": UploadFile,
"catchimage": catcher_remote_image,
"listimage": list_files,
"listfile": list_files
}
return reponseAction[action](request)
@csrf_exempt
def list_files(request):
"""列出文件"""
if request.method != "GET":
return HttpResponse(json.dumps(u"{'state:'ERROR'}"), content_type="application/javascript")
# 取得动作
action = request.GET.get("action", "listimage")
allowFiles = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerAllowFiles", []),
"listimage": USettings.UEditorUploadSettings.get("imageManagerAllowFiles", [])
}
listSize = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerListSize", ""),
"listimage": USettings.UEditorUploadSettings.get("imageManagerListSize", "")
}
listpath = {
"listfile": USettings.UEditorUploadSettings.get("fileManagerListPath", ""),
"listimage": USettings.UEditorUploadSettings.get("imageManagerListPath", "")
}
# 取得参数
list_size = long(request.GET.get("size", listSize[action]))
list_start = long(request.GET.get("start", 0))
files = []
root_path = os.path.join(
USettings.gSettings.MEDIA_ROOT, listpath[action]).replace("\\", "/")
files = get_files(root_path, root_path, allowFiles[action])
if (len(files) == 0):
return_info = {
"state": u"未找到匹配文件!",
"list": [],
"start": list_start,
"total": 0
}
else:
return_info = {
"state": "SUCCESS",
"list": files[list_start:list_start + list_size],
"start": list_start,
"total": len(files)
}
return HttpResponse(json.dumps(return_info), content_type="application/javascript")
def get_files(root_path, cur_path, allow_types=[]):
files = []
items = os.listdir(cur_path)
for item in items:
item = unicode(item)
item_fullname = os.path.join(
root_path, cur_path, item).replace("\\", "/")
if os.path.isdir(item_fullname):
files.extend(get_files(root_path, item_fullname, allow_types))
else:
ext = os.path.splitext(item_fullname)[1]
is_allow_list = (len(allow_types) == 0) or (ext in allow_types)
if is_allow_list:
files.append({
"url": urllib.basejoin(USettings.gSettings.MEDIA_URL, os.path.join(os.path.relpath(cur_path, root_path), item).replace("\\", "/")),
"mtime": os.path.getmtime(item_fullname)
})
return files
@csrf_exempt
def UploadFile(request):
"""上传文件"""
if not request.method == "POST":
return HttpResponse(json.dumps(u"{'state:'ERROR'}"), content_type="application/javascript")
state = "SUCCESS"
action = request.GET.get("action")
# 上传文件
upload_field_name = {
"uploadfile": "fileFieldName", "uploadimage": "imageFieldName",
"uploadscrawl": "scrawlFieldName", "catchimage": "catcherFieldName",
"uploadvideo": "videoFieldName",
}
UploadFieldName = request.GET.get(
upload_field_name[action], USettings.UEditorUploadSettings.get(action, "upfile"))
# 上传涂鸦,涂鸦是采用base64编码上传的,需要单独处理
if action == "uploadscrawl":
upload_file_name = "scrawl.png"
upload_file_size = 0
else:
# 取得上传的文件
file = request.FILES.get(UploadFieldName, None)
if file is None:
return HttpResponse(json.dumps(u"{'state:'ERROR'}"), content_type="application/javascript")
upload_file_name = file.name
upload_file_size = file.size
# 取得上传的文件的原始名称
upload_original_name, upload_original_ext = os.path.splitext(
upload_file_name)
# 文件类型检验
upload_allow_type = {
"uploadfile": "fileAllowFiles",
"uploadimage": "imageAllowFiles",
"uploadvideo": "videoAllowFiles"
}
if action in upload_allow_type:
allow_type = list(request.GET.get(upload_allow_type[
action], USettings.UEditorUploadSettings.get(upload_allow_type[action], "")))
if not upload_original_ext in allow_type:
state = u"服务器不允许上传%s类型的文件。" % upload_original_ext
# 大小检验
upload_max_size = {
"uploadfile": "filwMaxSize",
"uploadimage": "imageMaxSize",
"uploadscrawl": "scrawlMaxSize",
"uploadvideo": "videoMaxSize"
}
max_size = long(request.GET.get(upload_max_size[
action], USettings.UEditorUploadSettings.get(upload_max_size[action], 0)))
if max_size != 0:
from .utils import FileSize
MF = FileSize(max_size)
if upload_file_size > MF.size:
state = u"上传文件大小不允许超过%s。" % MF.FriendValue
# 检测保存路径是否存在,如果不存在则需要创建
upload_path_format = {
"uploadfile": "filePathFormat",
"uploadimage": "imagePathFormat",
"uploadscrawl": "scrawlPathFormat",
"uploadvideo": "videoPathFormat"
}
path_format_var = get_path_format_vars()
path_format_var.update({
"basename": upload_original_name,
"extname": upload_original_ext[1:],
"filename": upload_file_name,
})
# 取得输出文件的路径
OutputPathFormat, OutputPath, OutputFile = get_output_path(
request, upload_path_format[action], path_format_var)
# 所有检测完成后写入文件
if state == "SUCCESS":
if action == "uploadscrawl":
state = save_scrawl_file(
request, os.path.join(OutputPath, OutputFile))
else:
# 保存到文件中,如果保存错误,需要返回ERROR
upload_module_name = USettings.UEditorUploadSettings.get(
"upload_module", None)
if upload_module_name:
mod = import_module(upload_module_name)
state = mod.upload(file, OutputPathFormat)
else:
state = save_upload_file(
file, os.path.join(OutputPath, OutputFile))
# 返回数据
return_info = {
# 保存后的文件名称
'url': urllib.basejoin(USettings.gSettings.MEDIA_URL, OutputPathFormat),
'original': upload_file_name, # 原始文件名
'type': upload_original_ext,
'state': state, # 上传状态,成功时返回SUCCESS,其他任何值将原样返回至图片上传框中
'size': upload_file_size
}
return HttpResponse(json.dumps(return_info, ensure_ascii=False), content_type="application/javascript")
@csrf_exempt
def catcher_remote_image(request):
"""远程抓图,当catchRemoteImageEnable:true时,
如果前端插入图片地址与当前web不在同一个域,则由本函数从远程下载图片到本地
"""
if not request.method == "POST":
return HttpResponse(json.dumps(u"{'state:'ERROR'}"), content_type="application/javascript")
state = "SUCCESS"
allow_type = list(request.GET.get(
"catcherAllowFiles", USettings.UEditorUploadSettings.get("catcherAllowFiles", "")))
max_size = long(request.GET.get(
"catcherMaxSize", USettings.UEditorUploadSettings.get("catcherMaxSize", 0)))
remote_urls = request.POST.getlist("source[]", [])
catcher_infos = []
path_format_var = get_path_format_vars()
for remote_url in remote_urls:
# 取得上传的文件的原始名称
remote_file_name = os.path.basename(remote_url)
remote_original_name, remote_original_ext = os.path.splitext(
remote_file_name)
# 文件类型检验
if remote_original_ext in allow_type:
path_format_var.update({
"basename": remote_original_name,
"extname": remote_original_ext[1:],
"filename": remote_original_name
})
# 计算保存的文件名
o_path_format, o_path, o_file = get_output_path(
request, "catcherPathFormat", path_format_var)
o_filename = os.path.join(o_path, o_file).replace("\\", "/")
# 读取远程图片文件
try:
remote_image = urllib.urlopen(remote_url)
# 将抓取到的文件写入文件
try:
f = open(o_filename, 'wb')
f.write(remote_image.read())
f.close()
state = "SUCCESS"
except Exception as E:
state = u"写入抓取图片文件错误:%s" % E.message
except Exception as E:
state = u"抓取图片错误:%s" % E.message
catcher_infos.append({
"state": state,
"url": urllib.basejoin(USettings.gSettings.MEDIA_URL, o_path_format),
"size": os.path.getsize(o_filename),
"title": os.path.basename(o_file),
"original": remote_file_name,
"source": remote_url
})
return_info = {
"state": "SUCCESS" if len(catcher_infos) > 0 else "ERROR",
"list": catcher_infos
}
return HttpResponse(json.dumps(return_info, ensure_ascii=False), content_type="application/javascript")
def get_output_path(request, path_format, path_format_var):
# 取得输出文件的路径
OutputPathFormat = (request.GET.get(path_format, USettings.UEditorSettings[
"defaultPathFormat"]) % path_format_var).replace("\\", "/")
# 分解OutputPathFormat
OutputPath, OutputFile = os.path.split(OutputPathFormat)
OutputPath = os.path.join(USettings.gSettings.MEDIA_ROOT, OutputPath)
# 如果OutputFile为空说明传入的OutputPathFormat没有包含文件名,因此需要用默认的文件名
if not OutputFile:
OutputFile = USettings.UEditorSettings[
"defaultPathFormat"] % path_format_var
OutputPathFormat = os.path.join(OutputPathFormat, OutputFile)
if not os.path.exists(OutputPath):
os.makedirs(OutputPath)
return (OutputPathFormat, OutputPath, OutputFile)
# 涂鸦功能上传处理
@csrf_exempt
def save_scrawl_file(request, filename):
import base64
try:
content = request.POST.get(
USettings.UEditorUploadSettings.get("scrawlFieldName", "upfile"))
f = open(filename, 'wb')
f.write(base64.decodestring(content))
f.close()
state = "SUCCESS"
except Exception as E:
state = "写入图片文件错误:%s" % E.message
return state
``` |
[
{
"content": "Return the code exactly, with no changes:\n```python\n#!/usr/bin/env python\nimport dns\nfrom dnsdisttests import DNSDistTest\n\nclass TestSpoofingSpoof(DNSDistTest):\n\n _config_template = \"\"\"\n addAction(makeRule(\"spoofaction.spoofing.tests.powerdns.com.\"), SpoofAction({\"192.0.2.1\",... | [
{
"content": "Return the code exactly, with no changes:\n<|memory_start|>```python\n#!/usr/bin/env python\nimport dns\nfrom dnsdisttests import DNSDistTest\n\nclass TestSpoofingSpoof(DNSDistTest):\n\n _config_template = \"\"\"\n addAction(makeRule(\"spoofaction.spoofing.tests.powerdns.com.\"), SpoofAction... | ```python
#!/usr/bin/env python
import dns
from dnsdisttests import DNSDistTest
class TestSpoofingSpoof(DNSDistTest):
_config_template = """
addAction(makeRule("spoofaction.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}))
addAction(makeRule("spoofaction-aa.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}, {aa=true}))
addAction(makeRule("spoofaction-ad.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}, {ad=true}))
addAction(makeRule("spoofaction-ra.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}, {ra=true}))
addAction(makeRule("spoofaction-nora.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}, {ra=false}))
addAction(makeRule("spoofaction-ttl.spoofing.tests.powerdns.com."), SpoofAction({"192.0.2.1", "2001:DB8::1"}, {ttl=1500}))
addAction(makeRule("cnamespoofaction.spoofing.tests.powerdns.com."), SpoofCNAMEAction("cnameaction.spoofing.tests.powerdns.com."))
addAction("multispoof.spoofing.tests.powerdns.com", SpoofAction({"192.0.2.1", "192.0.2.2", "2001:DB8::1", "2001:DB8::2"}))
addAction(AndRule{makeRule("raw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.A)}, SpoofRawAction("\\192\\000\\002\\001"))
addAction(AndRule{makeRule("raw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.TXT)}, SpoofRawAction("\\003aaa\\004bbbb\\011ccccccccccc"))
addAction(AndRule{makeRule("raw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.SRV)}, SpoofRawAction("\\000\\000\\000\\000\\255\\255\\003srv\\008powerdns\\003com\\000", { aa=true, ttl=3600 }))
addAction(AndRule{makeRule("multiraw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.TXT)}, SpoofRawAction({"\\003aaa\\004bbbb", "\\011ccccccccccc"}))
addAction(AndRule{makeRule("multiraw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.A)}, SpoofRawAction({"\\192\\000\\002\\001", "\\192\\000\\002\\002"}))
newServer{address="127.0.0.1:%s"}
"""
def testSpoofActionA(self):
"""
Spoofing: Spoof A via Action
Send an A query to "spoofaction.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'spoofaction.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionAAAA(self):
"""
Spoofing: Spoof AAAA via Action
Send an AAAA query to "spoofaction.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'spoofaction.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionCNAME(self):
"""
Spoofing: Spoof CNAME via Action
Send an A query for "cnamespoofaction.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'cnamespoofaction.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.CNAME,
'cnameaction.spoofing.tests.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionMultiA(self):
"""
Spoofing: Spoof multiple IPv4 addresses via AddDomainSpoof
Send an A query for "multispoof.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'multispoof.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.2', '192.0.2.1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionMultiAAAA(self):
"""
Spoofing: Spoof multiple IPv6 addresses via AddDomainSpoof
Send an AAAA query for "multispoof.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'multispoof.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1', '2001:DB8::2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionMultiANY(self):
"""
Spoofing: Spoof multiple addresses via AddDomainSpoof
Send an ANY query for "multispoof.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'multispoof.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'ANY', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.2', '192.0.2.1')
expectedResponse.answer.append(rrset)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1', '2001:DB8::2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testSpoofActionSetAA(self):
"""
Spoofing: Spoof via Action, setting AA=1
"""
name = 'spoofaction-aa.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags |= dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
def testSpoofActionSetAD(self):
"""
Spoofing: Spoof via Action, setting AD=1
"""
name = 'spoofaction-ad.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags |= dns.flags.AD
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
def testSpoofActionSetRA(self):
"""
Spoofing: Spoof via Action, setting RA=1
"""
name = 'spoofaction-ra.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags |= dns.flags.RA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
def testSpoofActionSetNoRA(self):
"""
Spoofing: Spoof via Action, setting RA=0
"""
name = 'spoofaction-nora.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.RA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
def testSpoofActionSetTTL(self):
"""
Spoofing: Spoof via Action, setting the TTL to 1500
"""
name = 'spoofaction-ttl.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
expectedResponse = dns.message.make_response(query)
expectedResponse.flags |= dns.flags.RA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 1500)
def testSpoofRawAction(self):
"""
Spoofing: Spoof a response from raw bytes
"""
name = 'raw.spoofing.tests.powerdns.com.'
# A
query = dns.message.make_query(name, 'A', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# TXT
query = dns.message.make_query(name, 'TXT', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.TXT,
'"aaa" "bbbb" "ccccccccccc"')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# SRV
query = dns.message.make_query(name, 'SRV', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
# this one should have the AA flag set
expectedResponse.flags |= dns.flags.AA
rrset = dns.rrset.from_text(name,
3600,
dns.rdataclass.IN,
dns.rdatatype.SRV,
'0 0 65535 srv.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 3600)
def testSpoofRawActionMulti(self):
"""
Spoofing: Spoof a response from several raw bytes
"""
name = 'multiraw.spoofing.tests.powerdns.com.'
# A
query = dns.message.make_query(name, 'A', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1', '192.0.2.2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# TXT
query = dns.message.make_query(name, 'TXT', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.TXT,
'"aaa" "bbbb"', '"ccccccccccc"')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
class TestSpoofingLuaSpoof(DNSDistTest):
_config_template = """
function spoof1rule(dq)
if(dq.qtype==1) -- A
then
return DNSAction.Spoof, "192.0.2.1,192.0.2.2"
elseif(dq.qtype == 28) -- AAAA
then
return DNSAction.Spoof, "2001:DB8::1"
else
return DNSAction.None, ""
end
end
function spoof2rule(dq)
return DNSAction.Spoof, "spoofedcname.spoofing.tests.powerdns.com."
end
addAction(AndRule{makeRule("raw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.TXT)}, SpoofRawAction("\\003aaa\\004bbbb\\011ccccccccccc"))
addAction(AndRule{makeRule("raw.spoofing.tests.powerdns.com"), QTypeRule(DNSQType.SRV)}, SpoofRawAction("\\000\\000\\000\\000\\255\\255\\003srv\\008powerdns\\003com\\000", { aa=true, ttl=3600 }))
function spoofrawrule(dq)
if dq.qtype == DNSQType.A then
return DNSAction.SpoofRaw, "\\192\\000\\002\\001"
elseif dq.qtype == DNSQType.TXT then
return DNSAction.SpoofRaw, "\\003aaa\\004bbbb\\011ccccccccccc"
elseif dq.qtype == DNSQType.SRV then
dq.dh:setAA(true)
return DNSAction.SpoofRaw, "\\000\\000\\000\\000\\255\\255\\003srv\\008powerdns\\003com\\000"
end
return DNSAction.None, ""
end
addAction("luaspoof1.spoofing.tests.powerdns.com.", LuaAction(spoof1rule))
addAction("luaspoof2.spoofing.tests.powerdns.com.", LuaAction(spoof2rule))
addAction("lua-raw.spoofing.tests.powerdns.com.", LuaAction(spoofrawrule))
newServer{address="127.0.0.1:%s"}
"""
def testLuaSpoofA(self):
"""
Spoofing: Spoofing an A via Lua
Send an A query to "luaspoof1.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof1.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1', '192.0.2.2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofAAAA(self):
"""
Spoofing: Spoofing an AAAA via Lua
Send an AAAA query to "luaspoof1.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof1.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofAWithCNAME(self):
"""
Spoofing: Spoofing an A with a CNAME via Lua
Send an A query to "luaspoof2.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof2.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.CNAME,
'spoofedcname.spoofing.tests.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofAAAAWithCNAME(self):
"""
Spoofing: Spoofing an AAAA with a CNAME via Lua
Send an AAAA query to "luaspoof2.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof2.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.CNAME,
'spoofedcname.spoofing.tests.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofRawAction(self):
"""
Spoofing: Spoof a response from raw bytes via Lua
"""
name = 'lua-raw.spoofing.tests.powerdns.com.'
# A
query = dns.message.make_query(name, 'A', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# TXT
query = dns.message.make_query(name, 'TXT', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.TXT,
'"aaa" "bbbb" "ccccccccccc"')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# SRV
query = dns.message.make_query(name, 'SRV', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
# this one should have the AA flag set
expectedResponse.flags |= dns.flags.AA
rrset = dns.rrset.from_text(name,
3600,
dns.rdataclass.IN,
dns.rdatatype.SRV,
'0 0 65535 srv.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
# sorry, we can't set the TTL from the Lua API right now
#self.assertEqual(receivedResponse.answer[0].ttl, 3600)
class TestSpoofingLuaSpoofMulti(DNSDistTest):
_config_template = """
function spoof1multirule(dq)
if(dq.qtype==1) -- A
then
dq:spoof({ newCA("192.0.2.1"), newCA("192.0.2.2") })
return DNSAction.HeaderModify
elseif(dq.qtype == 28) -- AAAA
then
dq:spoof({ newCA("2001:DB8::1"), newCA("2001:DB8::2") })
return DNSAction.HeaderModify
else
return DNSAction.None, ""
end
end
function spoofrawmultirule(dq)
if dq.qtype == DNSQType.A then
dq:spoof({ "\\192\\000\\002\\001", "\\192\\000\\002\\002" })
return DNSAction.HeaderModify
elseif dq.qtype == DNSQType.TXT then
dq:spoof({ "\\003aaa\\004bbbb", "\\011ccccccccccc" })
return DNSAction.HeaderModify
elseif dq.qtype == DNSQType.SRV then
dq.dh:setAA(true)
dq:spoof({ "\\000\\000\\000\\000\\255\\255\\004srv1\\008powerdns\\003com\\000","\\000\\000\\000\\000\\255\\255\\004srv2\\008powerdns\\003com\\000" })
return DNSAction.HeaderModify
end
return DNSAction.None, ""
end
addAction("luaspoof1multi.spoofing.tests.powerdns.com.", LuaAction(spoof1multirule))
addAction("lua-raw-multi.spoofing.tests.powerdns.com.", LuaAction(spoofrawmultirule))
newServer{address="127.0.0.1:%s"}
"""
def testLuaSpoofMultiA(self):
"""
Spoofing: Spoofing multiple A via Lua dq:spoof
Send an A query to "luaspoof1multi.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof1multi.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1', '192.0.2.2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofMultiAAAA(self):
"""
Spoofing: Spoofing multiple AAAA via Lua dq:spoof
Send an AAAA query to "luaspoof1.spoofing.tests.powerdns.com.",
check that dnsdist sends a spoofed result.
"""
name = 'luaspoof1multi.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.AAAA,
'2001:DB8::1', '2001:DB8::2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
def testLuaSpoofMultiRawAction(self):
"""
Spoofing: Spoof responses from raw bytes via Lua dq:spoof
"""
name = 'lua-raw-multi.spoofing.tests.powerdns.com.'
# A
query = dns.message.make_query(name, 'A', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1', '192.0.2.2')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# TXT
query = dns.message.make_query(name, 'TXT', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.TXT,
'"aaa" "bbbb"', '"ccccccccccc"')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# SRV
query = dns.message.make_query(name, 'SRV', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
# this one should have the AA flag set
expectedResponse.flags |= dns.flags.AA
rrset = dns.rrset.from_text(name,
3600,
dns.rdataclass.IN,
dns.rdatatype.SRV,
'0 0 65535 srv1.powerdns.com.', '0 0 65535 srv2.powerdns.com.')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
# sorry, we can't set the TTL from the Lua API right now
#self.assertEqual(receivedResponse.answer[0].ttl, 3600)
class TestSpoofingLuaFFISpoofMulti(DNSDistTest):
_config_template = """
local ffi = require("ffi")
function spoofrawmultirule(dq)
local qtype = ffi.C.dnsdist_ffi_dnsquestion_get_qtype(dq)
if qtype == DNSQType.A then
local records = ffi.new("dnsdist_ffi_raw_value_t [2]")
local str = "\\192\\000\\002\\001"
records[0].size = #str
records[0].value = ffi.new("char[?]", #str)
ffi.copy(records[0].value, str, #str)
local str = "\\192\\000\\002\\255"
records[1].value = ffi.new("char[?]", #str)
ffi.copy(records[1].value, str, #str)
records[1].size = #str
ffi.C.dnsdist_ffi_dnsquestion_spoof_raw(dq, records, 2)
return DNSAction.HeaderModify
elseif qtype == DNSQType.TXT then
local records = ffi.new("dnsdist_ffi_raw_value_t [2]")
local str = "\\033this text has a comma at the end,"
records[0].size = #str
records[0].value = ffi.new("char[?]", #str)
ffi.copy(records[0].value, str, #str)
local str = "\\003aaa\\004bbbb"
records[1].size = #str
records[1].value = ffi.new("char[?]", #str)
ffi.copy(records[1].value, str, #str)
ffi.C.dnsdist_ffi_dnsquestion_spoof_raw(dq, records, 2)
return DNSAction.HeaderModify
end
return DNSAction.None, ""
end
addAction("lua-raw-multi.ffi-spoofing.tests.powerdns.com.", LuaFFIAction(spoofrawmultirule))
newServer{address="127.0.0.1:%s"}
"""
_verboseMode = True
def testLuaSpoofMultiRawAction(self):
"""
Spoofing via Lua FFI: Spoof responses from raw bytes via Lua FFI
"""
name = 'lua-raw-multi.ffi-spoofing.tests.powerdns.com.'
# A
query = dns.message.make_query(name, 'A', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1', '192.0.2.255')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
# TXT
query = dns.message.make_query(name, 'TXT', 'IN')
query.flags &= ~dns.flags.RD
expectedResponse = dns.message.make_response(query)
expectedResponse.flags &= ~dns.flags.AA
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.TXT,
'"this text has a comma at the end,"', '"aaa" "bbbb"')
expectedResponse.answer.append(rrset)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse, receivedResponse)
self.assertEqual(receivedResponse.answer[0].ttl, 60)
class TestSpoofingLuaWithStatistics(DNSDistTest):
_config_template = """
function spoof1rule(dq)
queriesCount = getStatisticsCounters()['queries']
if(queriesCount == 1) then
return DNSAction.Spoof, "192.0.2.1"
elseif(queriesCount == 2) then
return DNSAction.Spoof, "192.0.2.2"
else
return DNSAction.Spoof, "192.0.2.0"
end
end
addAction("luaspoofwithstats.spoofing.tests.powerdns.com.", LuaAction(spoof1rule))
newServer{address="127.0.0.1:%s"}
"""
def testLuaSpoofBasedOnStatistics(self):
"""
Spoofing: Spoofing an A via Lua based on statistics counters
"""
name = 'luaspoofwithstats.spoofing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
# dnsdist set RA = RD for spoofed responses
query.flags &= ~dns.flags.RD
expectedResponse1 = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.1')
expectedResponse1.answer.append(rrset)
expectedResponse2 = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.2')
expectedResponse2.answer.append(rrset)
expectedResponseAfterwards = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
60,
dns.rdataclass.IN,
dns.rdatatype.A,
'192.0.2.0')
expectedResponseAfterwards.answer.append(rrset)
(_, receivedResponse) = self.sendUDPQuery(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse1, receivedResponse)
(_, receivedResponse) = self.sendUDPQuery(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponse2, receivedResponse)
for method in ("sendUDPQuery", "sendTCPQuery"):
sender = getattr(self, method)
(_, receivedResponse) = sender(query, response=None, useQueue=False)
self.assertTrue(receivedResponse)
self.assertEqual(expectedResponseAfterwards, receivedResponse)
``` |
[
{
"content": "```python\nfrom pypif import pif\nfrom pypif.obj.common.pio import Pio\nfrom pypif.util.serializable import Serializable\nfrom six import string_types\n\n\nclass PifSearchHit(Serializable):\n \"\"\"\n Class to store a single PIF search hit.\n \"\"\"\n\n def __init__(self, id=None, data... | [
{
"content": "<|memory_start|>```python\nfrom pypif import pif\nfrom pypif.obj.common.pio import Pio\nfrom pypif.util.serializable import Serializable\nfrom six import string_types\n\n\nclass PifSearchHit(Serializable):\n \"\"\"\n Class to store a single PIF search hit.\n \"\"\"\n\n def __init__(sel... | ```python
from pypif import pif
from pypif.obj.common.pio import Pio
from pypif.util.serializable import Serializable
from six import string_types
class PifSearchHit(Serializable):
"""
Class to store a single PIF search hit.
"""
def __init__(self, id=None, dataset=None, dataset_version=None, score=None, updated_at=None, system=None,
extracted=None, extracted_path=None, **kwargs):
"""
Constructor.
:param id: String with the ID of the record.
:param dataset: Integer with the dataset of the record.
:param dataset_version: Integer with the dataset version of the record.
:param score: Score with the relevancy of the result.
:param updated_at: String with the last time that the record was updated.
:param system: Pif System object that matched.
:param extracted: Dictionary with a map of extracted property names to values.
:param extracted_path: Dictionary with a map of extracted property names to paths in a PIF.
"""
self._id = None
self.id = id
self._dataset = None
self.dataset = dataset
self._dataset_version = None
self.dataset_version = dataset_version
self._score = None
self.score = score
self._updated_at = None
self.updated_at = updated_at
self._system = None
self.system = system
self._extracted = None
self.extracted = extracted
self._extracted_path = None
self.extracted_path = extracted_path
@property
def id(self):
return self._id
@id.setter
def id(self, id):
self._id = id
@id.deleter
def id(self):
self._id = None
@property
def dataset(self):
return self._dataset
@dataset.setter
def dataset(self, dataset):
self._dataset = dataset
@dataset.deleter
def dataset(self):
self._dataset = None
@property
def dataset_version(self):
return self._dataset_version
@dataset_version.setter
def dataset_version(self, dataset_version):
self._dataset_version = dataset_version
@dataset_version.deleter
def dataset_version(self):
self._dataset_version = None
@property
def score(self):
return self._score
@score.setter
def score(self, score):
self._score = score
@score.deleter
def score(self):
self._score = None
@property
def updated_at(self):
return self._updated_at
@updated_at.setter
def updated_at(self, updated_at):
self._updated_at = updated_at
@updated_at.deleter
def updated_at(self):
self._updated_at = None
@property
def system(self):
return self._system
@system.setter
def system(self, system):
if system is None:
self._system = None
elif isinstance(system, string_types):
self._system = pif.loads(system)
elif isinstance(system, dict):
self._system = pif.loado(system)
elif isinstance(system, Pio):
self._system = system
else:
raise TypeError('Not a valid system type: must be string, dict, or Pio, but got ' + str(type(system)))
@system.deleter
def system(self):
self._system = None
@property
def extracted(self):
return self._extracted
@extracted.setter
def extracted(self, extracted):
self._extracted = extracted
@extracted.deleter
def extracted(self):
self._extracted = None
@property
def extracted_path(self):
return self._extracted_path
@extracted_path.setter
def extracted_path(self, extracted_path):
self._extracted_path = extracted_path
@extracted_path.deleter
def extracted_path(self):
self._extracted_path = None
``` |
[
{
"content": "Recreate the original code text:\n```python\nimport py\nfrom rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr\nfrom rpython.jit.metainterp.history import ResOperation, TargetToken,\\\n JitCellToken\nfrom rpython.jit.metainterp.history import (ConstInt, ConstPtr, Const,\n ... | [
{
"content": "Recreate the original code text:\n<|memory_start|>```python\nimport py\nfrom rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr\nfrom rpython.jit.metainterp.history import ResOperation, TargetToken,\\\n JitCellToken\nfrom rpython.jit.metainterp.history import (ConstInt, ConstPtr, ... | ```python
import py
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi, rstr
from rpython.jit.metainterp.history import ResOperation, TargetToken,\
JitCellToken
from rpython.jit.metainterp.history import (ConstInt, ConstPtr, Const,
BasicFailDescr, BasicFinalDescr)
from rpython.jit.backend.detect_cpu import getcpuclass
from rpython.jit.backend.x86.arch import WORD
from rpython.jit.backend.x86.rx86 import fits_in_32bits
from rpython.jit.backend.llsupport import symbolic
from rpython.jit.metainterp.resoperation import rop, InputArgInt, InputArgRef
from rpython.jit.metainterp.executor import execute
from rpython.jit.backend.test.runner_test import LLtypeBackendTest
from rpython.jit.tool.oparser import parse
import ctypes
CPU = getcpuclass()
class FakeStats(object):
pass
U = LLtypeBackendTest.U
S = LLtypeBackendTest.S
# ____________________________________________________________
class TestX86(LLtypeBackendTest):
# for the individual tests see
# ====> ../../test/runner_test.py
if WORD == 4:
add_loop_instructions = ('mov; '
'lea; ' # a nop, for the label
'add; test; je; jmp;') # plus some padding
bridge_loop_instructions = 'cmp; jge; mov; mov; call; jmp;'
else:
add_loop_instructions = ('mov; '
'nop; ' # for the label
'add; test; je; jmp;') # plus some padding
bridge_loop_instructions = (
'cmp; jge; mov;( movabs;)? mov; mov(abs)?; call; mov(abs)?; jmp;')
def get_cpu(self):
cpu = CPU(rtyper=None, stats=FakeStats())
cpu.setup_once()
return cpu
def test_execute_ptr_operation(self):
cpu = self.cpu
u_box, _, _ = self.alloc_instance(U)
u = u_box.getref(lltype.Ptr(U))
ofs = cpu.fielddescrof(S, 'value')
assert self.execute_operation(rop.SETFIELD_GC,
[u_box, InputArgInt(3)],
'void', ofs) == None
assert u.parent.parent.value == 3
u.parent.parent.value += 100
assert (self.execute_operation(rop.GETFIELD_GC_I, [u_box], 'int', ofs)
== 103)
def test_unicode(self):
ofs = symbolic.get_field_token(rstr.UNICODE, 'chars', False)[0]
u = rstr.mallocunicode(13)
for i in range(13):
u.chars[i] = unichr(ord(u'a') + i)
b = InputArgRef(lltype.cast_opaque_ptr(llmemory.GCREF, u))
r = self.execute_operation(rop.UNICODEGETITEM, [b, ConstInt(2)], 'int')
assert r == ord(u'a') + 2
self.execute_operation(rop.UNICODESETITEM, [b, ConstInt(2),
ConstInt(ord(u'z'))],
'void')
assert u.chars[2] == u'z'
assert u.chars[3] == u'd'
@staticmethod
def _resbuf(res, item_tp=ctypes.c_long):
return ctypes.cast(res._obj.intval, ctypes.POINTER(item_tp))
def test_allocations(self):
py.test.skip("rewrite or kill")
from rpython.rtyper.lltypesystem import rstr
allocs = [None]
all = []
orig_new = self.cpu.gc_ll_descr.funcptr_for_new
def f(size):
allocs.insert(0, size)
return orig_new(size)
self.cpu.assembler.setup_once()
self.cpu.gc_ll_descr.funcptr_for_new = f
ofs = symbolic.get_field_token(rstr.STR, 'chars', False)[0]
res = self.execute_operation(rop.NEWSTR, [ConstInt(7)], 'ref')
assert allocs[0] == 7 + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 7
# ------------------------------------------------------------
res = self.execute_operation(rop.NEWSTR, [InputArgInt(7)], 'ref')
assert allocs[0] == 7 + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 7
# ------------------------------------------------------------
TP = lltype.GcArray(lltype.Signed)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
assert allocs[0] == 10*WORD + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
# ------------------------------------------------------------
res = self.execute_operation(rop.NEW_ARRAY, [InputArgInt(10)],
'ref', descr)
assert allocs[0] == 10*WORD + ofs + WORD
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
def test_stringitems(self):
from rpython.rtyper.lltypesystem.rstr import STR
ofs = symbolic.get_field_token(STR, 'chars', False)[0]
ofs_items = symbolic.get_field_token(STR.chars, 'items', False)[0]
res = self.execute_operation(rop.NEWSTR, [ConstInt(10)], 'ref')
self.execute_operation(rop.STRSETITEM, [InputArgRef(res), ConstInt(2), ConstInt(ord('d'))], 'void')
resbuf = self._resbuf(res, ctypes.c_char)
assert resbuf[ofs + ofs_items + 2] == 'd'
self.execute_operation(rop.STRSETITEM, [InputArgRef(res), InputArgInt(2), ConstInt(ord('z'))], 'void')
assert resbuf[ofs + ofs_items + 2] == 'z'
r = self.execute_operation(rop.STRGETITEM, [InputArgRef(res), InputArgInt(2)], 'int')
assert r == ord('z')
def test_arrayitems(self):
TP = lltype.GcArray(lltype.Signed)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
itemsofs = symbolic.get_field_token(TP, 'items', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
resbuf = self._resbuf(res)
assert resbuf[ofs/WORD] == 10
self.execute_operation(rop.SETARRAYITEM_GC, [InputArgRef(res),
ConstInt(2), InputArgInt(38)],
'void', descr)
assert resbuf[itemsofs/WORD + 2] == 38
self.execute_operation(rop.SETARRAYITEM_GC, [InputArgRef(res),
InputArgInt(3), InputArgInt(42)],
'void', descr)
assert resbuf[itemsofs/WORD + 3] == 42
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res), ConstInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [ConstPtr(res),
InputArgInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [ConstPtr(res),
ConstInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res),
InputArgInt(2)],
'int', descr)
assert r == 38
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [InputArgRef(res), InputArgInt(3)],
'int', descr)
assert r == 42
def test_arrayitems_not_int(self):
TP = lltype.GcArray(lltype.Char)
ofs = symbolic.get_field_token(TP, 'length', False)[0]
itemsofs = symbolic.get_field_token(TP, 'items', False)[0]
descr = self.cpu.arraydescrof(TP)
res = self.execute_operation(rop.NEW_ARRAY, [ConstInt(10)],
'ref', descr)
resbuf = self._resbuf(res, ctypes.c_char)
res = InputArgRef(res)
assert resbuf[ofs] == chr(10)
for i in range(10):
self.execute_operation(rop.SETARRAYITEM_GC, [res,
ConstInt(i), InputArgInt(i)],
'void', descr)
for i in range(10):
assert resbuf[itemsofs + i] == chr(i)
for i in range(10):
r = self.execute_operation(rop.GETARRAYITEM_GC_I, [res,
ConstInt(i)],
'int', descr)
assert r == i
def test_getfield_setfield(self):
TP = lltype.GcStruct('x', ('s', lltype.Signed),
('i', rffi.INT),
('f', lltype.Float),
('u', rffi.USHORT),
('c1', lltype.Char),
('c2', lltype.Char),
('c3', lltype.Char))
res = InputArgRef(self.execute_operation(rop.NEW, [],
'ref', self.cpu.sizeof(TP)))
ofs_s = self.cpu.fielddescrof(TP, 's')
ofs_i = self.cpu.fielddescrof(TP, 'i')
#ofs_f = self.cpu.fielddescrof(TP, 'f')
ofs_u = self.cpu.fielddescrof(TP, 'u')
ofsc1 = self.cpu.fielddescrof(TP, 'c1')
ofsc2 = self.cpu.fielddescrof(TP, 'c2')
ofsc3 = self.cpu.fielddescrof(TP, 'c3')
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(3)], 'void',
ofs_s)
# XXX ConstFloat
#self.execute_operation(rop.SETFIELD_GC, [res, ofs_f, 1e100], 'void')
# XXX we don't support shorts (at all)
#self.execute_operation(rop.SETFIELD_GC, [res, ofs_u, ConstInt(5)], 'void')
s = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_s)
assert s == 3
self.execute_operation(rop.SETFIELD_GC, [res, InputArgInt(3)], 'void',
ofs_s)
s = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_s)
assert s == 3
self.execute_operation(rop.SETFIELD_GC, [res, InputArgInt(1234)], 'void', ofs_i)
i = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofs_i)
assert i == 1234
#u = self.execute_operation(rop.GETFIELD_GC, [res, ofs_u], 'int')
#assert u.value == 5
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(1)], 'void',
ofsc1)
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(3)], 'void',
ofsc3)
self.execute_operation(rop.SETFIELD_GC, [res, ConstInt(2)], 'void',
ofsc2)
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc1)
assert c == 1
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc2)
assert c == 2
c = self.execute_operation(rop.GETFIELD_GC_I, [res], 'int', ofsc3)
assert c == 3
def test_bug_setfield_64bit(self):
if WORD == 4:
py.test.skip("only for 64 bits")
TP = lltype.GcStruct('S', ('i', lltype.Signed))
ofsi = self.cpu.fielddescrof(TP, 'i')
for i in range(500):
p = lltype.malloc(TP)
addr = rffi.cast(lltype.Signed, p)
if fits_in_32bits(addr):
break # fitting in 32 bits, good
else:
py.test.skip("cannot get a 32-bit pointer")
res = ConstPtr(rffi.cast(llmemory.GCREF, addr))
self.execute_operation(rop.SETFIELD_RAW, [res, ConstInt(3**33)],
'void', ofsi)
assert p.i == 3**33
def test_and_mask_common_patterns(self):
cases = [8, 16, 24]
if WORD == 8:
cases.append(32)
for i in cases:
box = InputArgInt(0xAAAAAAAAAAAA)
res = self.execute_operation(rop.INT_AND,
[box, ConstInt(2 ** i - 1)],
'int')
assert res == 0xAAAAAAAAAAAA & (2 ** i - 1)
def test_nullity_with_guard(self):
allops = [rop.INT_IS_TRUE]
guards = [rop.GUARD_TRUE, rop.GUARD_FALSE]
p = lltype.cast_opaque_ptr(llmemory.GCREF,
lltype.malloc(lltype.GcStruct('x')))
nullptr = lltype.nullptr(llmemory.GCREF.TO)
f = InputArgInt()
for op in allops:
for guard in guards:
if op == rop.INT_IS_TRUE:
bp = InputArgInt(1)
n = InputArgInt(0)
else:
bp = InputArgRef(p)
n = InputArgRef(nullptr)
for b in (bp, n):
i1 = ResOperation(rop.SAME_AS_I, [ConstInt(1)])
f = ResOperation(op, [b])
ops = [
i1,
f,
ResOperation(guard, [f],
descr=BasicFailDescr()),
ResOperation(rop.FINISH, [ConstInt(0)],
descr=BasicFinalDescr()),
]
ops[-2].setfailargs([i1])
looptoken = JitCellToken()
self.cpu.compile_loop([b], ops, looptoken)
deadframe = self.cpu.execute_token(looptoken, b.getint())
result = self.cpu.get_int_value(deadframe, 0)
if guard == rop.GUARD_FALSE:
assert result == execute(self.cpu, None,
op, None, b)
else:
assert result != execute(self.cpu, None,
op, None, b)
def test_stuff_followed_by_guard(self):
boxes = [(InputArgInt(1), InputArgInt(0)),
(InputArgInt(0), InputArgInt(1)),
(InputArgInt(1), InputArgInt(1)),
(InputArgInt(-1), InputArgInt(1)),
(InputArgInt(1), InputArgInt(-1)),
(ConstInt(1), InputArgInt(0)),
(ConstInt(0), InputArgInt(1)),
(ConstInt(1), InputArgInt(1)),
(ConstInt(-1), InputArgInt(1)),
(ConstInt(1), InputArgInt(-1)),
(InputArgInt(1), ConstInt(0)),
(InputArgInt(0), ConstInt(1)),
(InputArgInt(1), ConstInt(1)),
(InputArgInt(-1), ConstInt(1)),
(InputArgInt(1), ConstInt(-1))]
guards = [rop.GUARD_FALSE, rop.GUARD_TRUE]
all = [rop.INT_EQ, rop.INT_NE, rop.INT_LE, rop.INT_LT, rop.INT_GT,
rop.INT_GE, rop.UINT_GT, rop.UINT_LT, rop.UINT_LE, rop.UINT_GE]
for a, b in boxes:
for guard in guards:
for op in all:
i1 = ResOperation(rop.SAME_AS_I, [ConstInt(1)])
res = ResOperation(op, [a, b])
ops = [
i1, res,
ResOperation(guard, [res],
descr=BasicFailDescr()),
ResOperation(rop.FINISH, [ConstInt(0)],
descr=BasicFinalDescr()),
]
ops[-2].setfailargs([i1])
inputargs = [i for i in (a, b) if not isinstance(i, Const)]
looptoken = JitCellToken()
self.cpu.compile_loop(inputargs, ops, looptoken)
inputvalues = [box.getint() for box in inputargs]
deadframe = self.cpu.execute_token(looptoken, *inputvalues)
result = self.cpu.get_int_value(deadframe, 0)
expected = execute(self.cpu, None, op, None, a, b)
if guard == rop.GUARD_FALSE:
assert result == expected
else:
assert result != expected
def test_compile_bridge_check_profile_info(self):
py.test.skip("does not work, reinvestigate")
class FakeProfileAgent(object):
def __init__(self):
self.functions = []
def native_code_written(self, name, address, size):
self.functions.append((name, address, size))
self.cpu.profile_agent = agent = FakeProfileAgent()
i0 = InputArgInt()
i1 = InputArgInt()
i2 = InputArgInt()
targettoken = TargetToken()
faildescr1 = BasicFailDescr(1)
faildescr2 = BasicFailDescr(2)
looptoken = JitCellToken()
looptoken.number = 17
class FakeString(object):
def __init__(self, val):
self.val = val
def _get_str(self):
return self.val
operations = [
ResOperation(rop.LABEL, [i0], None, descr=targettoken),
ResOperation(rop.DEBUG_MERGE_POINT, [FakeString("hello"), 0, 0], None),
ResOperation(rop.INT_ADD, [i0, ConstInt(1)], i1),
ResOperation(rop.INT_LE, [i1, ConstInt(9)], i2),
ResOperation(rop.GUARD_TRUE, [i2], None, descr=faildescr1),
ResOperation(rop.JUMP, [i1], None, descr=targettoken),
]
inputargs = [i0]
operations[-2].setfailargs([i1])
self.cpu.compile_loop(inputargs, operations, looptoken)
name, loopaddress, loopsize = agent.functions[0]
assert name == "Loop # 17: hello (loop counter 0)"
assert loopaddress <= looptoken._ll_loop_code
assert loopsize >= 40 # randomish number
i1b = InputArgInt()
i3 = InputArgInt()
bridge = [
ResOperation(rop.INT_LE, [i1b, ConstInt(19)], i3),
ResOperation(rop.GUARD_TRUE, [i3], None, descr=faildescr2),
ResOperation(rop.DEBUG_MERGE_POINT, [FakeString("bye"), 0, 0], None),
ResOperation(rop.JUMP, [i1b], None, descr=targettoken),
]
bridge[1].setfailargs([i1b])
self.cpu.compile_bridge(faildescr1, [i1b], bridge, looptoken)
name, address, size = agent.functions[1]
assert name == "Bridge # 0: bye (loop counter 1)"
# Would be exactly ==, but there are some guard failure recovery
# stubs in-between
assert address >= loopaddress + loopsize
assert size >= 10 # randomish number
deadframe = self.cpu.execute_token(looptoken, 2)
fail = self.cpu.get_latest_descr(deadframe)
assert fail.identifier == 2
res = self.cpu.get_int_value(deadframe, 0)
assert res == 20
def test_ops_offset(self):
from rpython.rlib import debug
looptoken = JitCellToken()
targettoken = TargetToken()
loop = parse("""
[i0]
label(i0, descr=targettoken)
i1 = int_add(i0, 1)
i2 = int_le(i1, 9)
jump(i1, descr=targettoken)
""", namespace=locals())
debug._log = dlog = debug.DebugLog()
info = self.cpu.compile_loop(loop.inputargs, loop.operations, looptoken)
ops_offset = info.ops_offset
debug._log = None
#
assert ops_offset is looptoken._x86_ops_offset
# 2*increment_debug_counter + ops + None
assert len(ops_offset) == 2 + len(loop.operations) + 1
assert (ops_offset[loop.operations[0]] <=
ops_offset[loop.operations[1]] <=
ops_offset[loop.operations[2]] <=
ops_offset[None])
def test_calling_convention(self, monkeypatch):
if WORD != 4:
py.test.skip("32-bit only test")
from rpython.jit.backend.x86.regloc import eax, edx
from rpython.jit.backend.x86 import codebuf, callbuilder
from rpython.jit.codewriter.effectinfo import EffectInfo
from rpython.rlib.libffi import types, clibffi
had_stdcall = hasattr(clibffi, 'FFI_STDCALL')
if not had_stdcall: # not running on Windows, but we can still test
monkeypatch.setattr(clibffi, 'FFI_STDCALL', 12345, raising=False)
monkeypatch.setattr(callbuilder, 'stdcall_or_cdecl', True)
else:
assert callbuilder.stdcall_or_cdecl
#
for real_ffi, reported_ffi in [
(clibffi.FFI_DEFAULT_ABI, clibffi.FFI_DEFAULT_ABI),
(clibffi.FFI_STDCALL, clibffi.FFI_DEFAULT_ABI),
(clibffi.FFI_STDCALL, clibffi.FFI_STDCALL)]:
cpu = self.cpu
mc = codebuf.MachineCodeBlockWrapper()
mc.MOV_rs(eax.value, 4) # argument 1
mc.MOV_rs(edx.value, 40) # argument 10
mc.SUB_rr(eax.value, edx.value) # return arg1 - arg10
if real_ffi == clibffi.FFI_DEFAULT_ABI:
mc.RET()
else:
mc.RET16_i(40)
rawstart = mc.materialize(cpu, [])
#
calldescr = cpu._calldescr_dynamic_for_tests([types.slong] * 10,
types.slong)
calldescr.get_call_conv = lambda: reported_ffi # <==== hack
# ^^^ we patch get_call_conv() so that the test also makes sense
# on Linux, because clibffi.get_call_conv() would always
# return FFI_DEFAULT_ABI on non-Windows platforms.
funcbox = ConstInt(rawstart)
i1 = InputArgInt()
i2 = InputArgInt()
c = ConstInt(-1)
faildescr = BasicFailDescr(1)
cz = ConstInt(0)
# we must call it repeatedly: if the stack pointer gets increased
# by 40 bytes by the STDCALL call, and if we don't expect it,
# then we are going to get our stack emptied unexpectedly by
# several repeated calls
ops = [
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
ResOperation(rop.CALL_RELEASE_GIL_I,
[cz, funcbox, i1, c, c, c, c, c, c, c, c, i2],
descr=calldescr),
ResOperation(rop.GUARD_NOT_FORCED, [], descr=faildescr),
]
i3 = ops[0]
i4 = ops[2]
i5 = ops[4]
i6 = ops[6]
ops += [
ResOperation(rop.GUARD_FALSE, [i3],
descr=BasicFailDescr(0)),
ResOperation(rop.FINISH, [],
descr=BasicFinalDescr(1))
]
ops[-2].setfailargs([i3, i4, i5, i6])
ops[1].setfailargs([])
ops[3].setfailargs([])
ops[5].setfailargs([])
ops[7].setfailargs([])
looptoken = JitCellToken()
self.cpu.compile_loop([i1, i2], ops, looptoken)
deadframe = self.cpu.execute_token(looptoken, 123450, 123408)
fail = self.cpu.get_latest_descr(deadframe)
assert fail.identifier == 0
assert self.cpu.get_int_value(deadframe, 0) == 42
assert self.cpu.get_int_value(deadframe, 1) == 42
assert self.cpu.get_int_value(deadframe, 2) == 42
assert self.cpu.get_int_value(deadframe, 3) == 42
class TestDebuggingAssembler(object):
def setup_method(self, meth):
self.cpu = CPU(rtyper=None, stats=FakeStats())
self.cpu.setup_once()
def test_debugger_on(self):
from rpython.tool.logparser import parse_log_file, extract_category
from rpython.rlib import debug
targettoken, preambletoken = TargetToken(), TargetToken()
loop = """
[i0]
label(i0, descr=preambletoken)
debug_merge_point('xyz', 0, 0)
i1 = int_add(i0, 1)
i2 = int_ge(i1, 10)
guard_false(i2) []
label(i1, descr=targettoken)
debug_merge_point('xyz', 0, 0)
i11 = int_add(i1, 1)
i12 = int_ge(i11, 10)
guard_false(i12) []
jump(i11, descr=targettoken)
"""
ops = parse(loop, namespace={'targettoken': targettoken,
'preambletoken': preambletoken})
debug._log = dlog = debug.DebugLog()
try:
self.cpu.assembler.set_debug(True)
looptoken = JitCellToken()
self.cpu.compile_loop(ops.inputargs, ops.operations, looptoken)
self.cpu.execute_token(looptoken, 0)
# check debugging info
struct = self.cpu.assembler.loop_run_counters[0]
assert struct.i == 1
struct = self.cpu.assembler.loop_run_counters[1]
assert struct.i == 1
struct = self.cpu.assembler.loop_run_counters[2]
assert struct.i == 9
self.cpu.finish_once()
finally:
debug._log = None
l0 = ('debug_print', 'entry -1:1')
l1 = ('debug_print', preambletoken.repr_of_descr() + ':1')
l2 = ('debug_print', targettoken.repr_of_descr() + ':9')
assert ('jit-backend-counts', [l0, l1, l2]) in dlog
``` |
[
{
"content": "Repeat the code exactly:\n```python\n\"\"\"\nSynfirechain-like example\n\"\"\"\n#!/usr/bin/python\nimport os\nimport spynnaker.pyNN as p\nimport spynnaker_external_devices_plugin.pyNN as q\nimport numpy, pylab\n\np.setup(timestep=1.0, min_delay = 1.0, max_delay = 144.0)\nnNeurons = 3 # number of n... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\n\"\"\"\nSynfirechain-like example\n\"\"\"\n#!/usr/bin/python\nimport os\nimport spynnaker.pyNN as p\nimport spynnaker_external_devices_plugin.pyNN as q\nimport numpy, pylab\n\np.setup(timestep=1.0, min_delay = 1.0, max_delay = 144.0)\nnNeurons =... | ```python
"""
Synfirechain-like example
"""
#!/usr/bin/python
import os
import spynnaker.pyNN as p
import spynnaker_external_devices_plugin.pyNN as q
import numpy, pylab
p.setup(timestep=1.0, min_delay = 1.0, max_delay = 144.0)
nNeurons = 3 # number of neurons in each population
max_delay = 50
cell_params_lif = {'cm' : 0.25, # nF
'i_offset' : 0.0,
'tau_m' : 20.0,
'tau_refrac': 2.0,
'tau_syn_E' : 5.0,
'tau_syn_I' : 5.0,
'v_reset' : -70.0,
'v_rest' : -65.0,
'v_thresh' : -50.0
}
populations = list()
projections = list()
weight_to_spike = 2.0
delay = 3
delays = list()
loopConnections = list()
for i in range(0, nNeurons):
delays.append(float(delay))
singleConnection = (i, ((i + 1) % nNeurons), weight_to_spike, delay)
loopConnections.append(singleConnection)
injectionConnection = [(0, 0, weight_to_spike, 1)]
spikeArray = {'spike_times': [[0]]}
populations.append(p.Population(nNeurons, p.IF_curr_exp, cell_params_lif, label='spikes_out'))
populations.append(p.Population(1, p.SpikeSourceArray, spikeArray, label='inputSpikes_1'))
projections.append(p.Projection(populations[0], populations[0], p.FromListConnector(loopConnections)))
projections.append(p.Projection(populations[1], populations[0], p.FromListConnector(injectionConnection)))
populations[0].record()
q.activate_live_output_for(populations[0])
#populations[0].set_constraint(p.PlacerChipAndCoreConstraint(0,0,2))
#populations[1].set_constraint(p.PlacerChipAndCoreConstraint(0,0,3))
run_time = 10
print "Running for {} ms".format(run_time)
p.run(run_time)
v = None
gsyn = None
spikes = None
spikes = populations[0].getSpikes(compatible_output=True)
print "The number of spikes in pop 0 is", len(spikes)
if spikes is not None:
#print spikes
pylab.figure()
pylab.plot([i[1] for i in spikes], [i[0] for i in spikes], ".")
pylab.ylabel('neuron id')
pylab.xlabel('Time/ms')
pylab.xlim(0,run_time)
pylab.ylim(-1,2)
pylab.title('spikes')
pylab.show()
else:
print "No spikes received"
p.end()
``` |
[
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n```python\n# Copyright (c) 2014 by Ecreall under licence AGPL terms \n# available on http://www.gnu.org/licenses/agpl.html \n\n# licence: AGPL\n# author: Amen Souissi\n\nfrom pyramid.view import view_config\nfrom pyramid.ht... | [
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n<|memory_start|>```python\n# Copyright (c) 2014 by Ecreall under licence AGPL terms \n# available on http://www.gnu.org/licenses/agpl.html \n\n# licence: AGPL\n# author: Amen Souissi\n\nfrom pyramid.view import view_config\... | ```python
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from dace.util import getSite
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.view import BasicView
from lac.content.processes.services_processes.behaviors import (
SeeModerationService, SeeModerationUnitService)
from lac.content.service import (
ModerationService, ModerationServiceUnit)
from lac.utilities.utils import (
ObjectRemovedException, generate_navbars)
@view_config(
name='seemoderationservice',
context=ModerationService,
renderer='pontus:templates/views_templates/grid.pt',
)
class SeeModerationServiceView(BasicView):
title = ''
name = 'seemoderationservice'
behaviors = [SeeModerationService]
template = 'lac:views/services_processes/moderation_service/templates/see_moderation_service.pt'
viewid = 'seemoderationservice'
def update(self):
self.execute(None)
result = {}
try:
navbars = generate_navbars(self, self.context, self.request)
except ObjectRemovedException:
return HTTPFound(self.request.resource_url(getSite(), ''))
values = {'object': self.context,
'navbar_body': navbars['navbar_body']}
body = self.content(args=values, template=self.template)['body']
item = self.adapt_item(body, self.viewid)
item['messages'] = navbars['messages']
item['isactive'] = navbars['isactive']
result.update(navbars['resources'])
result['coordinates'] = {self.coordinates: [item]}
return result
@view_config(
name='seemoderationserviceunit',
context=ModerationServiceUnit,
renderer='pontus:templates/views_templates/grid.pt',
)
class SeeModerationServiceUnitView(SeeModerationServiceView):
title = ''
name = 'seemoderationserviceunit'
behaviors = [SeeModerationUnitService]
template = 'lac:views/services_processes/moderation_service/templates/see_moderation_service.pt'
viewid = 'seemoderationserviceunit'
DEFAULTMAPPING_ACTIONS_VIEWS.update(
{SeeModerationService: SeeModerationServiceView})
DEFAULTMAPPING_ACTIONS_VIEWS.update(
{SeeModerationUnitService: SeeModerationServiceUnitView})
``` |
[
{
"content": "Here is the script:\n```python\n#!/usr/bin/env python3\n\n'''\n@file rpctest.py\n@author Gabriele Tozzi <gabriele@tozzi.eu>\n@package DoPhp\n@brief Simple RPC JSON client in python for tetsing methods\n'''\n\nimport sys\nimport argparse\nimport re\nimport hashlib\nimport http.client, urllib.parse\... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n#!/usr/bin/env python3\n\n'''\n@file rpctest.py\n@author Gabriele Tozzi <gabriele@tozzi.eu>\n@package DoPhp\n@brief Simple RPC JSON client in python for tetsing methods\n'''\n\nimport sys\nimport argparse\nimport re\nimport hashlib\nimport http.clien... | ```python
#!/usr/bin/env python3
'''
@file rpctest.py
@author Gabriele Tozzi <gabriele@tozzi.eu>
@package DoPhp
@brief Simple RPC JSON client in python for tetsing methods
'''
import sys
import argparse
import re
import hashlib
import http.client, urllib.parse
import gzip, zlib
import json
import logging
class ParamAction(argparse.Action):
pre = re.compile(r'=')
''' Argumentparse action to process a parameter '''
def __call__(self, parser, namespace, values, option_string=None):
param = {}
for val in values:
try:
k, v = self.pre.split(val)
except ValueError:
print('Parameters must be in the form <name>=<value>', file=sys.stderr)
sys.exit(1)
param[k] = v
if not param:
param = None
setattr(namespace, self.dest, param)
class RpcTest:
SEP = '~'
COMPPRESSIONS = ('gzip', 'zlib')
def __init__(self, url, user=None, pwd=None, headers={}, auth='sign', gzip=False, deflate=False, compress=None):
'''
Init the RPC Client
@param url string: The base URL
@param user string: The username
@param pwd string: The password
@param headers dict: Custom headers
@param auth string: Authentication type [sign,plain]. Default: sign
@param gzip bool: If true, accepts gzip response
@param deflate bool: If true, accepts deflate response
@param compress string: Use given compression, if any ('gzip' or 'deflate')
'''
self.log = logging.getLogger(self.__class__.__name__)
# Parse the url
self.baseUrl = urllib.parse.urlparse(url)
if self.baseUrl.scheme == 'http':
self.conn = http.client.HTTPConnection
elif self.baseUrl.scheme == 'https':
self.conn = http.client.HTTPSConnection
else:
raise ValueError('Unknown scheme', self.baseUrl.scheme)
self.auth = auth
self.user = user
self.pwd = pwd
self.headers = headers
self.gzip = gzip
self.deflate = deflate
self.compress = compress
if self.compress and self.compress not in self.COMPPRESSIONS:
raise ValueError('Unknown compression', self.compress)
def run(self, method, **param):
# Connect
conn = self.conn(self.baseUrl.netloc)
# Build Accept-encoding header
accept = []
if self.gzip:
accept.append('gzip')
if self.deflate:
accept.append('deflate')
# Request the page
data = self.dump(**param)
encoding, body = self.encode(data)
headers = self.headers.copy()
if not 'Content-Type' in headers.keys():
headers['Content-Type'] = 'application/json'
if accept:
headers['Accept-Encoding'] = ', '.join(accept)
if encoding:
headers['Content-Encoding'] = encoding
url = self.baseUrl.path + '?do=' + method
if self.user or self.pwd:
# Build authentication
if self.auth == 'sign':
sign = hashlib.sha512()
sign.update(self.user.encode('utf-8'))
sign.update(self.SEP.encode('utf-8'))
sign.update(self.pwd.encode('utf-8'))
sign.update(self.SEP.encode('utf-8'))
sign.update(body)
headers['X-Auth-User'] = self.user
headers['X-Auth-Sign'] = sign.hexdigest()
elif self.auth == 'plain':
headers['X-Auth-User'] = self.user
headers['X-Auth-Pass'] = self.pwd
self.log.info("Sending request to %s://%s%s", self.baseUrl.scheme, self.baseUrl.netloc, url)
self.log.debug("HEADERS:\n%s", headers)
self.log.debug("BODY:\n%s", body)
conn.request('POST', url, body, headers)
# Return response
return conn.getresponse()
def encode(self, data):
''' Encode the data, return (content-encoding, encoded) '''
enc = None
data = data.encode('utf-8')
if self.compress == 'gzip':
enc = 'gzip'
data = gzip.compress(data)
elif self.compress == 'deflate':
enc = 'deflate'
data = zlib.compress(data)
return (enc, data)
def decode(self, res):
''' Decode the response, return raw data '''
data = res.read()
encoding = res.getheader('Content-Encoding')
self.log.info("Parsing response %s - %s, %d bytes of %s encoded data", res.status, res.reason, len(data), encoding)
self.log.debug("HEADERS:\n%s", res.getheaders())
if res.status != 200:
raise StatusError(res.status, data)
# Decode response
if not encoding:
decoded = data
elif encoding == 'gzip':
decoded = gzip.decompress(data)
elif encoding == 'deflate':
decoded = zlib.decompress(data)
else:
raise UnsupportedEncodingError(encoding)
return decoded
def dump(self, **param):
''' Creates a json dump for the data JSON '''
return json.dumps(param)
def parse(self, decoded):
try:
return json.loads(decoded.decode('utf-8'))
except ValueError:
raise ParseError(decoded)
class ParseError(RuntimeError):
''' Exception raise when failing to parse the data '''
def __init__(self, data):
self.data = data
super().__init__("Invalid response data:\n%s" % data)
class DecodeError(RuntimeError):
''' Exception raised by 'decode' method '''
pass
class StatusError(RuntimeError):
''' Raised when status is not 200 '''
def __init__(self, code, data):
self.code = code
self.data = data
super().__init__("Invalid response status %d" % self.code)
class UnsupportedEncodingError(RuntimeError):
''' Raised when encoding is not supported '''
def __init__(self, encoding):
self.encoding = encoding
super().__init__("Unsupported encoding %s" % encoding)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# Parse command line
parser = argparse.ArgumentParser(
description='Call an RPC method on server',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('url', help='base server URL')
parser.add_argument('method', help='name of the method to call')
parser.add_argument('-a', '--auth', nargs=2, metavar=('USER','PASS'),
help='username and password for authentication')
parser.add_argument('-t', '--auth-type', choices=('sign', 'plain'), default='sign',
help='authentication type. WARNING: "plain" auth is NOT safe without SSL!')
parser.add_argument('-e', '--header', nargs='*', action=ParamAction,
help='adds an header <name>=<value>')
parser.add_argument('-g', '--gzip', action='store_true',
help='send accept gzip header')
parser.add_argument('-d', '--deflate', action='store_true',
help='send accept deflate header')
parser.add_argument('param', nargs='*', action=ParamAction,
help='adds a parameter <name>=<value> (use [] to specify a list)')
args = parser.parse_args()
params = {}
if args.param:
for k, v in args.param.items():
if v[0] == '[':
params[k] = json.loads(v)
else:
params[k] = v
headers = args.header if args.header else {}
logging.basicConfig(level=logging.DEBUG)
if args.auth:
rpc = RpcTest(args.url, args.auth[0], args.auth[1], headers=headers, auth=args.auth_type, gzip=args.gzip, deflate=args.deflate)
else:
rpc = RpcTest(args.url, headers=headers, gzip=args.gzip, deflate=args.deflate)
if params:
res = rpc.run(args.method, **params)
else:
res = rpc.run(args.method)
# Show result
print(rpc.parse(rpc.decode(res)))
sys.exit(0)
``` |
[
{
"content": "Here is the snippet:\n```python\nfrom genshi.builder import tag\n\nfrom trac.core import Component, implements\nfrom trac.ticket.api import ITicketActionController, TicketSystem\nfrom trac.perm import IPermissionRequestor\n\nrevision = \"$Rev: 6326 $\"\nurl = \"$URL: https://svn.edgewall.org/repos... | [
{
"content": "Here is the snippet:\n<|memory_start|>```python\nfrom genshi.builder import tag\n\nfrom trac.core import Component, implements\nfrom trac.ticket.api import ITicketActionController, TicketSystem\nfrom trac.perm import IPermissionRequestor\n\nrevision = \"$Rev: 6326 $\"\nurl = \"$URL: https://svn.ed... | ```python
from genshi.builder import tag
from trac.core import Component, implements
from trac.ticket.api import ITicketActionController, TicketSystem
from trac.perm import IPermissionRequestor
revision = "$Rev: 6326 $"
url = "$URL: https://svn.edgewall.org/repos/trac/tags/trac-0.12.2/sample-plugins/workflow/StatusFixer.py $"
class StatusFixerActionController(Component):
"""Provides the admin with a way to correct a ticket's status.
This plugin is especially useful when you made changes to your workflow,
and some ticket status are no longer valid. The tickets that are in those
status can then be set to some valid state.
Don't forget to add `StatusFixerActionController` to the workflow
option in [ticket].
If there is no workflow option, the line will look like this:
workflow = ConfigurableTicketWorkflow,StatusFixerActionController
"""
implements(ITicketActionController, IPermissionRequestor)
# IPermissionRequestor methods
def get_permission_actions(self):
return ['TICKET_STATUSFIX']
# ITicketActionController methods
def get_ticket_actions(self, req, ticket):
actions = []
if 'TICKET_STATUSFIX' in req.perm(ticket.resource):
actions.append((0, 'force_status'))
return actions
def get_all_status(self):
"""We return all the status that are used in the database so that the
user can query for used, but invalid, status."""
db = self.env.get_db_cnx()
cursor = db.cursor()
cursor.execute('SELECT DISTINCT status FROM ticket')
all_status = [row[0] for row in cursor]
cursor.close()
return all_status
def render_ticket_action_control(self, req, ticket, action):
# Need to use the list of all status so you can't manually set
# something to an invalid state.
selected_value = req.args.get('force_status_value', 'new')
all_status = TicketSystem(self.env).get_all_status()
render_control = tag.select(
[tag.option(x, selected=(x == selected_value and 'selected' or
None)) for x in all_status],
id='force_status_value', name='force_status_value')
return ("force status to:", render_control,
"The next status will be the selected one")
def get_ticket_changes(self, req, ticket, action):
return {'status': req.args.get('force_status_value')}
def apply_action_side_effects(self, req, ticket, action):
pass
``` |
[
{
"content": "```python\n#!/usr/bin/env python\n\nfrom nose.tools import *\nfrom utilities import Todo\nfrom utilities import execution_path\nimport tempfile\n\nimport os, sys, glob, mapnik2\n\ndef setup():\n # All of the paths used are relative, if we run the tests\n # from another directory we need to c... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env python\n\nfrom nose.tools import *\nfrom utilities import Todo\nfrom utilities import execution_path\nimport tempfile\n\nimport os, sys, glob, mapnik2\n\ndef setup():\n # All of the paths used are relative, if we run the tests\n # from another direct... | ```python
#!/usr/bin/env python
from nose.tools import *
from utilities import Todo
from utilities import execution_path
import tempfile
import os, sys, glob, mapnik2
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
def test():
# TODO: Write a better test
# 1. Construct map in memory
# 2. Save map as XML
# 3. Load map to a second object
# 4. Compare both map objects
map = mapnik2.Map(256, 256)
raise Todo("map comparison is currently broken dou to lacking relative paths support (#324,#340")
def compare_map(in_map):
mapnik2.load_map(map, in_map)
(handle, test_map) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map1-')
os.close(handle)
(handle, test_map2) = tempfile.mkstemp(suffix='.xml', prefix='mapnik-temp-map2-')
os.close(handle)
if os.path.exists(test_map):
os.remove(test_map)
mapnik2.save_map(map, test_map)
new_map = mapnik2.Map(256, 256)
mapnik2.load_map(new_map, test_map)
open(test_map2,'w').write(mapnik2.save_map_to_string(new_map))
diff = ' diff %s %s' % (os.path.abspath(test_map),os.path.abspath(test_map2))
try:
eq_(open(test_map).read(),open(test_map2).read())
except AssertionError, e:
raise AssertionError('serialized map "%s" not the same after being reloaded, \ncompare with command:\n\n$%s' % (in_map,diff))
if os.path.exists(test_map):
os.remove(test_map)
else:
# Fail, the map wasn't written
return False
for m in glob.glob("../data/good_maps/*.xml"):
compare_map(m)
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\n# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file\n# for details. All rights reserved. Use of this source code is governed by a\n# BSD-style license that can be found in the LICENSE file.\n\nimport gfm\n\... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\n# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file\n# for details. All rights reserved. Use of this source code is governed by a\n# BSD-style license that can be found in the LICENSE file.\... | ```python
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import gfm
from test_case import TestCase
class TestMultilineLink(TestCase):
def setUp(self):
self.spaced_link = gfm.SpacedLinkExtension([])
def test_normal_link(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link](href)
""", [self.spaced_link])
def test_normal_reference(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link][id]
[id]: href
""", [self.spaced_link])
def test_normal_image_link(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """

""", [self.spaced_link])
def test_normal_image_reference(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt][id]
[id]: href
""", [self.spaced_link])
def test_spaced_link(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link] (href)
""", [self.spaced_link])
def test_spaced_reference(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link] [id]
[id]: href
""", [self.spaced_link])
def test_spaced_image_link(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt] (href)
""", [self.spaced_link])
def test_spaced_image_reference(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt] [id]
[id]: href
""", [self.spaced_link])
def test_multiline_link(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link]
(href)
""", [self.spaced_link])
def test_multiline_reference(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link]
[id]
[id]: href
""", [self.spaced_link])
def test_multiline_image_link(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt]
(href)
""", [self.spaced_link])
def test_multiline_image_reference(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt]
[id]
[id]: href
""", [self.spaced_link])
def test_multiline_and_spaced_link(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link]
(href)
""", [self.spaced_link])
def test_multiline_and_spaced_reference(self):
self.assert_renders("""
<p><a href="href">link</a></p>
""", """
[link]
[id]
[id]: href
""", [self.spaced_link])
def test_multiline_and_spaced_image_link(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt]
(href)
""", [self.spaced_link])
def test_multiline_and_spaced_image_reference(self):
self.assert_renders("""
<p><img alt="alt" src="href" /></p>
""", """
![alt]
[id]
[id]: href
""", [self.spaced_link])
``` |
[
{
"content": "Here is some code:\n```python\n#!/usr/bin/python\n\nimport MySQLdb\nimport json\nfrom ConfigParser import ConfigParser\n\nLIMIT = 7\nuser_and_client_stat_columns = ('TOTAL_CONNECTIONS', 'CONCURRENT_CONNECTIONS', 'CONNECTED_TIME', 'BUSY_TIME', 'CPU_TIME', 'BYTES_RECEIVED', 'BYTES_SENT', 'BINLOG_BYT... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n#!/usr/bin/python\n\nimport MySQLdb\nimport json\nfrom ConfigParser import ConfigParser\n\nLIMIT = 7\nuser_and_client_stat_columns = ('TOTAL_CONNECTIONS', 'CONCURRENT_CONNECTIONS', 'CONNECTED_TIME', 'BUSY_TIME', 'CPU_TIME', 'BYTES_RECEIVED', 'BYTES_SE... | ```python
#!/usr/bin/python
import MySQLdb
import json
from ConfigParser import ConfigParser
LIMIT = 7
user_and_client_stat_columns = ('TOTAL_CONNECTIONS', 'CONCURRENT_CONNECTIONS', 'CONNECTED_TIME', 'BUSY_TIME', 'CPU_TIME', 'BYTES_RECEIVED', 'BYTES_SENT', 'BINLOG_BYTES_WRITTEN', 'ROWS_READ', 'ROWS_SENT', 'ROWS_DELETED', 'ROWS_INSERTED', 'ROWS_UPDATED', 'SELECT_COMMANDS', 'UPDATE_COMMANDS', 'OTHER_COMMANDS', 'COMMIT_TRANSACTIONS', 'ROLLBACK_TRANSACTIONS', 'DENIED_CONNECTIONS', 'LOST_CONNECTIONS', 'ACCESS_DENIED', 'EMPTY_QUERIES')
# data holding dicts
data_user_stat = {}
data_client_stat = {}
data_index_stat = {}
data_table_stat = {}
try:
# Configuration parsers
cfg = ConfigParser()
cfg.read('/root/.my.cnf')
# Connect to mysql db and get cursor info
db = MySQLdb.connect(host = cfg.get(section='client',option='host'), db = 'INFORMATION_SCHEMA', user = cfg.get(section='client',option='user'), passwd = cfg.get(section='client',option ='password'))
cur = db.cursor()
#gather USER_STATISTICS and CLIENT_STATISTICS info
for col in user_and_client_stat_columns:
cur.execute("SELECT USER,%s FROM USER_STATISTICS ORDER BY %s DESC LIMIT %d" % (col, col, LIMIT))
data_user_stat[col] = cur.fetchall()
cur.execute("SELECT CLIENT,%s FROM CLIENT_STATISTICS ORDER BY %s DESC LIMIT %d" % (col, col, LIMIT))
data_client_stat[col] = cur.fetchall()
# gather INDEX_STATISTICS
cur.execute("select TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, ROWS_READ from INDEX_STATISTICS order by ROWS_READ desc limit %d" % LIMIT)
data_index_stat['ROWS_READ'] = cur.fetchall()
# gather TABLE_STATISTICS
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS order by ROWS_CHANGED desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_READ from TABLE_STATISTICS order by ROWS_READ desc limit %d" % LIMIT)
data_table_stat['ROWS_READ'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED_X_INDEXES from TABLE_STATISTICS order by ROWS_CHANGED_X_INDEXES desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED_X_INDEXES'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_READ from TABLE_STATISTICS where TABLE_NAME like '%s' order by ROWS_READ desc limit %d" % ("%comments%",LIMIT))
data_table_stat['ROWS_READ_comments'] = cur.fetchall()
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS where TABLE_NAME REGEXP 'gast|guest|gasten|gjeste|gbook|gaest' order by ROWS_CHANGED desc limit %d" % LIMIT)
data_table_stat['ROWS_CHANGED_guestbook'] = cur.fetchall()
querystring = {'ROWS_CHANGED_comments':'%comments%' , 'ROWS_CHANGED_phpbbuser': 'phpbb%user%', 'ROWS_CHANGED_phpbbloginattempt':'phpbb%login%attempt%','ROWS_CHANGED_phpbbpost': 'phpbb%post%', 'ROWS_CHANGED_wpcomments': '%wp%comments%', 'ROWS_CHANGED_wpposts':'%wp%posts%', 'ROWS_CHANGED_wpusers': '%wp%users%','ROWS_CHANGED_users': 'users%', 'ROWS_CHANGED_session':'%session%', 'ROWS_CHANGED_friend': '%friend%' }
for key in querystring.keys():
cur.execute("select TABLE_SCHEMA,TABLE_NAME,ROWS_CHANGED from TABLE_STATISTICS where TABLE_NAME like '%s' order by ROWS_CHANGED desc limit %d" % (querystring[key], LIMIT))
data_table_stat[key] = cur.fetchall()
print json.dumps({'USER_STATISTICS': data_user_stat, 'CLIENT_STATISTICS': data_client_stat, 'INDEX_STATISTICS': data_index_stat ,'TABLE_STATISTICS': data_table_stat})
except Exception,e:
print e.message
finally:
#close db connection
cur.close()
db.close()
``` |
[
{
"content": "```python\nimport numpy as np\nfrom scipy.misc import imsave\n\nfrom libcore import Img\nfrom libcore import DistortionCorrection, DistortionCorrectionPoint\n\nimage_path = '../schraegbild_tempelhof.jpg'\n\n\ndef main():\n b1()\n\n\ndef b1():\n image = Img.load_image(image_path)\n\n targe... | [
{
"content": "<|memory_start|>```python\nimport numpy as np\nfrom scipy.misc import imsave\n\nfrom libcore import Img\nfrom libcore import DistortionCorrection, DistortionCorrectionPoint\n\nimage_path = '../schraegbild_tempelhof.jpg'\n\n\ndef main():\n b1()\n\n\ndef b1():\n image = Img.load_image(image_pa... | ```python
import numpy as np
from scipy.misc import imsave
from libcore import Img
from libcore import DistortionCorrection, DistortionCorrectionPoint
image_path = '../schraegbild_tempelhof.jpg'
def main():
b1()
def b1():
image = Img.load_image(image_path)
target_image_size_height = 900
target_image_size_witdh = 600
new_x_size = target_image_size_witdh*3
new_y_size = target_image_size_height*3
new_image = np.zeros((new_x_size, new_y_size, 3))
# links oben
pass_point_1_x = 344.0
pass_point_1_y = 334.0
target_point_1_x = 0.0
target_point_1_y = 0.0
# links unten
pass_point_2_x = 300.0
pass_point_2_y = 456.0
target_point_2_x = 0.0
target_point_2_y = target_image_size_height
pass_point_3_x = 694.0
pass_point_3_y = 432.0
#rechts unten
#pass_point_3_x = 690.0
#pass_point_3_y = 460.0
target_point_3_x = target_image_size_witdh
target_point_3_y = target_image_size_height
#recht oben
pass_point_4_x = 548.0
pass_point_4_y = 330.0
target_point_4_x = target_image_size_witdh
target_point_4_y = 0.0
translation_factor = 0
points = [DistortionCorrectionPoint(344.0, 344.0, 0.0+translation_factor, 0.0+translation_factor), # links oben
DistortionCorrectionPoint(300.0, 456.0, 0.0+translation_factor, target_image_size_height+translation_factor), # links unten
DistortionCorrectionPoint(694.0, 432.0, target_image_size_witdh+translation_factor, target_image_size_height+translation_factor),
DistortionCorrectionPoint(548.0, 330.0, target_image_size_witdh+translation_factor, 0.0+translation_factor)] # rechts unten
new_image = DistortionCorrection.distortion_correction(points, image, new_image)
imsave("../images/test.jpg", new_image)
if __name__ == "__main__": main()
``` |
[
{
"content": "Provide a verbatim copy of the code:\n```python\nimport base64\nimport hashlib\nimport hmac\nimport imghdr\nfrom wsgiref.util import FileWrapper\n\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured, PermissionDenied\nfrom django.http import HttpResponse, Htt... | [
{
"content": "Provide a verbatim copy of the code:\n<|memory_start|>```python\nimport base64\nimport hashlib\nimport hmac\nimport imghdr\nfrom wsgiref.util import FileWrapper\n\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured, PermissionDenied\nfrom django.http import H... | ```python
import base64
import hashlib
import hmac
import imghdr
from wsgiref.util import FileWrapper
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpResponse, HttpResponsePermanentRedirect, StreamingHttpResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.decorators import classonlymethod
from django.utils.encoding import force_text
from django.views.generic import View
from wagtail.images import get_image_model
from wagtail.images.exceptions import InvalidFilterSpecError
from wagtail.images.models import SourceImageIOError
from wagtail.utils.sendfile import sendfile
def generate_signature(image_id, filter_spec, key=None):
if key is None:
key = settings.SECRET_KEY
# Key must be a bytes object
if isinstance(key, str):
key = key.encode()
# Based on libthumbor hmac generation
# https://github.com/thumbor/libthumbor/blob/b19dc58cf84787e08c8e397ab322e86268bb4345/libthumbor/crypto.py#L50
url = '{}/{}/'.format(image_id, filter_spec)
return force_text(base64.urlsafe_b64encode(hmac.new(key, url.encode(), hashlib.sha1).digest()))
def verify_signature(signature, image_id, filter_spec, key=None):
return force_text(signature) == generate_signature(image_id, filter_spec, key=key)
def generate_image_url(image, filter_spec, viewname='wagtailimages_serve', key=None):
signature = generate_signature(image.id, filter_spec, key)
url = reverse(viewname, args=(signature, image.id, filter_spec))
url += image.file.name[len('original_images/'):]
return url
class ServeView(View):
model = get_image_model()
action = 'serve'
key = None
@classonlymethod
def as_view(cls, **initkwargs):
if 'action' in initkwargs:
if initkwargs['action'] not in ['serve', 'redirect']:
raise ImproperlyConfigured("ServeView action must be either 'serve' or 'redirect'")
return super(ServeView, cls).as_view(**initkwargs)
def get(self, request, signature, image_id, filter_spec):
if not verify_signature(signature.encode(), image_id, filter_spec, key=self.key):
raise PermissionDenied
image = get_object_or_404(self.model, id=image_id)
# Get/generate the rendition
try:
rendition = image.get_rendition(filter_spec)
except SourceImageIOError:
return HttpResponse("Source image file not found", content_type='text/plain', status=410)
except InvalidFilterSpecError:
return HttpResponse("Invalid filter spec: " + filter_spec, content_type='text/plain', status=400)
return getattr(self, self.action)(rendition)
def serve(self, rendition):
# Open and serve the file
rendition.file.open('rb')
image_format = imghdr.what(rendition.file)
return StreamingHttpResponse(FileWrapper(rendition.file),
content_type='image/' + image_format)
def redirect(self, rendition):
# Redirect to the file's public location
return HttpResponsePermanentRedirect(rendition.url)
serve = ServeView.as_view()
class SendFileView(ServeView):
backend = None
def serve(self, rendition):
return sendfile(self.request, rendition.file.path, backend=self.backend)
``` |
[
{
"content": "Here is a code snippet:\n```python\n#!/usr/bin/env python\n\"\"\"This file contains a helper class for the flows.\n\nThis flow context class provides all the methods for handling flows (i.e.,\ncalling clients, changing state, ...).\n\nEach flow must have a flow runner before it can be executed. Th... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\n#!/usr/bin/env python\n\"\"\"This file contains a helper class for the flows.\n\nThis flow context class provides all the methods for handling flows (i.e.,\ncalling clients, changing state, ...).\n\nEach flow must have a flow runner before it can... | ```python
#!/usr/bin/env python
"""This file contains a helper class for the flows.
This flow context class provides all the methods for handling flows (i.e.,
calling clients, changing state, ...).
Each flow must have a flow runner before it can be executed. The flow runner is
responsible for queuing messages and maintaining scheduling state (e.g. request
IDs, outstanding requests, quotas etc),
Runners form a tree structure: A top level runner has no parent, but child
runners have a parent. For example, when a flow calls CallFlow(), the runner
creates a new flow (with a child runner) and passes execution to the new
flow. The child flow's runner queues messages on its parent's message
queues. The top level flow runner ends up with all the messages for all its
children in its queues, and then flushes them all at once to the data
stores. The goal is to prevent child flows from sending messages to the data
store before their parent's messages since this will create a race condition
(for example a child's client requests may be answered before the parent). We
also need to ensure that client messages for child flows do not get queued until
the child flow itself has finished running and is stored into the data store.
The following is a summary of the CallFlow() sequence:
1. The top level flow runner has no parent_runner.
2. The flow calls self.CallFlow() which is delegated to the flow's runner's
CallFlow() method.
3. The flow runner calls StartFlow(). This creates a child flow and a new flow
runner. The new runner has as a parent the top level flow.
4. The child flow calls CallClient() which schedules some messages for the
client. Since its runner has a parent runner, the messages are queued on the
parent runner's message queues.
5. The child flow completes execution of its Start() method, and its state gets
stored in the data store.
6. Execution returns to the parent flow, which may also complete, and serialize
its state to the data store.
7. At this point the top level flow runner contains in its message queues all
messages from all child flows. It then syncs all its queues to the data store
at the same time. This guarantees that client messages from child flows are
scheduled after the child flow itself is serialized into the data store.
To manage the flow queues, we have a QueueManager object. The Queue manager
abstracts the accesses to the queue by maintaining internal queues of outgoing
messages and providing methods for retrieving requests and responses from the
queues. Each flow runner has a queue manager which is uses to manage the flow's
queues. Child flow runners all share their parent's queue manager.
"""
import threading
import traceback
import logging
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import events
# Note: OutputPluginDescriptor is also needed implicitly by FlowRunnerArgs
from grr.lib import output_plugin as output_plugin_lib
from grr.lib import queue_manager
from grr.lib import rdfvalue
from grr.lib import stats
from grr.lib import utils
from grr.lib.aff4_objects import multi_type_collection
from grr.lib.aff4_objects import sequential_collection
from grr.lib.aff4_objects import users as aff4_users
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import protodict as rdf_protodict
class FlowRunnerError(Exception):
"""Raised when there is an error during state transitions."""
class FlowLogCollection(sequential_collection.IndexedSequentialCollection):
RDF_TYPE = rdf_flows.FlowLog
# TODO(user): Another pickling issue. Remove this asap, this will
# break displaying old flows though so we will have to keep this
# around for a while.
FlowRunnerArgs = rdf_flows.FlowRunnerArgs # pylint: disable=invalid-name
RESULTS_SUFFIX = "Results"
RESULTS_PER_TYPE_SUFFIX = "ResultsPerType"
OUTPUT_PLUGIN_BASE_SUFFIX = "PluginOutput"
class FlowRunner(object):
"""The flow context class for hunts.
This is essentially the same as a normal context but it processes
all the requests that arrive regardless of any order such that one client that
doesn't respond does not make the whole hunt wait.
"""
def __init__(self, flow_obj, parent_runner=None, runner_args=None,
token=None):
"""Constructor for the Flow Runner.
Args:
flow_obj: The flow object this runner will run states for.
parent_runner: The parent runner of this runner.
runner_args: A FlowRunnerArgs() instance containing initial values. If not
specified, we use the runner_args from the flow_obj.
token: An instance of access_control.ACLToken security token.
"""
self.token = token or flow_obj.token
self.parent_runner = parent_runner
# If we have a parent runner, we use its queue manager.
if parent_runner is not None:
self.queue_manager = parent_runner.queue_manager
else:
# Otherwise we use a new queue manager.
self.queue_manager = queue_manager.QueueManager(token=self.token)
self.queued_replies = []
self.outbound_lock = threading.Lock()
self.flow_obj = flow_obj
# Initialize from a new runner args proto.
if runner_args is not None:
self.runner_args = runner_args
self.session_id = self.GetNewSessionID()
self.flow_obj.urn = self.session_id
# Flow state does not have a valid context, we need to create one.
self.context = self.InitializeContext(runner_args)
self.flow_obj.context = self.context
self.context.session_id = self.session_id
else:
# Retrieve args from the flow object's context. The flow object is
# responsible for storing our context, although they do not generally
# access it directly.
self.context = self.flow_obj.context
self.runner_args = self.flow_obj.runner_args
# Populate the flow object's urn with the session id.
self.flow_obj.urn = self.session_id = self.context.session_id
# Sent replies are cached so that they can be processed by output plugins
# when the flow is saved.
self.sent_replies = []
# If we're running a child flow and send_replies=True, but
# write_intermediate_results=False, we don't want to create an output
# collection object. We also only want to create it if runner_args are
# passed as a parameter, so that the new context is initialized.
#
# We can't create the collection as part of InitializeContext, as flow's
# urn is not known when InitializeContext runs.
if runner_args is not None and self.IsWritingResults():
with data_store.DB.GetMutationPool(token=self.token) as mutation_pool:
self.CreateCollections(mutation_pool)
def CreateCollections(self, mutation_pool):
logs_collection_urn = self._GetLogsCollectionURN(
self.runner_args.logs_collection_urn)
for urn, collection_type in [
(self.output_urn, sequential_collection.GeneralIndexedCollection),
(self.multi_type_output_urn, multi_type_collection.MultiTypeCollection),
(logs_collection_urn, FlowLogCollection),
]:
with aff4.FACTORY.Create(
urn,
collection_type,
mode="w",
mutation_pool=mutation_pool,
token=self.token):
pass
def IsWritingResults(self):
return (not self.parent_runner or not self.runner_args.send_replies or
self.runner_args.write_intermediate_results)
@property
def multi_type_output_urn(self):
return self.flow_obj.urn.Add(RESULTS_PER_TYPE_SUFFIX)
@property
def output_urn(self):
return self.flow_obj.urn.Add(RESULTS_SUFFIX)
def _GetLogsCollectionURN(self, logs_collection_urn):
if self.parent_runner is not None and not logs_collection_urn:
# We are a child runner, we should have been passed a
# logs_collection_urn
raise RuntimeError("Flow: %s has a parent %s but no logs_collection_urn"
" set." % (self.flow_obj.urn, self.parent_runner))
# If we weren't passed a collection urn, create one in our namespace.
return logs_collection_urn or self.flow_obj.urn.Add("Logs")
def OpenLogsCollection(self, logs_collection_urn, mode="w"):
"""Open the parent-flow logs collection for writing or create a new one.
If we receive a logs_collection_urn here it is being passed from the parent
flow runner into the new runner created by the flow object.
For a regular flow the call sequence is:
flow_runner --StartFlow--> flow object --CreateRunner--> (new) flow_runner
For a hunt the call sequence is:
hunt_runner --CallFlow--> flow_runner --StartFlow--> flow object
--CreateRunner--> (new) flow_runner
Args:
logs_collection_urn: RDFURN pointing to parent logs collection
mode: Mode to use for opening, "r", "w", or "rw".
Returns:
FlowLogCollection open with mode.
Raises:
RuntimeError: on parent missing logs_collection.
"""
return aff4.FACTORY.Create(
self._GetLogsCollectionURN(logs_collection_urn),
FlowLogCollection,
mode=mode,
object_exists=True,
token=self.token)
def InitializeContext(self, args):
"""Initializes the context of this flow."""
if args is None:
args = rdf_flows.FlowRunnerArgs()
output_plugins_states = []
for plugin_descriptor in args.output_plugins:
if not args.client_id:
self.Log("Not initializing output plugin %s as flow does not run on "
"the client.", plugin_descriptor.plugin_name)
continue
output_base_urn = self.session_id.Add(OUTPUT_PLUGIN_BASE_SUFFIX)
plugin_class = plugin_descriptor.GetPluginClass()
plugin = plugin_class(
self.output_urn,
args=plugin_descriptor.plugin_args,
output_base_urn=output_base_urn,
token=self.token)
try:
plugin.InitializeState()
# TODO(user): Those do not need to be inside the state, they
# could be part of the plugin descriptor.
plugin.state["logs"] = []
plugin.state["errors"] = []
output_plugins_states.append(
rdf_flows.OutputPluginState(
plugin_state=plugin.state, plugin_descriptor=plugin_descriptor))
except Exception as e: # pylint: disable=broad-except
logging.info("Plugin %s failed to initialize (%s), ignoring it.",
plugin, e)
parent_creator = None
if self.parent_runner:
parent_creator = self.parent_runner.context.creator
context = rdf_flows.FlowContext(
create_time=rdfvalue.RDFDatetime.Now(),
creator=parent_creator or self.token.username,
current_state="Start",
output_plugins_states=output_plugins_states,
remaining_cpu_quota=args.cpu_limit,
state=rdf_flows.FlowContext.State.RUNNING,
# Have we sent a notification to the user.
user_notified=False,)
return context
def GetNewSessionID(self):
"""Returns a random session ID for this flow based on the runner args.
Returns:
A formatted session id URN.
"""
# Calculate a new session id based on the flow args. Note that our caller
# can specify the base path to the session id, but they can not influence
# the exact session id we pick. This ensures that callers can not engineer a
# session id clash forcing us to overwrite an existing flow.
base = self.runner_args.base_session_id
if base is None:
base = self.runner_args.client_id or aff4.ROOT_URN
base = base.Add("flows")
return rdfvalue.SessionID(base=base, queue=self.runner_args.queue)
def OutstandingRequests(self):
"""Returns the number of all outstanding requests.
This is used to determine if the flow needs to be destroyed yet.
Returns:
the number of all outstanding requests.
"""
return self.context.outstanding_requests
def CallState(self,
messages=None,
next_state="",
request_data=None,
start_time=None):
"""This method is used to schedule a new state on a different worker.
This is basically the same as CallFlow() except we are calling
ourselves. The state will be invoked in a later time and receive all the
messages we send.
Args:
messages: A list of rdfvalues to send. If the last one is not a
GrrStatus, we append an OK Status.
next_state: The state in this flow to be invoked with the responses.
request_data: Any dict provided here will be available in the
RequestState protobuf. The Responses object maintains a reference
to this protobuf for use in the execution of the state method. (so
you can access this data by responses.request).
start_time: Start the flow at this time. This Delays notification for
flow processing into the future. Note that the flow may still be
processed earlier if there are client responses waiting.
Raises:
FlowRunnerError: if the next state is not valid.
"""
if messages is None:
messages = []
# Check if the state is valid
if not getattr(self.flow_obj, next_state):
raise FlowRunnerError("Next state %s is invalid.")
# Queue the response message to the parent flow
request_state = rdf_flows.RequestState(
id=self.GetNextOutboundId(),
session_id=self.context.session_id,
client_id=self.runner_args.client_id,
next_state=next_state)
if request_data:
request_state.data = rdf_protodict.Dict().FromDict(request_data)
self.QueueRequest(request_state, timestamp=start_time)
# Add the status message if needed.
if not messages or not isinstance(messages[-1], rdf_flows.GrrStatus):
messages.append(rdf_flows.GrrStatus())
# Send all the messages
for i, payload in enumerate(messages):
if isinstance(payload, rdfvalue.RDFValue):
msg = rdf_flows.GrrMessage(
session_id=self.session_id,
request_id=request_state.id,
response_id=1 + i,
auth_state=rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED,
payload=payload,
type=rdf_flows.GrrMessage.Type.MESSAGE)
if isinstance(payload, rdf_flows.GrrStatus):
msg.type = rdf_flows.GrrMessage.Type.STATUS
else:
raise FlowRunnerError("Bad message %s of type %s." % (payload,
type(payload)))
self.QueueResponse(msg, start_time)
# Notify the worker about it.
self.QueueNotification(session_id=self.session_id, timestamp=start_time)
def ScheduleKillNotification(self):
"""Schedules a kill notification for this flow."""
# Create a notification for the flow in the future that
# indicates that this flow is in progess. We'll delete this
# notification when we're done with processing completed
# requests. If we're stuck for some reason, the notification
# will be delivered later and the stuck flow will get
# terminated.
stuck_flows_timeout = rdfvalue.Duration(config_lib.CONFIG[
"Worker.stuck_flows_timeout"])
kill_timestamp = (rdfvalue.RDFDatetime().Now() + stuck_flows_timeout)
with queue_manager.QueueManager(token=self.token) as manager:
manager.QueueNotification(
session_id=self.session_id,
in_progress=True,
timestamp=kill_timestamp)
# kill_timestamp may get updated via flow.HeartBeat() calls, so we
# have to store it in the context.
self.context.kill_timestamp = kill_timestamp
def HeartBeat(self):
# If kill timestamp is set (i.e. if the flow is currently being
# processed by the worker), delete the old "kill if stuck" notification
# and schedule a new one, further in the future.
if self.context.kill_timestamp:
with queue_manager.QueueManager(token=self.token) as manager:
manager.DeleteNotification(
self.session_id,
start=self.context.kill_timestamp,
end=self.context.kill_timestamp + rdfvalue.Duration("1s"))
stuck_flows_timeout = rdfvalue.Duration(config_lib.CONFIG[
"Worker.stuck_flows_timeout"])
self.context.kill_timestamp = (
rdfvalue.RDFDatetime().Now() + stuck_flows_timeout)
manager.QueueNotification(
session_id=self.session_id,
in_progress=True,
timestamp=self.context.kill_timestamp)
def FinalizeProcessCompletedRequests(self, notification):
# Delete kill notification as the flow got processed and is not
# stuck.
with queue_manager.QueueManager(token=self.token) as manager:
manager.DeleteNotification(
self.session_id,
start=self.context.kill_timestamp,
end=self.context.kill_timestamp)
self.context.kill_timestamp = None
# If a flow raises in one state, the remaining states will not
# be processed. This is indistinguishable from an incomplete
# state due to missing responses / status so we need to check
# here if the flow is still running before rescheduling.
if (self.IsRunning() and notification.last_status and
(self.context.next_processed_request <= notification.last_status)):
logging.debug("Had to reschedule a notification: %s", notification)
# We have received a notification for a specific request but
# could not process that request. This might be a race
# condition in the data store so we reschedule the
# notification in the future.
delay = config_lib.CONFIG["Worker.notification_retry_interval"]
notification.ttl -= 1
if notification.ttl:
manager.QueueNotification(
notification, timestamp=notification.timestamp + delay)
def ProcessCompletedRequests(self, notification, unused_thread_pool=None):
"""Go through the list of requests and process the completed ones.
We take a snapshot in time of all requests and responses for this flow. We
then process as many completed requests as possible. If responses are not
quite here we leave it for next time.
It is safe to call this function as many times as needed. NOTE: We assume
that the flow queue is locked so another worker is not processing these
messages while we are. It is safe to insert new messages to the flow:state
queue.
Args:
notification: The notification object that triggered this processing.
"""
self.ScheduleKillNotification()
try:
self._ProcessCompletedRequests(notification)
finally:
self.FinalizeProcessCompletedRequests(notification)
def _ProcessCompletedRequests(self, notification):
"""Does the actual processing of the completed requests."""
# First ensure that client messages are all removed. NOTE: We make a new
# queue manager here because we want only the client messages to be removed
# ASAP. This must happen before we actually run the flow to ensure the
# client requests are removed from the client queues.
with queue_manager.QueueManager(token=self.token) as manager:
for request, _ in manager.FetchCompletedRequests(
self.session_id, timestamp=(0, notification.timestamp)):
# Requests which are not destined to clients have no embedded request
# message.
if request.HasField("request"):
manager.DeQueueClientRequest(request.client_id,
request.request.task_id)
# The flow is dead - remove all outstanding requests and responses.
if not self.IsRunning():
self.queue_manager.DestroyFlowStates(self.session_id)
return
processing = []
while True:
try:
# Here we only care about completed requests - i.e. those requests with
# responses followed by a status message.
for request, responses in self.queue_manager.FetchCompletedResponses(
self.session_id, timestamp=(0, notification.timestamp)):
if request.id == 0:
continue
if not responses:
break
# We are missing a needed request - maybe its not completed yet.
if request.id > self.context.next_processed_request:
stats.STATS.IncrementCounter("grr_response_out_of_order")
break
# Not the request we are looking for - we have seen it before
# already.
if request.id < self.context.next_processed_request:
self.queue_manager.DeleteFlowRequestStates(self.session_id, request)
continue
if not responses:
continue
# Do we have all the responses here? This can happen if some of the
# responses were lost.
if len(responses) != responses[-1].response_id:
# If we can retransmit do so. Note, this is different from the
# automatic retransmission facilitated by the task scheduler (the
# Task.task_ttl field) which would happen regardless of these.
if request.transmission_count < 5:
stats.STATS.IncrementCounter("grr_request_retransmission_count")
request.transmission_count += 1
self.ReQueueRequest(request)
break
# If we get here its all good - run the flow.
if self.IsRunning():
self.flow_obj.HeartBeat()
self.RunStateMethod(request.next_state, request, responses)
# Quit early if we are no longer alive.
else:
break
# At this point we have processed this request - we can remove it and
# its responses from the queue.
self.queue_manager.DeleteFlowRequestStates(self.session_id, request)
self.context.next_processed_request += 1
self.DecrementOutstandingRequests()
# Are there any more outstanding requests?
if not self.OutstandingRequests():
# Allow the flow to cleanup
if self.IsRunning() and self.context.current_state != "End":
self.RunStateMethod("End")
# Rechecking the OutstandingRequests allows the End state (which was
# called above) to issue further client requests - hence postpone
# termination.
if not self.OutstandingRequests():
# TODO(user): Deprecate in favor of 'flow_completions' metric.
stats.STATS.IncrementCounter("grr_flow_completed_count")
stats.STATS.IncrementCounter(
"flow_completions", fields=[self.flow_obj.Name()])
logging.debug("Destroying session %s(%s) for client %s",
self.session_id,
self.flow_obj.Name(), self.runner_args.client_id)
self.flow_obj.Terminate()
# We are done here.
return
except queue_manager.MoreDataException:
# Join any threads.
for event in processing:
event.wait()
# We did not read all the requests/responses in this run in order to
# keep a low memory footprint and have to make another pass.
self.FlushMessages()
self.flow_obj.Flush()
continue
finally:
# Join any threads.
for event in processing:
event.wait()
def RunStateMethod(self,
method,
request=None,
responses=None,
event=None,
direct_response=None):
"""Completes the request by calling the state method.
NOTE - we expect the state method to be suitably decorated with a
StateHandler (otherwise this will raise because the prototypes
are different)
Args:
method: The name of the state method to call.
request: A RequestState protobuf.
responses: A list of GrrMessages responding to the request.
event: A threading.Event() instance to signal completion of this request.
direct_response: A flow.Responses() object can be provided to avoid
creation of one.
"""
client_id = None
try:
self.context.current_state = method
if request and responses:
client_id = request.client_id or self.runner_args.client_id
logging.debug("%s Running %s with %d responses from %s",
self.session_id, method, len(responses), client_id)
else:
logging.debug("%s Running state method %s", self.session_id, method)
# Extend our lease if needed.
self.flow_obj.HeartBeat()
try:
method = getattr(self.flow_obj, method)
except AttributeError:
raise FlowRunnerError("Flow %s has no state method %s" %
(self.flow_obj.__class__.__name__, method))
method(
direct_response=direct_response, request=request, responses=responses)
if self.sent_replies:
self.ProcessRepliesWithOutputPlugins(self.sent_replies)
self.sent_replies = []
# We don't know here what exceptions can be thrown in the flow but we have
# to continue. Thus, we catch everything.
except Exception as e: # pylint: disable=broad-except
# This flow will terminate now
# TODO(user): Deprecate in favor of 'flow_errors'.
stats.STATS.IncrementCounter("grr_flow_errors")
stats.STATS.IncrementCounter("flow_errors", fields=[self.flow_obj.Name()])
logging.exception("Flow %s raised %s.", self.session_id, e)
self.Error(traceback.format_exc(), client_id=client_id)
finally:
if event:
event.set()
def GetNextOutboundId(self):
with self.outbound_lock:
my_id = self.context.next_outbound_id
self.context.next_outbound_id += 1
return my_id
def CallClient(self,
action_cls,
request=None,
next_state=None,
client_id=None,
request_data=None,
start_time=None,
**kwargs):
"""Calls the client asynchronously.
This sends a message to the client to invoke an Action. The run
action may send back many responses. These will be queued by the
framework until a status message is sent by the client. The status
message will cause the entire transaction to be committed to the
specified state.
Args:
action_cls: The function to call on the client.
request: The request to send to the client. If not specified (Or None) we
create a new RDFValue using the kwargs.
next_state: The state in this flow, that responses to this
message should go to.
client_id: rdf_client.ClientURN to send the request to.
request_data: A dict which will be available in the RequestState
protobuf. The Responses object maintains a reference to this
protobuf for use in the execution of the state method. (so you can
access this data by responses.request). Valid values are
strings, unicode and protobufs.
start_time: Call the client at this time. This Delays the client request
for into the future.
**kwargs: These args will be used to construct the client action semantic
protobuf.
Raises:
FlowRunnerError: If next_state is not one of the allowed next states.
RuntimeError: The request passed to the client does not have the correct
type.
"""
if client_id is None:
client_id = self.runner_args.client_id
if client_id is None:
raise FlowRunnerError("CallClient() is used on a flow which was not "
"started with a client.")
if not isinstance(client_id, rdf_client.ClientURN):
# Try turning it into a ClientURN
client_id = rdf_client.ClientURN(client_id)
if action_cls.in_rdfvalue is None:
if request:
raise RuntimeError("Client action %s does not expect args." %
action_cls.__name__)
else:
if request is None:
# Create a new rdf request.
request = action_cls.in_rdfvalue(**kwargs)
else:
# Verify that the request type matches the client action requirements.
if not isinstance(request, action_cls.in_rdfvalue):
raise RuntimeError("Client action expected %s but got %s" %
(action_cls.in_rdfvalue, type(request)))
outbound_id = self.GetNextOutboundId()
# Create a new request state
state = rdf_flows.RequestState(
id=outbound_id,
session_id=self.session_id,
next_state=next_state,
client_id=client_id)
if request_data is not None:
state.data = rdf_protodict.Dict(request_data)
# Send the message with the request state
msg = rdf_flows.GrrMessage(
session_id=utils.SmartUnicode(self.session_id),
name=action_cls.__name__,
request_id=outbound_id,
priority=self.runner_args.priority,
require_fastpoll=self.runner_args.require_fastpoll,
queue=client_id.Queue(),
payload=request,
generate_task_id=True)
if self.context.remaining_cpu_quota:
msg.cpu_limit = int(self.context.remaining_cpu_quota)
cpu_usage = self.context.client_resources.cpu_usage
if self.runner_args.cpu_limit:
msg.cpu_limit = max(self.runner_args.cpu_limit - cpu_usage.user_cpu_time -
cpu_usage.system_cpu_time, 0)
if msg.cpu_limit == 0:
raise FlowRunnerError("CPU limit exceeded.")
if self.runner_args.network_bytes_limit:
msg.network_bytes_limit = max(self.runner_args.network_bytes_limit -
self.context.network_bytes_sent, 0)
if msg.network_bytes_limit == 0:
raise FlowRunnerError("Network limit exceeded.")
state.request = msg
self.QueueRequest(state, timestamp=start_time)
def Publish(self, event_name, msg, delay=0):
"""Sends the message to event listeners."""
events.Events.PublishEvent(event_name, msg, delay=delay, token=self.token)
def CallFlow(self,
flow_name=None,
next_state=None,
sync=True,
request_data=None,
client_id=None,
base_session_id=None,
**kwargs):
"""Creates a new flow and send its responses to a state.
This creates a new flow. The flow may send back many responses which will be
queued by the framework until the flow terminates. The final status message
will cause the entire transaction to be committed to the specified state.
Args:
flow_name: The name of the flow to invoke.
next_state: The state in this flow, that responses to this
message should go to.
sync: If True start the flow inline on the calling thread, else schedule
a worker to actually start the child flow.
request_data: Any dict provided here will be available in the
RequestState protobuf. The Responses object maintains a reference
to this protobuf for use in the execution of the state method. (so
you can access this data by responses.request). There is no
format mandated on this data but it may be a serialized protobuf.
client_id: If given, the flow is started for this client.
base_session_id: A URN which will be used to build a URN.
**kwargs: Arguments for the child flow.
Raises:
FlowRunnerError: If next_state is not one of the allowed next states.
Returns:
The URN of the child flow which was created.
"""
client_id = client_id or self.runner_args.client_id
# This looks very much like CallClient() above - we prepare a request state,
# and add it to our queue - any responses from the child flow will return to
# the request state and the stated next_state. Note however, that there is
# no client_id or actual request message here because we directly invoke the
# child flow rather than queue anything for it.
state = rdf_flows.RequestState(
id=self.GetNextOutboundId(),
session_id=utils.SmartUnicode(self.session_id),
client_id=client_id,
next_state=next_state,
response_count=0)
if request_data:
state.data = rdf_protodict.Dict().FromDict(request_data)
# If the urn is passed explicitly (e.g. from the hunt runner) use that,
# otherwise use the urn from the flow_runner args. If both are None, create
# a new collection and give the urn to the flow object.
logs_urn = self._GetLogsCollectionURN(
kwargs.pop("logs_collection_urn", None) or
self.runner_args.logs_collection_urn)
# If we were called with write_intermediate_results, propagate down to
# child flows. This allows write_intermediate_results to be set to True
# either at the top level parent, or somewhere in the middle of
# the call chain.
write_intermediate = (kwargs.pop("write_intermediate_results", False) or
self.runner_args.write_intermediate_results)
try:
event_id = self.runner_args.event_id
except AttributeError:
event_id = None
# Create the new child flow but do not notify the user about it.
child_urn = self.flow_obj.StartFlow(
client_id=client_id,
flow_name=flow_name,
base_session_id=base_session_id or self.session_id,
event_id=event_id,
request_state=state,
token=self.token,
notify_to_user=False,
parent_flow=self.flow_obj,
sync=sync,
queue=self.runner_args.queue,
write_intermediate_results=write_intermediate,
logs_collection_urn=logs_urn,
**kwargs)
self.QueueRequest(state)
return child_urn
def SendReply(self, response):
"""Allows this flow to send a message to its parent flow.
If this flow does not have a parent, the message is ignored.
Args:
response: An RDFValue() instance to be sent to the parent.
Raises:
RuntimeError: If responses is not of the correct type.
"""
if not isinstance(response, rdfvalue.RDFValue):
raise RuntimeError("SendReply can only send a Semantic Value")
# Only send the reply if we have a parent and if flow's send_replies
# attribute is True. We have a parent only if we know our parent's request.
if (self.runner_args.request_state.session_id and
self.runner_args.send_replies):
request_state = self.runner_args.request_state
request_state.response_count += 1
# Make a response message
msg = rdf_flows.GrrMessage(
session_id=request_state.session_id,
request_id=request_state.id,
response_id=request_state.response_count,
auth_state=rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED,
type=rdf_flows.GrrMessage.Type.MESSAGE,
payload=response,
args_rdf_name=response.__class__.__name__,
args_age=int(response.age))
# Queue the response now
self.queue_manager.QueueResponse(request_state.session_id, msg)
if self.runner_args.write_intermediate_results:
self.QueueReplyForResultsCollection(response)
else:
# Only write the reply to the collection if we are the parent flow.
self.QueueReplyForResultsCollection(response)
def FlushMessages(self):
"""Flushes the messages that were queued."""
# Only flush queues if we are the top level runner.
if self.parent_runner is None:
self.queue_manager.Flush()
if self.queued_replies:
with data_store.DB.GetMutationPool(token=self.token) as mutation_pool:
for response in self.queued_replies:
sequential_collection.GeneralIndexedCollection.StaticAdd(
self.output_urn,
self.token,
response,
mutation_pool=mutation_pool)
multi_type_collection.MultiTypeCollection.StaticAdd(
self.multi_type_output_urn,
self.token,
response,
mutation_pool=mutation_pool)
self.queued_replies = []
def Error(self, backtrace, client_id=None, status=None):
"""Kills this flow with an error."""
client_id = client_id or self.runner_args.client_id
if self.IsRunning():
# Set an error status
reply = rdf_flows.GrrStatus()
if status is None:
reply.status = rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR
else:
reply.status = status
if backtrace:
reply.error_message = backtrace
self.flow_obj.Terminate(status=reply)
self.context.state = rdf_flows.FlowContext.State.ERROR
if backtrace:
logging.error("Error in flow %s (%s). Trace: %s", self.session_id,
client_id, backtrace)
self.context.backtrace = backtrace
else:
logging.error("Error in flow %s (%s).", self.session_id, client_id)
self.Notify("FlowStatus", client_id,
"Flow (%s) terminated due to error" % self.session_id)
def GetState(self):
return self.context.state
def IsRunning(self):
return self.context.state == rdf_flows.FlowContext.State.RUNNING
def ProcessRepliesWithOutputPlugins(self, replies):
if not self.runner_args.output_plugins or not replies:
return
for output_plugin_state in self.context.output_plugins_states:
plugin_descriptor = output_plugin_state.plugin_descriptor
plugin_state = output_plugin_state.plugin_state
output_plugin = plugin_descriptor.GetPluginForState(plugin_state)
# Extend our lease if needed.
self.flow_obj.HeartBeat()
try:
output_plugin.ProcessResponses(replies)
output_plugin.Flush()
log_item = output_plugin_lib.OutputPluginBatchProcessingStatus(
plugin_descriptor=plugin_descriptor,
status="SUCCESS",
batch_size=len(replies))
# Cannot append to lists in AttributedDicts.
plugin_state["logs"] += [log_item]
self.Log("Plugin %s sucessfully processed %d flow replies.",
plugin_descriptor, len(replies))
except Exception as e: # pylint: disable=broad-except
error = output_plugin_lib.OutputPluginBatchProcessingStatus(
plugin_descriptor=plugin_descriptor,
status="ERROR",
summary=utils.SmartStr(e),
batch_size=len(replies))
# Cannot append to lists in AttributedDicts.
plugin_state["errors"] += [error]
self.Log("Plugin %s failed to process %d replies due to: %s",
plugin_descriptor, len(replies), e)
def Terminate(self, status=None):
"""Terminates this flow."""
try:
self.queue_manager.DestroyFlowStates(self.session_id)
except queue_manager.MoreDataException:
pass
# This flow might already not be running.
if self.context.state != rdf_flows.FlowContext.State.RUNNING:
return
if self.runner_args.request_state.session_id:
# Make a response or use the existing one.
response = status or rdf_flows.GrrStatus()
client_resources = self.context.client_resources
user_cpu = client_resources.cpu_usage.user_cpu_time
sys_cpu = client_resources.cpu_usage.system_cpu_time
response.cpu_time_used.user_cpu_time = user_cpu
response.cpu_time_used.system_cpu_time = sys_cpu
response.network_bytes_sent = self.context.network_bytes_sent
response.child_session_id = self.session_id
request_state = self.runner_args.request_state
request_state.response_count += 1
# Make a response message
msg = rdf_flows.GrrMessage(
session_id=request_state.session_id,
request_id=request_state.id,
response_id=request_state.response_count,
auth_state=rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED,
type=rdf_flows.GrrMessage.Type.STATUS,
payload=response)
try:
# Queue the response now
self.queue_manager.QueueResponse(request_state.session_id, msg)
finally:
self.QueueNotification(session_id=request_state.session_id)
# Mark as terminated.
self.context.state = rdf_flows.FlowContext.State.TERMINATED
self.flow_obj.Flush()
def UpdateProtoResources(self, status):
"""Save cpu and network stats, check limits."""
user_cpu = status.cpu_time_used.user_cpu_time
system_cpu = status.cpu_time_used.system_cpu_time
self.context.client_resources.cpu_usage.user_cpu_time += user_cpu
self.context.client_resources.cpu_usage.system_cpu_time += system_cpu
user_cpu_total = self.context.client_resources.cpu_usage.user_cpu_time
system_cpu_total = self.context.client_resources.cpu_usage.system_cpu_time
self.context.network_bytes_sent += status.network_bytes_sent
if self.runner_args.cpu_limit:
if self.runner_args.cpu_limit < (user_cpu_total + system_cpu_total):
# We have exceeded our limit, stop this flow.
raise FlowRunnerError("CPU limit exceeded.")
if self.runner_args.network_bytes_limit:
if (self.runner_args.network_bytes_limit <
self.context.network_bytes_sent):
# We have exceeded our byte limit, stop this flow.
raise FlowRunnerError("Network bytes limit exceeded.")
def SaveResourceUsage(self, request, responses):
"""Method automatically called from the StateHandler to tally resource."""
_ = request
status = responses.status
if status:
# Do this last since it may raise "CPU limit exceeded".
self.UpdateProtoResources(status)
def _QueueRequest(self, request, timestamp=None):
if request.HasField("request") and request.request.name:
# This message contains a client request as well.
self.queue_manager.QueueClientMessage(
request.request, timestamp=timestamp)
self.queue_manager.QueueRequest(
self.session_id, request, timestamp=timestamp)
def IncrementOutstandingRequests(self):
with self.outbound_lock:
self.context.outstanding_requests += 1
def DecrementOutstandingRequests(self):
with self.outbound_lock:
self.context.outstanding_requests -= 1
def QueueRequest(self, request, timestamp=None):
# Remember the new request for later
self._QueueRequest(request, timestamp=timestamp)
self.IncrementOutstandingRequests()
def ReQueueRequest(self, request, timestamp=None):
self._QueueRequest(request, timestamp=timestamp)
def QueueResponse(self, response, timestamp=None):
self.queue_manager.QueueResponse(
self.session_id, response, timestamp=timestamp)
def QueueNotification(self, *args, **kw):
self.queue_manager.QueueNotification(*args, **kw)
def QueueReplyForResultsCollection(self, response):
self.queued_replies.append(response)
if self.runner_args.client_id:
# While wrapping the response in GrrMessage is not strictly necessary for
# output plugins, GrrMessage.source may be used by these plugins to fetch
# client's metadata and include it into the exported data.
self.sent_replies.append(
rdf_flows.GrrMessage(
payload=response, source=self.runner_args.client_id))
else:
self.sent_replies.append(response)
def SetStatus(self, status):
self.context.status = status
def Log(self, format_str, *args):
"""Logs the message using the flow's standard logging.
Args:
format_str: Format string
*args: arguments to the format string
Raises:
RuntimeError: on parent missing logs_collection
"""
format_str = utils.SmartUnicode(format_str)
status = format_str
if args:
try:
# The status message is always in unicode
status = format_str % args
except TypeError:
logging.error("Tried to log a format string with the wrong number "
"of arguments: %s", format_str)
logging.info("%s: %s", self.session_id, status)
self.SetStatus(utils.SmartUnicode(status))
log_entry = rdf_flows.FlowLog(
client_id=self.runner_args.client_id,
urn=self.session_id,
flow_name=self.flow_obj.__class__.__name__,
log_message=status)
logs_collection_urn = self._GetLogsCollectionURN(
self.runner_args.logs_collection_urn)
FlowLogCollection.StaticAdd(logs_collection_urn, self.token, log_entry)
def GetLog(self):
return self.OpenLogsCollection(
self.runner_args.logs_collection_urn, mode="r")
def Status(self, format_str, *args):
"""Flows can call this method to set a status message visible to users."""
self.Log(format_str, *args)
def Notify(self, message_type, subject, msg):
"""Send a notification to the originating user.
Args:
message_type: The type of the message. This allows the UI to format
a link to the original object e.g. "ViewObject" or "HostInformation"
subject: The urn of the AFF4 object of interest in this link.
msg: A free form textual message.
"""
user = self.context.creator
# Don't send notifications to system users.
if (self.runner_args.notify_to_user and
user not in aff4_users.GRRUser.SYSTEM_USERS):
# Prefix the message with the hostname of the client we are running
# against.
if self.runner_args.client_id:
client_fd = aff4.FACTORY.Open(
self.runner_args.client_id, mode="rw", token=self.token)
hostname = client_fd.Get(client_fd.Schema.HOSTNAME) or ""
client_msg = "%s: %s" % (hostname, msg)
else:
client_msg = msg
# Add notification to the User object.
fd = aff4.FACTORY.Create(
aff4.ROOT_URN.Add("users").Add(user),
aff4_users.GRRUser,
mode="rw",
token=self.token)
# Queue notifications to the user.
fd.Notify(message_type, subject, client_msg, self.session_id)
fd.Close()
# Add notifications to the flow.
notification = rdf_flows.Notification(
type=message_type,
subject=utils.SmartUnicode(subject),
message=utils.SmartUnicode(msg),
source=self.session_id,
timestamp=rdfvalue.RDFDatetime.Now())
data_store.DB.Set(self.session_id,
self.flow_obj.Schema.NOTIFICATION,
notification,
replace=False,
sync=False,
token=self.token)
# Disable further notifications.
self.context.user_notified = True
# Allow the flow to either specify an event name or an event handler URN.
notification_event = (self.runner_args.notification_event or
self.runner_args.notification_urn)
if notification_event:
if self.context.state == rdf_flows.FlowContext.State.ERROR:
status = rdf_flows.FlowNotification.Status.ERROR
else:
status = rdf_flows.FlowNotification.Status.OK
event = rdf_flows.FlowNotification(
session_id=self.context.session_id,
flow_name=self.runner_args.flow_name,
client_id=self.runner_args.client_id,
status=status)
self.flow_obj.Publish(notification_event, message=event)
``` |
[
{
"content": "Here is the source code:\n```python\n# -*- coding: utf-8 -*-\n# <nbformat>3.0</nbformat>\n\n# <codecell>\n\nfrom __future__ import division\nimport os, os.path\nimport sys\nsys.path.append('/home/will/PySeqUtils/')\nfrom collections import Counter\nos.chdir('/home/will/HIVCpG/')\nfrom GeneralSeqTo... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n# <nbformat>3.0</nbformat>\n\n# <codecell>\n\nfrom __future__ import division\nimport os, os.path\nimport sys\nsys.path.append('/home/will/PySeqUtils/')\nfrom collections import Counter\nos.chdir('/home/will/HIVCpG/')\nf... | ```python
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from __future__ import division
import os, os.path
import sys
sys.path.append('/home/will/PySeqUtils/')
from collections import Counter
os.chdir('/home/will/HIVCpG/')
from GeneralSeqTools import fasta_reader
# <codecell>
from itertools import izip, tee, imap, product
def yield_lets(infile):
with open(infile) as handle:
for name, seq in fasta_reader(handle):
for l in imap(lambda x: x.upper(), seq):
if l != '-':
yield l.upper()
def yield_pairs(seq_iter):
prev_iter, next_iter = tee(seq_iter, 2)
_ = next_iter.next()
for tup in izip(prev_iter, next_iter):
yield ''.join(tup)
base_iter = yield_lets('lanlgenomes/hiv-db.fasta')
base_counts = Counter(yield_pairs(base_iter))
npairs = sum(base_counts.values())
keys = [''.join(tup) for tup in product('ACGT', repeat=2)]
base_freqs = dict([(key, base_counts[key]/npairs) for key in keys])
print base_freqs
# <codecell>
import pandas as pd
store = pd.HDFStore('/home/will/HIVReportGen/Data/SplitRedcap/2013-01-16/EntireCohort.hdf')
seq_data = store['seq_data']
redcap_data = store['redcap']
store.close()
# <codecell>
def yield_bin(row):
for l, val in zip(row['LTR-bin-align'], row['LTR-seq-align']):
if val not in wlets:
yield np.nan
else:
yield l
tmp_seqs = seq_data[['LTR-seq-align', 'LTR-bin-align']].dropna()
nseqs = []
nvars = []
wlets = set('ACGT')
for key, row in tmp_seqs.iterrows():
seq = row['LTR-seq-align']
nlets = sum(l in wlets for l in seq)
if nlets > 200:
pat_info = [('Patient ID', key[0]), ('VisitNum', key[1])]
tups = list(enumerate(yield_pairs(iter(seq))))+pat_info
vtups = list(enumerate(yield_bin(row)))+pat_info
nseqs.append(dict(tups))
nvars.append(dict(vtups))
dinuc_seq_df = pd.DataFrame(nseqs).set_index(['Patient ID', 'VisitNum']).T
nuc_var_df = pd.DataFrame(nvars).set_index(['Patient ID', 'VisitNum']).T
# <codecell>
from pylru import lrudecorator
@lrudecorator(500)
def calc_fishers(intup):
ge_row = [base_counts['CG'], npairs - base_counts['CG']]
return fisher_exact([ge_row, list(intup)])[1]
# <codecell>
from scipy.stats import fisher_exact, chisquare
import numpy as np
def rolling_sig(inser):
win_pos = inser.sum()
win_neg = len(inser) - win_pos
return calc_fishers((win_pos, win_neg))
sig_vals = pd.rolling_apply(dinuc_seq_df == 'CG', 100, rolling_sig, center=True)
# <codecell>
import matplotlib.pyplot as plt
(-np.log10(sig_vals)).mean(axis=1).plot(legend=False)
plt.ylabel('p-value')
# <codecell>
has_seq = pd.DataFrame({'has_seq':pd.Series(True, index=dinuc_seq_df.columns)})
t = redcap_data['Event Name'].dropna().apply(lambda x: x.split(' - ')[0])
redcap_data['Patient visit number'] = redcap_data['Patient visit number'].combine_first(t)
redcap_data["Drugs used (choice='Other')"] = redcap_data["Drugs used (choice='Other')"].dropna() == 'Checked'
drug_cols = ['Amphetamines',
'Barbiturates',
'Benzodiazepines',
'Cannabinoid',
'Cocaine + metabolite',
'Opiates',
'Phencyclidine'
]
admit_cols = ["Drugs used (choice='Marijuana')",
"Drugs used (choice='Cocaine (crack, nasal, smoke, inject)')",
"Drugs used (choice='Heroin (nasal, inject)')",
"Drugs used (choice='Methamphetamine (smoke, nasal, inject)')",
"Drugs used (choice='Benzodiazapine (i.e. valium, ativan, xanax, klonipin, etc)')",
"Drugs used (choice='Narcotics')",
"Drugs used (choice='Ecstasy')",
"Drugs used (choice='PCP')",
"Drugs used (choice='Ritalin')",
"Drugs used (choice='Other')"]
wcols = ['Patient ID', 'Patient visit number'] + drug_cols + admit_cols
wanted_redcap = redcap_data[wcols].rename(columns = {
'Patient visit number':'VisitNum',
})
seq_redcap = pd.merge(wanted_redcap, has_seq,
left_on = ['Patient ID', 'VisitNum'],
right_index = True, how = 'outer')
seq_redcap
# <codecell>
PC = set()
PN = set()
for key, group in seq_redcap.groupby('Patient ID'):
nseqs = group['has_seq'].sum()
ever_drug = group[drug_cols].any(axis=0)
admit_drug = group[admit_cols].any()
always_coc = group['Cocaine + metabolite'].dropna().all()
if nseqs < 3:
pass
elif ~ever_drug.any() and ~admit_drug.any():
PN.add(key)
elif (ever_drug.sum() == 1) and always_coc:
PC.add(key)
print PN, PC
# <codecell>
pn_cols = [col for col in dinuc_seq_df.columns if col[0] in PN]
pc_cols = [col for col in dinuc_seq_df.columns if col[0] in PC]
all_vals = pd.rolling_apply(dinuc_seq_df == 'CG', 100, rolling_sig, center=True)
pc_vals = pd.rolling_apply(dinuc_seq_df[pc_cols] == 'CG', 100, rolling_sig, center=True)
pn_vals = pd.rolling_apply(dinuc_seq_df[pn_cols] == 'CG', 100, rolling_sig, center=True)
# <codecell>
fig, axs = plt.subplots(3, 1, sharex=True, figsize = (10,10))
groups = [('Conservation', 'Frequency', 1-nuc_var_df.dropna(thresh=100, axis = 0)),
('Non-Users', '-log(p-value)',-np.log10(pn_vals)),
('Pure-Cocaine', '-log(p-value)', -np.log10(pc_vals))]
for (name, label, group), ax in zip(groups, axs.flatten()):
group.mean(axis = 1).plot(ax = ax)
ax.set_ylabel(label)
ax.set_title(name)
ax.set_xlabel('LTR-Position')
ax.set_xlim([1, 630])
plt.savefig('cpg_islands_by_cocaine.png', dpi=300)
# <codecell>
base_freqs['CG']*100
# <codecell>
len(PN), len(PC)
# <codecell>
``` |
[
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n```python\n########\n# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the L... | [
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n<|memory_start|>```python\n########\n# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compl... | ```python
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from .. import env
from ..cli import cfy
from ..table import print_data, print_single
from ..utils import handle_client_error
USER_COLUMNS = ['username', 'groups', 'role', 'group_system_roles', 'active',
'last_login_at', 'is_locked']
GET_DATA_COLUMNS = ['user_tenants', 'group_tenants']
NO_GET_DATA_COLUMNS = ['tenants']
USER_LABELS = {'role': 'system wide role',
'group_system_roles': 'system wide roles via groups'}
def _format_user(user):
user_tenants = dict(
(str(tenant), str(user.user_tenants[tenant]))
for tenant in user.user_tenants
)
group_tenants = dict(
(str(tenant),
dict(
(str(role),
[str(group) for group in user.group_tenants[tenant][role]])
for role in user.group_tenants[tenant]
))
for tenant in user.group_tenants
)
user['user_tenants'] = str(user_tenants)[1:-1]
user['group_tenants'] = str(group_tenants)[1:-1]
return user
def _format_group_system_roles(user):
group_system_roles = dict(
(str(role),
[str(user_group) for user_group in user['group_system_roles'][role]])
for role in user['group_system_roles']
)
user['group_system_roles'] = str(group_system_roles).strip('{}')
return user
@cfy.group(name='users')
@cfy.options.common_options
def users():
"""Handle Cloudify users
"""
if not env.is_initialized():
env.raise_uninitialized()
@users.command(name='list', short_help='List users [manager only]')
@cfy.options.sort_by('username')
@cfy.options.descending
@cfy.options.common_options
@cfy.options.get_data
@cfy.options.search
@cfy.options.pagination_offset
@cfy.options.pagination_size
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def list(sort_by,
descending,
get_data,
search,
pagination_offset,
pagination_size,
logger,
client):
"""List all users
"""
logger.info('Listing all users...')
users_list = client.users.list(
sort=sort_by,
is_descending=descending,
_get_data=get_data,
_search=search,
_offset=pagination_offset,
_size=pagination_size
)
total = users_list.metadata.pagination.total
# copy list
columns = [] + USER_COLUMNS
users_list = [_format_group_system_roles(user) for user in users_list]
if get_data:
users_list = [_format_user(user) for user in users_list]
columns += GET_DATA_COLUMNS
else:
columns += NO_GET_DATA_COLUMNS
print_data(columns, users_list, 'Users:', labels=USER_LABELS)
logger.info('Showing {0} of {1} users'.format(len(users_list), total))
@users.command(name='create', short_help='Create a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.common_options
@cfy.options.security_role
@cfy.options.password
@cfy.options.tenant_name(required=False)
@cfy.options.user_tenant_role(required=False,
options_flags=['-l', '--user-tenant-role'])
@cfy.assert_manager_active()
@cfy.pass_client(use_tenant_in_header=False)
@cfy.pass_logger
def create(username,
security_role,
password,
tenant_name,
user_tenant_role,
logger,
client):
"""Create a new user on the manager
`USERNAME` is the username of the user
"""
client.users.create(username, password, security_role)
logger.info('User `{0}` created with `{1}` security role'.format(
username, security_role))
if tenant_name and user_tenant_role:
client.tenants.add_user(username, tenant_name, user_tenant_role)
logger.info(
'User `{0}` added successfully to tenant `{1}` with `{2}` role'
.format(username, tenant_name, user_tenant_role))
@users.command(name='set-password',
short_help='Set a new password for a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.password
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def set_password(username, password, logger, client):
"""Set a new password for a user
`USERNAME` is the username of the user
"""
logger.info('Setting new password for user {0}...'.format(username))
client.users.set_password(username, password)
logger.info('New password set')
@users.command(name='set-role',
short_help='Set a new role for a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.security_role
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def set_role(username, security_role, logger, client):
"""Set a new role for a user
`USERNAME` is the username of the user
"""
logger.info('Setting new role for user {0}...'.format(username))
client.users.set_role(username, security_role)
logger.info('New role `{0}` set'.format(security_role))
@users.command(name='get',
short_help='Get details for a single user [manager only]')
@cfy.argument(
'username', callback=cfy.validate_name, default=env.get_username())
@cfy.options.common_options
@cfy.options.get_data
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def get(username, get_data, logger, client):
"""Get details for a single user
`USERNAME` is the username of the user. (default: current user)
"""
logger.info('Getting info for user `{0}`...'.format(username))
if username == env.get_username():
user_details = client.users.get_self(_get_data=get_data)
else:
user_details = client.users.get(username, _get_data=get_data)
# copy list
columns = [] + USER_COLUMNS
if get_data:
_format_user(user_details)
columns += GET_DATA_COLUMNS
else:
columns += NO_GET_DATA_COLUMNS
print_single(columns,
user_details,
'Requested user info:',
labels=USER_LABELS)
@users.command(name='delete',
short_help='Delete a user [manager only]')
@cfy.argument('username', callback=cfy.validate_name)
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def delete(username, logger, client):
"""Delete a user
`USERNAME` is the username of the user
"""
logger.info('Deleting user `{0}`...'.format(username))
client.users.delete(username)
logger.info('User removed')
@users.command(name='activate',
short_help='Make an inactive user active [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def activate(username, logger, client):
"""Activate a user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already active'.format(username)
logger.info('Activating user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.activate(username)
logger.info('User activated')
@users.command(name='deactivate',
short_help='Make an active user inactive [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def deactivate(username, logger, client):
"""Deactivate a user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already inactive'.format(username)
logger.info('Deactivating user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.deactivate(username)
logger.info('User deactivated')
@users.command(name='unlock',
short_help='Unlock a locked user [manager only]')
@cfy.argument('username')
@cfy.options.common_options
@cfy.assert_manager_active()
@cfy.pass_client()
@cfy.pass_logger
def unlock(username, logger, client):
"""Unlock a locked user
`USERNAME` is the username of the user
"""
graceful_msg = 'User `{0}` is already unlocked'.format(username)
logger.info('Unlocking user `{0}`...'.format(username))
with handle_client_error(409, graceful_msg, logger):
client.users.unlock(username)
logger.info('User unlocked')
``` |
[
{
"content": "```python\n\"\"\"proyecto_DAI URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/1.8/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns... | [
{
"content": "<|memory_start|>```python\n\"\"\"proyecto_DAI URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/1.8/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a UR... | ```python
"""proyecto_DAI URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Create a new class that redirects the user to the index page, if successful at logging
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('app.urls')),
]
if not settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'^media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
``` |
[
{
"content": "Here is the code block:\n```python\nfrom django.shortcuts import render\nfrom django.views.generic import ListView\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.http import HttpResponse, HttpResponseRedirect\n# IMPORT REST\nfrom rest_framework import status, generics, mixins, ... | [
{
"content": "Here is the code block:\n<|memory_start|>```python\nfrom django.shortcuts import render\nfrom django.views.generic import ListView\nfrom django.views.decorators.csrf import csrf_exempt\nfrom django.http import HttpResponse, HttpResponseRedirect\n# IMPORT REST\nfrom rest_framework import status, ge... | ```python
from django.shortcuts import render
from django.views.generic import ListView
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse, HttpResponseRedirect
# IMPORT REST
from rest_framework import status, generics, mixins, viewsets
from rest_framework.decorators import api_view
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from rest_framework import generics, filters
from rest_framework.response import Response
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import permissions
from ceamon.permissions import IsOwnerOrReadOnly
from django.contrib.auth.models import User, Group
from ceamon.serializers import UserSerializer, GroupSerializer
# IMPORT
from ceamon.serializers import sapnodeSerializer, StatusSerializer
from ceamon.models import sapnode, StatusModel
def detail(request, question_id):
return HttpResponse("Estas viendo el server %s." % server_id)
class JSONResponse(HttpResponse):
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
@api_view(['GET', 'POST'])
def sapnode_list(request, format=None):
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
content = {
'user': unicode(request.user), # `django.contrib.auth.User` instance.
'auth': unicode(request.auth), # None
}
if request.method == 'GET':
l_sapnode = sapnode.objects.all()
serializer = sapnodeSerializer(l_sapnode, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = sapnodeSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'PUT', 'DELETE', 'POST'])
def sapnode_detail(request, pk, format=None,):
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
try:
l_sapnode = sapnode.objects.get(pk=pk)
except sapnode.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = sapnodeSerializer(l_sapnode)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = sapnodeSerializer(l_sapnode, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'POST':
serializer = sapnodeSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
l_sapnode.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
class GroupViewSet(viewsets.ModelViewSet):
queryset = Group.objects.all()
serializer_class = GroupSerializer
@api_view(['GET', 'PUT' , 'POST'])
def StatusViewSet(request, format=None):
#l_StatusModel = StatusModel.objects.all(pk=pk)
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
content = {
'user': unicode(request.user), # `django.contrib.auth.User` instance.
'auth': unicode(request.auth), # None
}
if request.method == 'GET':
l_StatusModel = StatusModel.objects.all()
serializer = StatusSerializer(l_StatusModel, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = StatusSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'PUT':
serializer = StatusSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'PUT', 'DELETE', 'POST'])
def status_detail(request, pk, format=None):
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
try:
l_StatusModel = StatusModel.objects.get(pk=pk)
except StatusModel.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = StatusSerializer(l_StatusModel)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = StatusSerializer(l_StatusModel, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'POST':
serializer = StatusSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
StatusModel.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
``` |
[
{
"content": "Provide a verbatim copy of the code:\n```python\nfrom __future__ import division\nfrom math import pi\nimport numpy as np\nimport random\n# import matplotlib.pyplot as plt\n\nclass SineOsc:\n\n def __init__(self):\n self.sample_rate = 44100\n\n def wave(self, frequency, length, rate):... | [
{
"content": "Provide a verbatim copy of the code:\n<|memory_start|>```python\nfrom __future__ import division\nfrom math import pi\nimport numpy as np\nimport random\n# import matplotlib.pyplot as plt\n\nclass SineOsc:\n\n def __init__(self):\n self.sample_rate = 44100\n\n def wave(self, frequency... | ```python
from __future__ import division
from math import pi
import numpy as np
import random
# import matplotlib.pyplot as plt
class SineOsc:
def __init__(self):
self.sample_rate = 44100
def wave(self, frequency, length, rate):
"""produces sine across np array"""
length = int(length * rate)
factor = float(frequency) * (pi * 2) / rate
waveform = np.sin(np.arange(length) * factor)
rounded_waveform = np.round(waveform, 0)
rounded_waveform2 = np.round(waveform, 1)
waveform2 = np.power(waveform, 3)
waveform3 = np.power(rounded_waveform, 4)/4
# waveform4 = waveform
return np.add(rounded_waveform, rounded_waveform2)
def play_frequencies(self, stream, length, volume, attack, decay, *freqs):
"""Plays a group of frequencies"""
def _create_waveform(freq):
wave = [self.wave(freq, length, self.sample_rate)]
waveform = (np.concatenate(wave) * volume / 16)
fade_in = np.arange(0., 1., 1./attack)
fade_out = np.arange(1., 0., -1./decay)
waveform[:attack] = np.multiply(waveform[:attack], fade_in)
waveform[-decay:] = np.multiply(waveform[-decay:], fade_out)
return waveform
all_tones = map(_create_waveform, freqs)
all_tones = sum(all_tones)
# plt.plot(chunk[])
# plt.show()
return stream.write(all_tones.astype(np.float32).tostring())
``` |
[
{
"content": "Here is the script:\n```python\n# Copyright (c) James Percent, Byron Galbraith and Unlock contributors.\n# All rights reserved.\n# Redistribution and use in source and binary forms, with or without modification,\n# are permitted provided that the following conditions are met:\n#\n# 1. Redistrib... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n# Copyright (c) James Percent, Byron Galbraith and Unlock contributors.\n# All rights reserved.\n# Redistribution and use in source and binary forms, with or without modification,\n# are permitted provided that the following conditions are met:\n#\n#... | ```python
# Copyright (c) James Percent, Byron Galbraith and Unlock contributors.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Unlock nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.import socket
import numpy as np
import random
__author__ = 'jpercent'
class RMSSignalGenerator(object):
'''
Generates simulated device samples. Each invocation of the
generate method returns a table of samples. The generate method determines sample values
by consulting an unlock.state.SequenceState. The state returns a tuple of True/False values,
one foreach channel. A state channel value that is True results in sample value, for the
corresponding channel, that is above the threshold; a False value results in value above
the min, but below the threshold.
channels: number of channels
minmax: list of tuples denoting the min and max values of a channel
thresholds: list of channel thresholds
state: an unlock.state.SequenceState. provides a means to dynamically configure
which channels of a given set of samples are above/below threshold values
samples: default number of samples per request
'''
def __init__(self, channels, minmax, thresholds, state, samples, seed=31337):
assert channels == len(thresholds) and channels == len(minmax)
self.channels = channels
self.min = 0
self.max = 1
self.minmax = minmax
self.thresholds = thresholds
self.samples = samples
self.state = state
self.state.start()
self.generate_sample = self.simple_sample_gen
self.random = random.Random()
self.random.seed(seed)
def generate_samples(self, samples=None):
if samples == None:
samples = self.samples
ret = np.zeros((samples, self.channels))
for sample in range(samples):
ret[sample] = self.generate_sample(self.state.state())
self.state.step()
return ret
def simple_sample_gen(self, state_value):
assert self.channels == len(state_value)
sample = np.zeros(self.channels)
for i in range(self.channels):
if state_value[i] == True:
sample[i] = self.random.randint(self.thresholds[i], self.minmax[i][self.max])
elif state_value[i] == False:
sample[i] = self.random.randint(self.minmax[i][self.min], self.thresholds[i]-1)
else:
raise Exception('invalid state')
return sample
if __name__ == '__main__':
# example
from unlock.state import SequenceState
channels = 4
minmax = [(0,10), (-10, 10), (9,100), (0,7)]
thresholds = [ 8, 5, 80, 5]
samples = 12
seq = [(False, False, False, False), (True, False, False, False), (True, True, False, False),
(False, False, False, True), (False, True, False, False), (True, False, True, False),
(False, False, True, False), (False, False, False, True),
(True, False, False, True), (False, True, False, True), (True, True, True, False),
(True, True, True, True)]
state = SequenceState(seq)
print(state.sequence)
gen = RMSSignalGenerator(channels, minmax, thresholds, state, samples)
sample_values = gen.generate_samples()
for i in range(len(seq)):
print ("Sequence value = ", seq[i])
print("Normalized Sample = ", sample_values[i] - np.array(thresholds))
print('-'*80)
``` |
[
{
"content": "Here is the snippet:\n```python\nfrom .ut_utils import ForgeTestCase\nfrom forge import InvalidEntryPoint\n\nclass TestedClass(object):\n def entry_point(self):\n self.f()\n self.g(1, 2, 3)\n self.h(a=1, b=2)\n self.class_method()\n self.static_method()\n d... | [
{
"content": "Here is the snippet:\n<|memory_start|>```python\nfrom .ut_utils import ForgeTestCase\nfrom forge import InvalidEntryPoint\n\nclass TestedClass(object):\n def entry_point(self):\n self.f()\n self.g(1, 2, 3)\n self.h(a=1, b=2)\n self.class_method()\n self.static... | ```python
from .ut_utils import ForgeTestCase
from forge import InvalidEntryPoint
class TestedClass(object):
def entry_point(self):
self.f()
self.g(1, 2, 3)
self.h(a=1, b=2)
self.class_method()
self.static_method()
def set_value(self):
self.value = 2
def f(self):
raise NotImplementedError()
@classmethod
def class_method(self):
pass
@staticmethod
def static_method():
pass
def g(self, a, b, c):
raise NotImplementedError()
def h(self, a, b):
raise NotImplementedError()
@classmethod
def class_method_entry_point(cls):
return cls
@staticmethod
def static_method_entry_point(arg):
pass
class HybridMockTest(ForgeTestCase):
def tearDown(self):
self.forge.verify()
self.assertNoMoreCalls()
super(HybridMockTest, self).tearDown()
def test__hybrid_mock(self):
hm = self.forge.create_hybrid_mock(TestedClass)
# these will be stubbed
hm.f()
hm.g(1, 2, 3)
hm.h(1, 2)
hm.class_method()
hm.static_method()
self.forge.replay()
hm.entry_point()
self.forge.verify()
def test__can_call_class_methods(self):
hm = self.forge.create_hybrid_mock(TestedClass)
self.forge.replay()
rv = hm.class_method_entry_point()
# the 'cls' argument should be the class itself
self.assertIs(rv, TestedClass)
def test__can_call_class_methods_on_class_mocks(self):
hm = self.forge.create_hybrid_class_mock(TestedClass)
self.forge.replay()
rv = hm.class_method_entry_point()
# for class mocks, the 'cls' argument should be the mock!
self.assertIs(rv, hm)
def test__cannot_call_static_methods(self):
hm = self.forge.create_hybrid_mock(TestedClass)
self.forge.replay()
with self.assertRaises(InvalidEntryPoint):
hm.static_method_entry_point()
def test__hybrid_mocks_setting_values(self):
hm = self.forge.create_hybrid_mock(TestedClass)
hm.__forge__.enable_setattr_during_replay()
self.forge.replay()
hm.set_value()
self.assertEqual(hm.value, 2)
class ClassWithClassmethodConstructor(object):
def __init__(self, a, b, c):
pass
@classmethod
def constructor(cls, a, b, c):
return cls(a, b, c)
class HybridClassMockTest(ForgeTestCase):
def setUp(self):
super(HybridClassMockTest, self).setUp()
self.mock = self.forge.create_hybrid_class_mock(ClassWithClassmethodConstructor)
def test__expecting_construction(self):
expected = self.mock(1, 2, 3).and_return(self.forge.create_mock(ClassWithClassmethodConstructor))
self.forge.replay()
got = self.mock.constructor(1, 2, 3)
self.assertIs(expected, got)
``` |
[
{
"content": "Provide an exact copy of the source code:\n```python\n# Copyright 2014 Andreas Riegg - t-h-i-n-x.net\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# ... | [
{
"content": "Provide an exact copy of the source code:\n<|memory_start|>```python\n# Copyright 2014 Andreas Riegg - t-h-i-n-x.net\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the L... | ```python
# Copyright 2014 Andreas Riegg - t-h-i-n-x.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Changelog
#
# 1.0 2014-06-26 Initial release.
# 1.1 2014-08-28 Added bit access.
# 1.2 2014-08-31 Added NON-REST multiple read/write byte methods
# to speed up direct Python access.
# 1.3 2014-11-13 Changed parameter order for writeMemoryBytes and
# optimized it.
# 1.4 2014-12-08 Simplified multiple read/write byte methods and
# made start/stop bounds checking more strict.
# Added REST mapping for multiple bytes reading.
# Made addressing scheme uniform for all slot types.
#
# Usage remarks
#
# - The smallest possible memory unit is 1 byte (8 bits)
# - Addressed slots can be
# - bits ( 1 bit)
# - bytes ( 8 bits)
# - words (16 bits)
# - longs (32 bits)
# - All memory address slots are mapped strictly sequential in ascending
# order like channel numbers starting at 0 with MSB first for non
# single-bit slots. This results in the following address slot mapping:
# |<- bit 0 bit 31 ->|
# 01010101010101010101010101010101
# |byte 0| byte 1| byte 2| byte 3|
# | -- word 0 -- | -- word 1 -- |
# | ---------- long 0 ---------- |
# - Where applicable, start and stop have the same meaning as range and
# list slices in Python. Start is included, stop is excluded.
#
from webiopi.decorators.rest import request, response
from webiopi.utils.types import toint, M_JSON
class Memory():
def __init__(self, byteCount):
self._byteCount = byteCount
#---------- Abstraction framework contracts ----------
def __family__(self):
return "Memory"
#---------- Memory abstraction REST implementation ----------
@request("GET", "memory/bit/*")
@response(contentType=M_JSON)
def memoryBitWildcard(self):
values = {}
for i in range(self.byteCount()):
valbyte = self.readMemoryByte(i)
for j in range(8):
position = 7 - j
values[i*8 + j] = "%d" % ((valbyte & (1 << position)) >> position)
return values
# {
# "0": "0",
# "1": "0",
# "2": "0",
# "3": "1"
# "4": "0"
# "5": "0"
# "6": "1"
# "7": "0"
# ...
# }
@request("GET", "memory/byte/*")
@response(contentType=M_JSON)
def memoryByteWildcard(self):
values = {}
byteValues = self.readMemoryBytes()
for i in range(len(byteValues)):
values[i] = "0x%02X" % byteValues[i]
return values
# {
# "0": "0x12",
# "1": "0x34",
# "2": "0xDE",
# "3": "0xFF"
# }
@request("GET", "memory/bytes/%(bounds)s")
@response(contentType=M_JSON)
def memoryBytes(self, bounds):
(start, stop) = bounds.split(",")
start = toint(start)
stop = toint(stop)
values = {}
byteValues = self.readMemoryBytes(start, stop)
for i in range(start, stop):
values[i] = "0x%02X" % byteValues[i - start]
return values
# {
# "1": "0x34",
# "2": "0xDE",
# }
@request("GET", "memory/word/*")
@response(contentType=M_JSON)
def memoryWordWildcard(self):
values = {}
for i in range(self.wordCount()):
values[i] = "0x%04X" % self.readMemoryWord(i)
return values
# {
# "0": "0x1234",
# "1": "0xDEFF"
# }
@request("GET", "memory/long/*")
@response(contentType=M_JSON)
def memoryLongWildcard(self):
values = {}
for i in range(self.longCount()):
values[i] = "0x%08X" % self.readMemoryLong(i)
return values
# {
# "0": "0x1234DEFF"
# }
@request("GET", "memory/bit/count")
@response("%d")
def bitCount(self):
return self._byteCount * 8
@request("GET", "memory/byte/count")
@response("%d")
def byteCount(self):
return self._byteCount
@request("GET", "memory/word/count")
@response("%d")
def wordCount(self):
return self._byteCount >> 1
@request("GET", "memory/long/count")
@response("%d")
def longCount(self):
return self._byteCount >> 2
@request("GET", "memory/bit/%(address)s")
@response("%d")
def readMemoryBit(self, address):
address = toint(address)
self.checkBitAddress(address)
return self.__readMemoryBit__(address)
@request("POST", "memory/bit/%(address)s/%(value)s")
@response("%d")
def writeMemoryBit(self, address, value):
address = toint(address)
self.checkBitAddress(address)
value = toint(value)
self.checkBitValue(value)
self.__writeMemoryBit__(address, value)
return self.readMemoryBit(address)
@request("GET", "memory/byte/%(address)s")
@response("0x%02X")
def readMemoryByte(self, address):
address = toint(address)
self.checkByteAddress(address)
return self.__readMemoryByte__(address)
@request("POST", "memory/byte/%(address)s/%(value)s")
@response("0x%02X")
def writeMemoryByte(self, address, value):
address = toint(address)
self.checkByteAddress(address)
value = toint(value)
self.checkByteValue(value)
self.__writeMemoryByte__(address, value)
return self.readMemoryByte(address)
@request("GET", "memory/word/%(address)s")
@response("0x%04X")
def readMemoryWord(self, address):
address = toint(address)
self.checkWordAddress(address)
return self.__readMemoryWord__(address)
@request("POST", "memory/word/%(address)s/%(value)s")
@response("0x%04X")
def writeMemoryWord(self, address, value):
address = toint(address)
self.checkWordAddress(address)
value = toint(value)
self.checkWordValue(value)
self.__writeMemoryWord__(address, value)
return self.readMemoryWord(address)
@request("GET", "memory/long/%(address)s")
@response("0x%08X")
def readMemoryLong(self, address):
address = toint(address)
self.checkLongAddress(address)
return self.__readMemoryLong__(address)
@request("POST", "memory/long/%(address)s/%(value)s")
@response("0x%08X")
def writeMemoryLong(self, address, value):
address = toint(address)
self.checkLongAddress(address)
value = toint(value)
self.checkLongValue(value)
self.__writeMemoryLong__(address, value)
return self.readMemoryLong(address)
#---------- Memory abstraction NON-REST implementation ----------
def readMemoryBytes(self, start=0, stop=None):
maxCount = self.byteCount()
if stop is None:
stop = maxCount
self.checkByteAddress(start)
self.checkStopByteAddress(stop)
byteValues = []
if start > stop:
raise ValueError("Stop address must be >= start address")
for i in range(start, stop):
byteValues.append(self.readMemoryByte(i))
return byteValues
def writeMemoryBytes(self, start=0, byteValues=[]):
self.checkByteAddress(start)
stop = start + len(byteValues)
self.checkStopByteAddress(stop)
i = 0
for byte in byteValues: # do nothing if list is empty
position = i + start
self.writeMemoryByte(position, byte)
i += 1
#---------- Memory abstraction contracts ----------
def __readMemoryByte__(self, address):
raise NotImplementedError
def __writeMemoryByte__(self, address, value):
raise NotImplementedError
#---------- Memory abstraction contracts with default implementations ---------
def __readMemoryBit__(self, address):
byteAddress, rawPosition = divmod(address, 8)
bitPosition = 7 - rawPosition
return (self.__readMemoryByte__(byteAddress) & (1 << bitPosition)) >> bitPosition
def __writeMemoryBit__(self, address, value):
byteAddress, rawPosition = divmod(address, 8)
bitPosition = 7 - rawPosition
changeMask = 1 << bitPosition
byteValue = self.__readMemoryByte__(byteAddress)
if value:
byteValue |= changeMask
else:
byteValue &= ~changeMask
self.__writeMemoryByte__(byteAddress, byteValue)
def __readMemoryWord__(self, address):
byte0 = self.__readMemoryByte__(address * 2)
byte1 = self.__readMemoryByte__((address * 2) + 1)
return (byte0 << 8) + byte1
def __writeMemoryWord__(self, address, value):
byte0 = (value >> 8) & 0xFF
byte1 = value & 0xFF
self.__writeMemoryByte__(address * 2, byte0)
self.__writeMemoryByte__((address * 2) + 1, byte1)
def __readMemoryLong__(self, address):
byte0 = self.__readMemoryByte__(address * 4)
byte1 = self.__readMemoryByte__((address * 4) + 1)
byte2 = self.__readMemoryByte__((address * 4) + 2)
byte3 = self.__readMemoryByte__((address * 4) + 3)
return (byte0 << 24) + (byte1 << 16) + (byte2 << 8) + byte3
def __writeMemoryLong__(self, address, value):
byte0 = (value >> 24) & 0xFF
byte1 = (value >> 16) & 0xFF
byte2 = (value >> 8) & 0xFF
byte3 = value & 0xFF
self.__writeMemoryByte__(address * 4, byte0)
self.__writeMemoryByte__((address * 4) + 1, byte1)
self.__writeMemoryByte__((address * 4) + 2, byte2)
self.__writeMemoryByte__((address * 4) + 3, byte3)
#---------- Value checks ----------
def checkBitAddress(self, address):
if not 0 <= address < self.byteCount() * 8:
raise ValueError("Bit address [%d] out of range [%d..%d]" % (address, 0, (self.byteCount() * 8) - 1))
def checkBitValue(self, value):
if not value in range(2):
raise ValueError("Bit value [%d] out of range [0..1]" % value)
def checkByteAddress(self, address):
if not 0 <= address < self.byteCount():
raise ValueError("Byte address [%d] out of range [%d..%d]" % (address, 0, self.byteCount() - 1))
def checkStopByteAddress(self, address):
if not 0 <= address <= self.byteCount():
raise ValueError("Stop byte address [%d] out of range [%d..%d]" % (address, 0, self.byteCount()))
def checkByteValue(self, value):
if not value in range(0x00,0xFF + 1):
raise ValueError("Byte value [0x%02X] out of range [0x%02X..0x%02X]" % (value, 0x00,0xFF))
def checkWordAddress(self, address):
if not 0 <= address < self.wordCount():
raise ValueError("Word address [%d] out of range [%d..%d]" % (address, 0, (self.wordCount() - 1)))
def checkWordValue(self, value):
if not value in range(0x00,0xFFFF + 1):
raise ValueError("Word value [0x%04X] out of range [0x%04X..0x%04X]" % (value, 0x00,0xFFFF))
def checkLongAddress(self, address):
if not 0 <= address < self.longCount():
raise ValueError("Long address [%d] out of range [%d..%d]" % (address, 0, (self.longCount() - 1)))
def checkLongValue(self, value):
if not value in range(0x00,0xFFFFFFFF + 1):
raise ValueError("Long value [0x%08X] out of range [0x%08X..0x%08X]" % (value, 0x00,0xFFFFFFFF))
#---------- Driver lookup ----------
DRIVERS = {}
DRIVERS["filememory"] = ["PICKLEFILE"]
DRIVERS["at24"] = ["EE24BASIC", "EE24X32", "EE24X64", "EE24X128", "EE24X256", "EE24X512", "EE24X1024_2"]
``` |
[
{
"content": "```python\n# -*- coding: UTF-8 -*-\n\n# COPYRIGHT (c) 2016 Cristóbal Ganter\n#\n# GNU AFFERO GENERAL PUBLIC LICENSE\n# Version 3, 19 November 2007\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as publish... | [
{
"content": "<|memory_start|>```python\n# -*- coding: UTF-8 -*-\n\n# COPYRIGHT (c) 2016 Cristóbal Ganter\n#\n# GNU AFFERO GENERAL PUBLIC LICENSE\n# Version 3, 19 November 2007\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public Li... | ```python
# -*- coding: UTF-8 -*-
# COPYRIGHT (c) 2016 Cristóbal Ganter
#
# GNU AFFERO GENERAL PUBLIC LICENSE
# Version 3, 19 November 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class NotDictError(TypeError):
"""Raise when an object is not an instance of dict.
.. automethod:: __init__
"""
def __init__(self, name, *args):
"""Initialize a new NotDictError.
:param str name:
Name of the object that is not a dictionary.
"""
super().__init__(
'{} is not a dictionary.'.format(name),
*args
)
class NotStringError(TypeError):
"""Raise when an object is not an instance of str.
.. automethod:: __init__
"""
def __init__(self, name, *args):
"""Initialize a new NotStringError.
:param str name:
Name of the object that is not a string.
"""
super().__init__(
'{} is not a string.'.format(name),
*args
)
class MissingFieldError(ValueError):
"""Raise when a dict doesn't have a required field.
.. automethod:: __init__
"""
def __init__(self, dict_name, field_name, *args):
"""Initialize a new MissingFieldError.
:param str dict_name:
The name of the dictionary.
:param str field_name:
The name of the field that is missing in the
dictionary.
"""
super().__init__(
"{} should have a '{}' field.".format(
dict_name, field_name),
*args
)
``` |
[
{
"content": "Here is a code snippet:\n```python\nfrom helperFunctions.data_conversion import make_bytes\nfrom test.common_helper import TEST_FW, TEST_FW_2, TEST_TEXT_FILE\nfrom test.unit.web_interface.base import WebInterfaceTest\n\n\nclass TestAppShowAnalysis(WebInterfaceTest):\n\n def test_app_show_analys... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\nfrom helperFunctions.data_conversion import make_bytes\nfrom test.common_helper import TEST_FW, TEST_FW_2, TEST_TEXT_FILE\nfrom test.unit.web_interface.base import WebInterfaceTest\n\n\nclass TestAppShowAnalysis(WebInterfaceTest):\n\n def test... | ```python
from helperFunctions.data_conversion import make_bytes
from test.common_helper import TEST_FW, TEST_FW_2, TEST_TEXT_FILE
from test.unit.web_interface.base import WebInterfaceTest
class TestAppShowAnalysis(WebInterfaceTest):
def test_app_show_analysis_get_valid_fw(self):
result = self.test_client.get('/analysis/{}'.format(TEST_FW.uid)).data
assert b'<strong>UID:</strong> ' + make_bytes(TEST_FW.uid) in result
assert b'data-toggle="tooltip" title="mandatory plugin description"' in result
assert b'data-toggle="tooltip" title="optional plugin description"' in result
# check release date not available
assert b'1970-01-01' not in result
assert b'unknown' in result
# check file preview
assert b'Preview' not in result
result = self.test_client.get('/analysis/{}'.format(TEST_FW_2.uid)).data
assert b'unknown' not in result
assert b'2000-01-01' in result
def test_app_show_analysis_file_with_preview(self):
result = self.test_client.get('/analysis/{}'.format(TEST_TEXT_FILE.uid)).data
assert b'<strong>UID:</strong> ' + make_bytes(TEST_TEXT_FILE.uid) in result
assert b'Preview' in result
assert b'test file:\ncontent:'
def test_app_single_file_analysis(self):
result = self.test_client.get('/analysis/{}'.format(TEST_FW.uid))
assert b'Add new analysis' in result.data
assert b'Update analysis' in result.data
assert not self.mocked_interface.tasks
post_new = self.test_client.post('/analysis/{}'.format(TEST_FW.uid), content_type='multipart/form-data', data={'analysis_systems': ['plugin_a', 'plugin_b']})
assert post_new.status_code == 302
assert self.mocked_interface.tasks
assert self.mocked_interface.tasks[0].scheduled_analysis == ['plugin_a', 'plugin_b']
def test_app_dependency_graph(self):
result = self.test_client.get('/dependency-graph/{}'.format('testgraph'))
assert b'<strong>UID:</strong> testgraph' in result.data
assert b'Error: Graph could not be rendered. The file chosen as root must contain a filesystem with binaries.' not in result.data
assert b'Warning: Elf analysis plugin result is missing for 1 files' in result.data
result_error = self.test_client.get('/dependency-graph/{}'.format('1234567'))
assert b'Error: Graph could not be rendered. The file chosen as root must contain a filesystem with binaries.' in result_error.data
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\nfrom unittest import TestCase, main\nimport os\nimport time\nimport sys\nimport tempfile\nimport difflib\nimport svtools.sv_classifier\nimport gzip\n\nclass IntegrationTest_sv_classify(TestCase):\n\n def test_chromosome_prefix(s... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\nfrom unittest import TestCase, main\nimport os\nimport time\nimport sys\nimport tempfile\nimport difflib\nimport svtools.sv_classifier\nimport gzip\n\nclass IntegrationTest_sv_classify(TestCase):\n\n def test_chr... | ```python
from unittest import TestCase, main
import os
import time
import sys
import tempfile
import difflib
import svtools.sv_classifier
import gzip
class IntegrationTest_sv_classify(TestCase):
def test_chromosome_prefix(self):
self.assertEqual(svtools.sv_classifier.chromosome_prefix('chrBLAH'), 'BLAH')
self.assertEqual(svtools.sv_classifier.chromosome_prefix('BLAH'), 'chrBLAH')
def test_integration_nb(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.nb.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'naive_bayes', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
def test_integration_ls(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.ls.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'large_sample', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
def test_integration_hyb(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.hyb.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'hybrid', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
if __name__ == "__main__":
main()
``` |
[
{
"content": "Here is a code file:\n```python\nfrom collections import OrderedDict\nfrom datetime import datetime\nfrom decimal import Decimal\nfrom urllib.request import urlopen\nfrom urllib.error import HTTPError\nfrom xml.sax import handler, make_parser\nimport xml.etree.ElementTree\nimport json\nimport re\n... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\nfrom collections import OrderedDict\nfrom datetime import datetime\nfrom decimal import Decimal\nfrom urllib.request import urlopen\nfrom urllib.error import HTTPError\nfrom xml.sax import handler, make_parser\nimport xml.etree.ElementTree\nimport j... | ```python
from collections import OrderedDict
from datetime import datetime
from decimal import Decimal
from urllib.request import urlopen
from urllib.error import HTTPError
from xml.sax import handler, make_parser
import xml.etree.ElementTree
import json
import re
import time
from typing import Any, Callable, ClassVar, Dict, List, NoReturn, Optional, Tuple, Type, TypeVar, Union
from overpy import exception
# Ignore flake8 F401 warning for unused vars
from overpy.__about__ import ( # noqa: F401
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
ElementTypeVar = TypeVar("ElementTypeVar", bound="Element")
XML_PARSER_DOM = 1
XML_PARSER_SAX = 2
# Try to convert some common attributes
# http://wiki.openstreetmap.org/wiki/Elements#Common_attributes
GLOBAL_ATTRIBUTE_MODIFIERS: Dict[str, Callable] = {
"changeset": int,
"timestamp": lambda ts: datetime.strptime(ts, "%Y-%m-%dT%H:%M:%SZ"),
"uid": int,
"version": int,
"visible": lambda v: v.lower() == "true"
}
def is_valid_type(
element: Union["Area", "Node", "Relation", "Way"],
cls: Type[Union["Area", "Element", "Node", "Relation", "Way"]]) -> bool:
"""
Test if an element is of a given type.
:param element: The element instance to test
:param cls: The element class to test
:return: False or True
"""
return isinstance(element, cls) and element.id is not None
class Overpass:
"""
Class to access the Overpass API
:cvar default_max_retry_count: Global max number of retries (Default: 0)
:cvar default_read_chunk_size: Max size of each chunk read from the server response
:cvar default_retry_timeout: Global time to wait between tries (Default: 1.0s)
:cvar default_url: Default URL of the Overpass server
"""
default_max_retry_count: ClassVar[int] = 0
default_read_chunk_size: ClassVar[int] = 4096
default_retry_timeout: ClassVar[float] = 1.0
default_url: ClassVar[str] = "http://overpass-api.de/api/interpreter"
def __init__(
self,
read_chunk_size: Optional[int] = None,
url: Optional[str] = None,
xml_parser: int = XML_PARSER_SAX,
max_retry_count: int = None,
retry_timeout: float = None):
"""
:param read_chunk_size: Max size of each chunk read from the server response
:param url: Optional URL of the Overpass server. Defaults to http://overpass-api.de/api/interpreter
:param xml_parser: The xml parser to use
:param max_retry_count: Max number of retries (Default: default_max_retry_count)
:param retry_timeout: Time to wait between tries (Default: default_retry_timeout)
"""
self.url = self.default_url
if url is not None:
self.url = url
self._regex_extract_error_msg = re.compile(br"\<p\>(?P<msg>\<strong\s.*?)\</p\>")
self._regex_remove_tag = re.compile(b"<[^>]*?>")
if read_chunk_size is None:
read_chunk_size = self.default_read_chunk_size
self.read_chunk_size = read_chunk_size
if max_retry_count is None:
max_retry_count = self.default_max_retry_count
self.max_retry_count = max_retry_count
if retry_timeout is None:
retry_timeout = self.default_retry_timeout
self.retry_timeout = retry_timeout
self.xml_parser = xml_parser
@staticmethod
def _handle_remark_msg(msg: str) -> NoReturn:
"""
Try to parse the message provided with the remark tag or element.
:param msg: The message
:raises overpy.exception.OverpassRuntimeError: If message starts with 'runtime error:'
:raises overpy.exception.OverpassRuntimeRemark: If message starts with 'runtime remark:'
:raises overpy.exception.OverpassUnknownError: If we are unable to identify the error
"""
msg = msg.strip()
if msg.startswith("runtime error:"):
raise exception.OverpassRuntimeError(msg=msg)
elif msg.startswith("runtime remark:"):
raise exception.OverpassRuntimeRemark(msg=msg)
raise exception.OverpassUnknownError(msg=msg)
def query(self, query: Union[bytes, str]) -> "Result":
"""
Query the Overpass API
:param query: The query string in Overpass QL
:return: The parsed result
"""
if not isinstance(query, bytes):
query = query.encode("utf-8")
retry_num: int = 0
retry_exceptions: List[exception.OverPyException] = []
do_retry: bool = True if self.max_retry_count > 0 else False
while retry_num <= self.max_retry_count:
if retry_num > 0:
time.sleep(self.retry_timeout)
retry_num += 1
try:
f = urlopen(self.url, query)
except HTTPError as e:
f = e
response = f.read(self.read_chunk_size)
while True:
data = f.read(self.read_chunk_size)
if len(data) == 0:
break
response = response + data
f.close()
current_exception: exception.OverPyException
if f.code == 200:
content_type = f.getheader("Content-Type")
if content_type == "application/json":
return self.parse_json(response)
if content_type == "application/osm3s+xml":
return self.parse_xml(response)
current_exception = exception.OverpassUnknownContentType(content_type)
if not do_retry:
raise current_exception
retry_exceptions.append(current_exception)
continue
if f.code == 400:
msgs: List[str] = []
for msg_raw in self._regex_extract_error_msg.finditer(response):
msg_clean_bytes = self._regex_remove_tag.sub(b"", msg_raw.group("msg"))
try:
msg = msg_clean_bytes.decode("utf-8")
except UnicodeDecodeError:
msg = repr(msg_clean_bytes)
msgs.append(msg)
current_exception = exception.OverpassBadRequest(
query,
msgs=msgs
)
if not do_retry:
raise current_exception
retry_exceptions.append(current_exception)
continue
if f.code == 429:
current_exception = exception.OverpassTooManyRequests()
if not do_retry:
raise current_exception
retry_exceptions.append(current_exception)
continue
if f.code == 504:
current_exception = exception.OverpassGatewayTimeout()
if not do_retry:
raise current_exception
retry_exceptions.append(current_exception)
continue
current_exception = exception.OverpassUnknownHTTPStatusCode(f.code)
if not do_retry:
raise current_exception
retry_exceptions.append(current_exception)
continue
raise exception.MaxRetriesReached(retry_count=retry_num, exceptions=retry_exceptions)
def parse_json(self, data: Union[bytes, str], encoding: str = "utf-8") -> "Result":
"""
Parse raw response from Overpass service.
:param data: Raw JSON Data
:param encoding: Encoding to decode byte string
:return: Result object
"""
if isinstance(data, bytes):
data = data.decode(encoding)
data_parsed: dict = json.loads(data, parse_float=Decimal)
if "remark" in data_parsed:
self._handle_remark_msg(msg=data_parsed.get("remark"))
return Result.from_json(data_parsed, api=self)
def parse_xml(self, data: Union[bytes, str], encoding: str = "utf-8", parser: Optional[int] = None):
"""
:param data: Raw XML Data
:param encoding: Encoding to decode byte string
:param parser: The XML parser to use
:return: Result object
"""
if parser is None:
parser = self.xml_parser
if isinstance(data, bytes):
data = data.decode(encoding)
m = re.compile("<remark>(?P<msg>[^<>]*)</remark>").search(data)
if m:
self._handle_remark_msg(m.group("msg"))
return Result.from_xml(data, api=self, parser=parser)
class Result:
"""
Class to handle the result.
"""
def __init__(
self,
elements: Optional[List[Union["Area", "Node", "Relation", "Way"]]] = None,
api: Optional[Overpass] = None):
"""
:param elements: List of elements to initialize the result with
:param api: The API object to load additional resources and elements
"""
if elements is None:
elements = []
self._areas: Dict[int, Union["Area", "Node", "Relation", "Way"]] = OrderedDict(
(element.id, element) for element in elements if is_valid_type(element, Area)
)
self._nodes = OrderedDict(
(element.id, element) for element in elements if is_valid_type(element, Node)
)
self._ways = OrderedDict(
(element.id, element) for element in elements if is_valid_type(element, Way)
)
self._relations = OrderedDict(
(element.id, element) for element in elements if is_valid_type(element, Relation)
)
self._class_collection_map: Dict[Any, Any] = {
Node: self._nodes,
Way: self._ways,
Relation: self._relations,
Area: self._areas
}
self.api = api
def expand(self, other: "Result"):
"""
Add all elements from an other result to the list of elements of this result object.
It is used by the auto resolve feature.
:param other: Expand the result with the elements from this result.
:raises ValueError: If provided parameter is not instance of :class:`overpy.Result`
"""
if not isinstance(other, Result):
raise ValueError("Provided argument has to be instance of overpy:Result()")
other_collection_map: Dict[Type["Element"], List[Union["Area", "Node", "Relation", "Way"]]] = {
Area: other.areas,
Node: other.nodes,
Relation: other.relations,
Way: other.ways
}
for element_type, own_collection in self._class_collection_map.items():
for element in other_collection_map[element_type]:
if is_valid_type(element, element_type) and element.id not in own_collection:
own_collection[element.id] = element
def append(self, element: Union["Area", "Node", "Relation", "Way"]):
"""
Append a new element to the result.
:param element: The element to append
"""
if is_valid_type(element, Element):
self._class_collection_map[element.__class__].setdefault(element.id, element)
def get_elements(
self,
filter_cls: Type[ElementTypeVar],
elem_id: Optional[int] = None) -> List[ElementTypeVar]:
"""
Get a list of elements from the result and filter the element type by a class.
:param filter_cls:
:param elem_id: ID of the object
:return: List of available elements
"""
result: List[ElementTypeVar] = []
if elem_id is not None:
try:
result = [self._class_collection_map[filter_cls][elem_id]]
except KeyError:
result = []
else:
for e in self._class_collection_map[filter_cls].values():
result.append(e)
return result
def get_ids(
self,
filter_cls: Type[Union["Area", "Node", "Relation", "Way"]]) -> List[int]:
"""
Get all Element IDs
:param filter_cls: Only IDs of elements with this type
:return: List of IDs
"""
return list(self._class_collection_map[filter_cls].keys())
def get_node_ids(self) -> List[int]:
return self.get_ids(filter_cls=Node)
def get_way_ids(self) -> List[int]:
return self.get_ids(filter_cls=Way)
def get_relation_ids(self) -> List[int]:
return self.get_ids(filter_cls=Relation)
def get_area_ids(self) -> List[int]:
return self.get_ids(filter_cls=Area)
@classmethod
def from_json(cls, data: dict, api: Optional[Overpass] = None) -> "Result":
"""
Create a new instance and load data from json object.
:param data: JSON data returned by the Overpass API
:param api:
:return: New instance of Result object
"""
result = cls(api=api)
elem_cls: Type[Union["Area", "Node", "Relation", "Way"]]
for elem_cls in [Node, Way, Relation, Area]:
for element in data.get("elements", []):
e_type = element.get("type")
if hasattr(e_type, "lower") and e_type.lower() == elem_cls._type_value:
result.append(elem_cls.from_json(element, result=result))
return result
@classmethod
def from_xml(
cls,
data: Union[str, xml.etree.ElementTree.Element],
api: Optional[Overpass] = None,
parser: Optional[int] = None) -> "Result":
"""
Create a new instance and load data from xml data or object.
.. note::
If parser is set to None, the functions tries to find the best parse.
By default the SAX parser is chosen if a string is provided as data.
The parser is set to DOM if an xml.etree.ElementTree.Element is provided as data value.
:param data: Root element
:param api: The instance to query additional information if required.
:param parser: Specify the parser to use(DOM or SAX)(Default: None = autodetect, defaults to SAX)
:return: New instance of Result object
"""
if parser is None:
if isinstance(data, str):
parser = XML_PARSER_SAX
else:
parser = XML_PARSER_DOM
result = cls(api=api)
if parser == XML_PARSER_DOM:
import xml.etree.ElementTree as ET
if isinstance(data, str):
root = ET.fromstring(data)
elif isinstance(data, ET.Element):
root = data
else:
raise exception.OverPyException("Unable to detect data type.")
elem_cls: Type[Union["Area", "Node", "Relation", "Way"]]
for elem_cls in [Node, Way, Relation, Area]:
for child in root:
if child.tag.lower() == elem_cls._type_value:
result.append(elem_cls.from_xml(child, result=result))
elif parser == XML_PARSER_SAX:
from io import StringIO
if not isinstance(data, str):
raise ValueError("data must be of type str if using the SAX parser")
source = StringIO(data)
sax_handler = OSMSAXHandler(result)
sax_parser = make_parser()
sax_parser.setContentHandler(sax_handler)
sax_parser.parse(source)
else:
# ToDo: better exception
raise Exception("Unknown XML parser")
return result
def get_area(self, area_id: int, resolve_missing: bool = False) -> "Area":
"""
Get an area by its ID.
:param area_id: The area ID
:param resolve_missing: Query the Overpass API if the area is missing in the result set.
:return: The area
:raises overpy.exception.DataIncomplete: The requested way is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the area can't be resolved.
"""
areas = self.get_areas(area_id=area_id)
if len(areas) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing area is disabled")
query = ("\n"
"[out:json];\n"
"area({area_id});\n"
"out body;\n"
)
query = query.format(
area_id=area_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
areas = self.get_areas(area_id=area_id)
if len(areas) == 0:
raise exception.DataIncomplete("Unable to resolve requested areas")
return areas[0]
def get_areas(self, area_id: Optional[int] = None) -> List["Area"]:
"""
Alias for get_elements() but filter the result by Area
:param area_id: The Id of the area
:return: List of elements
"""
return self.get_elements(Area, elem_id=area_id)
def get_node(self, node_id: int, resolve_missing: bool = False) -> "Node":
"""
Get a node by its ID.
:param node_id: The node ID
:param resolve_missing: Query the Overpass API if the node is missing in the result set.
:return: The node
:raises overpy.exception.DataIncomplete: At least one referenced node is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and at least one node can't be resolved.
"""
nodes = self.get_nodes(node_id=node_id)
if len(nodes) == 0:
if not resolve_missing:
raise exception.DataIncomplete("Resolve missing nodes is disabled")
query = ("\n"
"[out:json];\n"
"node({node_id});\n"
"out body;\n"
)
query = query.format(
node_id=node_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
nodes = self.get_nodes(node_id=node_id)
if len(nodes) == 0:
raise exception.DataIncomplete("Unable to resolve all nodes")
return nodes[0]
def get_nodes(self, node_id: Optional[int] = None) -> List["Node"]:
"""
Alias for get_elements() but filter the result by Node()
:param node_id: The Id of the node
:type node_id: Integer
:return: List of elements
"""
return self.get_elements(Node, elem_id=node_id)
def get_relation(self, rel_id: int, resolve_missing: bool = False) -> "Relation":
"""
Get a relation by its ID.
:param rel_id: The relation ID
:param resolve_missing: Query the Overpass API if the relation is missing in the result set.
:return: The relation
:raises overpy.exception.DataIncomplete: The requested relation is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the relation can't be resolved.
"""
relations = self.get_relations(rel_id=rel_id)
if len(relations) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing relations is disabled")
query = ("\n"
"[out:json];\n"
"relation({relation_id});\n"
"out body;\n"
)
query = query.format(
relation_id=rel_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
relations = self.get_relations(rel_id=rel_id)
if len(relations) == 0:
raise exception.DataIncomplete("Unable to resolve requested reference")
return relations[0]
def get_relations(self, rel_id: int = None) -> List["Relation"]:
"""
Alias for get_elements() but filter the result by Relation
:param rel_id: Id of the relation
:return: List of elements
"""
return self.get_elements(Relation, elem_id=rel_id)
def get_way(self, way_id: int, resolve_missing: bool = False) -> "Way":
"""
Get a way by its ID.
:param way_id: The way ID
:param resolve_missing: Query the Overpass API if the way is missing in the result set.
:return: The way
:raises overpy.exception.DataIncomplete: The requested way is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and the way can't be resolved.
"""
ways = self.get_ways(way_id=way_id)
if len(ways) == 0:
if resolve_missing is False:
raise exception.DataIncomplete("Resolve missing way is disabled")
query = ("\n"
"[out:json];\n"
"way({way_id});\n"
"out body;\n"
)
query = query.format(
way_id=way_id
)
tmp_result = self.api.query(query)
self.expand(tmp_result)
ways = self.get_ways(way_id=way_id)
if len(ways) == 0:
raise exception.DataIncomplete("Unable to resolve requested way")
return ways[0]
def get_ways(self, way_id: Optional[int] = None) -> List["Way"]:
"""
Alias for get_elements() but filter the result by Way
:param way_id: The Id of the way
:return: List of elements
"""
return self.get_elements(Way, elem_id=way_id)
area_ids = property(get_area_ids)
areas = property(get_areas)
node_ids = property(get_node_ids)
nodes = property(get_nodes)
relation_ids = property(get_relation_ids)
relations = property(get_relations)
way_ids = property(get_way_ids)
ways = property(get_ways)
class Element:
"""
Base element
"""
_type_value: str
def __init__(self, attributes: Optional[dict] = None, result: Optional[Result] = None, tags: Optional[Dict] = None):
"""
:param attributes: Additional attributes
:param result: The result object this element belongs to
:param tags: List of tags
"""
self._result = result
self.attributes = attributes
# ToDo: Add option to modify attribute modifiers
attribute_modifiers: Dict[str, Callable] = dict(GLOBAL_ATTRIBUTE_MODIFIERS.items())
for n, m in attribute_modifiers.items():
if n in self.attributes:
self.attributes[n] = m(self.attributes[n])
self.id: int
self.tags = tags
@classmethod
def get_center_from_json(cls, data: dict) -> Tuple[Decimal, Decimal]:
"""
Get center information from json data
:param data: json data
:return: tuple with two elements: lat and lon
"""
center_lat = None
center_lon = None
center = data.get("center")
if isinstance(center, dict):
center_lat = center.get("lat")
center_lon = center.get("lon")
if center_lat is None or center_lon is None:
raise ValueError("Unable to get lat or lon of way center.")
center_lat = Decimal(center_lat)
center_lon = Decimal(center_lon)
return center_lat, center_lon
@classmethod
def get_center_from_xml_dom(cls, sub_child: xml.etree.ElementTree.Element) -> Tuple[Decimal, Decimal]:
center_lat_str: str = sub_child.attrib.get("lat")
center_lon_str: str = sub_child.attrib.get("lon")
if center_lat_str is None or center_lon_str is None:
raise ValueError("Unable to get lat or lon of way center.")
center_lat = Decimal(center_lat_str)
center_lon = Decimal(center_lon_str)
return center_lat, center_lon
@classmethod
def from_json(cls: Type[ElementTypeVar], data: dict, result: Optional[Result] = None) -> ElementTypeVar:
"""
Create new Element() from json data
:param data:
:param result:
:return:
"""
raise NotImplementedError
@classmethod
def from_xml(
cls: Type[ElementTypeVar],
child: xml.etree.ElementTree.Element,
result: Optional[Result] = None) -> ElementTypeVar:
"""
Create new Element() element from XML data
"""
raise NotImplementedError
class Area(Element):
"""
Class to represent an element of type area
"""
_type_value = "area"
def __init__(self, area_id: Optional[int] = None, **kwargs):
"""
:param area_id: Id of the area element
:param kwargs: Additional arguments are passed directly to the parent class
"""
Element.__init__(self, **kwargs)
#: The id of the way
self.id = area_id
def __repr__(self) -> str:
return f"<overpy.Area id={self.id}>"
@classmethod
def from_json(cls, data: dict, result: Optional[Result] = None) -> "Area":
"""
Create new Area element from JSON data
:param data: Element data from JSON
:param result: The result this element belongs to
:return: New instance of Way
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
area_id = data.get("id")
attributes = {}
ignore = ["id", "tags", "type"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(area_id=area_id, attributes=attributes, tags=tags, result=result)
@classmethod
def from_xml(cls, child: xml.etree.ElementTree.Element, result: Optional[Result] = None) -> "Area":
"""
Create new way element from XML data
:param child: XML node to be parsed
:param result: The result this node belongs to
:return: New Way oject
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
:raises ValueError: If the ref attribute of the xml node is not provided
:raises ValueError: If a tag doesn't have a name
"""
if child.tag.lower() != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
tags = {}
for sub_child in child:
if sub_child.tag.lower() == "tag":
name = sub_child.attrib.get("k")
if name is None:
raise ValueError("Tag without name/key.")
value = sub_child.attrib.get("v")
tags[name] = value
area_id_str: Optional[str] = child.attrib.get("id")
area_id: Optional[int] = None
if area_id_str is not None:
area_id = int(area_id_str)
attributes = {}
ignore = ["id"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
return cls(area_id=area_id, attributes=attributes, tags=tags, result=result)
class Node(Element):
"""
Class to represent an element of type node
"""
_type_value = "node"
def __init__(
self,
node_id: Optional[int] = None,
lat: Optional[Union[Decimal, float]] = None,
lon: Optional[Union[Decimal, float]] = None,
**kwargs):
"""
:param lat: Latitude
:param lon: Longitude
:param node_id: Id of the node element
:param kwargs: Additional arguments are passed directly to the parent class
"""
Element.__init__(self, **kwargs)
self.id = node_id
self.lat = lat
self.lon = lon
def __repr__(self) -> str:
return f"<overpy.Node id={self.id} lat={self.lat} lon={self.lon}>"
@classmethod
def from_json(cls, data: dict, result: Optional[Result] = None) -> "Node":
"""
Create new Node element from JSON data
:param data: Element data from JSON
:param result: The result this element belongs to
:return: New instance of Node
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
node_id = data.get("id")
lat = data.get("lat")
lon = data.get("lon")
attributes = {}
ignore = ["type", "id", "lat", "lon", "tags"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(node_id=node_id, lat=lat, lon=lon, tags=tags, attributes=attributes, result=result)
@classmethod
def from_xml(cls, child: xml.etree.ElementTree.Element, result: Optional[Result] = None) -> "Node":
"""
Create new way element from XML data
:param child: XML node to be parsed
:param result: The result this node belongs to
:return: New Way oject
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
:raises ValueError: If a tag doesn't have a name
"""
if child.tag.lower() != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
tags = {}
for sub_child in child:
if sub_child.tag.lower() == "tag":
name = sub_child.attrib.get("k")
if name is None:
raise ValueError("Tag without name/key.")
value = sub_child.attrib.get("v")
tags[name] = value
node_id: Optional[int] = None
node_id_str: Optional[str] = child.attrib.get("id")
if node_id_str is not None:
node_id = int(node_id_str)
lat: Optional[Decimal] = None
lat_str: Optional[str] = child.attrib.get("lat")
if lat_str is not None:
lat = Decimal(lat_str)
lon: Optional[Decimal] = None
lon_str: Optional[str] = child.attrib.get("lon")
if lon_str is not None:
lon = Decimal(lon_str)
attributes = {}
ignore = ["id", "lat", "lon"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
return cls(node_id=node_id, lat=lat, lon=lon, tags=tags, attributes=attributes, result=result)
class Way(Element):
"""
Class to represent an element of type way
"""
_type_value = "way"
def __init__(
self,
way_id: Optional[int] = None,
center_lat: Optional[Union[Decimal, float]] = None,
center_lon: Optional[Union[Decimal, float]] = None,
node_ids: Optional[Union[List[int], Tuple[int]]] = None,
**kwargs):
"""
:param node_ids: List of node IDs
:param way_id: Id of the way element
:param kwargs: Additional arguments are passed directly to the parent class
"""
Element.__init__(self, **kwargs)
#: The id of the way
self.id = way_id
#: List of Ids of the associated nodes
self._node_ids = node_ids
#: The lat/lon of the center of the way (optional depending on query)
self.center_lat = center_lat
self.center_lon = center_lon
def __repr__(self):
return f"<overpy.Way id={self.id} nodes={self._node_ids}>"
@property
def nodes(self) -> List[Node]:
"""
List of nodes associated with the way.
"""
return self.get_nodes()
def get_nodes(self, resolve_missing: bool = False) -> List[Node]:
"""
Get the nodes defining the geometry of the way
:param resolve_missing: Try to resolve missing nodes.
:return: List of nodes
:raises overpy.exception.DataIncomplete: At least one referenced node is not available in the result cache.
:raises overpy.exception.DataIncomplete: If resolve_missing is True and at least one node can't be resolved.
"""
result = []
resolved = False
for node_id in self._node_ids:
try:
node = self._result.get_node(node_id)
except exception.DataIncomplete:
node = None
if node is not None:
result.append(node)
continue
if not resolve_missing:
raise exception.DataIncomplete("Resolve missing nodes is disabled")
# We tried to resolve the data but some nodes are still missing
if resolved:
raise exception.DataIncomplete("Unable to resolve all nodes")
query = ("\n"
"[out:json];\n"
"way({way_id});\n"
"node(w);\n"
"out body;\n"
)
query = query.format(
way_id=self.id
)
tmp_result = self._result.api.query(query)
self._result.expand(tmp_result)
resolved = True
try:
node = self._result.get_node(node_id)
except exception.DataIncomplete:
node = None
if node is None:
raise exception.DataIncomplete("Unable to resolve all nodes")
result.append(node)
return result
@classmethod
def from_json(cls, data: dict, result: Optional[Result] = None) -> "Way":
"""
Create new Way element from JSON data
:param data: Element data from JSON
:param result: The result this element belongs to
:return: New instance of Way
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
way_id = data.get("id")
node_ids = data.get("nodes")
(center_lat, center_lon) = cls.get_center_from_json(data=data)
attributes = {}
ignore = ["center", "id", "nodes", "tags", "type"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(
attributes=attributes,
center_lat=center_lat,
center_lon=center_lon,
node_ids=node_ids,
tags=tags,
result=result,
way_id=way_id
)
@classmethod
def from_xml(cls, child: xml.etree.ElementTree.Element, result: Optional[Result] = None) -> "Way":
"""
Create new way element from XML data
:param child: XML node to be parsed
:param result: The result this node belongs to
:return: New Way oject
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
:raises ValueError: If the ref attribute of the xml node is not provided
:raises ValueError: If a tag doesn't have a name
"""
if child.tag.lower() != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
tags = {}
node_ids = []
center_lat = None
center_lon = None
for sub_child in child:
if sub_child.tag.lower() == "tag":
name = sub_child.attrib.get("k")
if name is None:
raise ValueError("Tag without name/key.")
value = sub_child.attrib.get("v")
tags[name] = value
if sub_child.tag.lower() == "nd":
ref_id_str = sub_child.attrib.get("ref")
if ref_id_str is None:
raise ValueError("Unable to find required ref value.")
ref_id: int = int(ref_id_str)
node_ids.append(ref_id)
if sub_child.tag.lower() == "center":
(center_lat, center_lon) = cls.get_center_from_xml_dom(sub_child=sub_child)
way_id: Optional[int] = None
way_id_str: Optional[str] = child.attrib.get("id")
if way_id_str is not None:
way_id = int(way_id_str)
attributes = {}
ignore = ["id"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
return cls(way_id=way_id, center_lat=center_lat, center_lon=center_lon,
attributes=attributes, node_ids=node_ids, tags=tags, result=result)
class Relation(Element):
"""
Class to represent an element of type relation
"""
_type_value = "relation"
def __init__(
self,
rel_id: Optional[int] = None,
center_lat: Optional[Union[Decimal, float]] = None,
center_lon: Optional[Union[Decimal, float]] = None,
members: Optional[List["RelationMember"]] = None,
**kwargs):
"""
:param members:
:param rel_id: Id of the relation element
:param kwargs:
:return:
"""
Element.__init__(self, **kwargs)
self.id = rel_id
self.members = members
#: The lat/lon of the center of the way (optional depending on query)
self.center_lat = center_lat
self.center_lon = center_lon
def __repr__(self):
return f"<overpy.Relation id={self.id}>"
@classmethod
def from_json(cls, data: dict, result: Optional[Result] = None) -> "Relation":
"""
Create new Relation element from JSON data
:param data: Element data from JSON
:param result: The result this element belongs to
:return: New instance of Relation
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
tags = data.get("tags", {})
rel_id = data.get("id")
(center_lat, center_lon) = cls.get_center_from_json(data=data)
members = []
supported_members = [RelationNode, RelationWay, RelationRelation]
for member in data.get("members", []):
type_value = member.get("type")
for member_cls in supported_members:
if member_cls._type_value == type_value:
members.append(
member_cls.from_json(
member,
result=result
)
)
attributes = {}
ignore = ["id", "members", "tags", "type"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
return cls(
rel_id=rel_id,
attributes=attributes,
center_lat=center_lat,
center_lon=center_lon,
members=members,
tags=tags,
result=result
)
@classmethod
def from_xml(cls, child: xml.etree.ElementTree.Element, result: Optional[Result] = None) -> "Relation":
"""
Create new way element from XML data
:param child: XML node to be parsed
:param result: The result this node belongs to
:return: New Way oject
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
:raises ValueError: If a tag doesn't have a name
"""
if child.tag.lower() != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
tags = {}
members = []
center_lat = None
center_lon = None
supported_members = [RelationNode, RelationWay, RelationRelation, RelationArea]
for sub_child in child:
if sub_child.tag.lower() == "tag":
name = sub_child.attrib.get("k")
if name is None:
raise ValueError("Tag without name/key.")
value = sub_child.attrib.get("v")
tags[name] = value
if sub_child.tag.lower() == "member":
type_value = sub_child.attrib.get("type")
for member_cls in supported_members:
if member_cls._type_value == type_value:
members.append(
member_cls.from_xml(
sub_child,
result=result
)
)
if sub_child.tag.lower() == "center":
(center_lat, center_lon) = cls.get_center_from_xml_dom(sub_child=sub_child)
rel_id: Optional[int] = None
rel_id_str: Optional[str] = child.attrib.get("id")
if rel_id_str is not None:
rel_id = int(rel_id_str)
attributes = {}
ignore = ["id"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
return cls(
rel_id=rel_id,
attributes=attributes,
center_lat=center_lat,
center_lon=center_lon,
members=members,
tags=tags,
result=result
)
class RelationMember:
"""
Base class to represent a member of a relation.
"""
_type_value: Optional[str] = None
def __init__(
self,
attributes: Optional[dict] = None,
geometry: Optional[List["RelationWayGeometryValue"]] = None,
ref: Optional[int] = None,
role: Optional[str] = None,
result: Optional[Result] = None):
"""
:param ref: Reference Id
:type ref: Integer
:param role: The role of the relation member
:type role: String
:param result:
"""
self.ref = ref
self._result = result
self.role = role
self.attributes = attributes
self.geometry = geometry
@classmethod
def from_json(cls, data: dict, result: Optional[Result] = None) -> "RelationMember":
"""
Create new RelationMember element from JSON data
:param data: Element data from JSON
:param result: The result this element belongs to
:return: New instance of RelationMember
:raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match.
"""
if data.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=data.get("type")
)
ref = data.get("ref")
role = data.get("role")
attributes = {}
ignore = ["geometry", "type", "ref", "role"]
for n, v in data.items():
if n in ignore:
continue
attributes[n] = v
geometry = data.get("geometry")
if isinstance(geometry, list):
geometry_orig = geometry
geometry = []
for v in geometry_orig:
geometry.append(
RelationWayGeometryValue(
lat=v.get("lat"),
lon=v.get("lon")
)
)
else:
geometry = None
return cls(
attributes=attributes,
geometry=geometry,
ref=ref,
role=role,
result=result
)
@classmethod
def from_xml(
cls,
child: xml.etree.ElementTree.Element,
result: Optional[Result] = None) -> "RelationMember":
"""
Create new RelationMember from XML data
:param child: XML node to be parsed
:param result: The result this element belongs to
:return: New relation member oject
:raises overpy.exception.ElementDataWrongType: If name of the xml child node doesn't match
"""
if child.attrib.get("type") != cls._type_value:
raise exception.ElementDataWrongType(
type_expected=cls._type_value,
type_provided=child.tag.lower()
)
ref: Optional[int] = None
ref_str: Optional[str] = child.attrib.get("ref")
if ref_str is not None:
ref = int(ref_str)
role: Optional[str] = child.attrib.get("role")
attributes = {}
ignore = ["geometry", "ref", "role", "type"]
for n, v in child.attrib.items():
if n in ignore:
continue
attributes[n] = v
geometry = None
for sub_child in child:
if sub_child.tag.lower() == "nd":
if geometry is None:
geometry = []
geometry.append(
RelationWayGeometryValue(
lat=Decimal(sub_child.attrib["lat"]),
lon=Decimal(sub_child.attrib["lon"])
)
)
return cls(
attributes=attributes,
geometry=geometry,
ref=ref,
role=role,
result=result
)
class RelationNode(RelationMember):
_type_value = "node"
def resolve(self, resolve_missing: bool = False) -> Node:
return self._result.get_node(self.ref, resolve_missing=resolve_missing)
def __repr__(self):
return f"<overpy.RelationNode ref={self.ref} role={self.role}>"
class RelationWay(RelationMember):
_type_value = "way"
def resolve(self, resolve_missing: bool = False) -> Way:
return self._result.get_way(self.ref, resolve_missing=resolve_missing)
def __repr__(self):
return f"<overpy.RelationWay ref={self.ref} role={self.role}>"
class RelationWayGeometryValue:
def __init__(self, lat: Union[Decimal, float], lon: Union[Decimal, float]):
self.lat = lat
self.lon = lon
def __repr__(self):
return f"<overpy.RelationWayGeometryValue lat={self.lat} lon={self.lon}>"
class RelationRelation(RelationMember):
_type_value = "relation"
def resolve(self, resolve_missing: bool = False) -> Relation:
return self._result.get_relation(self.ref, resolve_missing=resolve_missing)
def __repr__(self):
return f"<overpy.RelationRelation ref={self.ref} role={self.role}>"
class RelationArea(RelationMember):
_type_value = "area"
def resolve(self, resolve_missing: bool = False) -> Area:
return self._result.get_area(self.ref, resolve_missing=resolve_missing)
def __repr__(self):
return f"<overpy.RelationArea ref={self.ref} role={self.role}>"
class OSMSAXHandler(handler.ContentHandler):
"""
SAX parser for Overpass XML response.
"""
#: Tuple of opening elements to ignore
ignore_start: ClassVar = ('osm', 'meta', 'note', 'bounds', 'remark')
#: Tuple of closing elements to ignore
ignore_end: ClassVar = ('osm', 'meta', 'note', 'bounds', 'remark', 'tag', 'nd', 'center')
def __init__(self, result: Result):
"""
:param result: Append results to this result set.
"""
handler.ContentHandler.__init__(self)
self._result = result
self._curr: Dict[str, Any] = {}
#: Current relation member object
self.cur_relation_member: Optional[RelationMember] = None
def startElement(self, name: str, attrs: dict):
"""
Handle opening elements.
:param name: Name of the element
:param attrs: Attributes of the element
"""
if name in self.ignore_start:
return
try:
handler = getattr(self, '_handle_start_%s' % name)
except AttributeError:
raise KeyError("Unknown element start '%s'" % name)
handler(attrs)
def endElement(self, name: str):
"""
Handle closing elements
:param name: Name of the element
"""
if name in self.ignore_end:
return
try:
handler = getattr(self, '_handle_end_%s' % name)
except AttributeError:
raise KeyError("Unknown element end '%s'" % name)
handler()
def _handle_start_center(self, attrs: dict):
"""
Handle opening center element
:param attrs: Attributes of the element
:type attrs: Dict
"""
center_lat = attrs.get("lat")
center_lon = attrs.get("lon")
if center_lat is None or center_lon is None:
raise ValueError("Unable to get lat or lon of way center.")
self._curr["center_lat"] = Decimal(center_lat)
self._curr["center_lon"] = Decimal(center_lon)
def _handle_start_tag(self, attrs: dict):
"""
Handle opening tag element
:param attrs: Attributes of the element
"""
try:
tag_key = attrs['k']
except KeyError:
raise ValueError("Tag without name/key.")
self._curr['tags'][tag_key] = attrs.get('v')
def _handle_start_node(self, attrs: dict):
"""
Handle opening node element
:param attrs: Attributes of the element
"""
self._curr = {
'attributes': dict(attrs),
'lat': None,
'lon': None,
'node_id': None,
'tags': {}
}
if attrs.get('id', None) is not None:
self._curr['node_id'] = int(attrs['id'])
del self._curr['attributes']['id']
if attrs.get('lat', None) is not None:
self._curr['lat'] = Decimal(attrs['lat'])
del self._curr['attributes']['lat']
if attrs.get('lon', None) is not None:
self._curr['lon'] = Decimal(attrs['lon'])
del self._curr['attributes']['lon']
def _handle_end_node(self):
"""
Handle closing node element
"""
self._result.append(Node(result=self._result, **self._curr))
self._curr = {}
def _handle_start_way(self, attrs: dict):
"""
Handle opening way element
:param attrs: Attributes of the element
"""
self._curr = {
'center_lat': None,
'center_lon': None,
'attributes': dict(attrs),
'node_ids': [],
'tags': {},
'way_id': None
}
if attrs.get('id', None) is not None:
self._curr['way_id'] = int(attrs['id'])
del self._curr['attributes']['id']
def _handle_end_way(self):
"""
Handle closing way element
"""
self._result.append(Way(result=self._result, **self._curr))
self._curr = {}
def _handle_start_area(self, attrs: dict):
"""
Handle opening area element
:param attrs: Attributes of the element
"""
self._curr = {
'attributes': dict(attrs),
'tags': {},
'area_id': None
}
if attrs.get('id', None) is not None:
self._curr['area_id'] = int(attrs['id'])
del self._curr['attributes']['id']
def _handle_end_area(self):
"""
Handle closing area element
"""
self._result.append(Area(result=self._result, **self._curr))
self._curr = {}
def _handle_start_nd(self, attrs: dict):
"""
Handle opening nd element
:param attrs: Attributes of the element
"""
if isinstance(self.cur_relation_member, RelationWay):
if self.cur_relation_member.geometry is None:
self.cur_relation_member.geometry = []
self.cur_relation_member.geometry.append(
RelationWayGeometryValue(
lat=Decimal(attrs["lat"]),
lon=Decimal(attrs["lon"])
)
)
else:
try:
node_ref = attrs['ref']
except KeyError:
raise ValueError("Unable to find required ref value.")
self._curr['node_ids'].append(int(node_ref))
def _handle_start_relation(self, attrs: dict):
"""
Handle opening relation element
:param attrs: Attributes of the element
"""
self._curr = {
'attributes': dict(attrs),
'members': [],
'rel_id': None,
'tags': {}
}
if attrs.get('id', None) is not None:
self._curr['rel_id'] = int(attrs['id'])
del self._curr['attributes']['id']
def _handle_end_relation(self):
"""
Handle closing relation element
"""
self._result.append(Relation(result=self._result, **self._curr))
self._curr = {}
def _handle_start_member(self, attrs: dict):
"""
Handle opening member element
:param attrs: Attributes of the element
"""
params: Dict[str, Any] = {
# ToDo: Parse attributes
'attributes': {},
'ref': None,
'result': self._result,
'role': None
}
if attrs.get('ref', None):
params['ref'] = int(attrs['ref'])
if attrs.get('role', None):
params['role'] = attrs['role']
cls_map = {
"area": RelationArea,
"node": RelationNode,
"relation": RelationRelation,
"way": RelationWay
}
cls: Type[RelationMember] = cls_map.get(attrs["type"])
if cls is None:
raise ValueError("Undefined type for member: '%s'" % attrs['type'])
self.cur_relation_member = cls(**params)
self._curr['members'].append(self.cur_relation_member)
def _handle_end_member(self):
self.cur_relation_member = None
``` |
[
{
"content": "Here is some code:\n```python\n#!/usr/bin/env python3\n\nimport math\nfrom numbers import Number\n\nimport torch\nfrom torch.distributions import constraints\nfrom torch.nn import Module as TModule\n\nfrom ..utils.cholesky import psd_safe_cholesky\nfrom .prior import Prior\n\n\nclass LKJPrior(Prio... | [
{
"content": "Here is some code:\n<|memory_start|>```python\n#!/usr/bin/env python3\n\nimport math\nfrom numbers import Number\n\nimport torch\nfrom torch.distributions import constraints\nfrom torch.nn import Module as TModule\n\nfrom ..utils.cholesky import psd_safe_cholesky\nfrom .prior import Prior\n\n\ncla... | ```python
#!/usr/bin/env python3
import math
from numbers import Number
import torch
from torch.distributions import constraints
from torch.nn import Module as TModule
from ..utils.cholesky import psd_safe_cholesky
from .prior import Prior
class LKJPrior(Prior):
r"""LKJ prior over n x n (positive definite) correlation matrices
.. math:
\begin{equation*}
pdf(\Sigma) ~ |\Sigma| ^ (\eta - 1)
\end{equation*}
where :math:`\eta > 0` is a shape parameter.
Reference: Bayesian Data Analysis, 3rd ed., Gelman et al., p. 576
"""
arg_constraints = {"n": constraints.positive_integer, "eta": constraints.positive}
# TODO: move correlation matrix validation upstream into pytorch
support = constraints.positive_definite
_validate_args = True
def __init__(self, n, eta, validate_args=False):
TModule.__init__(self)
if not isinstance(n, int) or n < 1:
raise ValueError("n must be a positive integer")
if isinstance(eta, Number):
eta = torch.tensor(float(eta))
self.n = torch.tensor(n, dtype=torch.long, device=eta.device)
batch_shape = eta.shape
event_shape = torch.Size([n, n])
# Normalization constant(s)
i = torch.arange(n, dtype=eta.dtype, device=eta.device)
C = (((2 * eta.view(-1, 1) - 2 + i) * i).sum(1) * math.log(2)).view_as(eta)
C += n * torch.sum(2 * torch.lgamma(i / 2 + 1) - torch.lgamma(i + 2))
# need to assign values before registering as buffers to make argument validation work
self.eta = eta
self.C = C
super(LKJPrior, self).__init__(batch_shape, event_shape, validate_args=validate_args)
# now need to delete to be able to register buffer
del self.eta, self.C
self.register_buffer("eta", eta)
self.register_buffer("C", C)
def log_prob(self, X):
if any(s != self.n for s in X.shape[-2:]):
raise ValueError("Correlation matrix is not of size n={}".format(self.n.item()))
if not _is_valid_correlation_matrix(X):
raise ValueError("Input is not a valid correlation matrix")
log_diag_sum = psd_safe_cholesky(X, upper=True).diagonal(dim1=-2, dim2=-1).log().sum(-1)
return self.C + (self.eta - 1) * 2 * log_diag_sum
class LKJCholeskyFactorPrior(LKJPrior):
r"""LKJ prior over n x n (positive definite) Cholesky-decomposed
correlation matrices
.. math:
\begin{equation*}
pdf(\Sigma) ~ |\Sigma| ^ (\eta - 1)
\end{equation*}
where :math:`\eta > 0` is a shape parameter and n is the dimension of the
correlation matrix.
LKJCholeskyFactorPrior is different from LKJPrior in that it accepts the
Cholesky factor of the correlation matrix to compute probabilities.
"""
support = constraints.lower_cholesky
def log_prob(self, X):
if any(s != self.n for s in X.shape[-2:]):
raise ValueError("Cholesky factor is not of size n={}".format(self.n.item()))
if not _is_valid_correlation_matrix_cholesky_factor(X):
raise ValueError("Input is not a Cholesky factor of a valid correlation matrix")
log_diag_sum = torch.diagonal(X, dim1=-2, dim2=-1).log().sum(-1)
return self.C + (self.eta - 1) * 2 * log_diag_sum
class LKJCovariancePrior(LKJPrior):
"""LKJCovariancePrior combines an LKJ prior over the correlation matrix
and a user-specified prior over marginal standard deviations to return a
prior over the full covariance matrix.
Usage: LKJCovariancePrior(n, eta, sd_prior), where
n is a positive integer, the size of the covariance matrix,
eta is a positive shape parameter for the LKJPrior over correlations, and
sd_prior is a scalar Prior over nonnegative numbers, which is used for
each of the n marginal standard deviations on the covariance matrix.
"""
def __init__(self, n, eta, sd_prior, validate_args=False):
if not isinstance(sd_prior, Prior):
raise ValueError("sd_prior must be an instance of Prior")
if not isinstance(n, int):
raise ValueError("n must be an integer")
if sd_prior.event_shape not in {torch.Size([1]), torch.Size([n])}:
raise ValueError("sd_prior must have event_shape 1 or n")
correlation_prior = LKJPrior(n=n, eta=eta, validate_args=validate_args)
if sd_prior.batch_shape != correlation_prior.batch_shape:
raise ValueError("sd_prior must have same batch_shape as eta")
TModule.__init__(self)
super(LKJPrior, self).__init__(
correlation_prior.batch_shape, correlation_prior.event_shape, validate_args=False
)
self.correlation_prior = correlation_prior
self.sd_prior = sd_prior
def log_prob(self, X):
marginal_var = torch.diagonal(X, dim1=-2, dim2=-1)
if not torch.all(marginal_var >= 0):
raise ValueError("Variance(s) cannot be negative")
marginal_sd = marginal_var.sqrt()
sd_diag_mat = _batch_form_diag(1 / marginal_sd)
correlations = torch.matmul(torch.matmul(sd_diag_mat, X), sd_diag_mat)
log_prob_corr = self.correlation_prior.log_prob(correlations)
log_prob_sd = self.sd_prior.log_prob(marginal_sd)
return log_prob_corr + log_prob_sd
def _batch_form_diag(tsr):
"""Form diagonal matrices in batch mode."""
eye = torch.eye(tsr.shape[-1], dtype=tsr.dtype, device=tsr.device)
M = tsr.unsqueeze(-1).expand(tsr.shape + tsr.shape[-1:])
return eye * M
def _is_valid_correlation_matrix(Sigma, tol=1e-6):
"""Check if supplied matrix is a valid correlation matrix
A matrix is a valid correlation matrix if it is positive semidefinite, and
if all diagonal elements are equal to 1.
Args:
Sigma: A n x n correlation matrix, or a batch of b correlation matrices
with shape b x n x n
tol: The tolerance with which to check unit value of the diagonal elements
Returns:
True if Sigma is a valid correlation matrix, False otherwise (in batch
mode, all matrices in the batch need to be valid correlation matrices)
"""
evals, _ = torch.symeig(Sigma, eigenvectors=False)
if not torch.all(evals >= -tol):
return False
return all(torch.all(torch.abs(S.diag() - 1) < tol) for S in Sigma.view(-1, *Sigma.shape[-2:]))
def _is_valid_correlation_matrix_cholesky_factor(L, tol=1e-6):
"""Check if supplied matrix is a Cholesky factor of a valid correlation matrix
A matrix is a Cholesky fator of a valid correlation matrix if it is lower
triangular, has positive diagonal, and unit row-sum
Args:
L: A n x n lower-triangular matrix, or a batch of b lower-triangular
matrices with shape b x n x n
tol: The tolerance with which to check positivity of the diagonal and
unit-sum of the rows
Returns:
True if L is a Cholesky factor of a valid correlation matrix, False
otherwise (in batch mode, all matrices in the batch need to be
Cholesky factors of valid correlation matrices)
"""
unit_row_length = torch.all((torch.norm(L, dim=-1) - 1).abs() < tol)
return unit_row_length and torch.all(constraints.lower_cholesky.check(L))
``` |
[
{
"content": "Repeat the code precisely:\n```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport glob\nimport time\nimport shutil\n\ntry:\n from PyQt4.QtGui import *\n from PyQt4.QtCore import *\nexcept ImportError, err:\n sys.stderr.write(\"Error: %s%s\" % (str(err)... | [
{
"content": "Repeat the code precisely:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport glob\nimport time\nimport shutil\n\ntry:\n from PyQt4.QtGui import *\n from PyQt4.QtCore import *\nexcept ImportError, err:\n sys.stderr.write(\"Error: %s... | ```python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import glob
import time
import shutil
try:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
except ImportError, err:
sys.stderr.write("Error: %s%s" % (str(err), os.linesep))
sys.exit(1)
try:
from comicutils.ui.cc_ui import Ui_MainWindow
from comicutils import utils
except ImportError, err:
sys.stderr.write("Error: %s%s" % (str(err), os.linesep))
sys.exit(1)
class ComicConvert(QMainWindow, Ui_MainWindow):
toggle_current = pyqtSignal()
show_message = pyqtSignal(str)
show_progress = pyqtSignal(bool)
item_status = pyqtSignal()
def __init__(self, parent = None):
QMainWindow.__init__(self, parent)
self.setupUi(self)
self.setFixedSize(self.sizeHint())
self.setWindowFlags((self.windowFlags() | Qt.CustomizeWindowHint) & ~Qt.WindowMaximizeButtonHint)
self.progressBar.hide()
self.set_opts()
self.model = QStandardItemModel()
self.treeView.setModel(self.model)
self.treeView.setSortingEnabled(True)
self.set_columns()
self.treeView.show()
self.connect_signals()
self.thread = Worker(self)
def connect_signals(self):
self.connect(self.opt_exclude,
SIGNAL("stateChanged(int)"), self.on_opt_exclude_stateChanged)
self.connect(self.opt_recursive,
SIGNAL("stateChanged(int)"), self.on_opt_recursive_stateChanged)
self.connect(self.opt_size,
SIGNAL("valueChanged(int)"), self.on_opt_size_valueChanged)
self.connect(self.select_lineEdit,
SIGNAL("returnPressed()"), self.refresh_treeview)
self.connect(self.progressBar,
SIGNAL("valueChanged(int)"), self.on_progress_bar_changed)
self.connect(self,
SIGNAL("toggle_current(PyQt_PyObject)"), self.on_toggle_current)
self.connect(self,
SIGNAL("show_message(PyQt_PyObject)"), self.on_show_message)
self.connect(self,
SIGNAL("show_progress(PyQt_PyObject)"), self.on_show_progress)
self.connect(self.treeView,
SIGNAL("doubleClicked(QModelIndex)"), self.on_double_click)
self.connect(self,
SIGNAL("item_status(PyQt_PyObject, PyQt_PyObject)"), self.on_item_status)
def set_opts(self):
self.opts = {}
self.opts['size'] = None
self.opts['recursive'] = False
self.opts['outdir'] = None
self.opts['bmp-4'] = False
self.opts['bmp-8'] = False
self.opts['jpeg'] = False
self.opts['png'] = False
self.opts['scale'] = '100%'
self.opts['quality'] = '85'
self.opts['level'] = None
self.opts['cover'] = False
self.opts['nocover'] = False
self.opts['norgb'] = False
self.opts['exclude'] = []
self.opts['rar'] = False
self.opts['zip'] = False
self.opts['suffix'] = ''
self.opts['verbose'] = False
def set_columns(self):
self.model.setHorizontalHeaderLabels(
["Filename", "Filetype", "Filesize", "Status"])
self.treeView.setColumnWidth(0, 460)
self.treeView.setColumnWidth(1, 100)
self.treeView.setColumnWidth(2, 100)
self.treeView.setColumnWidth(3, 100)
def refresh_treeview(self):
self.model.clear()
args = glob.glob(str(self.select_lineEdit.text()))
self.comics = utils.get_comics(args, self.opts, self.opts['size'])
for comic in self.comics:
filename, basename, fileext, filedir, fullpath, filetype, filesize, filemtime, fileuri = comic
row = [filename, filetype, utils.filesizeformat(filesize)]
items = []
sitems = [QStandardItem(item) for item in row]
for item in sitems:
item.setData(comic)
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
items.append(item)
self.model.appendRow(items)
self.set_columns()
self.refresh_status()
def refresh_status(self):
sizesum = 0
rowcount = self.model.rowCount()
for row in range(rowcount):
index = self.model.index(row, 0)
item = self.model.itemFromIndex(index)
comic = item.data().toPyObject()
filename, basename, fileext, filedir, fullpath, filetype, filesize, filemtime, fileuri = comic
sizesum += filesize
self.comicStatus.setText("comics: %d\t|\tsize: %s" % (
rowcount, utils.filesizeformat(sizesum)))
def get_options(self):
self.opts['outdir'] = str(self.opt_outdir.text())
self.opts['suffix'] = str(self.opt_suffix.text())
self.opts['nocover'] = self.opt_exclude_cover.isChecked()
self.opts['norgb'] = self.opt_exclude_rgb.isChecked()
self.opts['scale'] = str(self.opt_scale.text())
self.opts['level'] = str(self.opt_level.text())
self.opts['quality'] = str(self.opt_quality.value())
if self.opt_exclude.isChecked():
self.opts['exclude'] = str(self.opt_exclude_list.text()).split(",")
image_format = self.image_comboBox.itemData(
self.image_comboBox.currentIndex(), 2).toPyObject()
if image_format:
self.opts[str(image_format)] = True
archive_format = self.archive_comboBox.itemData(
self.archive_comboBox.currentIndex(), 2).toPyObject()
if archive_format:
self.opts[str(archive_format)] = True
def set_enabled(self, enabled=True):
self.convert_pushButton.setEnabled(enabled)
self.cancel_pushButton.setEnabled(not enabled)
self.select_pushButton.setEnabled(enabled)
self.refresh_pushButton.setEnabled(enabled)
self.outdir_pushButton.setEnabled(enabled)
self.opt_recursive.setEnabled(enabled)
self.opt_size.setEnabled(enabled)
def on_toggle_current(self, index):
self.treeView.selectionModel().setCurrentIndex(
index, QItemSelectionModel.ToggleCurrent)
def on_show_message(self, message=None):
if not message:
self.statusBar.clearMessage()
else:
self.statusBar.showMessage(message)
def on_show_progress(self, progress=True):
if progress:
self.progressBar.show()
else:
self.progressBar.hide()
def on_double_click(self, index):
if self.thread.isRunning(): return
item = self.model.itemFromIndex(index)
self.model.removeRow(item.row())
self.refresh_status()
def on_item_status(self, item, status):
self.model.setItem(item.row(), 3, QStandardItem(status))
def on_opt_exclude_stateChanged(self, enabled):
self.opt_exclude_list.setEnabled(enabled)
self.opt_exclude_list.setFocus()
def on_opt_recursive_stateChanged(self, enabled):
self.opts['recursive'] = bool(enabled)
self.refresh_treeview()
def on_opt_size_valueChanged(self, value):
self.opts['size'] = int(value)
self.refresh_treeview()
@pyqtSignature("")
def on_convert_pushButton_clicked(self):
self.get_options()
self.set_enabled(False)
self.thread.start()
@pyqtSignature("")
def on_cancel_pushButton_clicked(self):
self.set_enabled(True)
self.thread.stop()
@pyqtSignature("")
def on_select_pushButton_clicked(self):
dialog = QFileDialog()
ext_filter = "Comic book archives (*.cbz *.cbr);;All files (*)"
dialog.setNameFilter(ext_filter)
dialog.setFileMode(QFileDialog.ExistingFiles)
self.select_lineEdit.setText(dialog.getOpenFileName(filter=ext_filter))
self.refresh_treeview()
@pyqtSignature("")
def on_outdir_pushButton_clicked(self):
dialog = QFileDialog()
dialog.setFileMode(QFileDialog.Directory)
dir_name = dialog.getExistingDirectory()
self.opt_outdir.setText(dir_name)
@pyqtSignature("")
def on_refresh_pushButton_clicked(self):
self.refresh_treeview()
@pyqtSignature("")
def on_close_pushButton_clicked(self):
self.close()
@pyqtSignature("")
def on_about_pushButton_clicked(self):
about = """
<p align="center">Comic Convert 0.5<br>
<a href="https://github.com/gen2brain/comic-utils">
<span style="text-decoration:underline;color:#0057ae;">https://github.com/gen2brain/comic-utils</span></a></p>
<p align="center">Author:<br>
Milan Nikolic (gen2brain@gmail.com)</p>
<p align="center">This program is released under the terms of the<br>
<a href="http://www.gnu.org/licenses/gpl-3.0.txt">
<span style="text-decoration:underline;color:#0057ae;">GNU General Public License version 3.</span></a></p>
<p align="center">Powered by:<br>
<a href="http://www.stripzona.com">
<span style="text-decoration:underline;color:#0057ae;">http://www.stripzona.com</span></a></p>"""
QMessageBox.about(self, "About", about)
@pyqtSignature("")
def on_progress_bar_changed(self, value):
self.progressBar.setValue(value)
class Worker(QThread):
def __init__(self, parent = None):
QThread.__init__(self, parent)
self.parent = parent
self.opts = parent.opts
self.tmpdir = None
self.exiting = False
self.item = None
def __del__(self):
self.exiting = True
self.wait()
def stop(self):
self.exiting = True
self.show_progress(False)
self.show_message(None)
if self.item:
self.item_status(self.item, "")
if self.tmpdir:
shutil.rmtree(self.tmpdir)
self.terminate()
def item_status(self, item, status):
self.parent.emit(SIGNAL("item_status(PyQt_PyObject, PyQt_PyObject)"), item, status)
def show_progress(self, progress):
self.parent.emit(SIGNAL("show_progress(PyQt_PyObject)"), progress)
def show_message(self, message, row=None):
if row is not None:
rowcount = self.parent.model.rowCount()
message = "File %d of %d - %s" % (row+1, rowcount, message)
self.parent.emit(SIGNAL("show_message(PyQt_PyObject)"), message)
def run(self):
self.exiting = False
rowcount = self.parent.model.rowCount()
while not self.exiting:
for row in range(rowcount):
index = self.parent.model.index(row, 0)
item = self.parent.model.itemFromIndex(index)
self.item = item
self.parent.emit(SIGNAL("toggle_current(PyQt_PyObject)"), index)
comic = item.data().toPyObject()
filename,basename,fileext,filedir,fullpath,filetype,filesize,filemtime,fileuri = comic
self.item_status(item, "CONVERTING...")
if self.opts['outdir']:
convdir = os.path.realpath(self.opts['outdir'])
else:
convdir = os.path.join(filedir, '_converted')
if os.path.isfile(os.path.join(convdir, filename)):
self.show_message('Skipping, file exists in %s' % convdir, row)
self.item_status(item, "SKIPPED")
time.sleep(1)
continue
self.show_message('Unpacking file %s (%sMB)...' % (
filename, filesize/(1024*1024)), row)
self.item_status(item, "UNPACKING...")
tempdir = utils.unpack_archive(fullpath, filetype, filename)
if tempdir is not None:
self.tmpdir = tempdir
self.show_message('File %s unpacked' % filename, row)
self.item_status(item, "CONVERTING...")
self.parent.emit(SIGNAL("show_progress(PyQt_PyObject)"), True)
if utils.convert_images(tempdir, self.opts, self.parent, row):
self.parent.progressBar.emit(SIGNAL("valueChanged(int)"), 0)
self.parent.emit(SIGNAL("show_progress(PyQt_PyObject)"), False)
self.show_message('File %s converted' % filename, row)
if not self.opts['outdir'] and not os.path.isdir(convdir):
os.mkdir(convdir)
if self.opts['suffix']:
basename = basename + self.opts['suffix']
filename = basename + fileext
if self.opts['rar']:
filetype = 'RAR'
filename = '%s.cbr' % basename
elif self.opts['zip']:
filetype = 'ZIP'
filename = '%s.cbz' % basename
self.show_message('Packing %s...' % filename, row)
self.item_status(item, "PACKING...")
filepath = os.path.join(convdir, filename)
if utils.pack_archive(tempdir, filetype, filepath):
self.show_message('File %s packed.' % filepath, row)
shutil.rmtree(tempdir)
self.item_status(item, "OK")
self.tmpdir = None
time.sleep(1)
self.show_message(None)
self.parent.set_enabled(True)
self.exiting = True
``` |
[
{
"content": "Repeat the code exactly as the original, including blank lines:\n```python\n# ----------------------------------------------------------------------------\n# pyglet\n# Copyright (c) 2006-2008 Alex Holkner\n# Copyright (c) 2008-2021 pyglet contributors\n# All rights reserved.\n#\n# Redistribution a... | [
{
"content": "Repeat the code exactly as the original, including blank lines:\n<|memory_start|>```python\n# ----------------------------------------------------------------------------\n# pyglet\n# Copyright (c) 2006-2008 Alex Holkner\n# Copyright (c) 2008-2021 pyglet contributors\n# All rights reserved.\n#\n# ... | ```python
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import weakref
from pyglet.media.drivers.base import AbstractAudioDriver, AbstractAudioPlayer
from pyglet.media.events import MediaEvent
from pyglet.media.drivers.listener import AbstractListener
from pyglet.util import debug_print
from . import lib_pulseaudio as pa
from .interface import PulseAudioMainLoop
_debug = debug_print('debug_media')
class PulseAudioDriver(AbstractAudioDriver):
def __init__(self):
self.mainloop = PulseAudioMainLoop()
self.mainloop.start()
self.lock = self.mainloop
self.context = None
self._players = weakref.WeakSet()
self._listener = PulseAudioListener(self)
def __del__(self):
self.delete()
def create_audio_player(self, source, player):
assert self.context is not None
player = PulseAudioPlayer(source, player, self)
self._players.add(player)
return player
def connect(self, server=None):
"""Connect to pulseaudio server.
:Parameters:
`server` : str
Server to connect to, or ``None`` for the default local
server (which may be spawned as a daemon if no server is
found).
"""
# TODO disconnect from old
assert not self.context, 'Already connected'
self.context = self.mainloop.create_context()
self.context.connect(server)
def dump_debug_info(self):
print('Client version: ', pa.pa_get_library_version())
print('Server: ', self.context.server)
print('Protocol: ', self.context.protocol_version)
print('Server protocol:', self.context.server_protocol_version)
print('Local context: ', self.context.is_local and 'Yes' or 'No')
def delete(self):
"""Completely shut down pulseaudio client."""
if self.mainloop is not None:
with self.mainloop:
if self.context is not None:
self.context.delete()
self.context = None
if self.mainloop is not None:
self.mainloop.delete()
self.mainloop = None
self.lock = None
def get_listener(self):
return self._listener
class PulseAudioListener(AbstractListener):
def __init__(self, driver):
self.driver = weakref.proxy(driver)
def _set_volume(self, volume):
self._volume = volume
for player in self.driver._players:
player.set_volume(player._volume)
def _set_position(self, position):
self._position = position
def _set_forward_orientation(self, orientation):
self._forward_orientation = orientation
def _set_up_orientation(self, orientation):
self._up_orientation = orientation
class PulseAudioPlayer(AbstractAudioPlayer):
_volume = 1.0
def __init__(self, source, player, driver):
super(PulseAudioPlayer, self).__init__(source, player)
self.driver = weakref.ref(driver)
self._events = []
self._timestamps = [] # List of (ref_time, timestamp)
self._write_index = 0 # Current write index (tracked manually)
self._read_index_valid = False # True only if buffer has non-stale data
self._clear_write = False
self._buffered_audio_data = None
self._playing = False
self._current_audio_data = None
self._time_sync_operation = None
audio_format = source.audio_format
assert audio_format
with driver.mainloop:
self.stream = driver.context.create_stream(audio_format)
self.stream.push_handlers(self)
self.stream.connect_playback()
assert self.stream.is_ready
assert _debug('PulseAudioPlayer: __init__ finished')
def on_write_needed(self, nbytes, underflow):
if underflow:
self._handle_underflow()
else:
self._write_to_stream(nbytes)
# Asynchronously update time
if self._events:
if self._time_sync_operation is not None and self._time_sync_operation.is_done:
self._time_sync_operation.delete()
self._time_sync_operation = None
if self._time_sync_operation is None:
assert _debug('PulseAudioPlayer: trigger timing info update')
self._time_sync_operation = self.stream.update_timing_info(self._process_events)
def _get_audio_data(self, nbytes=None):
if self._current_audio_data is None and self.source is not None:
# Always try to buffer at least 1 second of audio data
min_bytes = 1 * self.source.audio_format.bytes_per_second
if nbytes is None:
nbytes = min_bytes
else:
nbytes = min(min_bytes, nbytes)
assert _debug('PulseAudioPlayer: Try to get {} bytes of audio data'.format(nbytes))
compensation_time = self.get_audio_time_diff()
self._current_audio_data = self.source.get_audio_data(nbytes, compensation_time)
self._schedule_events()
if self._current_audio_data is None:
assert _debug('PulseAudioPlayer: No audio data available')
else:
assert _debug('PulseAudioPlayer: Got {} bytes of audio data'.format(
self._current_audio_data.length))
return self._current_audio_data
def _has_audio_data(self):
return self._get_audio_data() is not None
def _consume_audio_data(self, nbytes):
if self._current_audio_data is not None:
if nbytes == self._current_audio_data.length:
self._current_audio_data = None
else:
self._current_audio_data.consume(nbytes, self.source.audio_format)
def _schedule_events(self):
if self._current_audio_data is not None:
for event in self._current_audio_data.events:
event_index = self._write_index + event.timestamp * \
self.source.audio_format.bytes_per_second
assert _debug('PulseAudioPlayer: Schedule event at index {}'.format(event_index))
self._events.append((event_index, event))
def _write_to_stream(self, nbytes=None):
if nbytes is None:
nbytes = self.stream.writable_size
assert _debug('PulseAudioPlayer: Requested to write %d bytes to stream' % nbytes)
seek_mode = pa.PA_SEEK_RELATIVE
if self._clear_write:
# When seeking, the stream.writable_size will be 0.
# So we force at least 4096 bytes to overwrite the Buffer
# starting at read index
nbytes = max(4096, nbytes)
seek_mode = pa.PA_SEEK_RELATIVE_ON_READ
self._clear_write = False
assert _debug('PulseAudioPlayer: Clear buffer')
while self._has_audio_data() and nbytes > 0:
audio_data = self._get_audio_data()
write_length = min(nbytes, audio_data.length)
consumption = self.stream.write(audio_data, write_length, seek_mode)
seek_mode = pa.PA_SEEK_RELATIVE
self._read_index_valid = True
self._timestamps.append((self._write_index, audio_data.timestamp))
self._write_index += consumption
assert _debug('PulseAudioPlayer: Actually wrote {} bytes '
'to stream'.format(consumption))
self._consume_audio_data(consumption)
nbytes -= consumption
if not self._has_audio_data():
# In case the source group wasn't long enough to prebuffer stream
# to PA's satisfaction, trigger immediate playback (has no effect
# if stream is already playing).
if self._playing:
op = self.stream.trigger()
op.delete() # Explicit delete to prevent locking
def _handle_underflow(self):
assert _debug('Player: underflow')
if self._has_audio_data():
self._write_to_stream()
else:
self._add_event_at_write_index('on_eos')
def _process_events(self):
assert _debug('PulseAudioPlayer: Process events')
if not self._events:
assert _debug('PulseAudioPlayer: No events')
return
# Assume this is called after time sync
timing_info = self.stream.get_timing_info()
if not timing_info:
assert _debug('PulseAudioPlayer: No timing info to process events')
return
read_index = timing_info.read_index
assert _debug('PulseAudioPlayer: Dispatch events at index {}'.format(read_index))
while self._events and self._events[0][0] <= read_index:
_, event = self._events.pop(0)
assert _debug('PulseAudioPlayer: Dispatch event', event)
event._sync_dispatch_to_player(self.player)
def _add_event_at_write_index(self, event_name):
assert _debug('PulseAudioPlayer: Add event at index {}'.format(self._write_index))
self._events.append((self._write_index, MediaEvent(0., event_name)))
def delete(self):
assert _debug('Delete PulseAudioPlayer')
self.stream.pop_handlers()
driver = self.driver()
if driver is None:
assert _debug('PulseAudioDriver has been garbage collected.')
self.stream = None
return
if driver.mainloop is None:
assert _debug('PulseAudioDriver already deleted. '
'PulseAudioPlayer could not clean up properly.')
return
if self._time_sync_operation is not None:
with self._time_sync_operation:
self._time_sync_operation.delete()
self._time_sync_operation = None
self.stream.delete()
self.stream = None
def clear(self):
assert _debug('PulseAudioPlayer.clear')
super(PulseAudioPlayer, self).clear()
self._clear_write = True
self._write_index = self._get_read_index()
self._timestamps = []
self._events = []
with self.stream:
self._read_index_valid = False
self.stream.prebuf().wait()
def play(self):
assert _debug('PulseAudioPlayer.play')
with self.stream:
if self.stream.is_corked:
self.stream.resume().wait().delete()
assert _debug('PulseAudioPlayer: Resumed playback')
if self.stream.underflow:
self._write_to_stream()
if not self._has_audio_data():
self.stream.trigger().wait().delete()
assert _debug('PulseAudioPlayer: Triggered stream for immediate playback')
assert not self.stream.is_corked
self._playing = True
def stop(self):
assert _debug('PulseAudioPlayer.stop')
with self.stream:
if not self.stream.is_corked:
self.stream.pause().wait().delete()
self._playing = False
def _get_read_index(self):
with self.stream:
self.stream.update_timing_info().wait().delete()
timing_info = self.stream.get_timing_info()
if timing_info:
read_index = timing_info.read_index
else:
read_index = 0
assert _debug('_get_read_index ->', read_index)
return read_index
def _get_write_index(self):
timing_info = self.stream.get_timing_info()
if timing_info:
write_index = timing_info.write_index
else:
write_index = 0
assert _debug('_get_write_index ->', write_index)
return write_index
def _get_timing_info(self):
with self.stream:
self.stream.update_timing_info().wait().delete()
timing_info = self.stream.get_timing_info()
return timing_info
def get_time(self):
if not self._read_index_valid:
assert _debug('get_time <_read_index_valid = False> -> 0')
return 0
t_info = self._get_timing_info()
read_index = t_info.read_index
transport_usec = t_info.transport_usec
sink_usec = t_info.sink_usec
write_index = 0
timestamp = 0.0
try:
write_index, timestamp = self._timestamps[0]
write_index, timestamp = self._timestamps[1]
while read_index >= write_index:
del self._timestamps[0]
write_index, timestamp = self._timestamps[1]
except IndexError:
pass
bytes_per_second = self.source.audio_format.bytes_per_second
dt = (read_index - write_index) / float(bytes_per_second) * 1000000
# We add 2x the transport time because we didn't take it into account
# when we wrote the write index the first time. See _write_to_stream
dt += t_info.transport_usec * 2
dt -= t_info.sink_usec
# We convert back to seconds
dt /= 1000000
time = timestamp + dt
assert _debug('get_time ->', time)
return time
def set_volume(self, volume):
self._volume = volume
if self.stream:
driver = self.driver()
volume *= driver._listener._volume
with driver.context:
driver.context.set_input_volume(self.stream, volume).wait()
def set_pitch(self, pitch):
sample_rate = self.stream.audio_format.rate
with self.stream:
self.stream.update_sample_rate(int(pitch * sample_rate)).wait()
def prefill_audio(self):
self._write_to_stream(nbytes=None)
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\n#!/usr/bin/env python\nimport os\nimport unittest\nimport sys\n\nfrom test_common import TestCommon\n\nsys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))\nfrom fastly_service import FastlyConfiguration\n\nclass... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\n#!/usr/bin/env python\nimport os\nimport unittest\nimport sys\n\nfrom test_common import TestCommon\n\nsys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))\nfrom fastly_service import FastlyConfig... | ```python
#!/usr/bin/env python
import os
import unittest
import sys
from test_common import TestCommon
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))
from fastly_service import FastlyConfiguration
class TestFastlyGzip(TestCommon):
GZIP_NAME = 'gzip-config-name'
@TestCommon.vcr.use_cassette()
def test_fastly_gzip(self):
gzip_configuration = self.minimal_configuration.copy()
gzip_configuration.update({
'gzips': [{
'name': self.GZIP_NAME,
'content_types': 'text/html text/css application/javascript',
'extensions': 'html css js'
}]
})
configuration = FastlyConfiguration(gzip_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.configuration.gzips[0].name, self.GZIP_NAME)
self.assertEqual(service.active_version.configuration, configuration)
if __name__ == '__main__':
unittest.main()
``` |
[
{
"content": "```python\n#!/usr/bin/env python3\n\nimport os\n\nfrom gi.repository import Gtk\n\nfrom src.Constants import MAIN_FOLDER\nfrom src.Constants import UI_FOLDER\n\nfrom src.Common import createTag\nfrom src.Common import createCategory\n\nfrom src.Interface.Utils import BasicInterface\nfrom src.Inter... | [
{
"content": "<|memory_start|>```python\n#!/usr/bin/env python3\n\nimport os\n\nfrom gi.repository import Gtk\n\nfrom src.Constants import MAIN_FOLDER\nfrom src.Constants import UI_FOLDER\n\nfrom src.Common import createTag\nfrom src.Common import createCategory\n\nfrom src.Interface.Utils import BasicInterface... | ```python
#!/usr/bin/env python3
import os
from gi.repository import Gtk
from src.Constants import MAIN_FOLDER
from src.Constants import UI_FOLDER
from src.Common import createTag
from src.Common import createCategory
from src.Interface.Utils import BasicInterface
from src.Interface.Utils import acceptInterfaceSignals
from src.Interface.Utils import ignoreSignals
class SHandler():
def __init__(self, interface):
self.interface = interface
def editorAddCategory(self, *args):
self.interface.addCategory(*args)
def editorAddTag(self, *args):
self.interface.addTag(*args)
def editorEditCategory(self, *args):
self.interface.editCategory(*args)
def editorEditTag(self, *args):
self.interface.editTag(*args)
def editorDeleteCategory(self, *args):
self.interface.deleteCategory(*args)
def editorDeleteTag(self, *args):
self.interface.deleteTag(*args)
def editorClose(self, *args):
self.interface.close()
@acceptInterfaceSignals
def editorEditCategoryName(self, *args):
self.interface.setEditMetaData(*args)
@acceptInterfaceSignals
def editorEditTagName(self, *args):
self.interface.setEditTagData(*args)
################
## Tag Editor ##
################
def requireUpdateInterface(method):
def new(self, *args, **kwargs):
res = method(self, *args, **kwargs)
self.updateInterface()
return res
return new
class TagEditor(BasicInterface):
def __init__(self, tm):
super().__init__(tm)
self.browser = self.ts.browser
self._loadInterface()
def _loadInterface(self):
# Create builder
self.builder = Gtk.Builder()
ui_file = os.path.join(UI_FOLDER, 'TagEditor.glade')
self.builder.add_from_file(ui_file)
# Connect signals
self.shandler = SHandler(self)
self.builder.connect_signals(self.shandler)
# Setup main window
self.main_window = self.builder.get_object('TagEditor')
if self.browser is not None:
self.main_window.set_transient_for(self.browser.main_window)
def updateInterface(self):
self.initializeVariables()
self.updateAllSelectors()
def initializeVariables(self):
self.categories = self.db.getAllCategories()
self.tags = self.db.getAllTags()
# category which can be eliminated
self.deletable_categories = []
self.updateDeletableCategories()
def updateDeletableCategories(self):
tags_category = []
for tag in self.tags:
if not tag.getCategory() in tags_category:
tags_category.append(tag.getCategory())
self.deletable_categories.clear()
for category in self.categories:
if not category.getCode() in tags_category:
self.deletable_categories.append(category)
@ignoreSignals
def updateAllSelectors(self):
# -- Add section --
self.updateCategorySelector('AddTagCategory', set_active=True)
## -- Edit section --
self.updateCategorySelector('EditCategorySelect')
self.updateTagSelector('EditTagSelect')
self.updateCategorySelector('EditTagCategory')
## -- Delete section --
self.updateDeletableCategorySelector('DeleteCategorySelect')
self.updateTagSelector('DeleteTagSelect')
## Selector updaters
def updateCategorySelector(self, name, set_active=False):
category_selector = self.builder.get_object(name)
category_selector.remove_all()
for category in self.categories:
category_selector.append_text(category.getName())
if set_active:
category_selector.set_active(0)
return category_selector
def updateDeletableCategorySelector(self, name):
category_selector = self.builder.get_object(name)
category_selector.remove_all()
for category in self.deletable_categories:
category_selector.append_text(category.getName())
return category_selector
def updateTagSelector(self, name):
tag_selector = self.builder.get_object(name)
tag_selector.remove_all()
for tag in self.tags:
tag_selector.append_text(tag.getName())
return tag_selector
## -- Database operations --
# Add
@requireUpdateInterface
def addCategory(self, widget, data=None):
name_entry = self.builder.get_object('AddCategoryName')
magnitude_check = self.builder.get_object('AddCategoryMagnitude')
category_name = name_entry.get_text()
category_has_magnitude = magnitude_check.get_active()
new_category = createCategory(category_name, category_has_magnitude)
res = self.db.addCategory(new_category)
if res is None:
self.showErrorWindow("Category already in database")
else:
self.showInfoWindow("Category added")
@requireUpdateInterface
def addTag(self, widget, data=None):
name_entry = self.builder.get_object('AddTagName')
cat_selector = self.builder.get_object('AddTagCategory')
tag_name = name_entry.get_text()
category_id = cat_selector.get_active()
category = self.categories[category_id]
tag_category = category.getCode()
new_tag = createTag(tag_name, tag_category)
res = self.db.addTag(new_tag)
if res is None:
self.showErrorWindow("Tag already in database")
else:
self.showInfoWindow("Tag added")
# Edit
@requireUpdateInterface
def editCategory(self, widget, data = None):
category_selector = self.builder.get_object('EditCategorySelect')
name_entry = self.builder.get_object('EditCategoryName')
category_id = category_selector.get_active()
if category_id < 0:
return True
category = self.categories[category_id]
new_name = name_entry.get_text()
res = self.db.renameCategory(category, new_name)
if res is None:
self.showErrorWindow("Duplicate name")
else:
self.showInfoWindow("Category edited")
@requireUpdateInterface
def editTag(self, widget, data = None):
tag_selector = self.builder.get_object('EditTagSelect')
name_entry = self.builder.get_object('EditTagName')
cat_selector = self.builder.get_object('EditTagCategory')
tag_id = tag_selector.get_active()
if tag_id < 0:
return True
tag = self.tags[tag_id]
new_name = name_entry.get_text()
category_id = cat_selector.get_active()
if category_id < 0:
return True
new_category = self.categories[category_id]
res = self.db.renameTag(tag, new_name)
if res is None:
self.showErrorWindow("Duplicate name")
return None
else:
self.db.changeTagCategory(tag, new_category)
self.showInfoWindow("Tag edited")
def setEditMetaData(self, category_selector):
category_id = category_selector.get_active()
if category_id < 0:
return True
category = self.categories[category_id]
name_entry = self.builder.get_object('EditCategoryName')
name_entry.set_text(category.getName())
def setEditTagData(self, tag_selector):
tag_id = tag_selector.get_active()
if tag_id < 0:
return True
tag = self.tags[tag_id]
name_entry = self.builder.get_object('EditTagName')
name_entry.set_text(tag.getName())
category = None
for cat in self.categories:
if cat.getCode() == tag.getCategory():
category = cat
break
category_index = self.categories.index(category)
cat_selector = self.builder.get_object('EditTagCategory')
cat_selector.set_active(category_index)
# Delete
@requireUpdateInterface
def deleteCategory(self, widget, data=None):
# NOTE: to delete a Category there must be no associated Tags
cat_selector = self.builder.get_object('DeleteCategorySelect')
category_id = cat_selector.get_active()
if category_id < 0:
return True
category = self.deletable_categories[category_id]
self.db.deleteCategory(category)
self.showInfoWindow("Category deleted")
@requireUpdateInterface
def deleteTag(self, widget, data = None):
tag_selector = self.builder.get_object('DeleteTagSelect')
tag_id = tag_selector.get_active()
if tag_id < 0:
return True
tag = self.tags[tag_id]
self.db.deleteTag(tag)
self.showInfoWindow("Tag deleted")
## Start/Stop
def start(self):
self.updateInterface()
self.main_window.show()
def close(self):
self.main_window.hide()
self.ts.closeSecondary(refresh=True)
def open(*args, **kwargs):
editor = TagEditor(*args, **kwargs)
return editor
``` |
[
{
"content": "Here is the script:\n```python\n#!/usr/bin/python\n# -*- coding: utf-8 -*-\n##############################################################################\n#\n# Copyright (C) 2010-2012 Associazione OpenERP Italia\n# (<http://www.openerp-italia.org>).\n# Copyright(c)2008-2010 SIA \"KN dati\".... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n#!/usr/bin/python\n# -*- coding: utf-8 -*-\n##############################################################################\n#\n# Copyright (C) 2010-2012 Associazione OpenERP Italia\n# (<http://www.openerp-italia.org>).\n# Copyright(c)2008-2010 ... | ```python
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2012 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
# Copyright(c)2008-2010 SIA "KN dati".(http://kndati.lv) All Rights Reserved.
# General contacts <info@kndati.lv>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import sys
import logging
import erppeek
import pickle
from datetime import datetime
from openerp.report import report_sxw
from openerp.report.report_sxw import rml_parse
from openerp.tools.translate import _
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class Parser(report_sxw.rml_parse):
# Constructor:
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'load_bom': self.load_bom,
'get_filter': self.get_filter,
})
# Method
def get_filter(self, data):
if data is None:
data = {}
start_code = data.get('start_code', '')
only = data.get('only', 'all')
from_order = data.get('from_order')
description = _('Line: %s') % only
if start_code:
description += _('Code start with: %s ') % start_code
if from_order:
description += _('From open order ')
if not description:
description = _('All product ')
return description
def load_bom(self, data):
''' Master function for generate data
'''
if data is None:
data = {}
cr = self.cr
uid = self.uid
context = {}
product_pool = self.pool.get('product.product')
start_code = data.get('start_code', '') # mandatory field
only = data.get('only', 'all')
from_order = data.get('from_order')
product_ids = product_pool.search(cr, uid, [
('default_code', '=ilike', '%s%%' % start_code),
], context=context)
# Intersect with order for login AND operation
if from_order:
sol_product_ids = self.pool.get(
'res.company').mrp_domain_sale_order_line_ids(
cr, uid, context=context)
product_ids = list(
sol_product_ids.intersection(
set(product_ids)))
res = []
for item in product_pool.browse(cr, uid, product_ids, context=context):
record = (item, [])
double_check = [] # save component id
for component in item.dynamic_bom_line_ids:
placeholder = component.product_id.bom_placeholder
# Test if yet present:
if component.product_id.id in double_check:
double = True
else:
double = False
double_check.append(component.product_id.id)
# Consider ph error, double error and override if required:
if not placeholder and not double and only=='override' and not\
component.dynamic_mask:
continue
# If only error jump placeholder and double both false
if only=='error' and not placeholder and not double:
continue
record[1].append((component, double, placeholder))
res.append(record)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
``` |
[
{
"content": "Repeat the code exactly as the original, including blank lines:\n```python\nfrom .util import TestCase, load_json_fixture\nfrom babbage.model import Model\n\n\nclass ModelTestCase(TestCase):\n\n def setUp(self):\n super(ModelTestCase, self).setUp()\n self.simple_model_data = load_... | [
{
"content": "Repeat the code exactly as the original, including blank lines:\n<|memory_start|>```python\nfrom .util import TestCase, load_json_fixture\nfrom babbage.model import Model\n\n\nclass ModelTestCase(TestCase):\n\n def setUp(self):\n super(ModelTestCase, self).setUp()\n self.simple_mo... | ```python
from .util import TestCase, load_json_fixture
from babbage.model import Model
class ModelTestCase(TestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.simple_model_data = load_json_fixture('models/simple_model.json')
self.simple_model = Model(self.simple_model_data)
def test_model_concepts(self):
concepts = list(self.simple_model.concepts)
assert len(concepts) == 7, len(concepts)
def test_model_match(self):
concepts = list(self.simple_model.match('foo'))
assert len(concepts) == 1, len(concepts)
def test_model_match_invalid(self):
concepts = list(self.simple_model.match('fooxx'))
assert len(concepts) == 0, len(concepts)
def test_model_aggregates(self):
aggregates = list(self.simple_model.aggregates)
assert len(aggregates) == 2, aggregates
def test_model_fact_table(self):
assert self.simple_model.fact_table_name == 'simple'
assert 'simple' in repr(self.simple_model), repr(self.simple_model)
def test_deref(self):
assert self.simple_model['foo'].name == 'foo'
assert self.simple_model['foo.key'].name == 'key'
assert self.simple_model['amount'].name == 'amount'
assert 'amount' in self.simple_model
assert 'amount.sum' in self.simple_model
assert '_count' in self.simple_model
assert 'yabba' not in self.simple_model
assert 'foo.key' in self.simple_model
def test_repr(self):
assert 'amount' in repr(self.simple_model['amount'])
assert 'amount.sum' in repr(self.simple_model['amount.sum'])
assert 'foo.key' in repr(self.simple_model['foo.key'])
assert 'foo' in repr(self.simple_model['foo'])
assert 'foo' in unicode(self.simple_model['foo'])
assert self.simple_model['foo'] == 'foo'
def test_to_dict(self):
data = self.simple_model.to_dict()
assert 'measures' in data
assert 'amount' in data['measures']
assert 'amount.sum' in data['aggregates']
assert 'ref' in data['measures']['amount']
assert 'dimensions' in data
assert 'foo' in data['dimensions']
``` |
[
{
"content": "```python\n##\n# Copyright 2009-2017 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),\n# with support of Ghent University (http://ugent.be/hpc),\n# the Flemish Supercomputer Centre (VSC) (https://www.vscentru... | [
{
"content": "<|memory_start|>```python\n##\n# Copyright 2009-2017 Ghent University\n#\n# This file is part of EasyBuild,\n# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),\n# with support of Ghent University (http://ugent.be/hpc),\n# the Flemish Supercomputer Centre (VSC) (http... | ```python
##
# Copyright 2009-2017 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Special handling of R easyblocks, due to clash with easybuild.easyblocks.r namespace.
author: Kenneth Hoste (Ghent University)
"""
from easybuild.easyblocks.r.r import *
``` |
[
{
"content": "```python\nimport imp\nfrom io import StringIO\nfrom pluggdapps.plugin import Plugin, implements\nfrom tayra import BaseTTLPlugin\n\ndef __traceback_decorator__( frames ):\n from copy import deepcopy\n from os.path import basename\n\n def _map2... | [
{
"content": "<|memory_start|>```python\nimport imp\nfrom io import StringIO\nfrom pluggdapps.plugin import Plugin, implements\nfrom tayra import BaseTTLPlugin\n\ndef __traceback_decorator__( frames ):\n from copy import deepcopy\n from os.path import basename\... | ```python
import imp
from io import StringIO
from pluggdapps.plugin import Plugin, implements
from tayra import BaseTTLPlugin
def __traceback_decorator__( frames ):
from copy import deepcopy
from os.path import basename
def _map2ttl( frame ):
filename = frame.filename
lineno = frame.lineno
lines = open(filename).readlines()[:lineno]
lines.reverse()
rc = {}
for l in lines :
if l.strip().startswith('# lineno') :
_, ttl_lineno = l.split(':', 1)
ttl_lineno = int( ttl_lineno )
ttl_text = open( _ttlfile ).readlines()[ ttl_lineno-1 ]
return ttl_lineno, ttl_text
return None, None
newframes = []
for frame in frames :
newframes.append( frame )
frameadded = getattr( frame, '_ttlframeadded', False )
basen = basename( frame.filename )
if basen.endswith( '.ttl.py' ) and basen == (basename( _ttlfile ) + '.py') and frameadded == False :
newframe = deepcopy( frame )
frame._ttlframeadded = True
try :
newframe.lineno, newframe.linetext = _map2ttl( newframe )
if newframe.lineno :
newframe.filename = _ttlfile
newframes.append( newframe )
except :
raise
continue
return newframes
from tayra.interfaces import ITayraTestInterface
def body( *args, **kwargs ) :
_m.pushbuf()
_m.extend( ['<!DOCTYPE html>\n'] )
# lineno:4
obj = _compiler.query_plugin( ITayraTestInterface, 'tayra.XYZTestInterface' )
# lineno:6
_m.pushbuf()
_m.extend( ['<html>'] )
_m.pushbuf()
_m.extend( ['\n '] )
# lineno:7
_m.pushbuf()
_m.extend( ['<head>'] )
_m.pushbuf()
_m.extend( ['\n '] )
_m.handletag( _m.popbuftext(), _m.popbuftext(), **{'nl': '', 'oprune': False, 'indent': False, 'iprune': False} )
# lineno:8
_m.pushbuf()
_m.extend( ['<body>'] )
_m.pushbuf()
_m.extend( ['\n '] )
# lineno:9
_m.extend( [''] )
_m.append(_m.evalexprs( '', 'obj.render()', '', globals(), locals()) )
_m.extend( ['\n'] )
_m.handletag( _m.popbuftext(), _m.popbuftext(), **{'nl': '', 'oprune': False, 'indent': False, 'iprune': False} )
_m.handletag( _m.popbuftext(), _m.popbuftext(), **{'nl': '', 'oprune': False, 'indent': False, 'iprune': False} )
return _m.popbuftext()
# ---- Global Functions
# ---- Interface functions
# ---- Footer
``` |
[
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n```python\n# -*- coding: utf-8 -*-\n\"\"\"The CLI tools classes.\"\"\"\n\nimport abc\nimport datetime\nimport locale\nimport logging\nimport os\nimport sys\n\ntry:\n import win32api\n import win32console\nexcept ImportErr... | [
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\"\"\"The CLI tools classes.\"\"\"\n\nimport abc\nimport datetime\nimport locale\nimport logging\nimport os\nimport sys\n\ntry:\n import win32api\n import win32console\n... | ```python
# -*- coding: utf-8 -*-
"""The CLI tools classes."""
import abc
import datetime
import locale
import logging
import os
import sys
try:
import win32api
import win32console
except ImportError:
win32console = None
import plaso
from plaso.cli import views
from plaso.lib import errors
from plaso.lib import py2to3
import pytz # pylint: disable=wrong-import-order
class CLITool(object):
"""Class that implements a CLI tool.
Attributes:
list_timezones (bool): True if the time zones should be listed.
preferred_encoding (str): preferred encoding of single-byte or multi-byte
character strings, sometimes referred to as extended ASCII.
"""
# The maximum number of characters of a line written to the output writer.
_LINE_LENGTH = 80
# The fall back preferred encoding.
_PREFERRED_ENCODING = u'utf-8'
NAME = u''
def __init__(self, input_reader=None, output_writer=None):
"""Initializes the CLI tool object.
Args:
input_reader (Optional[InputReader]): input reader, where None indicates
that the stdin input reader should be used.
output_writer (Optional[OutputWriter]): output writer, where None
indicates that the stdout output writer should be used.
"""
super(CLITool, self).__init__()
preferred_encoding = locale.getpreferredencoding()
if not preferred_encoding:
preferred_encoding = self._PREFERRED_ENCODING
if not input_reader:
input_reader = StdinInputReader(encoding=preferred_encoding)
if not output_writer:
output_writer = StdoutOutputWriter(encoding=preferred_encoding)
self._data_location = None
self._debug_mode = False
self._encode_errors = u'strict'
self._input_reader = input_reader
self._log_file = None
self._output_writer = output_writer
self._quiet_mode = False
self._timezone = u'UTC'
self._views_format_type = views.ViewsFactory.FORMAT_TYPE_CLI
self.list_timezones = False
self.preferred_encoding = preferred_encoding
def _ClearScreen(self):
"""Clears the terminal/console screen."""
if not win32console:
# ANSI escape sequence to clear screen.
self._output_writer.Write(b'\033[2J')
# ANSI escape sequence to move cursor to top left.
self._output_writer.Write(b'\033[H')
else:
# Windows cmd.exe does not support ANSI escape codes, thus instead we
# fill the console screen buffer with spaces.
top_left_coordinate = win32console.PyCOORDType(0, 0)
screen_buffer = win32console.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
screen_buffer_information = screen_buffer.GetConsoleScreenBufferInfo()
screen_buffer_attributes = screen_buffer_information[u'Attributes']
screen_buffer_size = screen_buffer_information[u'Size']
console_size = screen_buffer_size.X * screen_buffer_size.Y
screen_buffer.FillConsoleOutputCharacter(
u' ', console_size, top_left_coordinate)
screen_buffer.FillConsoleOutputAttribute(
screen_buffer_attributes, console_size, top_left_coordinate)
screen_buffer.SetConsoleCursorPosition(top_left_coordinate)
def _ConfigureLogging(
self, filename=None, format_string=None, log_level=None):
"""Configure the logger.
If a filename is specified and the corresponding log file already exists,
the file is truncated.
Args:
filename (Optional[str]): path to a filename to append logs to, where
None means logs will not be redirected to a file.
format_string (Optional[str]): format string for the logs, where None
configures the logger to use a default format string.
log_level (Optional[int]): integer representing the log level, for
example logging.DEBUG, where None represents logging.INFO.
"""
# Remove all possible log handlers.
for handler in logging.root.handlers:
logging.root.removeHandler(handler)
if log_level is None:
log_level = logging.INFO
if not format_string:
format_string = u'[%(levelname)s] %(message)s'
if filename:
logging.basicConfig(
level=log_level, format=format_string, filename=filename,
filemode=u'w')
else:
logging.basicConfig(level=log_level, format=format_string)
def _EncodeString(self, string):
"""Encodes a string in the preferred encoding.
Returns:
bytes: encoded string.
"""
try:
# Note that encode() will first convert string into a Unicode string
# if necessary.
encoded_string = string.encode(
self.preferred_encoding, errors=self._encode_errors)
except UnicodeEncodeError:
if self._encode_errors == u'strict':
logging.error(
u'Unable to properly write output due to encoding error. '
u'Switching to error tolerant encoding which can result in '
u'non Basic Latin (C0) characters to be replaced with "?" or '
u'"\\ufffd".')
self._encode_errors = u'replace'
encoded_string = string.encode(
self.preferred_encoding, errors=self._encode_errors)
return encoded_string
def _ParseDataLocationOption(self, options):
"""Parses the data location option.
Args:
options (argparse.Namespace): command line arguments.
"""
data_location = self.ParseStringOption(options, u'data_location')
if not data_location:
# Determine if we are running from the source directory.
# This should get us the path to the "plaso/cli" directory.
data_location = os.path.dirname(__file__)
# In order to get to the main path of the egg file we need to traverse
# two directories up.
data_location = os.path.dirname(data_location)
data_location = os.path.dirname(data_location)
# There are multiple options to run a tool e.g. running from source or
# from an egg file.
data_location_egg = os.path.join(data_location, u'share', u'plaso')
data_location_source = os.path.join(data_location, u'data')
data_location_system = os.path.join(sys.prefix, u'share', u'plaso')
data_location_system_local = os.path.join(
sys.prefix, u'local', u'share', u'plaso')
if os.path.exists(data_location_egg):
data_location = data_location_egg
elif os.path.exists(data_location_source):
data_location = data_location_source
elif os.path.exists(data_location_system):
data_location = data_location_system
elif os.path.exists(data_location_system_local):
data_location = data_location_system_local
else:
data_location = None
if not data_location:
self._output_writer.Write(
u'WARNING: unable to determine location of data files.\n')
self._data_location = data_location
def _ParseInformationalOptions(self, options):
"""Parses the informational options.
Args:
options (argparse.Namespace): command line arguments.
"""
self._debug_mode = getattr(options, u'debug', False)
self._quiet_mode = getattr(options, u'quiet', False)
if self._debug_mode and self._quiet_mode:
logging.warning(
u'Cannot use debug and quiet mode at the same time, defaulting to '
u'debug output.')
def _ParseTimezoneOption(self, options):
"""Parses the timezone options.
Args:
options (argparse.Namespace): command line arguments.
Raises:
BadConfigOption: if the options are invalid.
"""
timezone_string = self.ParseStringOption(options, u'timezone')
if isinstance(timezone_string, py2to3.STRING_TYPES):
if timezone_string.lower() == u'list':
self.list_timezones = True
elif timezone_string:
try:
pytz.timezone(timezone_string)
except pytz.UnknownTimeZoneError:
raise errors.BadConfigOption(
u'Unknown timezone: {0:s}'.format(timezone_string))
self._timezone = timezone_string
def AddBasicOptions(self, argument_group):
"""Adds the basic options to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
version_string = u'plaso - {0:s} version {1:s}'.format(
self.NAME, plaso.GetVersion())
# We want a custom help message and not the default argparse one.
argument_group.add_argument(
u'-h', u'--help', action=u'help',
help=u'show this help message and exit.')
argument_group.add_argument(
u'-V', u'--version', dest=u'version', action=u'version',
version=version_string, help=u'show the version information.')
def AddDataLocationOption(self, argument_group):
"""Adds the data location option to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
argument_group.add_argument(
u'--data', action=u'store', dest=u'data_location', type=str,
metavar=u'PATH', default=None, help=u'the location of the data files.')
def AddInformationalOptions(self, argument_group):
"""Adds the informational options to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
argument_group.add_argument(
'-d', '--debug', dest='debug', action='store_true', default=False,
help=u'enable debug output.')
argument_group.add_argument(
'-q', '--quiet', dest='quiet', action='store_true', default=False,
help=u'disable informational output.')
def AddLogFileOptions(self, argument_group):
"""Adds the log file option to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
argument_group.add_argument(
u'--logfile', u'--log_file', u'--log-file', action=u'store',
metavar=u'FILENAME', dest=u'log_file', type=str, default=u'', help=(
u'If defined all log messages will be redirected to this file '
u'instead the default STDERR.'))
def AddTimezoneOption(self, argument_group):
"""Adds the timezone option to the argument group.
Args:
argument_group (argparse._ArgumentGroup): argparse argument group.
"""
argument_group.add_argument(
u'-z', u'--zone', u'--timezone', dest=u'timezone', action=u'store',
type=str, default=u'UTC', help=(
u'explicitly define the timezone. Typically the timezone is '
u'determined automatically where possible. Use "-z list" to '
u'see a list of available timezones.'))
def GetCommandLineArguments(self):
"""Retrieves the command line arguments.
Returns:
str: command line arguments.
"""
command_line_arguments = sys.argv
if not command_line_arguments:
return u''
if isinstance(command_line_arguments[0], py2to3.BYTES_TYPE):
encoding = sys.stdin.encoding
# Note that sys.stdin.encoding can be None.
if not encoding:
encoding = self.preferred_encoding
try:
command_line_arguments = [
argument.decode(encoding) for argument in command_line_arguments]
except UnicodeDecodeError:
logging.error(
u'Unable to properly read command line input due to encoding '
u'error. Replacing non Basic Latin (C0) characters with "?" or '
u'"\\ufffd".')
command_line_arguments = [
argument.decode(encoding, errors=u'replace')
for argument in command_line_arguments]
return u' '.join(command_line_arguments)
def ListTimeZones(self):
"""Lists the timezones."""
max_length = 0
for timezone_name in pytz.all_timezones:
if len(timezone_name) > max_length:
max_length = len(timezone_name)
utc_date_time = datetime.datetime.utcnow()
table_view = views.ViewsFactory.GetTableView(
self._views_format_type, column_names=[u'Timezone', u'UTC Offset'],
title=u'Zones')
for timezone_name in pytz.all_timezones:
local_timezone = pytz.timezone(timezone_name)
local_date_string = u'{0!s}'.format(
local_timezone.localize(utc_date_time))
if u'+' in local_date_string:
_, _, diff = local_date_string.rpartition(u'+')
diff_string = u'+{0:s}'.format(diff)
else:
_, _, diff = local_date_string.rpartition(u'-')
diff_string = u'-{0:s}'.format(diff)
table_view.AddRow([timezone_name, diff_string])
table_view.Write(self._output_writer)
def ParseOptions(self, options):
"""Parses tool specific options.
Args:
options (argparse.Namespace): command line arguments.
"""
self._ParseInformationalOptions(options)
def ParseLogFileOptions(self, options):
"""Parses the log file options.
Args:
options (argparse.Namespace): command line arguments.
"""
self._log_file = self.ParseStringOption(options, u'log_file')
def ParseStringOption(self, options, argument_name, default_value=None):
"""Parses a string command line argument.
Args:
options (argparse.Namespace): command line arguments.
argument_name (str): name of the command line argument.
default_value (Optional[object]): default value of the command line
argument.
Returns:
object: command line argument value. If the command line argument is
not set the default value will be returned.
Raises:
BadConfigOption: if the command line argument value cannot be converted
to a Unicode string.
"""
argument_value = getattr(options, argument_name, None)
if not argument_value:
return default_value
if isinstance(argument_value, py2to3.BYTES_TYPE):
encoding = sys.stdin.encoding
# Note that sys.stdin.encoding can be None.
if not encoding:
encoding = self.preferred_encoding
try:
argument_value = argument_value.decode(encoding)
except UnicodeDecodeError as exception:
raise errors.BadConfigOption((
u'Unable to convert option: {0:s} to Unicode with error: '
u'{1:s}.').format(argument_name, exception))
elif not isinstance(argument_value, py2to3.UNICODE_TYPE):
raise errors.BadConfigOption(
u'Unsupported option: {0:s} string type required.'.format(
argument_name))
return argument_value
def PrintSeparatorLine(self):
"""Prints a separator line."""
self._output_writer.Write(u'-' * self._LINE_LENGTH)
self._output_writer.Write(u'\n')
class CLIInputReader(object):
"""Class that implements the CLI input reader interface."""
def __init__(self, encoding=u'utf-8'):
"""Initializes the input reader object.
Args:
encoding (Optional[str]): input encoding.
"""
super(CLIInputReader, self).__init__()
self._encoding = encoding
@abc.abstractmethod
def Read(self):
"""Reads a string from the input.
Returns:
str: input.
"""
class CLIOutputWriter(object):
"""Class that implements the CLI output writer interface."""
def __init__(self, encoding=u'utf-8'):
"""Initializes the output writer object.
Args:
encoding (Optional[str]): output encoding.
"""
super(CLIOutputWriter, self).__init__()
self._encoding = encoding
@abc.abstractmethod
def Write(self, string):
"""Writes a string to the output.
Args:
string (str): output.
"""
class FileObjectInputReader(CLIInputReader):
"""Class that implements a file-like object input reader.
This input reader relies on the file-like object having a readline method.
"""
def __init__(self, file_object, encoding=u'utf-8'):
"""Initializes the input reader object.
Args:
file_object (file): file-like object to read from.
encoding (Optional[str]): input encoding.
"""
super(FileObjectInputReader, self).__init__(encoding=encoding)
self._errors = u'strict'
self._file_object = file_object
def Read(self):
"""Reads a string from the input.
Returns:
str: input.
"""
encoded_string = self._file_object.readline()
try:
string = encoded_string.decode(self._encoding, errors=self._errors)
except UnicodeDecodeError:
if self._errors == u'strict':
logging.error(
u'Unable to properly read input due to encoding error. '
u'Switching to error tolerant encoding which can result in '
u'non Basic Latin (C0) characters to be replaced with "?" or '
u'"\\ufffd".')
self._errors = u'replace'
string = encoded_string.decode(self._encoding, errors=self._errors)
return string
class StdinInputReader(FileObjectInputReader):
"""Class that implements a stdin input reader."""
def __init__(self, encoding=u'utf-8'):
"""Initializes the input reader object.
Args:
encoding (Optional[str]): input encoding.
"""
super(StdinInputReader, self).__init__(sys.stdin, encoding=encoding)
class FileObjectOutputWriter(CLIOutputWriter):
"""Class that implements a file-like object output writer.
This output writer relies on the file-like object having a write method.
"""
def __init__(self, file_object, encoding=u'utf-8'):
"""Initializes the output writer object.
Args:
file_object (file): file-like object to read from.
encoding (Optional[str]): output encoding.
"""
super(FileObjectOutputWriter, self).__init__(encoding=encoding)
self._errors = u'strict'
self._file_object = file_object
def Write(self, string):
"""Writes a string to the output.
Args:
string (str): output.
"""
try:
# Note that encode() will first convert string into a Unicode string
# if necessary.
encoded_string = string.encode(self._encoding, errors=self._errors)
except UnicodeEncodeError:
if self._errors == u'strict':
logging.error(
u'Unable to properly write output due to encoding error. '
u'Switching to error tolerant encoding which can result in '
u'non Basic Latin (C0) characters to be replaced with "?" or '
u'"\\ufffd".')
self._errors = u'replace'
encoded_string = string.encode(self._encoding, errors=self._errors)
self._file_object.write(encoded_string)
class StdoutOutputWriter(FileObjectOutputWriter):
"""Class that implements a stdout output writer."""
def __init__(self, encoding=u'utf-8'):
"""Initializes the output writer object.
Args:
encoding (Optional[str]): output encoding.
"""
super(StdoutOutputWriter, self).__init__(sys.stdout, encoding=encoding)
``` |
[
{
"content": "Here is the source code:\n```python\n# -*- encoding: utf-8 -*-\n##############################################################################\n# \n# Copyright (C) 2013-2016 Didotech SRL\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\n# -*- encoding: utf-8 -*-\n##############################################################################\n# \n# Copyright (C) 2013-2016 Didotech SRL\n#\n# This program is free software: you can redistribute it and/or modify\n# it un... | ```python
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013-2016 Didotech SRL
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Extended',
'version': '3.17.28.32',
'category': 'Generic Modules/Projects & Services',
'description': """Tasks list on a dedicated tab on the project form
""",
'author': 'Didotech SRL',
'website': 'http://www.didotech.com',
'license': 'AGPL-3',
"depends": [
'account',
'project',
'sale',
'project_timesheet',
'task_time_control',
'res_users_helper_functions',
'core_extended',
'res_users_kanban',
'mrp',
'hr_timesheet_invoice',
],
"data": [
'security/security.xml',
'security/ir.model.access.csv',
'views/account_analytic_line.xml',
'views/project.xml',
'views/project_task.xml',
'views/project_view_menu.xml',
'views/res_partner.xml',
'views/project_task_work.xml',
'views/account_analytic_account.xml',
],
"active": False,
"installable": True
}
``` |
[
{
"content": "Repeat the code exactly as the original, including blank lines:\n```python\n# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nimport itertools\nimport logging\nimport math\nimport re\nimport uuid\nfrom werkzeug.exceptions import Forbidden\n\nfrom openerp import _\nfrom openerp import api, ... | [
{
"content": "Repeat the code exactly as the original, including blank lines:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nimport itertools\nimport logging\nimport math\nimport re\nimport uuid\nfrom werkzeug.exceptions import Forbidden\n\nfrom openerp import _\nfrom open... | ```python
# -*- coding: utf-8 -*-
from datetime import datetime
import itertools
import logging
import math
import re
import uuid
from werkzeug.exceptions import Forbidden
from openerp import _
from openerp import api, fields, models
from openerp import http
from openerp import modules
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.addons.website.models.website import slug
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
class KarmaError(Forbidden):
""" Karma-related error, used for forum and posts. """
pass
class Forum(models.Model):
_name = 'forum.forum'
_description = 'Forum'
_inherit = ['mail.thread', 'website.seo.metadata']
def init(self, cr):
""" Add forum uuid for user email validation.
TDE TODO: move me somewhere else, auto_init ? """
forum_uuids = self.pool['ir.config_parameter'].search(cr, SUPERUSER_ID, [('key', '=', 'website_forum.uuid')])
if not forum_uuids:
self.pool['ir.config_parameter'].set_param(cr, SUPERUSER_ID, 'website_forum.uuid', str(uuid.uuid4()), ['base.group_system'])
@api.model
def _get_default_faq(self):
fname = modules.get_module_resource('website_forum', 'data', 'forum_default_faq.html')
with open(fname, 'r') as f:
return f.read()
return False
# description and use
name = fields.Char('Forum Name', required=True, translate=True)
faq = fields.Html('Guidelines', default=_get_default_faq, translate=True)
description = fields.Text(
'Description',
translate=True,
default='This community is for professionals and enthusiasts of our products and services. '
'Share and discuss the best content and new marketing ideas, '
'build your professional profile and become a better marketer together.')
welcome_message = fields.Html(
'Welcome Message',
default = """<section class="bg-info" style="height: 168px;"><div class="container">
<div class="row">
<div class="col-md-12">
<h1 class="text-center" style="text-align: left;">Welcome!</h1>
<p class="text-muted text-center" style="text-align: left;">This community is for professionals and enthusiasts of our products and services. Share and discuss the best content and new marketing ideas, build your professional profile and become a better marketer together.</p>
</div>
<div class="col-md-12">
<a href="#" class="js_close_intro">Hide Intro</a> <a class="btn btn-primary forum_register_url" href="/web/login">Register</a> </div>
</div>
</div>
</section>""")
default_order = fields.Selection([
('create_date desc', 'Newest'),
('write_date desc', 'Last Updated'),
('vote_count desc', 'Most Voted'),
('relevancy desc', 'Relevance'),
('child_count desc', 'Answered')],
string='Default Order', required=True, default='write_date desc')
relevancy_post_vote = fields.Float('First Relevance Parameter', default=0.8, help="This formula is used in order to sort by relevance. The variable 'votes' represents number of votes for a post, and 'days' is number of days since the post creation")
relevancy_time_decay = fields.Float('Second Relevance Parameter', default=1.8)
default_post_type = fields.Selection([
('question', 'Question'),
('discussion', 'Discussion'),
('link', 'Link')],
string='Default Post', required=True, default='question')
allow_question = fields.Boolean('Questions', help="Users can answer only once per question. Contributors can edit answers and mark the right ones.", default=True)
allow_discussion = fields.Boolean('Discussions', default=True)
allow_link = fields.Boolean('Links', help="When clicking on the post, it redirects to an external link", default=True)
allow_bump = fields.Boolean('Allow Bump', default=True,
help='Check this box to display a popup for posts older than 10 days '
'without any given answer. The popup will offer to share it on social '
'networks. When shared, a question is bumped at the top of the forum.')
allow_share = fields.Boolean('Sharing Options', default=True,
help='After posting the user will be proposed to share its question '
'or answer on social networks, enabling social network propagation '
'of the forum content.')
count_posts_waiting_validation = fields.Integer(string="Number of posts waiting for validation", compute='_compute_count_posts_waiting_validation')
count_flagged_posts = fields.Integer(string='Number of flagged posts', compute='_compute_count_flagged_posts')
# karma generation
karma_gen_question_new = fields.Integer(string='Asking a question', default=2)
karma_gen_question_upvote = fields.Integer(string='Question upvoted', default=5)
karma_gen_question_downvote = fields.Integer(string='Question downvoted', default=-2)
karma_gen_answer_upvote = fields.Integer(string='Answer upvoted', default=10)
karma_gen_answer_downvote = fields.Integer(string='Answer downvoted', default=-2)
karma_gen_answer_accept = fields.Integer(string='Accepting an answer', default=2)
karma_gen_answer_accepted = fields.Integer(string='Answer accepted', default=15)
karma_gen_answer_flagged = fields.Integer(string='Answer flagged', default=-100)
# karma-based actions
karma_ask = fields.Integer(string='Ask questions', default=3)
karma_answer = fields.Integer(string='Answer questions', default=3)
karma_edit_own = fields.Integer(string='Edit own posts', default=1)
karma_edit_all = fields.Integer(string='Edit all posts', default=300)
karma_close_own = fields.Integer(string='Close own posts', default=100)
karma_close_all = fields.Integer(string='Close all posts', default=500)
karma_unlink_own = fields.Integer(string='Delete own posts', default=500)
karma_unlink_all = fields.Integer(string='Delete all posts', default=1000)
karma_upvote = fields.Integer(string='Upvote', default=5)
karma_downvote = fields.Integer(string='Downvote', default=50)
karma_answer_accept_own = fields.Integer(string='Accept an answer on own questions', default=20)
karma_answer_accept_all = fields.Integer(string='Accept an answer to all questions', default=500)
karma_comment_own = fields.Integer(string='Comment own posts', default=1)
karma_comment_all = fields.Integer(string='Comment all posts', default=1)
karma_comment_convert_own = fields.Integer(string='Convert own answers to comments and vice versa', default=50)
karma_comment_convert_all = fields.Integer(string='Convert all answers to comments and vice versa', default=500)
karma_comment_unlink_own = fields.Integer(string='Unlink own comments', default=50)
karma_comment_unlink_all = fields.Integer(string='Unlink all comments', default=500)
karma_retag = fields.Integer(string='Change question tags', default=75)
karma_flag = fields.Integer(string='Flag a post as offensive', default=500)
karma_dofollow = fields.Integer(string='Nofollow links', help='If the author has not enough karma, a nofollow attribute is added to links', default=500)
karma_editor = fields.Integer(string='Editor Features: image and links',
default=30, oldname='karma_editor_link_files')
karma_user_bio = fields.Integer(string='Display detailed user biography', default=750)
karma_post = fields.Integer(string='Ask questions without validation', default=100)
karma_moderate = fields.Integer(string='Moderate posts', default=1000)
@api.one
@api.constrains('allow_question', 'allow_discussion', 'allow_link', 'default_post_type')
def _check_default_post_type(self):
if (self.default_post_type == 'question' and not self.allow_question) \
or (self.default_post_type == 'discussion' and not self.allow_discussion) \
or (self.default_post_type == 'link' and not self.allow_link):
raise UserError(_('You cannot choose %s as default post since the forum does not allow it.' % self.default_post_type))
@api.one
@api.constrains('allow_link', 'allow_question', 'allow_discussion', 'default_post_type')
def _check_default_post_type(self):
if self.default_post_type == 'link' and not self.allow_link or self.default_post_type == 'question' and not self.allow_question or self.default_post_type == 'discussion' and not self.allow_discussion:
raise Warning(_('Post type in "Default post" must be activated'))
@api.one
def _compute_count_posts_waiting_validation(self):
domain = [('forum_id', '=', self.id), ('state', '=', 'pending')]
self.count_posts_waiting_validation = self.env['forum.post'].search_count(domain)
@api.one
def _compute_count_flagged_posts(self):
domain = [('forum_id', '=', self.id), ('state', '=', 'flagged')]
self.count_flagged_posts = self.env['forum.post'].search_count(domain)
@api.model
def create(self, values):
return super(Forum, self.with_context(mail_create_nolog=True, mail_create_nosubscribe=True)).create(values)
@api.model
def _tag_to_write_vals(self, tags=''):
User = self.env['res.users']
Tag = self.env['forum.tag']
post_tags = []
existing_keep = []
for tag in filter(None, tags.split(',')):
if tag.startswith('_'): # it's a new tag
# check that not arleady created meanwhile or maybe excluded by the limit on the search
tag_ids = Tag.search([('name', '=', tag[1:])])
if tag_ids:
existing_keep.append(int(tag_ids[0]))
else:
# check if user have Karma needed to create need tag
user = User.sudo().browse(self._uid)
if user.exists() and user.karma >= self.karma_retag:
post_tags.append((0, 0, {'name': tag[1:], 'forum_id': self.id}))
else:
existing_keep.append(int(tag))
post_tags.insert(0, [6, 0, existing_keep])
return post_tags
def get_tags_first_char(self):
""" get set of first letter of forum tags """
tags = self.env['forum.tag'].search([('forum_id', '=', self.id), ('posts_count', '>', 0)])
return sorted(set([tag.name[0].upper() for tag in tags]))
class Post(models.Model):
_name = 'forum.post'
_description = 'Forum Post'
_inherit = ['mail.thread', 'website.seo.metadata']
_order = "is_correct DESC, vote_count DESC, write_date DESC"
name = fields.Char('Title')
forum_id = fields.Many2one('forum.forum', string='Forum', required=True)
content = fields.Html('Content', strip_style=True)
plain_content = fields.Text('Plain Content', compute='_get_plain_content', store=True)
content_link = fields.Char('URL', help="URL of Link Articles")
tag_ids = fields.Many2many('forum.tag', 'forum_tag_rel', 'forum_id', 'forum_tag_id', string='Tags')
state = fields.Selection([('active', 'Active'), ('pending', 'Waiting Validation'), ('close', 'Close'), ('offensive', 'Offensive'), ('flagged', 'Flagged')], string='Status', default='active')
views = fields.Integer('Number of Views', default=0)
active = fields.Boolean('Active', default=True)
post_type = fields.Selection([
('question', 'Question'),
('link', 'Article'),
('discussion', 'Discussion')],
string='Type', default='question', required=True)
website_message_ids = fields.One2many(
'mail.message', 'res_id',
domain=lambda self: ['&', ('model', '=', self._name), ('message_type', 'in', ['email', 'comment'])],
string='Post Messages', help="Comments on forum post",
)
# history
create_date = fields.Datetime('Asked on', select=True, readonly=True)
create_uid = fields.Many2one('res.users', string='Created by', select=True, readonly=True)
write_date = fields.Datetime('Update on', select=True, readonly=True)
bump_date = fields.Datetime('Bumped on', readonly=True,
help="Technical field allowing to bump a question. Writing on this field will trigger "
"a write on write_date and therefore bump the post. Directly writing on write_date "
"is currently not supported and this field is a workaround.")
write_uid = fields.Many2one('res.users', string='Updated by', select=True, readonly=True)
relevancy = fields.Float('Relevance', compute="_compute_relevancy", store=True)
# vote
vote_ids = fields.One2many('forum.post.vote', 'post_id', string='Votes')
user_vote = fields.Integer('My Vote', compute='_get_user_vote')
vote_count = fields.Integer('Votes', compute='_get_vote_count', store=True)
# favorite
favourite_ids = fields.Many2many('res.users', string='Favourite')
user_favourite = fields.Boolean('Is Favourite', compute='_get_user_favourite')
favourite_count = fields.Integer('Favorite Count', compute='_get_favorite_count', store=True)
# hierarchy
is_correct = fields.Boolean('Correct', help='Correct answer or answer accepted')
parent_id = fields.Many2one('forum.post', string='Question', ondelete='cascade')
self_reply = fields.Boolean('Reply to own question', compute='_is_self_reply', store=True)
child_ids = fields.One2many('forum.post', 'parent_id', string='Answers')
child_count = fields.Integer('Number of answers', compute='_get_child_count', store=True)
uid_has_answered = fields.Boolean('Has Answered', compute='_get_uid_has_answered')
has_validated_answer = fields.Boolean('Is answered', compute='_get_has_validated_answer', store=True)
# offensive moderation tools
flag_user_id = fields.Many2one('res.users', string='Flagged by')
moderator_id = fields.Many2one('res.users', string='Reviewed by', readonly=True)
# closing
closed_reason_id = fields.Many2one('forum.post.reason', string='Reason')
closed_uid = fields.Many2one('res.users', string='Closed by', select=1)
closed_date = fields.Datetime('Closed on', readonly=True)
# karma calculation and access
karma_accept = fields.Integer('Convert comment to answer', compute='_get_post_karma_rights')
karma_edit = fields.Integer('Karma to edit', compute='_get_post_karma_rights')
karma_close = fields.Integer('Karma to close', compute='_get_post_karma_rights')
karma_unlink = fields.Integer('Karma to unlink', compute='_get_post_karma_rights')
karma_comment = fields.Integer('Karma to comment', compute='_get_post_karma_rights')
karma_comment_convert = fields.Integer('Karma to convert comment to answer', compute='_get_post_karma_rights')
karma_flag = fields.Integer('Flag a post as offensive', compute='_get_post_karma_rights')
can_ask = fields.Boolean('Can Ask', compute='_get_post_karma_rights')
can_answer = fields.Boolean('Can Answer', compute='_get_post_karma_rights')
can_accept = fields.Boolean('Can Accept', compute='_get_post_karma_rights')
can_edit = fields.Boolean('Can Edit', compute='_get_post_karma_rights')
can_close = fields.Boolean('Can Close', compute='_get_post_karma_rights')
can_unlink = fields.Boolean('Can Unlink', compute='_get_post_karma_rights')
can_upvote = fields.Boolean('Can Upvote', compute='_get_post_karma_rights')
can_downvote = fields.Boolean('Can Downvote', compute='_get_post_karma_rights')
can_comment = fields.Boolean('Can Comment', compute='_get_post_karma_rights')
can_comment_convert = fields.Boolean('Can Convert to Comment', compute='_get_post_karma_rights')
can_view = fields.Boolean('Can View', compute='_get_post_karma_rights')
can_display_biography = fields.Boolean("Is the author's biography visible from his post", compute='_get_post_karma_rights')
can_post = fields.Boolean('Can Automatically be Validated', compute='_get_post_karma_rights')
can_flag = fields.Boolean('Can Flag', compute='_get_post_karma_rights')
can_moderate = fields.Boolean('Can Moderate', compute='_get_post_karma_rights')
@api.one
@api.depends('content')
def _get_plain_content(self):
self.plain_content = tools.html2plaintext(self.content)[0:500] if self.content else False
@api.one
@api.depends('vote_count', 'forum_id.relevancy_post_vote', 'forum_id.relevancy_time_decay')
def _compute_relevancy(self):
if self.create_date:
days = (datetime.today() - datetime.strptime(self.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)).days
self.relevancy = math.copysign(1, self.vote_count) * (abs(self.vote_count - 1) ** self.forum_id.relevancy_post_vote / (days + 2) ** self.forum_id.relevancy_time_decay)
else:
self.relevancy = 0
@api.multi
def _get_user_vote(self):
votes = self.env['forum.post.vote'].search_read([('post_id', 'in', self._ids), ('user_id', '=', self._uid)], ['vote', 'post_id'])
mapped_vote = dict([(v['post_id'][0], v['vote']) for v in votes])
for vote in self:
vote.user_vote = mapped_vote.get(vote.id, 0)
@api.multi
@api.depends('vote_ids.vote')
def _get_vote_count(self):
read_group_res = self.env['forum.post.vote'].read_group([('post_id', 'in', self._ids)], ['post_id', 'vote'], ['post_id', 'vote'], lazy=False)
result = dict.fromkeys(self._ids, 0)
for data in read_group_res:
result[data['post_id'][0]] += data['__count'] * int(data['vote'])
for post in self:
post.vote_count = result[post.id]
@api.one
def _get_user_favourite(self):
self.user_favourite = self._uid in self.favourite_ids.ids
@api.one
@api.depends('favourite_ids')
def _get_favorite_count(self):
self.favourite_count = len(self.favourite_ids)
@api.one
@api.depends('create_uid', 'parent_id')
def _is_self_reply(self):
self.self_reply = self.parent_id.create_uid.id == self._uid
@api.one
@api.depends('child_ids.create_uid', 'website_message_ids')
def _get_child_count(self):
def process(node):
total = len(node.website_message_ids) + len(node.child_ids)
for child in node.child_ids:
total += process(child)
return total
self.child_count = process(self)
@api.one
def _get_uid_has_answered(self):
self.uid_has_answered = any(answer.create_uid.id == self._uid for answer in self.child_ids)
@api.one
@api.depends('child_ids.is_correct')
def _get_has_validated_answer(self):
self.has_validated_answer = any(answer.is_correct for answer in self.child_ids)
@api.multi
def _get_post_karma_rights(self):
user = self.env.user
is_admin = user.id == SUPERUSER_ID
# sudoed recordset instead of individual posts so values can be
# prefetched in bulk
for post, post_sudo in itertools.izip(self, self.sudo()):
is_creator = post.create_uid == user
post.karma_accept = post.forum_id.karma_answer_accept_own if post.parent_id.create_uid == user else post.forum_id.karma_answer_accept_all
post.karma_edit = post.forum_id.karma_edit_own if is_creator else post.forum_id.karma_edit_all
post.karma_close = post.forum_id.karma_close_own if is_creator else post.forum_id.karma_close_all
post.karma_unlink = post.forum_id.karma_unlink_own if is_creator else post.forum_id.karma_unlink_all
post.karma_comment = post.forum_id.karma_comment_own if is_creator else post.forum_id.karma_comment_all
post.karma_comment_convert = post.forum_id.karma_comment_convert_own if is_creator else post.forum_id.karma_comment_convert_all
post.can_ask = is_admin or user.karma >= post.forum_id.karma_ask
post.can_answer = is_admin or user.karma >= post.forum_id.karma_answer
post.can_accept = is_admin or user.karma >= post.karma_accept
post.can_edit = is_admin or user.karma >= post.karma_edit
post.can_close = is_admin or user.karma >= post.karma_close
post.can_unlink = is_admin or user.karma >= post.karma_unlink
post.can_upvote = is_admin or user.karma >= post.forum_id.karma_upvote
post.can_downvote = is_admin or user.karma >= post.forum_id.karma_downvote
post.can_comment = is_admin or user.karma >= post.karma_comment
post.can_comment_convert = is_admin or user.karma >= post.karma_comment_convert
post.can_view = is_admin or user.karma >= post.karma_close or post_sudo.create_uid.karma > 0
post.can_display_biography = is_admin or post_sudo.create_uid.karma >= post.forum_id.karma_user_bio
post.can_post = is_admin or user.karma >= post.forum_id.karma_post
post.can_flag = is_admin or user.karma >= post.forum_id.karma_flag
post.can_moderate = is_admin or user.karma >= post.forum_id.karma_moderate
@api.one
@api.constrains('post_type', 'forum_id')
def _check_post_type(self):
if (self.post_type == 'question' and not self.forum_id.allow_question) \
or (self.post_type == 'discussion' and not self.forum_id.allow_discussion) \
or (self.post_type == 'link' and not self.forum_id.allow_link):
raise UserError(_('This forum does not allow %s' % self.post_type))
def _update_content(self, content, forum_id):
forum = self.env['forum.forum'].browse(forum_id)
if content and self.env.user.karma < forum.karma_dofollow:
for match in re.findall(r'<a\s.*href=".*?">', content):
content = re.sub(match, match[:3] + 'rel="nofollow" ' + match[3:], content)
if self.env.user.karma <= forum.karma_editor:
filter_regexp = r'(<img.*?>)|(<a[^>]*?href[^>]*?>)|(<[a-z|A-Z]+[^>]*style\s*=\s*[\'"][^\'"]*\s*background[^:]*:[^url;]*url)'
content_match = re.search(filter_regexp, content, re.I)
if content_match:
raise KarmaError('User karma not sufficient to post an image or link.')
return content
@api.model
def create(self, vals):
if 'content' in vals and vals.get('forum_id'):
vals['content'] = self._update_content(vals['content'], vals['forum_id'])
post = super(Post, self.with_context(mail_create_nolog=True)).create(vals)
# deleted or closed questions
if post.parent_id and (post.parent_id.state == 'close' or post.parent_id.active is False):
raise UserError(_('Posting answer on a [Deleted] or [Closed] question is not possible'))
# karma-based access
if not post.parent_id and not post.can_ask:
raise KarmaError('Not enough karma to create a new question')
elif post.parent_id and not post.can_answer:
raise KarmaError('Not enough karma to answer to a question')
if not post.parent_id and not post.can_post:
post.state = 'pending'
# add karma for posting new questions
if not post.parent_id and post.state == 'active':
self.env.user.sudo().add_karma(post.forum_id.karma_gen_question_new)
post.post_notification()
return post
@api.model
def check_mail_message_access(self, res_ids, operation, model_name=None):
if operation in ('write', 'unlink') and (not model_name or model_name == 'forum.post'):
# Make sure only author or moderator can edit/delete messages
if any(not post.can_edit for post in self.browse(res_ids)):
raise KarmaError('Not enough karma to edit a post.')
return super(Post, self).check_mail_message_access(res_ids, operation, model_name=model_name)
@api.multi
@api.depends('name', 'post_type')
def name_get(self):
result = []
for post in self:
if post.post_type == 'discussion' and post.parent_id and not post.name:
result.append((post.id, '%s (%s)' % (post.parent_id.name, post.id)))
else:
result.append((post.id, '%s' % (post.name)))
return result
@api.multi
def write(self, vals):
if 'content' in vals:
vals['content'] = self._update_content(vals['content'], self.forum_id.id)
if 'state' in vals:
if vals['state'] in ['active', 'close'] and any(not post.can_close for post in self):
raise KarmaError('Not enough karma to close or reopen a post.')
if 'active' in vals:
if any(not post.can_unlink for post in self):
raise KarmaError('Not enough karma to delete or reactivate a post')
if 'is_correct' in vals:
if any(not post.can_accept for post in self):
raise KarmaError('Not enough karma to accept or refuse an answer')
# update karma except for self-acceptance
mult = 1 if vals['is_correct'] else -1
for post in self:
if vals['is_correct'] != post.is_correct and post.create_uid.id != self._uid:
post.create_uid.sudo().add_karma(post.forum_id.karma_gen_answer_accepted * mult)
self.env.user.sudo().add_karma(post.forum_id.karma_gen_answer_accept * mult)
if any(key not in ['state', 'active', 'is_correct', 'closed_uid', 'closed_date', 'closed_reason_id'] for key in vals.keys()) and any(not post.can_edit for post in self):
raise KarmaError('Not enough karma to edit a post.')
res = super(Post, self).write(vals)
# if post content modify, notify followers
if 'content' in vals or 'name' in vals:
for post in self:
if post.parent_id:
body, subtype = _('Answer Edited'), 'website_forum.mt_answer_edit'
obj_id = post.parent_id
else:
body, subtype = _('Question Edited'), 'website_forum.mt_question_edit'
obj_id = post
obj_id.message_post(body=body, subtype=subtype)
return res
@api.multi
def post_notification(self):
base_url = self.env['ir.config_parameter'].get_param('web.base.url')
for post in self:
if post.state == 'active' and post.parent_id:
body = _(
'<p>A new answer for <i>%s</i> has been posted. <a href="%s/forum/%s/question/%s">Click here to access the post.</a></p>' %
(post.parent_id.name, base_url, slug(post.parent_id.forum_id), slug(post.parent_id))
)
post.parent_id.message_post(subject=_('Re: %s') % post.parent_id.name, body=body, subtype='website_forum.mt_answer_new')
elif post.state == 'active' and not post.parent_id:
body = _(
'<p>A new question <i>%s</i> has been asked on %s. <a href="%s/forum/%s/question/%s">Click here to access the question.</a></p>' %
(post.name, post.forum_id.name, base_url, slug(post.forum_id), slug(post))
)
post.message_post(subject=post.name, body=body, subtype='website_forum.mt_question_new')
elif post.state == 'pending' and not post.parent_id:
# TDE FIXME: in master, you should probably use a subtype;
# however here we remove subtype but set partner_ids
partners = post.sudo().message_partner_ids.filtered(lambda partner: partner.user_ids and partner.user_ids.karma >= post.forum_id.karma_moderate)
note_subtype = self.sudo().env.ref('mail.mt_note')
body = _(
'<p>A new question <i>%s</i> has been asked on %s and require your validation. <a href="%s/forum/%s/question/%s">Click here to access the question.</a></p>' %
(post.name, post.forum_id.name, base_url, slug(post.forum_id), slug(post))
)
post.message_post(subject=post.name, body=body, subtype_id=note_subtype.id, partner_ids=partners.ids)
return True
@api.multi
def reopen(self):
if any(post.parent_id or post.state != 'close' for post in self):
return False
reason_offensive = self.env.ref('website_forum.reason_7')
reason_spam = self.env.ref('website_forum.reason_8')
for post in self:
if post.closed_reason_id in (reason_offensive, reason_spam):
_logger.info('Upvoting user <%s>, reopening spam/offensive question',
post.create_uid)
post.create_uid.sudo().add_karma(post.forum_id.karma_gen_answer_flagged * -1)
self.sudo().write({'state': 'active'})
@api.multi
def close(self, reason_id):
if any(post.parent_id for post in self):
return False
reason_offensive = self.env.ref('website_forum.reason_7').id
reason_spam = self.env.ref('website_forum.reason_8').id
if reason_id in (reason_offensive, reason_spam):
for post in self:
_logger.info('Downvoting user <%s> for posting spam/offensive contents',
post.create_uid)
post.create_uid.sudo().add_karma(post.forum_id.karma_gen_answer_flagged)
self.write({
'state': 'close',
'closed_uid': self._uid,
'closed_date': datetime.today().strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT),
'closed_reason_id': reason_id,
})
return True
@api.one
def validate(self):
if not self.can_moderate:
raise KarmaError('Not enough karma to validate a post')
# if state == pending, no karma previously added for the new question
if self.state == 'pending':
self.create_uid.sudo().add_karma(self.forum_id.karma_gen_question_new)
self.write({
'state': 'active',
'active': True,
'moderator_id': self.env.user.id,
})
self.post_notification()
return True
@api.one
def refuse(self):
if not self.can_moderate:
raise KarmaError('Not enough karma to refuse a post')
self.moderator_id = self.env.user
return True
@api.one
def flag(self):
if not self.can_flag:
raise KarmaError('Not enough karma to flag a post')
if(self.state == 'flagged'):
return {'error': 'post_already_flagged'}
elif(self.state == 'active'):
self.write({
'state': 'flagged',
'flag_user_id': self.env.user.id,
})
return self.can_moderate and {'success': 'post_flagged_moderator'} or {'success': 'post_flagged_non_moderator'}
else:
return {'error': 'post_non_flaggable'}
@api.one
def mark_as_offensive(self, reason_id):
if not self.can_moderate:
raise KarmaError('Not enough karma to mark a post as offensive')
# remove some karma
_logger.info('Downvoting user <%s> for posting spam/offensive contents', self.create_uid)
self.create_uid.sudo().add_karma(self.forum_id.karma_gen_answer_flagged)
self.write({
'state': 'offensive',
'moderator_id': self.env.user.id,
'closed_date': datetime.today().strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT),
'closed_reason_id': reason_id,
'active': False,
})
return True
@api.multi
def unlink(self):
if any(not post.can_unlink for post in self):
raise KarmaError('Not enough karma to unlink a post')
# if unlinking an answer with accepted answer: remove provided karma
for post in self:
if post.is_correct:
post.create_uid.sudo().add_karma(post.forum_id.karma_gen_answer_accepted * -1)
self.env.user.sudo().add_karma(post.forum_id.karma_gen_answer_accepted * -1)
return super(Post, self).unlink()
@api.multi
def bump(self):
""" Bump a question: trigger a write_date by writing on a dummy bump_date
field. One cannot bump a question more than once every 10 days. """
self.ensure_one()
if self.forum_id.allow_bump and not self.child_ids and (datetime.today() - datetime.strptime(self.write_date, tools.DEFAULT_SERVER_DATETIME_FORMAT)).days > 9:
# write through super to bypass karma; sudo to allow public user to bump any post
return self.sudo().write({'bump_date': fields.Datetime.now()})
return False
@api.multi
def vote(self, upvote=True):
Vote = self.env['forum.post.vote']
vote_ids = Vote.search([('post_id', 'in', self._ids), ('user_id', '=', self._uid)])
new_vote = '1' if upvote else '-1'
voted_forum_ids = set()
if vote_ids:
for vote in vote_ids:
if upvote:
new_vote = '0' if vote.vote == '-1' else '1'
else:
new_vote = '0' if vote.vote == '1' else '-1'
vote.vote = new_vote
voted_forum_ids.add(vote.post_id.id)
for post_id in set(self._ids) - voted_forum_ids:
for post_id in self._ids:
Vote.create({'post_id': post_id, 'vote': new_vote})
return {'vote_count': self.vote_count, 'user_vote': new_vote}
@api.one
def convert_answer_to_comment(self):
""" Tools to convert an answer (forum.post) to a comment (mail.message).
The original post is unlinked and a new comment is posted on the question
using the post create_uid as the comment's author. """
if not self.parent_id:
return False
# karma-based action check: use the post field that computed own/all value
if not self.can_comment_convert:
raise KarmaError('Not enough karma to convert an answer to a comment')
# post the message
question = self.parent_id
values = {
'author_id': self.sudo().create_uid.partner_id.id, # use sudo here because of access to res.users model
'body': tools.html_sanitize(self.content, strict=True, strip_style=True, strip_classes=True),
'message_type': 'comment',
'subtype': 'mail.mt_comment',
'date': self.create_date,
}
new_message = self.browse(question.id).with_context(mail_create_nosubscribe=True).message_post(**values)
# unlink the original answer, using SUPERUSER_ID to avoid karma issues
self.sudo().unlink()
return new_message
@api.model
def convert_comment_to_answer(self, message_id, default=None):
""" Tool to convert a comment (mail.message) into an answer (forum.post).
The original comment is unlinked and a new answer from the comment's author
is created. Nothing is done if the comment's author already answered the
question. """
comment = self.env['mail.message'].sudo().browse(message_id)
post = self.browse(comment.res_id)
if not comment.author_id or not comment.author_id.user_ids: # only comment posted by users can be converted
return False
# karma-based action check: must check the message's author to know if own / all
karma_convert = comment.author_id.id == self.env.user.partner_id.id and post.forum_id.karma_comment_convert_own or post.forum_id.karma_comment_convert_all
can_convert = self.env.user.karma >= karma_convert
if not can_convert:
raise KarmaError('Not enough karma to convert a comment to an answer')
# check the message's author has not already an answer
question = post.parent_id if post.parent_id else post
post_create_uid = comment.author_id.user_ids[0]
if any(answer.create_uid.id == post_create_uid.id for answer in question.child_ids):
return False
# create the new post
post_values = {
'forum_id': question.forum_id.id,
'content': comment.body,
'parent_id': question.id,
}
# done with the author user to have create_uid correctly set
new_post = self.sudo(post_create_uid.id).create(post_values)
# delete comment
comment.unlink()
return new_post
@api.one
def unlink_comment(self, message_id):
user = self.env.user
comment = self.env['mail.message'].sudo().browse(message_id)
if not comment.model == 'forum.post' or not comment.res_id == self.id:
return False
# karma-based action check: must check the message's author to know if own or all
karma_unlink = comment.author_id.id == user.partner_id.id and self.forum_id.karma_comment_unlink_own or self.forum_id.karma_comment_unlink_all
can_unlink = user.karma >= karma_unlink
if not can_unlink:
raise KarmaError('Not enough karma to unlink a comment')
return comment.unlink()
@api.multi
def set_viewed(self):
self._cr.execute("""UPDATE forum_post SET views = views+1 WHERE id IN %s""", (self._ids,))
return True
@api.multi
def get_access_action(self):
""" Override method that generated the link to access the document. Instead
of the classic form view, redirect to the post on the website directly """
self.ensure_one()
return {
'type': 'ir.actions.act_url',
'url': '/forum/%s/question/%s' % (self.forum_id.id, self.id),
'target': 'self',
'res_id': self.id,
}
@api.multi
def _notification_get_recipient_groups(self, message, recipients):
""" Override to set the access button: everyone can see an access button
on their notification email. It will lead on the website view of the
post. """
res = super(Post, self)._notification_get_recipient_groups(message, recipients)
access_action = self._notification_link_helper('view', model=message.model, res_id=message.res_id)
for category, data in res.iteritems():
res[category]['button_access'] = {'url': access_action, 'title': '%s %s' % (_('View'), self.post_type)}
return res
@api.cr_uid_ids_context
def message_post(self, cr, uid, thread_id, message_type='notification', subtype=None, context=None, **kwargs):
if thread_id and message_type == 'comment': # user comments have a restriction on karma
if isinstance(thread_id, (list, tuple)):
post_id = thread_id[0]
else:
post_id = thread_id
post = self.browse(cr, uid, post_id, context=context)
# TDE FIXME: trigger browse because otherwise the function field is not compted - check with RCO
tmp1, tmp2 = post.karma_comment, post.can_comment
user = self.pool['res.users'].browse(cr, uid, uid)
tmp3 = user.karma
# TDE END FIXME
if not post.can_comment:
raise KarmaError('Not enough karma to comment')
return super(Post, self).message_post(cr, uid, thread_id, message_type=message_type, subtype=subtype, context=context, **kwargs)
class PostReason(models.Model):
_name = "forum.post.reason"
_description = "Post Closing Reason"
_order = 'name'
name = fields.Char(string='Closing Reason', required=True, translate=True)
reason_type = fields.Char(string='Reason Type')
class Vote(models.Model):
_name = 'forum.post.vote'
_description = 'Vote'
post_id = fields.Many2one('forum.post', string='Post', ondelete='cascade', required=True)
user_id = fields.Many2one('res.users', string='User', required=True, default=lambda self: self._uid)
vote = fields.Selection([('1', '1'), ('-1', '-1'), ('0', '0')], string='Vote', required=True, default='1')
create_date = fields.Datetime('Create Date', select=True, readonly=True)
forum_id = fields.Many2one('forum.forum', string='Forum', related="post_id.forum_id", store=True)
recipient_id = fields.Many2one('res.users', string='To', related="post_id.create_uid", store=True)
def _get_karma_value(self, old_vote, new_vote, up_karma, down_karma):
_karma_upd = {
'-1': {'-1': 0, '0': -1 * down_karma, '1': -1 * down_karma + up_karma},
'0': {'-1': 1 * down_karma, '0': 0, '1': up_karma},
'1': {'-1': -1 * up_karma + down_karma, '0': -1 * up_karma, '1': 0}
}
return _karma_upd[old_vote][new_vote]
@api.model
def create(self, vals):
vote = super(Vote, self).create(vals)
# own post check
if vote.user_id.id == vote.post_id.create_uid.id:
raise UserError(_('Not allowed to vote for its own post'))
# karma check
if vote.vote == '1' and not vote.post_id.can_upvote:
raise KarmaError('Not enough karma to upvote.')
elif vote.vote == '-1' and not vote.post_id.can_downvote:
raise KarmaError('Not enough karma to downvote.')
if vote.post_id.parent_id:
karma_value = self._get_karma_value('0', vote.vote, vote.forum_id.karma_gen_answer_upvote, vote.forum_id.karma_gen_answer_downvote)
else:
karma_value = self._get_karma_value('0', vote.vote, vote.forum_id.karma_gen_question_upvote, vote.forum_id.karma_gen_question_downvote)
vote.recipient_id.sudo().add_karma(karma_value)
return vote
@api.multi
def write(self, values):
if 'vote' in values:
for vote in self:
# own post check
if vote.user_id.id == vote.post_id.create_uid.id:
raise UserError(_('Not allowed to vote for its own post'))
# karma check
if (values['vote'] == '1' or vote.vote == '-1' and values['vote'] == '0') and not vote.post_id.can_upvote:
raise KarmaError('Not enough karma to upvote.')
elif (values['vote'] == '-1' or vote.vote == '1' and values['vote'] == '0') and not vote.post_id.can_downvote:
raise KarmaError('Not enough karma to downvote.')
# karma update
if vote.post_id.parent_id:
karma_value = self._get_karma_value(vote.vote, values['vote'], vote.forum_id.karma_gen_answer_upvote, vote.forum_id.karma_gen_answer_downvote)
else:
karma_value = self._get_karma_value(vote.vote, values['vote'], vote.forum_id.karma_gen_question_upvote, vote.forum_id.karma_gen_question_downvote)
vote.recipient_id.sudo().add_karma(karma_value)
res = super(Vote, self).write(values)
return res
class Tags(models.Model):
_name = "forum.tag"
_description = "Forum Tag"
_inherit = ['website.seo.metadata']
name = fields.Char('Name', required=True)
create_uid = fields.Many2one('res.users', string='Created by', readonly=True)
forum_id = fields.Many2one('forum.forum', string='Forum', required=True)
post_ids = fields.Many2many('forum.post', 'forum_tag_rel', 'forum_tag_id', 'forum_id', string='Posts')
posts_count = fields.Integer('Number of Posts', compute='_get_posts_count', store=True)
_sql_constraints = [
('name_uniq', 'unique (name, forum_id)', "Tag name already exists !"),
]
@api.multi
@api.depends("post_ids.tag_ids")
def _get_posts_count(self):
for tag in self:
tag.posts_count = len(tag.post_ids)
``` |
[
{
"content": "Repeat the code precisely:\n```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'Michal Szczepanski'\n\n\nfrom sqlalchemy.sql.schema import Column, ForeignKey\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy impo... | [
{
"content": "Repeat the code precisely:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'Michal Szczepanski'\n\n\nfrom sqlalchemy.sql.schema import Column, ForeignKey\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom... | ```python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Michal Szczepanski'
from sqlalchemy.sql.schema import Column, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Integer, String, Boolean, Binary
Base = declarative_base()
class DatabaseConnection(Base):
__tablename__ = 'database_connection'
connection_name = Column(String, unique=True, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(Integer)
username = Column(String)
password = Column(String)
database = Column(String)
'''
class SSHConnection(Base):
__tablename__ = 'ssh_connection'
id = Column(Integer, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(String)
auth_id = Column(Integer, ForeignKey('ssh_connection_auth.id'))
class SSHConnectionAuth(Base):
__tablename__ = 'ssh_connection_auth'
id = Column(Integer, primary_key=True)
key = Column(Boolean, default=False)
key_data = Column(Binary)
username = Column(String)
password = Column(String)
connections = relationship('SSHConnection')
'''
``` |
[
{
"content": "Reconstruct the code exactly:\n```python\n# -*- coding: utf-8 -*-\n\n\"\"\"setup.py: setuptools control.\"\"\"\n\nimport re\nfrom setuptools import setup\n\n#import sys\n#if not sys.version_info[0] == 3:\n# print(\"\\n \\\n# sys.exit(\"\\n \\\n# *********************************... | [
{
"content": "Reconstruct the code exactly:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n\"\"\"setup.py: setuptools control.\"\"\"\n\nimport re\nfrom setuptools import setup\n\n#import sys\n#if not sys.version_info[0] == 3:\n# print(\"\\n \\\n# sys.exit(\"\\n \\\n# *****************... | ```python
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup
#import sys
#if not sys.version_info[0] == 3:
# print("\n \
# sys.exit("\n \
# ****************************************************************\n \
# * The CLI has only been tested with Python 3+ at this time. *\n \
# * Report any issues with Python 2 by emailing help@pipeline.io *\n \
# ****************************************************************\n")
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('pipeline_models/__init__.py').read(),
re.M
).group(1)
setup(
name = "pipeline-models",
packages = ["pipeline_models"],
version = version,
description = "PipelineAI Models",
long_description = "PipelineAI Models",
author = "Chris Fregly",
author_email = "chris@pipeline.io",
url = "https://github.com/fluxcapacitor/pipeline/lib/models",
install_requires=[
],
dependency_links=[
]
)
``` |
[
{
"content": "Write the code verbatim:\n```python\n# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright (C) 2016-2019 Canonical Ltd\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\n# published by ... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright (C) 2016-2019 Canonical Ltd\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\... | ```python
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2019 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from testtools.matchers import Equals, FileExists
from tests import integration
class ChannelClosingTestCase(integration.StoreTestCase):
def test_missing_permission(self):
self.addCleanup(self.logout)
self.login()
expected = (
"Make sure the logged in account has upload permissions on "
"'missing' in series '16'."
)
status = self.close("missing", "beta", expected=expected)
self.assertThat(status, Equals(2))
def test_close_channel(self):
self.addCleanup(self.logout)
self.login()
# Change to a random name and version when not on the fake store.
if not self.is_store_fake():
name = self.get_unique_name()
version = self.get_unique_version()
# If not, keep the name that is faked in our fake account.
else:
name = "basic"
version = "1.0"
self.copy_project_to_cwd("basic")
self.update_name_and_version(name, version)
self.run_snapcraft("snap")
# Register the snap
self.register(name)
# Upload the snap
snap_file_path = "{}_{}_{}.snap".format(name, version, "all")
self.assertThat(os.path.join(snap_file_path), FileExists())
self.assertThat(self.push(snap_file_path, release="edge,beta"), Equals(0))
expected = "The beta channel is now closed."
status = self.close(name, "beta", expected=expected)
self.assertThat(status, Equals(0))
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\n\"\"\" crypto.passwords.passwordfactory\n\n Python classes to create and recover passwords. Currently contains\n simple password generation. <need to merge the dictionary based pws>\n\n Copyright © (c) 2002 by Paul A. Lambert\n Read LICENSE.txt for... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\"\"\" crypto.passwords.passwordfactory\n\n Python classes to create and recover passwords. Currently contains\n simple password generation. <need to merge the dictionary based pws>\n\n Copyright © (c) 2002 by Paul A. Lambert\n Read... | ```python
# -*- coding: utf-8 -*-
""" crypto.passwords.passwordfactory
Python classes to create and recover passwords. Currently contains
simple password generation. <need to merge the dictionary based pws>
Copyright © (c) 2002 by Paul A. Lambert
Read LICENSE.txt for license information.
August 14, 2002
"""
from random import Random
from sha import sha # the SHA1 algorithm for cryptographic hashing
from math import log, ceil
#from binascii_plus import b2a_p
class PasswordFactory:
""" Make passwords using pseudo random seeds.
Also used to recover passwords by using same pwSeed.
If the seed is not saved, the password can not be recovered!!
"""
def __init__(self, pwFactorySeed, minSize=10, maxSize=10 ):
""" An abstract class to create passwords """
self._factorySeed = pwFactorySeed
self.minSize = minSize
self.maxSize = maxSize
self.rand = Random( self._factorySeed )
def getPassword(self, pwSeed):
raise "MUST be overloaded"
def __call__(self, pwSeed):
""" Create a new password as a 'call' """
return self.getPassword(pwSeed)
def entropy(self):
""" Calculate the security of the password generation as a power of 2 """
total = 0
for pwSize in range(self.minSize, self.maxSize+1):
total = total + self.passwordsForSize(pwSize)
return powof2( total )
def powof2(x):
""" Convert x to a power of 2 """
return log(x)/log(2)
class PasswordFactorySimple(PasswordFactory):
""" This class implements a very secure but simple selection of numbers and letters.
Some characters have been removed to prevent confusion between similar shapes
The removed characters are: (O,0,o), (l,1,I) , (u,v),(U,V)
"""
def __init__(self, pwFactorySeed, minSize=10, maxSize=10 ):
""" Initialize password generation """
PasswordFactory.__init__(self, pwFactorySeed, minSize, maxSize )
self.lettersReduced = 'abcdefghijkmnpqrstwxyzABCDEFGHJKLMNPQRSTWXYZ'
self.digitsReduced = '23456789'
self.specialCharacters = '#%*+$'
def getPassword(self, pwSeed):
""" Create a new password from pwSeed. """
self.rand.seed( pwSeed + 'getPassword' + self._factorySeed ) # reset prf sequence
self.passwordSize = self.rand.randrange(self.minSize, self.maxSize+1)
password = ''
for i in range(self.passwordSize):
password = password + self.rand.choice(self.lettersReduced+self.digitsReduced)
return password
def passwordsForSize(self,pwSize):
return (len(self.lettersReduced)+len(self.digitsReduced))**pwSize
consonants_01 = 'bcdfghjklmnpqrstvwxz'
vowels_01 = 'aeiouy'
class PasswordFactoryReadable_01(PasswordFactory):
""" Readable passwords created by alternating consonate/vowel/consonate ... etc.
"""
def getPassword(self, pwSeed):
""" Create a new password. Also used to recover passwords by using same pwSeed """
#self.rand.seed( 'getPassword'+self.__factorySeed+pwSeed ) # reset prf sequence
self.passwordSize = self.rand.randrange(self.minSize, self.maxSize+1)
password = ''
for i in range(self.passwordSize):
if i == 0 :
password = password + self.rand.choice(consonants_01)
else:
if password[-1] in consonants_01 :
password = password + self.rand.choice(vowels_01)
else:
password = password + self.rand.choice(consonants_01)
return password
def passwordsForSize(self,pwSize):
return (len(vowels_01)**(pwSize/2))*(len(consonants_01)**ceil(pwSize/2))
``` |
[
{
"content": "Here is a code file:\n```python\n# Copyright 2021 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the 'License');\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.a... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\n# Copyright 2021 The FedLearner Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the 'License');\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# ... | ```python
# Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import logging
class _MiddlewareRegistry(object):
def __init__(self):
self.middlewares = []
def register(self, middleware):
self.middlewares.append(middleware)
_middleware_registry = _MiddlewareRegistry()
register = _middleware_registry.register
def init_app(app):
logging.info('Initializing app with middlewares')
# Wraps app with middlewares
for middleware in _middleware_registry.middlewares:
app = middleware(app)
return app
``` |
[
{
"content": "```python\n'''\nA MLP network for MNIST digits classification\n\nProject: https://github.com/roatienza/dl-keras\nUsage: python3 <this file>\n'''\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n# numpy package\nimport numpy as np\... | [
{
"content": "<|memory_start|>```python\n'''\nA MLP network for MNIST digits classification\n\nProject: https://github.com/roatienza/dl-keras\nUsage: python3 <this file>\n'''\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\n# numpy package\nimp... | ```python
'''
A MLP network for MNIST digits classification
Project: https://github.com/roatienza/dl-keras
Usage: python3 <this file>
'''
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# numpy package
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout
from keras.datasets import mnist
from keras.utils import to_categorical
# load mnist dataset
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# compute the number of labels
num_labels = np.amax(y_train) + 1
# convert to one-hot vector
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# image dimensions (assumed square)
image_size = x_train.shape[1]
input_size = image_size * image_size
# for mlp, the input dim is a vector, so we reshape
x_train = np.reshape(x_train, [-1, input_size])
# we train our network using float data
x_train = x_train.astype('float32') / 255
x_test = np.reshape(x_test, [-1, input_size])
x_test = x_test.astype('float32') / 255
# network parameters
batch_size = 128
hidden_units = 256
dropout = 0.45
# this is 3-layer MLP with ReLU. No regularizer
model = Sequential()
model.add(Dense(hidden_units,
input_dim=input_size))
model.add(Activation('relu'))
model.add(Dense(hidden_units,
))
model.add(Activation('relu'))
model.add(Dense(num_labels))
# this is the output for one-hot vector
model.add(Activation('softmax'))
model.summary()
# loss function for one-hot vector
# use of adam optimizer
# accuracy is good metric for classification tasks
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# train the network
model.fit(x_train, y_train, epochs=20, batch_size=batch_size)
# validate the model on test dataset to determine generalization
score = model.evaluate(x_test, y_test, batch_size=batch_size)
print("\nTest accuracy: %.1f%%" % (100.0 * score[1]))
``` |
[
{
"content": "Repeat the code precisely:\n```python\n__author__ = 'yueli'\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nfrom config.config import *\n\n# Import the targeted raw CSV file\nrawCSV_file1 = os.path.join(\n CSV_FILE_DESTDIR,\n 'For_different_5_VP',\n 'Delete... | [
{
"content": "Repeat the code precisely:\n<|memory_start|>```python\n__author__ = 'yueli'\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib as mpl\nfrom config.config import *\n\n# Import the targeted raw CSV file\nrawCSV_file1 = os.path.join(\n CSV_FILE_DESTDIR,\n 'For_different_5_V... | ```python
__author__ = 'yueli'
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from config.config import *
# Import the targeted raw CSV file
rawCSV_file1 = os.path.join(
CSV_FILE_DESTDIR,
'For_different_5_VP',
'Deleted_database',
'EID-153.16.47.16-MR-198.6.255.37',
"liege-EID-153.16.47.16-MR-198.6.255.37.log.csv"
)
rawCSV_file2 = os.path.join(
CSV_FILE_DESTDIR,
'For_different_5_VP',
'Deleted_database',
'EID-153.16.47.16-MR-198.6.255.37',
"temple-EID-153.16.47.16-MR-198.6.255.37.log.csv"
)
rawCSV_file3 = os.path.join(
CSV_FILE_DESTDIR,
'For_different_5_VP',
'Deleted_database',
'EID-153.16.47.16-MR-198.6.255.37',
"ucl-EID-153.16.47.16-MR-198.6.255.37.log.csv"
)
rawCSV_file4 = os.path.join(
CSV_FILE_DESTDIR,
'For_different_5_VP',
'Deleted_database',
'EID-153.16.47.16-MR-198.6.255.37',
"umass-EID-153.16.47.16-MR-198.6.255.37.log.csv"
)
rawCSV_file5 = os.path.join(
CSV_FILE_DESTDIR,
'For_different_5_VP',
'Deleted_database',
'EID-153.16.47.16-MR-198.6.255.37',
"wiilab-EID-153.16.47.16-MR-198.6.255.37.log.csv"
)
# Define a function to get the experiment number list from the CSV file
def getTime(rawCSV_file):
i = -1
for line in open(rawCSV_file):
i = i + 1
lines = line.split(";")
if lines[0] == "Round Type":
continue
else:
time.append(i)
return time
def getRlocSet(rawCSV_file):
i = -1
responseList = []
for line in open(rawCSV_file):
print line
i = i + 1
lines = line.split(";")
if lines[0] == "Round Type":
print "Round Type"
continue
else:
if lines[0] == "NegativeReply":
print "Done"
responseList.append(-1)
elif lines[0] == "RoundNoReply":
responseList.append(0)
elif lines[0] == "RoundNormal":
if int(lines[9]) == 1:
if lines[14].split(",")[1] == "195.59.156.123":
responseList.append(1)
elif lines[14].split(",")[1] == "195.59.156.124":
responseList.append(2)
else:
responseList.append(3)
else:
print "There are more than 2 RLOCs together"
else:
print "Unknown type exists"
return responseList
time = []
time = getTime(rawCSV_file1)
print "time", time
rlocSet1 = getRlocSet(rawCSV_file1)
print "rlocSet1:", rlocSet1.__len__()
rlocSet2= getRlocSet(rawCSV_file2)
print "rlocSet2:", rlocSet2.__len__()
rlocSet3 = getRlocSet(rawCSV_file3)
print "rlocSet3:", rlocSet3.__len__()
rlocSet4 = getRlocSet(rawCSV_file4)
print "rlocSet4:", rlocSet4.__len__()
rlocSet5 = getRlocSet(rawCSV_file5)
print "rlocSet5:", rlocSet5.__len__()
# Modify the size and dpi of picture, default size is (8,6), default dpi is 80
plt.gcf().set_size_inches(32, 17)
# Define font
font_label = {
'fontname' : 'Times New Roman',
'color' : 'black',
'fontsize' : 70
}
plt.scatter(time, rlocSet1, color='purple', marker="o", label="VP1", s=700)
plt.scatter(time, rlocSet2, color='green', marker='>', label="VP2", s=700)
plt.scatter(time, rlocSet3, color='red', marker=(5,0), label = "VP3", s=700)
plt.scatter(time, rlocSet4, color='orange', marker='*', label = "VP4", s=700)
plt.scatter(time, rlocSet5, color='blue', marker='+', label = "VP5", s=700)
response = np.linspace(-1, 2, 4)
plt.xlabel("experiment numbers", font_label)
plt.ylabel("different Map-Replies", font_label)
# plt.title("Map Replies over time for EID-153.16.47.16 from MR-198.6.255.37 in 5 VPs", fontsize=20)
plt.xlim(0,798)
# plt.xlim(550, 600)
plt.ylim(-2, 3)
plt.xticks(fontsize=45, fontname='Times New Roman')
plt.yticks(response, ('Negative\nMap-Reply', 'No Map-\nReply', 'RLOC 1', 'RLOC 2'), fontsize=45, fontname='Times New Roman')
# loc=1 makes legend locating at right-up;
# loc=2 makes legend locating at left-up;
# loc=3 makes legend locating at left-down
# loc=4 makes legend locating at right-down
# Just have one point in legend
mpl.rc('legend', scatterpoints=1)
mpl.rc('legend', fontsize=45)
mpl.rc('legend', markerscale=1.5)
plt.legend(loc=4)
plt.savefig(
os.path.join(PLOT_DIR, 'Plot_newSize', 'Plot_variable_VP_different_RLOCs.eps'), dpi=300, transparent=True)
# plt.show()
``` |
[
{
"content": "```python\n# -*- coding: utf-8 -*-\n#\n# This file is part of INSPIRE.\n# Copyright (C) 2014-2017 CERN.\n#\n# INSPIRE is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of... | [
{
"content": "<|memory_start|>```python\n# -*- coding: utf-8 -*-\n#\n# This file is part of INSPIRE.\n# Copyright (C) 2014-2017 CERN.\n#\n# INSPIRE is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, eit... | ```python
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""DoJSON rules for MARC fields in 0xx."""
from __future__ import absolute_import, division, print_function
import re
from collections import defaultdict
import pycountry
from dojson import utils
from idutils import is_arxiv_post_2007, is_doi, is_handle, normalize_doi
from inspire_schemas.api import load_schema
from inspire_schemas.utils import normalize_arxiv_category
from inspire_utils.helpers import force_list
from ..model import hep, hep2marc
from ...utils import force_single_element, normalize_isbn
RE_LANGUAGE = re.compile('\/| or | and |,|=|\s+')
@hep.over('isbns', '^020..')
@utils.for_each_value
def isbns(self, key, value):
"""Populate the ``isbns`` key."""
def _get_medium(value):
def _normalize(medium):
schema = load_schema('hep')
valid_media = schema['properties']['isbns']['items']['properties']['medium']['enum']
medium = medium.lower().replace('-', '').replace(' ', '')
if medium in valid_media:
return medium
elif medium == 'ebook':
return 'online'
elif medium == 'paperback':
return 'softcover'
return ''
medium = force_single_element(value.get('b', ''))
normalized_medium = _normalize(medium)
return normalized_medium
def _get_isbn(value):
a_value = force_single_element(value.get('a', ''))
normalized_a_value = a_value.replace('.', '')
if normalized_a_value:
return normalize_isbn(normalized_a_value)
return {
'medium': _get_medium(value),
'value': _get_isbn(value),
}
@hep2marc.over('020', 'isbns')
@utils.for_each_value
def isbns2marc(self, key, value):
"""Populate the ``020`` MARC field."""
return {
'a': value.get('value'),
'b': value.get('medium'),
}
@hep.over('dois', '^0247.')
def dois(self, key, value):
"""Populate the ``dois`` key.
Also populates the ``persistent_identifiers`` key through side effects.
"""
def _get_first_non_curator_source(sources):
sources_without_curator = [el for el in sources if el.upper() != 'CURATOR']
return force_single_element(sources_without_curator)
def _get_material(value):
MATERIAL_MAP = {
'ebook': 'publication',
}
q_value = force_single_element(value.get('q', ''))
normalized_q_value = q_value.lower()
return MATERIAL_MAP.get(normalized_q_value, normalized_q_value)
def _is_doi(id_, type_):
return (not type_ or type_.upper() == 'DOI') and is_doi(id_)
def _is_handle(id_, type_):
return (not type_ or type_.upper() == 'HDL') and is_handle(id_)
dois = self.get('dois', [])
persistent_identifiers = self.get('persistent_identifiers', [])
values = force_list(value)
for value in values:
id_ = force_single_element(value.get('a', ''))
material = _get_material(value)
schema = force_single_element(value.get('2', ''))
sources = force_list(value.get('9'))
source = _get_first_non_curator_source(sources)
if _is_doi(id_, schema):
dois.append({
'material': material,
'source': source,
'value': normalize_doi(id_),
})
else:
schema = 'HDL' if _is_handle(id_, schema) else schema
persistent_identifiers.append({
'material': material,
'schema': schema,
'source': source,
'value': id_,
})
self['persistent_identifiers'] = persistent_identifiers
return dois
@hep2marc.over('0247', '^dois$')
@utils.for_each_value
def dois2marc(self, key, value):
"""Populate the ``0247`` MARC field."""
return {
'2': 'DOI',
'9': value.get('source'),
'a': value.get('value'),
'q': value.get('material'),
}
@hep2marc.over('0247', '^persistent_identifiers$')
@utils.for_each_value
def persistent_identifiers2marc(self, key, value):
"""Populate the ``0247`` MARC field."""
return {
'2': value.get('schema'),
'9': value.get('source'),
'a': value.get('value'),
'q': value.get('material'),
}
@hep.over('texkeys', '^035..')
def texkeys(self, key, value):
"""Populate the ``texkeys`` key.
Also populates the ``external_system_identifiers`` and ``_desy_bookkeeping`` keys through side effects.
"""
def _is_oai(id_, schema):
return id_.startswith('oai:')
def _is_desy(id_, schema):
return id_ and schema in ('DESY',)
def _is_texkey(id_, schema):
return id_ and schema in ('INSPIRETeX', 'SPIRESTeX')
texkeys = self.get('texkeys', [])
external_system_identifiers = self.get('external_system_identifiers', [])
_desy_bookkeeping = self.get('_desy_bookkeeping', [])
values = force_list(value)
for value in values:
ids = force_list(value.get('a', ''))
other_ids = force_list(value.get('z', ''))
schema = force_single_element(value.get('9', ''))
for id_ in ids:
id_ = id_.strip()
if not id_:
continue
if _is_texkey(id_, schema):
texkeys.insert(0, id_)
elif _is_oai(id_, schema):
continue # XXX: ignored.
elif _is_desy(id_, schema):
_desy_bookkeeping.append({'identifier': id_})
else:
external_system_identifiers.insert(0, {
'schema': schema,
'value': id_,
})
for id_ in other_ids:
id_ = id_.strip()
if not id_:
continue
if _is_texkey(id_, schema):
texkeys.append(id_)
elif _is_oai(id_, schema):
continue # XXX: ignored.
elif _is_desy(id_, schema):
_desy_bookkeeping.append({'identifier': id_})
else:
external_system_identifiers.append({
'schema': schema,
'value': id_,
})
self['external_system_identifiers'] = external_system_identifiers
self['_desy_bookkeeping'] = _desy_bookkeeping
return texkeys
@hep2marc.over('035', '^texkeys$')
def texkeys2marc(self, key, value):
"""Populate the ``035`` MARC field."""
result = []
values = force_list(value)
if values:
value = values[0]
result.append({
'9': 'INSPIRETeX',
'a': value,
})
for value in values[1:]:
result.append({
'9': 'INSPIRETeX',
'z': value,
})
return result
@hep2marc.over('035', '^external_system_identifiers$')
def external_system_identifiers2marc(self, key, value):
"""Populate the ``035`` MARC field.
Also populates the ``970`` MARC field through side effects and an extra
``id_dict`` dictionary that holds potentially duplicate IDs that are
post-processed in a filter.
"""
def _is_scheme_cernkey(id_, schema):
return schema == 'CERNKEY'
def _is_scheme_spires(id_, schema):
return schema == 'SPIRES'
result_035 = self.get('035', [])
id_dict = self.get('id_dict', defaultdict(list))
result_970 = self.get('970', [])
values = force_list(value)
for value in values:
id_ = value.get('value')
schema = value.get('schema')
if _is_scheme_spires(id_, schema):
result_970.append({
'a': id_,
})
elif _is_scheme_cernkey(id_, schema):
result_035.append({
'9': 'CERNKEY',
'z': id_,
})
else:
id_dict[schema].append(id_)
self['970'] = result_970
self['id_dict'] = id_dict
return result_035
@hep.over('arxiv_eprints', '^037..')
def arxiv_eprints(self, key, value):
"""Populate the ``arxiv_eprints`` key.
Also populates the ``report_numbers`` key through side effects.
"""
def _get_clean_arxiv_eprint(id_):
return id_.split(':')[-1]
def _is_arxiv_eprint(id_, source):
return source == 'arXiv'
def _is_hidden_report_number(other_id, source):
return other_id
def _get_clean_source(source):
if source == 'arXiv:reportnumber':
return 'arXiv'
return source
arxiv_eprints = self.get('arxiv_eprints', [])
report_numbers = self.get('report_numbers', [])
values = force_list(value)
for value in values:
id_ = force_single_element(value.get('a', ''))
other_id = force_single_element(value.get('z', ''))
categories = [normalize_arxiv_category(category) for category
in force_list(value.get('c'))]
source = force_single_element(value.get('9', ''))
if _is_arxiv_eprint(id_, source):
arxiv_eprints.append({
'categories': categories,
'value': _get_clean_arxiv_eprint(id_),
})
elif _is_hidden_report_number(other_id, source):
report_numbers.append({
'hidden': True,
'source': _get_clean_source(source),
'value': other_id,
})
else:
report_numbers.append({
'source': _get_clean_source(source),
'value': id_,
})
self['report_numbers'] = report_numbers
return arxiv_eprints
@hep2marc.over('037', '^arxiv_eprints$')
def arxiv_eprints2marc(self, key, values):
"""Populate the ``037`` MARC field.
Also populates the ``035`` and the ``65017`` MARC fields through side effects.
"""
result_037 = self.get('037', [])
result_035 = self.get('035', [])
result_65017 = self.get('65017', [])
for value in values:
arxiv_id = value.get('value')
arxiv_id = 'arXiv:' + arxiv_id if is_arxiv_post_2007(arxiv_id) else arxiv_id
result_037.append({
'9': 'arXiv',
'a': arxiv_id,
'c': force_single_element(value.get('categories')),
})
result_035.append({
'9': 'arXiv',
'a': 'oai:arXiv.org:' + value.get('value'),
})
categories = force_list(value.get('categories'))
for category in categories:
result_65017.append({
'2': 'arXiv',
'a': category,
})
self['65017'] = result_65017
self['035'] = result_035
return result_037
@hep2marc.over('037', '^report_numbers$')
@utils.for_each_value
def report_numbers2marc(self, key, value):
"""Populate the ``037`` MARC field."""
def _get_mangled_source(source):
if source == 'arXiv':
return 'arXiv:reportnumber'
return source
source = _get_mangled_source(value.get('source'))
if value.get('hidden'):
return {
'9': source,
'z': value.get('value'),
}
return {
'9': source,
'a': value.get('value'),
}
@hep.over('languages', '^041..')
def languages(self, key, value):
"""Populate the ``languages`` key."""
languages = self.get('languages', [])
values = force_list(value.get('a'))
for value in values:
for language in RE_LANGUAGE.split(value):
try:
name = language.strip().capitalize()
languages.append(pycountry.languages.get(name=name).alpha_2)
except KeyError:
pass
return languages
@hep2marc.over('041', '^languages$')
@utils.for_each_value
def languages2marc(self, key, value):
"""Populate the ``041`` MARC field."""
return {'a': pycountry.languages.get(alpha_2=value).name.lower()}
``` |
[
{
"content": "Write the code verbatim:\n```python\n#!/usr/bin/env python\n\n\"\"\"\nSimple app for converting between float, double, and IEEE754 binary\nrepresentations.\n\"\"\"\n\n__author__ = \"Zachary Sturgeon <zws258@email.vccs.edu>\"\n\nimport struct\nfrom kivy.app import App\nfrom kivy.uix.widget import W... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n#!/usr/bin/env python\n\n\"\"\"\nSimple app for converting between float, double, and IEEE754 binary\nrepresentations.\n\"\"\"\n\n__author__ = \"Zachary Sturgeon <zws258@email.vccs.edu>\"\n\nimport struct\nfrom kivy.app import App\nfrom kivy.uix... | ```python
#!/usr/bin/env python
"""
Simple app for converting between float, double, and IEEE754 binary
representations.
"""
__author__ = "Zachary Sturgeon <zws258@email.vccs.edu>"
import struct
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.checkbox import CheckBox
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.uix.stacklayout import StackLayout
from kivy.uix.widget import Canvas
from kivy.graphics import Color, Rectangle
class ToggleBar(BoxLayout):
"""
A widget to generate a large switch bank for manipulating binary values.
:param n: Length of the switchbank to create.
:type n: int
"""
def __init__(self, **kwargs):
barsize = kwargs.pop('n', 1)
self.value = "0" * barsize
self.orientation = 'vertical'
self.color = kwargs.pop('color', (0.2, 0.2, 0.2, 0.5))
self.callback = kwargs.pop('callback', lambda: None)
self.height = 70
self.padding = 10
self.spacing = 10
self.size_hint = (1, None)
super(ToggleBar, self).__init__(**kwargs)
self.checkboxes = []
box = BoxLayout(orientation='horizontal')
box.size_hint = (1, 0.6)
for n in range(barsize):
checkbox = CheckBox(size_hint=(1.0/barsize, 0.70))
checkbox.bind(active=self.checkbox_toggle)
box.add_widget(checkbox)
self.checkboxes.append(checkbox)
if 'label' in kwargs:
self.label = Label(text=kwargs['label'], markup=True, size_hint=(1, 0.3))
self.add_widget(self.label)
self.add_widget(box)
self.value_label = Label(text="0"*barsize)
self.value_label.size_hint = (1, 0.3)
self.add_widget(self.value_label)
def set_value(self, binstring):
#Truncate to beginning of string
if len(binstring) > len(self.checkboxes):
binstring = binstring[0:len(self.checkboxes)]
for index, bit in enumerate(binstring):
if bit == '1':
self.checkboxes[index].active = True
else:
self.checkboxes[index].active = False
self.value_label.text = binstring
self.value = binstring
def checkbox_toggle(self, checkbox, value):
binstring = ""
for checkbox in self.checkboxes:
if checkbox.active:
binstring += "1"
else:
binstring += "0"
#Update the label:
self.value_label.text = binstring
self.value = binstring
self.callback()
class ToggleBarBlock(ToggleBar):
"""
Same as ToggleBar, but arranged in a grid for better presentation of
particularly long binary strings. Takes n and breakpoint arguments.
:param n: Length of the switchbank to generate.
:type n: int
:param breakpoint: A new row is created after this point.
:type breakpoint: int
"""
def __init__(self, **kwargs):
barsize = kwargs.pop('n', 1)
self.value = "0" * barsize
self.orientation = 'vertical'
self.color = kwargs.pop('color', (0.2, 0.2, 0.2, 0.5))
self.callback = kwargs.pop('callback', lambda: None)
self.height = 70
self.padding = 10
self.spacing = 10
self.size_hint = (1, None)
super(ToggleBar, self).__init__(**kwargs)
self.checkboxes = []
master_box = BoxLayout(orientation='vertical')
box = BoxLayout(orientation='horizontal')
box.size_hint = (1, 0.6)
for n in range(barsize):
checkbox = CheckBox(size_hint=(1.0/barsize, 0.70))
checkbox.bind(active=self.checkbox_toggle)
self.checkboxes.append(checkbox)
#If bit position is divisible by the breaking point, add a new row:
if ((n + 1) % kwargs['breakpoint']) == 0:
box.add_widget(checkbox)
master_box.add_widget(box)
box = BoxLayout(orientation='horizontal')
box.size_hint = (1, 0.6)
else:
box.add_widget(checkbox)
if 'label' in kwargs:
self.label = Label(text=kwargs['label'], markup=True, size_hint=(1, 0.3))
self.add_widget(self.label)
self.add_widget(master_box)
self.value_label = Label(text="0"*barsize)
self.value_label.size_hint = (1, 0.3)
self.add_widget(self.value_label)
class RootWidget(BoxLayout):
"""
Root frame for the application. This contains callback bindings for the
button actions and is accessible from the named context inside the kv layout.
"""
def __init__(self, **kwargs):
super(RootWidget, self).__init__(**kwargs)
def convert_float(self, textbox, hex_textbox):
if textbox.text == "":
return
#Convert float to binary string and set checkboxes:
try:
binary_string = float_to_binary(float(textbox.text))
except ValueError:
return
self.sign.set_value(binary_string[0])
self.exponent.set_value(binary_string[1:9])
self.mantissa.set_value(binary_string[9:])
#Coerce the input value:
self.bin_to_float()
#Convert float to hex and display in text input:
hex_textbox.text = hex(int(binary_string, 2))
def convert_hex_float(self):
if self.float_hex == "":
return
try:
binary_string = hex_to_bin(self.float_hex.text)
except ValueError:
return
self.sign.set_value(binary_string[0])
self.exponent.set_value(binary_string[1:9])
self.mantissa.set_value(binary_string[9:32])
#Convert to float:
self.bin_to_float()
def convert_hex_double(self):
if self.double_hex == "":
return
try:
binary_string = hex_to_bin64(self.double_hex.text)
except ValueError:
return
self.sign_double.set_value(binary_string[0])
self.exponent_double.set_value(binary_string[1:12])
self.mantissa_double.set_value(binary_string[12:64])
#Convert to float:
#~ self.float_decimal.text = str(float.fromhex(self.float_hex.text))
#~ self.bin_to_double()
def convert_double(self, textbox, hex_textbox):
if textbox.text == "":
return
try:
binary_string = double_to_binary(float(textbox.text))
except ValueError:
return
self.sign_double.set_value(binary_string[0])
self.exponent_double.set_value(binary_string[1:12])
self.mantissa_double.set_value(binary_string[12:])
hex_textbox.text = hex(int(binary_string, 2))
#Coerce the input value
self.bin_to_double()
def bin_to_float(self):
self.binary_string = "{}{}{}".format(self.sign.value,
self.exponent.value, self.mantissa.value)
bfloat = binary_to_float(self.binary_string)
self.float_decimal.text = bfloat
self.float_hex.text = hex(int(self.binary_string, 2))
def bin_to_double(self):
self.binary_string = "{}{}{}".format(self.sign_double.value,
self.exponent_double.value, self.mantissa_double.value)
bdouble = binary_to_double(self.binary_string)
#~ import pdb; pdb.set_trace()
self.double_hex.text = hex(int(self.binary_string, 2))
self.double_decimal.text = bdouble
class IEEEApp(App):
"""
Application build logic. Builds the accordion frames and switch banks.
"""
def build(self):
tw = RootWidget(orientation='vertical')
# Set up 32-bit accordion tab:
tw.sign = ToggleBar(n=1, label="[b]Sign[/b]", callback=tw.bin_to_float, \
color=(0.8, 0.2, 0.2, 0.5))
tw.exponent = ToggleBar(n=8, label="[b]Exponent[/b]", \
callback=tw.bin_to_float, \
color=(0.2, 0.8, 0.2, 0.5))
tw.mantissa = ToggleBar(n=23, label="[b]Mantissa[/b]", \
callback=tw.bin_to_float)
tw.sign.size_hint = (0.1, None)
tw.exponent.size_hint = (0.4, None)
tw.mantissa.size_hint = (1, None)
box32 = BoxLayout(orientation='horizontal')
box32.add_widget(tw.sign)
box32.add_widget(tw.exponent)
box32.add_widget(tw.mantissa)
# Set up 64-bit accordion tab:
tw.sign_double = ToggleBar(n=1, color=(0.8, 0.2, 0.2, 0.5),
label="[b]Sign[/b]", callback=tw.bin_to_double)
tw.exponent_double = ToggleBar(n=11, color=(0.2, 0.8, 0.2, 0.5),
label="[b]Exponent[/b]",
callback=tw.bin_to_double)
tw.mantissa_double = ToggleBarBlock(n=52, breakpoint=26,
label="[b]Mantissa[/b]",
callback=tw.bin_to_double)
box64 = BoxLayout(orientation='horizontal', size_hint=(1, 0.4))
box64.add_widget(tw.sign_double)
box64.add_widget(tw.exponent_double)
tw.mantissa_double.size_hint = (1, 0.5)
tw.toggle32.add_widget(box32)
tw.toggle64.add_widget(box64)
tw.toggle64.add_widget(tw.mantissa_double)
#tw.toggle64.add_widget(ToggleBar(n=64))
return tw
#Functions for converting between IEEE754 binary 32/64-bit representations:
def float_to_binary(num):
"""
Converts a python float to a 32-bit single precision IEEE754 binary string.
"""
try:
return ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', num))
except OverflowError:
if str(num)[0] == '-':
return float_to_binary(float('-inf'))
else:
return float_to_binary(float('inf'))
def binary_to_float(binstring):
"""
Converts a 32-bit single precision binary string to a float.
Raises a ValueError if the input is not 32 characters long.
"""
if len(binstring) != 32:
raise ValueError("Binary number must be 32 bits long")
chars = "".join(chr(int(binstring[i:i+8], 2)) for i in xrange(0, len(binstring), 8))
return str(struct.unpack('!f', chars)[0])
def double_to_binary(num):
"""
Converts a python float to a 64-bit double precision IEEE754 binary string.
"""
return bin(struct.unpack('!Q', struct.pack('!d', num))[0])[2:].zfill(64)
def binary_to_double(binstring):
"""
Converts a 64-bit double precision binary string to a float.
Raises a ValueError if the input is not 64 characters long.
"""
if len(binstring) != 64:
raise ValueError("Binary number must be 64 bits long")
chars = "".join(chr(int(binstring[i:i+8], 2)) for i in xrange(0, len(binstring), 8))
return str(struct.unpack('!d', chars)[0])
def hex_to_bin(hexstring):
return bin(int(hexstring, 16))[2:].zfill(32)
def hex_to_bin64(hexstring):
return bin(int(hexstring, 16))[2:].zfill(64)
if __name__ == '__main__':
IEEEApp().run()
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\nimport re\nimport collections\n\nfrom enum import Enum\n\nfrom ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\nfrom ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict\nfro... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\nimport re\nimport collections\n\nfrom enum import Enum\n\nfrom ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum\nfrom ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, Fi... | ```python
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'OpticsFormFactorEnum' : _MetaInfoEnum('OpticsFormFactorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'not-set':'NOT_SET',
'invalid':'INVALID',
'cpak':'CPAK',
'cxp':'CXP',
'sfp-plus':'SFP_PLUS',
'qsfp':'QSFP',
'qsfp-plus':'QSFP_PLUS',
'qsfp28':'QSFP28',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsControllerStateEnum' : _MetaInfoEnum('OpticsControllerStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'optics-state-up':'OPTICS_STATE_UP',
'optics-state-down':'OPTICS_STATE_DOWN',
'optics-state-admin-down':'OPTICS_STATE_ADMIN_DOWN',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsLaserStateEnum' : _MetaInfoEnum('OpticsLaserStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'on':'ON',
'off':'OFF',
'unknown':'UNKNOWN',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsPhyEnum' : _MetaInfoEnum('OpticsPhyEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'not-set':'NOT_SET',
'invalid':'INVALID',
'long-reach-four-lanes':'LONG_REACH_FOUR_LANES',
'short-reach-ten-lanes':'SHORT_REACH_TEN_LANES',
'short-reach-one-lane':'SHORT_REACH_ONE_LANE',
'long-reach-one-lane':'LONG_REACH_ONE_LANE',
'short-reach-four-lanes':'SHORT_REACH_FOUR_LANES',
'copper-four-lanes':'COPPER_FOUR_LANES',
'active-optical-cable':'ACTIVE_OPTICAL_CABLE',
'fourty-gig-e-long-reach-four-lanes':'FOURTY_GIG_E_LONG_REACH_FOUR_LANES',
'fourty-gig-e-short-reach-four-lanes':'FOURTY_GIG_E_SHORT_REACH_FOUR_LANES',
'cwdm-four-lanes':'CWDM_FOUR_LANES',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsTasEnum' : _MetaInfoEnum('OpticsTasEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'tas-ui-oos':'TAS_UI_OOS',
'tas-ui-main':'TAS_UI_MAIN',
'tas-ui-is':'TAS_UI_IS',
'tas-ui-ains':'TAS_UI_AINS',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsEnum' : _MetaInfoEnum('OpticsEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'optics-unknown':'OPTICS_UNKNOWN',
'optics-grey':'OPTICS_GREY',
'optics-dwdm':'OPTICS_DWDM',
'optics-cwdm':'OPTICS_CWDM',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsLedStateEnum' : _MetaInfoEnum('OpticsLedStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper',
{
'off':'OFF',
'green-on':'GREEN_ON',
'green-flashing':'GREEN_FLASHING',
'yellow-on':'YELLOW_ON',
'yellow-flashing':'YELLOW_FLASHING',
'red-on':'RED_ON',
'red-flashing':'RED_FLASHING',
}, 'Cisco-IOS-XR-controller-optics-oper', _yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper']),
'OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap.DwdmCarrierMapInfo' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap.DwdmCarrierMapInfo',
False,
[
_MetaInfoClassMember('frequency', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Frequency
''',
'frequency',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('g694-chan-num', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' G694 channel number
''',
'g694_chan_num',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('itu-chan-num', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' ITU channel number
''',
'itu_chan_num',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('wavelength', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' Wavelength
''',
'wavelength',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'dwdm-carrier-map-info',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap',
False,
[
_MetaInfoClassMember('dwdm-carrier-band', ATTRIBUTE, 'str' , None, None,
[(0, 32)], [],
''' DWDM carrier band
''',
'dwdm_carrier_band',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-map-info', REFERENCE_LIST, 'DwdmCarrierMapInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap.DwdmCarrierMapInfo',
[], [],
''' DWDM carrier mapping info
''',
'dwdm_carrier_map_info',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-max', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Highest DWDM carrier supported
''',
'dwdm_carrier_max',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-min', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Lowest DWDM carrier supported
''',
'dwdm_carrier_min',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-dwdm-carrrier-channel-map',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.NetworkSrlgInfo' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.NetworkSrlgInfo',
False,
[
_MetaInfoClassMember('network-srlg', REFERENCE_LEAFLIST, 'int' , None, None,
[(0, 4294967295)], [],
''' Network Srlg
''',
'network_srlg',
'Cisco-IOS-XR-controller-optics-oper', False, max_elements=102),
],
'Cisco-IOS-XR-controller-optics-oper',
'network-srlg-info',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRxPower' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRxPower',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-rx-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRxPower' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRxPower',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-rx-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTxPower' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTxPower',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTxPower' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTxPower',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighLbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighLbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx1Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx1Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-rx1-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx2Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx2Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-rx2-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx3Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx3Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-rx3-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx4Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx4Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-rx4-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx1Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx1Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-rx1-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx2Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx2Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-rx2-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx3Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx3Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-rx3-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx4Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx4Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-rx4-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx1-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx2-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx3-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx4-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx1-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx2-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx3-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Power' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Power',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx4-power',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx1lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx2lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx3lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'high-tx4lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx1lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx2lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx3lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Lbc' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Lbc',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'low-tx4lbc',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLos' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLos',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'rx-los',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLos' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLos',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'tx-los',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLol' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLol',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'rx-lol',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLol' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLol',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'tx-lol',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxFault' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxFault',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'tx-fault',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Hidgd' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Hidgd',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'hidgd',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Oorcd' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Oorcd',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'oorcd',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Osnr' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Osnr',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'osnr',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Wvlool' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Wvlool',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'wvlool',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Mea' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Mea',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'mea',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.ImpRemoval' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.ImpRemoval',
False,
[
_MetaInfoClassMember('counter', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Alarm counter
''',
'counter',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('is-detected', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is defect detected?
''',
'is_detected',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'imp-removal',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo',
False,
[
_MetaInfoClassMember('hidgd', REFERENCE_CLASS, 'Hidgd' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Hidgd',
[], [],
''' HI DGD
''',
'hidgd',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-lbc', REFERENCE_CLASS, 'HighLbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighLbc',
[], [],
''' High laser bias current
''',
'high_lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-rx1-power', REFERENCE_CLASS, 'HighRx1Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx1Power',
[], [],
''' High Rx1 Power
''',
'high_rx1_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-rx2-power', REFERENCE_CLASS, 'HighRx2Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx2Power',
[], [],
''' High Rx2 Power
''',
'high_rx2_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-rx3-power', REFERENCE_CLASS, 'HighRx3Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx3Power',
[], [],
''' High Rx3 Power
''',
'high_rx3_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-rx4-power', REFERENCE_CLASS, 'HighRx4Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx4Power',
[], [],
''' High Rx4 Power
''',
'high_rx4_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-rx-power', REFERENCE_CLASS, 'HighRxPower' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRxPower',
[], [],
''' High Rx Power
''',
'high_rx_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx1-power', REFERENCE_CLASS, 'HighTx1Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Power',
[], [],
''' High Tx1 Power
''',
'high_tx1_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx1lbc', REFERENCE_CLASS, 'HighTx1Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Lbc',
[], [],
''' High Tx1 laser bias current
''',
'high_tx1lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx2-power', REFERENCE_CLASS, 'HighTx2Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Power',
[], [],
''' High Tx2 Power
''',
'high_tx2_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx2lbc', REFERENCE_CLASS, 'HighTx2Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Lbc',
[], [],
''' High Tx2 laser bias current
''',
'high_tx2lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx3-power', REFERENCE_CLASS, 'HighTx3Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Power',
[], [],
''' High Tx3 Power
''',
'high_tx3_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx3lbc', REFERENCE_CLASS, 'HighTx3Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Lbc',
[], [],
''' High Tx3 laser bias current
''',
'high_tx3lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx4-power', REFERENCE_CLASS, 'HighTx4Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Power',
[], [],
''' High Tx4 Power
''',
'high_tx4_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx4lbc', REFERENCE_CLASS, 'HighTx4Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Lbc',
[], [],
''' High Tx4 laser bias current
''',
'high_tx4lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('high-tx-power', REFERENCE_CLASS, 'HighTxPower' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTxPower',
[], [],
''' High Tx Power
''',
'high_tx_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('imp-removal', REFERENCE_CLASS, 'ImpRemoval' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.ImpRemoval',
[], [],
''' IMPROPER REM
''',
'imp_removal',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-rx1-power', REFERENCE_CLASS, 'LowRx1Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx1Power',
[], [],
''' Low Rx1 Power
''',
'low_rx1_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-rx2-power', REFERENCE_CLASS, 'LowRx2Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx2Power',
[], [],
''' Low Rx2 Power
''',
'low_rx2_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-rx3-power', REFERENCE_CLASS, 'LowRx3Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx3Power',
[], [],
''' Low Rx3 Power
''',
'low_rx3_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-rx4-power', REFERENCE_CLASS, 'LowRx4Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx4Power',
[], [],
''' Low Rx4 Power
''',
'low_rx4_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-rx-power', REFERENCE_CLASS, 'LowRxPower' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRxPower',
[], [],
''' Low Rx Power
''',
'low_rx_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx1-power', REFERENCE_CLASS, 'LowTx1Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Power',
[], [],
''' Low Tx1 Power
''',
'low_tx1_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx1lbc', REFERENCE_CLASS, 'LowTx1Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Lbc',
[], [],
''' Low Tx1 laser bias current
''',
'low_tx1lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx2-power', REFERENCE_CLASS, 'LowTx2Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Power',
[], [],
''' Low Tx2 Power
''',
'low_tx2_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx2lbc', REFERENCE_CLASS, 'LowTx2Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Lbc',
[], [],
''' Low Tx2 laser bias current
''',
'low_tx2lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx3-power', REFERENCE_CLASS, 'LowTx3Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Power',
[], [],
''' Low Tx3 Power
''',
'low_tx3_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx3lbc', REFERENCE_CLASS, 'LowTx3Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Lbc',
[], [],
''' Low Tx3 laser bias current
''',
'low_tx3lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx4-power', REFERENCE_CLASS, 'LowTx4Power' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Power',
[], [],
''' Low Tx4 Power
''',
'low_tx4_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx4lbc', REFERENCE_CLASS, 'LowTx4Lbc' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Lbc',
[], [],
''' Low Tx4 laser bias current
''',
'low_tx4lbc',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('low-tx-power', REFERENCE_CLASS, 'LowTxPower' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTxPower',
[], [],
''' Low Tx Power
''',
'low_tx_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('mea', REFERENCE_CLASS, 'Mea' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Mea',
[], [],
''' MEA
''',
'mea',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('oorcd', REFERENCE_CLASS, 'Oorcd' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Oorcd',
[], [],
''' OOR CD
''',
'oorcd',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('osnr', REFERENCE_CLASS, 'Osnr' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Osnr',
[], [],
''' OSNR
''',
'osnr',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('rx-lol', REFERENCE_CLASS, 'RxLol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLol',
[], [],
''' RX LOL
''',
'rx_lol',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('rx-los', REFERENCE_CLASS, 'RxLos' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLos',
[], [],
''' RX LOS
''',
'rx_los',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('tx-fault', REFERENCE_CLASS, 'TxFault' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxFault',
[], [],
''' TX Fault
''',
'tx_fault',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('tx-lol', REFERENCE_CLASS, 'TxLol' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLol',
[], [],
''' TX LOL
''',
'tx_lol',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('tx-los', REFERENCE_CLASS, 'TxLos' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLos',
[], [],
''' TX LOS
''',
'tx_los',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('wvlool', REFERENCE_CLASS, 'Wvlool' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Wvlool',
[], [],
''' WVL OOL
''',
'wvlool',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-alarm-info',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.LaneData' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.LaneData',
False,
[
_MetaInfoClassMember('lane-index', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' The index number of the lane
''',
'lane_index',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('laser-bias-current-milli-amps', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Laser Bias Current in units of 0.01mA
''',
'laser_bias_current_milli_amps',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('laser-bias-current-percent', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Laser Bias Current in units of 0.01%
''',
'laser_bias_current_percent',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('receive-power', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Transponder receive power in the unit of 0.01dBm
''',
'receive_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('transmit-power', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Transmit power in the unit of 0.01dBm
''',
'transmit_power',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'lane-data',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort.OpticsInfo',
False,
[
_MetaInfoClassMember('cd', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Chromatic Dispersion ps/nm
''',
'cd',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('cd-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Chromatic Dispersion high threshold ps/nm
''',
'cd_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('cd-low-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Chromatic Dispersion low threshold ps/nm
''',
'cd_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('cd-max', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Chromatic Dispersion Max ps/nm
''',
'cd_max',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('cd-min', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Chromatic Dispersion Min ps/nm
''',
'cd_min',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('cfg-tx-power', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Configured Tx power value
''',
'cfg_tx_power',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('controller-state', REFERENCE_ENUM_CLASS, 'OpticsControllerStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsControllerStateEnum',
[], [],
''' Optics controller state: Up, Down or
Administratively Down
''',
'controller_state',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dgd-high-threshold', ATTRIBUTE, 'str' , None, None,
[], [],
''' DGD high threshold in 0.1 ps
''',
'dgd_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('differential-group-delay', ATTRIBUTE, 'str' , None, None,
[], [],
''' Differential Group Delay ps
''',
'differential_group_delay',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('display-volt-temp', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Display Volt/Temp ?
''',
'display_volt_temp',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-band', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' DWDM Carrier Band information
''',
'dwdm_carrier_band',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-channel', ATTRIBUTE, 'str' , None, None,
[], [],
''' Current ITU DWDM Carrier channel number
''',
'dwdm_carrier_channel',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-frequency', ATTRIBUTE, 'str' , None, None,
[], [],
''' DWDM Carrier frequency read from hw in the unit
0.01THz
''',
'dwdm_carrier_frequency',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('dwdm-carrier-wavelength', ATTRIBUTE, 'str' , None, None,
[], [],
''' Wavelength of color optics 0.001nm
''',
'dwdm_carrier_wavelength',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('form-factor', REFERENCE_ENUM_CLASS, 'OpticsFormFactorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsFormFactorEnum',
[], [],
''' Optics form factor
''',
'form_factor',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('grey-wavelength', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Wavelength of grey optics 0.01nm
''',
'grey_wavelength',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('lane-data', REFERENCE_LIST, 'LaneData' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.LaneData',
[], [],
''' Lane information
''',
'lane_data',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('laser-state', REFERENCE_ENUM_CLASS, 'OpticsLaserStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsLaserStateEnum',
[], [],
''' Showing laser state.Either ON or OFF or unknown
''',
'laser_state',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('lbc-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' LBC High threshold value
''',
'lbc_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('led-state', REFERENCE_ENUM_CLASS, 'OpticsLedStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsLedStateEnum',
[], [],
''' Showing Current Colour of led state
''',
'led_state',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('network-srlg-info', REFERENCE_CLASS, 'NetworkSrlgInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.NetworkSrlgInfo',
[], [],
''' Network SRLG information
''',
'network_srlg_info',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optical-signal-to-noise-ratio', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optical Signal to Noise Ratio dB
''',
'optical_signal_to_noise_ratio',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optics-alarm-info', REFERENCE_CLASS, 'OpticsAlarmInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo',
[], [],
''' Optics Alarm Information
''',
'optics_alarm_info',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optics-module', ATTRIBUTE, 'str' , None, None,
[], [],
''' Optics module name
''',
'optics_module',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optics-present', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Is Optics Present?
''',
'optics_present',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optics-type', REFERENCE_ENUM_CLASS, 'OpticsEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsEnum',
[], [],
''' Optics type name
''',
'optics_type',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('osnr-low-threshold', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSNR low threshold in 0.01 dB
''',
'osnr_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('phase-noise', ATTRIBUTE, 'str' , None, None,
[], [],
''' Phase Noise dB
''',
'phase_noise',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('phy-type', REFERENCE_ENUM_CLASS, 'OpticsPhyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsPhyEnum',
[], [],
''' Optics physical type
''',
'phy_type',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('pm-enable', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' PmEable or Disable
''',
'pm_enable',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('polarization-change-rate', ATTRIBUTE, 'str' , None, None,
[], [],
''' Polarization Change Rate rad/s
''',
'polarization_change_rate',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('polarization-dependent-loss', ATTRIBUTE, 'str' , None, None,
[], [],
''' Polarization Dependent Loss dB
''',
'polarization_dependent_loss',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('polarization-mode-dispersion', ATTRIBUTE, 'str' , None, None,
[], [],
''' Polarization Mode Dispersion 0.1ps
''',
'polarization_mode_dispersion',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('rx-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Rx High threshold value
''',
'rx_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('rx-low-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Rx Low threshold value
''',
'rx_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('second-order-polarization-mode-dispersion', ATTRIBUTE, 'str' , None, None,
[], [],
''' Second Order Polarization Mode Dispersion 0
.1ps^2
''',
'second_order_polarization_mode_dispersion',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('temp-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Temp High threshold value
''',
'temp_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('temp-low-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Temp Low threshold value
''',
'temp_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('transport-admin-state', REFERENCE_ENUM_CLASS, 'OpticsTasEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsTasEnum',
[], [],
''' Transport Admin State
''',
'transport_admin_state',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('tx-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Tx High threshold value
''',
'tx_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('tx-low-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Tx Low threshold value
''',
'tx_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('vendor-info', ATTRIBUTE, 'str' , None, None,
[], [],
''' Vendor Information
''',
'vendor_info',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('volt-high-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Volt High threshold value
''',
'volt_high_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('volt-low-threshold', ATTRIBUTE, 'int' , None, None,
[(-2147483648, 2147483647)], [],
''' Volt Low threshold value
''',
'volt_low_threshold',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-info',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts.OpticsPort' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts.OpticsPort',
False,
[
_MetaInfoClassMember('name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3}\\d+)|(([a-zA-Z0-9_]*\\d+/){4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Port name
''',
'name',
'Cisco-IOS-XR-controller-optics-oper', True),
_MetaInfoClassMember('optics-dwdm-carrrier-channel-map', REFERENCE_CLASS, 'OpticsDwdmCarrrierChannelMap' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap',
[], [],
''' Optics operational data
''',
'optics_dwdm_carrrier_channel_map',
'Cisco-IOS-XR-controller-optics-oper', False),
_MetaInfoClassMember('optics-info', REFERENCE_CLASS, 'OpticsInfo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort.OpticsInfo',
[], [],
''' Optics operational data
''',
'optics_info',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-port',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper.OpticsPorts' : {
'meta_info' : _MetaInfoClass('OpticsOper.OpticsPorts',
False,
[
_MetaInfoClassMember('optics-port', REFERENCE_LIST, 'OpticsPort' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts.OpticsPort',
[], [],
''' Optics operational data
''',
'optics_port',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-ports',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
'OpticsOper' : {
'meta_info' : _MetaInfoClass('OpticsOper',
False,
[
_MetaInfoClassMember('optics-ports', REFERENCE_CLASS, 'OpticsPorts' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper', 'OpticsOper.OpticsPorts',
[], [],
''' All Optics Port operational data
''',
'optics_ports',
'Cisco-IOS-XR-controller-optics-oper', False),
],
'Cisco-IOS-XR-controller-optics-oper',
'optics-oper',
_yang_ns._namespaces['Cisco-IOS-XR-controller-optics-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_optics_oper'
),
},
}
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap.DwdmCarrierMapInfo']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRxPower']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRxPower']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTxPower']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTxPower']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighLbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx1Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx2Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx3Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighRx4Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx1Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx2Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx3Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowRx4Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Power']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx1Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx2Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx3Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.HighTx4Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx1Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx2Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx3Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.LowTx4Lbc']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLos']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLos']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.RxLol']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxLol']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.TxFault']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Hidgd']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Oorcd']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Osnr']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Wvlool']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.Mea']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo.ImpRemoval']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.NetworkSrlgInfo']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.OpticsAlarmInfo']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo.LaneData']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsDwdmCarrrierChannelMap']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort.OpticsInfo']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts.OpticsPort']['meta_info']
_meta_table['OpticsOper.OpticsPorts.OpticsPort']['meta_info'].parent =_meta_table['OpticsOper.OpticsPorts']['meta_info']
_meta_table['OpticsOper.OpticsPorts']['meta_info'].parent =_meta_table['OpticsOper']['meta_info']
``` |
[
{
"content": "```python\n# Copyright 2011 Nicholas Bray\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless require... | [
{
"content": "<|memory_start|>```python\n# Copyright 2011 Nicholas Bray\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n... | ```python
# Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from language.glsl import ast as glsl
from . slotstruct import SlotStruct
import re
invalidNameChar = re.compile('[^\w\d_]')
# HACK does not ensure the first character is not a digit.
def ensureValidName(name):
return re.sub(invalidNameChar, '_', name)
# A pool contains O objects and F fields.
class PoolImplementation(object):
def __init__(self, poolinfo, basename):
self.poolinfo = poolinfo
# HACK
poolinfo.impl = self
self.basename = basename
self.stores = {}
self.types = {}
self.struct = SlotStruct(poolinfo)
#self.struct.dump()
#print
def _getFieldRef(self, field, slotinfo):
key = field, slotinfo
if not key in self.stores:
fieldimpl = slotinfo.getPoolInfo().impl
t = fieldimpl.struct.ast
name = "%s_%s_%s" % (self.basename, field.type, field.name.pyobj)
name = ensureValidName(name)
lcl = glsl.Local(t, name)
self.stores[key] = lcl
else:
lcl = self.stores[key]
return lcl
def _deref(self, ref, index):
if self.poolinfo.isSingleUnique():
return ref
else:
return glsl.GetSubscript(ref, index)
def getType(self, index):
assert self.poolinfo.typeTaken
#assert False, "hack"
return glsl.Load(index, 'type')
def getField(self, index, field, slotinfo):
assert slotinfo.isSlotInfo(), slotinfo
ref = self._getFieldRef(field, slotinfo)
return self._deref(ref, index)
def getValue(self, index, type):
assert self.struct.inlined
return index
def allocate(self, translator, slot, g):
if self.poolinfo.isSingleUnique():
return []
else:
src = translator.slotRef(translator.makeConstant(0), slot)
return translator.assignmentTransfer(src, g)
``` |
[
{
"content": "```python\nfrom collections import (\n Iterable,\n Mapping,\n MutableMapping,\n Sequence,\n MutableSequence\n)\nimport sys\nimport imp\nimport functools\nimport itertools\nfrom numbers import Number\nimport operator\nimport re\n\nfrom pyrsistent import (\n v, pvector, m, pmap, s,... | [
{
"content": "<|memory_start|>```python\nfrom collections import (\n Iterable,\n Mapping,\n MutableMapping,\n Sequence,\n MutableSequence\n)\nimport sys\nimport imp\nimport functools\nimport itertools\nfrom numbers import Number\nimport operator\nimport re\n\nfrom pyrsistent import (\n v, pvec... | ```python
from collections import (
Iterable,
Mapping,
MutableMapping,
Sequence,
MutableSequence
)
import sys
import imp
import functools
import itertools
from numbers import Number
import operator
import re
from pyrsistent import (
v, pvector, m, pmap, s, pset, b, pbag, dq, pdeque, l,
plist, immutable, freeze, thaw, CheckedPVector, PVector,
PMap, PSet, PList, PBag
)
from mochi import IS_PYPY, GE_PYTHON_33
from mochi.actor import actor
from mochi.parser import Symbol, Keyword, get_temp_name
if not IS_PYPY:
from annotation.typed import union, options, optional, only, predicate
class AttrDict(dict):
def __getattr__(self, attr):
if attr in self.keys():
return self[attr]
else:
return self['__builtins__'][attr]
def __setattr__(self, attr, value):
self[attr] = value
def make_default_env():
env = AttrDict()
if isinstance(__builtins__, dict):
env.update(__builtins__)
else:
env.update(__builtins__.__dict__)
env['Symbol'] = Symbol
env['Keyword'] = Keyword
env.update(__builtins__.__dict__) if hasattr(__builtins__, '__dict__') else env.update(__builtins__)
del env['exec']
# del env['globals']
# del env['locals']
env.update(functools.__dict__)
env.update(itertools.__dict__)
env.update(operator.__dict__)
env['pow'] = pow
if GE_PYTHON_33:
env['__spec__'] = sys.modules[__name__].__spec__
env[Iterable.__name__] = Iterable
env[Sequence.__name__] = Sequence
env[Mapping.__name__] = Mapping
env['v'] = v
env['pvector'] = pvector
env['CheckedPVector'] = CheckedPVector
env['m'] = m
env['pmap'] = pmap
env['s'] = s
env['pset'] = pset
env['l'] = l
env['plist'] = plist
env['b'] = b
env['pbag'] = pbag
env['dq'] = dq
env['pdeque'] = pdeque
env['thaw'] = thaw
env['freeze'] = freeze
env['immutable'] = immutable
env['PVector'] = PVector
env['PMap'] = PMap
env['PSet'] = PSet
env['PList'] = PList
env['PBag'] = PBag
if not IS_PYPY:
env['union'] = union
env['options'] = options
env['optional'] = optional
env['only'] = only
env['predicate'] = predicate
env[Number.__name__] = Number
env['append'] = MutableSequence.append
# env['clear'] = MutableSequence.clear # not supported (pypy)
env['seq_count'] = MutableSequence.count
env['extend'] = MutableSequence.extend
env['insert'] = MutableSequence.insert
env['pop'] = MutableSequence.pop
env['remove'] = MutableSequence.remove
env['reverse'] = MutableSequence.reverse
env['mapping_get'] = MutableMapping.get
env['items'] = MutableMapping.items
env['values'] = MutableMapping.values
env['keys'] = MutableMapping.keys
env['mapping_pop'] = MutableMapping.pop
env['popitem'] = MutableMapping.popitem
env['setdefault'] = MutableMapping.setdefault
env['update'] = MutableMapping.update
env['values'] = MutableMapping.values
env['doall'] = pvector
env['nth'] = operator.getitem
env['+'] = operator.add
env['-'] = operator.sub
env['/'] = operator.truediv
env['*'] = operator.mul
env['%'] = operator.mod
env['**'] = operator.pow
env['<<'] = operator.lshift
env['>>'] = operator.rshift
env['//'] = operator.floordiv
env['=='] = operator.eq
env['!='] = operator.ne
env['>'] = operator.gt
env['>='] = operator.ge
env['<'] = operator.lt
env['<='] = operator.le
env['not'] = operator.not_
env['and'] = operator.and_
env['or'] = operator.or_
env['is'] = operator.is_
env['isnt'] = operator.is_not
env['re'] = re
env['True'] = True
env['False'] = False
env['None'] = None
env['gensym'] = get_temp_name
env['uniq'] = get_temp_name
env['Record'] = immutable((), 'Record')
env['spawn'] = actor.spawn
env['spawn_with_mailbox'] = actor.spawn_with_mailbox
env['send'] = actor.send
env['recv'] = actor.recv
env['ack_last_msg'] = actor.ack_last_msg
env['ack'] = actor.ack
env['link'] = actor.link
env['unlink'] = actor.unlink
env['kill'] = actor.kill
env['cancel'] = actor.cancel
env['self'] = actor.self
env['sleep'] = actor.sleep
env['wait_all'] = actor.wait_all
env['wait'] = actor.wait
try:
env['__loader__'] = __loader__
except:
pass
env['__package__'] = __package__
env['__doc__'] = __doc__
if IS_PYPY:
from _continuation import continulet
env['continulet'] = continulet
return env
global_env = make_default_env()
global_env['__name__'] = '__main__'
global_env['__package__'] = None
global_env['__spec__'] = None
global_env['__loader__'] = None
global_mod = imp.new_module('__main__')
global_mod.__name__ = '__main__'
global_mod.__package__ = None
global_mod.__spec__ = None
global_mod.__loader__ = None
global_mod.__builtins__ = global_env
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\n#!/usr/bin/env python2\n# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai\nfrom __future__ import print_function\n\n__license__ = 'GPL v3'\n__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'\n__docformat__ = 'restructuredt... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\n#!/usr/bin/env python2\n# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai\nfrom __future__ import print_function\n\n__license__ = 'GPL v3'\n__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'\n__docformat__ ... | ```python
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import print_function
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import os, errno
from threading import Thread, Event
from PyQt5.Qt import QDialog, QTimer, Qt, pyqtSignal
from calibre.gui2.dialogs.choose_library_ui import Ui_Dialog
from calibre.gui2.dialogs.progress import ProgressDialog as PD
from calibre.gui2 import error_dialog, choose_dir
from calibre.constants import (filesystem_encoding, iswindows,
get_portable_base)
from calibre import isbytestring, patheq, force_unicode
class ProgressDialog(PD):
on_progress_update = pyqtSignal(object, object, object)
finished_moving = pyqtSignal()
def __init__(self, *args, **kwargs):
PD.__init__(self, *args, **kwargs)
self.on_progress_update.connect(self.progressed, type=Qt.QueuedConnection)
self.finished_moving.connect(self.accept, type=Qt.QueuedConnection)
def reject(self):
return
def progressed(self, item_name, count, total):
self.max = total
self.value = count
self.set_msg(item_name)
def show_new_progress(self, *args):
self.on_progress_update.emit(*args)
class ChooseLibrary(QDialog, Ui_Dialog):
def __init__(self, db, callback, parent):
QDialog.__init__(self, parent)
self.setupUi(self)
self.db = db
self.new_db = None
self.callback = callback
self.location.initialize('choose_library_dialog')
lp = db.library_path
if isbytestring(lp):
lp = lp.decode(filesystem_encoding)
loc = unicode(self.old_location.text()).format(lp)
self.old_location.setText(loc)
self.browse_button.clicked.connect(self.choose_loc)
self.empty_library.toggled.connect(self.empty_library_toggled)
self.copy_structure.setEnabled(False)
def empty_library_toggled(self, to_what):
self.copy_structure.setEnabled(to_what)
def choose_loc(self, *args):
base = get_portable_base()
if base is None:
loc = choose_dir(self, 'choose library location',
_('Choose location for calibre library'))
else:
name = force_unicode('choose library loc at' + base,
filesystem_encoding)
loc = choose_dir(self, name,
_('Choose location for calibre library'), default_dir=base,
no_save_dir=True)
if loc is not None:
self.location.setText(loc)
def check_action(self, ac, loc):
exists = self.db.exists_at(loc)
base = get_portable_base()
if patheq(loc, self.db.library_path):
error_dialog(self, _('Same as current'),
_('The location %s contains the current calibre'
' library')%loc, show=True)
return False
if base is not None and ac in ('new', 'move'):
abase = os.path.normcase(os.path.abspath(base))
cal = os.path.normcase(os.path.abspath(os.path.join(abase,
'Calibre')))
aloc = os.path.normcase(os.path.abspath(loc))
if (aloc.startswith(cal+os.sep) or aloc == cal):
error_dialog(self, _('Bad location'),
_('You should not create a library inside the calibre'
' folder as this folder is automatically deleted during upgrades.'),
show=True)
return False
if aloc.startswith(abase) and os.path.dirname(aloc) != abase:
error_dialog(self, _('Bad location'),
_('You can only create libraries inside %s at the top '
'level, not in sub-folders')%base, show=True)
return False
empty = not os.listdir(loc)
if ac == 'existing' and not exists:
error_dialog(self, _('No existing library found'),
_('There is no existing calibre library at %s')%loc,
show=True)
return False
if ac in ('new', 'move'):
from calibre.db.legacy import LibraryDatabase
if not empty:
error_dialog(self, _('Not empty'),
_('The folder %s is not empty. Please choose an empty'
' folder')%loc,
show=True)
return False
if (iswindows and len(loc) >
LibraryDatabase.WINDOWS_LIBRARY_PATH_LIMIT):
error_dialog(self, _('Too long'),
_('Path to library too long. Must be less than'
' %d characters.')%LibraryDatabase.WINDOWS_LIBRARY_PATH_LIMIT,
show=True)
return False
return True
def perform_action(self, ac, loc):
if ac in ('new', 'existing'):
self.callback(loc, copy_structure=self.copy_structure.isChecked())
else:
# move library
self.db.prefs.disable_setting = True
abort_move = Event()
pd = ProgressDialog(_('Moving library, please wait...'), _('Scanning...'), max=0, min=0, icon='lt.png', parent=self)
pd.canceled_signal.connect(abort_move.set)
self.parent().library_view.model().stop_metadata_backup()
move_error = []
def do_move():
try:
self.db.new_api.move_library_to(loc, abort=abort_move, progress=pd.show_new_progress)
except Exception:
import traceback
move_error.append(traceback.format_exc())
finally:
pd.finished_moving.emit()
t = Thread(name='MoveLibrary', target=do_move)
QTimer.singleShot(0, t.start)
pd.exec_()
if abort_move.is_set():
self.callback(self.db.library_path)
return
if move_error:
error_dialog(self.parent(), _('Failed to move library'), _(
'There was an error while moving the library. The operation has been aborted. Click'
' "Show details" for details.'), det_msg=move_error[0], show=True)
self.callback(self.db.library_path)
return
self.callback(loc, library_renamed=True)
def accept(self):
action = 'move'
if self.existing_library.isChecked():
action = 'existing'
elif self.empty_library.isChecked():
action = 'new'
text = unicode(self.location.text()).strip()
if not text:
return error_dialog(self, _('No location'), _('No location selected'),
show=True)
loc = os.path.abspath(text)
if action == 'move':
try:
os.makedirs(loc)
except EnvironmentError as e:
if e.errno != errno.EEXIST:
raise
if not loc or not os.path.exists(loc) or not os.path.isdir(loc):
return error_dialog(self, _('Bad location'),
_('%s is not an existing folder')%loc, show=True)
if not self.check_action(action, loc):
return
self.location.save_history()
self.perform_action(action, loc)
QDialog.accept(self) # Must be after perform action otherwise the progress dialog is not updated on windows
``` |
[
{
"content": "Reconstruct the code file line-for-line, unmodified:\n```python\n\"\"\"Test for registration view.\"\"\"\n\nfrom bs4 import BeautifulSoup\nfrom django.contrib.auth.models import User\nfrom django.core import mail\nfrom django.test import Client\nfrom django.test import RequestFactory\nfrom django.... | [
{
"content": "Reconstruct the code file line-for-line, unmodified:\n<|memory_start|>```python\n\"\"\"Test for registration view.\"\"\"\n\nfrom bs4 import BeautifulSoup\nfrom django.contrib.auth.models import User\nfrom django.core import mail\nfrom django.test import Client\nfrom django.test import RequestFacto... | ```python
"""Test for registration view."""
from bs4 import BeautifulSoup
from django.contrib.auth.models import User
from django.core import mail
from django.test import Client
from django.test import RequestFactory
from django.test import TestCase
from django.urls import reverse
from imagersite.views import home_view
from django.urls import reverse_lazy
from imager_images.models import Photo
import factory
from django.core.files.uploadedfile import SimpleUploadedFile
import os
class ViewTest(TestCase):
"""Test Home View"""
def setUp(self):
"""Setup home fixture"""
self.client = Client()
self.ger_request = RequestFactory().get('/')
def test_home_route_returns_status_200(self):
"""Home route returns 200."""
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
def test_home_view_has_some_heading(self):
"""Has heading"""
response = home_view(self.ger_request)
self.assertTrue(b'h1' in response.content)
class RegistrationTests(TestCase):
"""Test Registration."""
def setUp(self):
"""Make Reg"""
self.client = Client()
def test_registration_page_uses_proper_template(self):
"""Registration is returned."""
response = self.client.get(reverse('registration_register'))
self.assertIn(
'registration/registration_form.html',
response.template_name
)
def test_registration_creates_new_inactive_user(self):
"""Register adds user."""
self.assertTrue(User.objects.count() == 0)
response = self.client.get(reverse('registration_register'))
html = BeautifulSoup(response.rendered_content, "html.parser")
token = html.find(
'input', {'name': "csrfmiddlewaretoken"}
).attrs['value']
info = {
'csrfmiddlewaretoken': token,
'username': 'test',
'email': 'test@test.com',
'password1': 'testtest123',
'password2': 'testtest123'
}
self.client.post(
reverse('registration_register'),
info
)
self.assertFalse(User.objects.first().is_active)
self.assertTrue(len(mail.outbox) == 1)
def test_registration_success_redirects_to_reg_complete_html(self):
"""Test that the registration complete page shows after registering."""
self.assertTrue(User.objects.count() == 0)
response = self.client.get(reverse('registration_register'))
html = BeautifulSoup(response.rendered_content, "html.parser")
token = html.find(
'input', {'name': "csrfmiddlewaretoken"}
).attrs['value']
info = {
'csrfmiddlewaretoken': token,
'username': 'test',
'email': 'test@test.com',
'password1': 'testtest123',
'password2': 'testtest123'
}
response = self.client.post(
reverse('registration_register'),
info,
follow=True
)
self.assertIn(
'Registration complete',
response.rendered_content
)
def test_activation_key_activates_user(self):
"""Test that that the activation key activates the user."""
self.assertTrue(User.objects.count() == 0)
response = self.client.get(reverse('registration_register'))
html = BeautifulSoup(response.rendered_content, "html.parser")
token = html.find(
'input', {'name': "csrfmiddlewaretoken"}
).attrs['value']
info = {
'csrfmiddlewaretoken': token,
'username': 'test',
'email': 'test@test.com',
'password1': 'testtest123',
'password2': 'testtest123'
}
response = self.client.post(
reverse('registration_register'),
info
)
key = response.context['activation_key']
response = self.client.get(
"/accounts/activate/" + key + "/",
follow=True
)
self.assertIn('Activated!!', response.rendered_content)
# ========================= Tests from class July 13 ========================
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
class PhotoFactory(factory.django.DjangoModelFactory):
"""Create Photos for tests."""
class Meta:
"""Model for Photo Factory."""
model = Photo
title = factory.Sequence(lambda n: "photo{}".format(n))
image = SimpleUploadedFile(
name='somephoto.jpg',
content=open(os.path.join(BASE_DIR, 'imagersite/static/imagersite/testing.png'), 'rb').read(),
content_type='image/jpeg'
)
class HomePageTests(TestCase):
"""Test the home page."""
def setUp(self):
"""Set up for home page tests."""
self.client = Client()
self.user = User(username='carl', email='carl@carl.carl')
self.user.save()
def add_photos(self):
"""Build photos to test."""
photos = [PhotoFactory.build() for _ in range(1)]
for photo in photos:
photo.profile = self.user.profile
photo.save()
def test_when_no_images_placeholder_appears(self):
"""Test that before any images are uploaded a placeholder appears."""
response = self.client.get(reverse_lazy('home'))
html = BeautifulSoup(response.content, 'html.parser')
# import pdb; pdb.set_trace()
self.assertTrue(html.find('img', {'src': '/static/imagersite/testing.png'}))
def test_when_images_exist_one_of_them_is_on_the_page(self):
"""Test that if image exists, it displays."""
self.add_photos()
response = self.client.get(reverse_lazy('home'))
html = BeautifulSoup(response.content, 'html.parser')
img_tag = html.find_all('img')
self.assertTrue(img_tag[0].attrs['src'] == Photo.objects.first().image.url)
class ProfilePageTests(TestCase):
"""Test suite for the profile page."""
def setUp(self):
"""Set up for Profile page tests."""
self.client = Client()
self.user = User(username='carl', email='carl@carl.carl')
self.user.set_password('bobloblaw')
self.user.save()
def test_user_profile_info_on_profile_page(self):
"""Test that a user's profile info displays on page."""
self.client.force_login(self.user)
response = self.client.get(reverse_lazy('profile', kwargs={'username': 'carl'}))
self.assertTrue(b'<p>Username: carl</p>' in response.content)
def test_user_profile_page_has_link_to_library_page(self):
"""Test that profile page has a link to the library page."""
self.client.force_login(self.user)
response = self.client.get(reverse_lazy('profile', kwargs={'username': 'carl'}))
html = BeautifulSoup(response.content, 'html.parser')
self.assertTrue(html.find('a', {'href': '/images/library/1/1'}))
def test_when_user_logs_in_redirect_to_profile_page(self):
"""Test log in redirects to profile page."""
response = self.client.post(reverse_lazy('login'), {
'username': self.user.username, 'password': 'bobloblaw'
}, follow=True)
# import pdb; pdb.set_trace()
self.assertTrue(b'<p>Username: carl</p>' in response.content)
``` |
[
{
"content": "Here is the script:\n```python\n#!/usr/bin/env python\n#\n#\tx12c.c\n#\n#\tBar chart demo.\n\nimport sys\nimport os\n\nmodule_dir = \"@MODULE_DIR@\"\n\nif module_dir[0] == '@':\n\tmodule_dir = os.getcwd ()\n\nsys.path.insert (0, module_dir)\n\n# main\n#\n# Does a simple bar chart, using color fill... | [
{
"content": "Here is the script:\n<|memory_start|>```python\n#!/usr/bin/env python\n#\n#\tx12c.c\n#\n#\tBar chart demo.\n\nimport sys\nimport os\n\nmodule_dir = \"@MODULE_DIR@\"\n\nif module_dir[0] == '@':\n\tmodule_dir = os.getcwd ()\n\nsys.path.insert (0, module_dir)\n\n# main\n#\n# Does a simple bar chart, ... | ```python
#!/usr/bin/env python
#
# x12c.c
#
# Bar chart demo.
import sys
import os
module_dir = "@MODULE_DIR@"
if module_dir[0] == '@':
module_dir = os.getcwd ()
sys.path.insert (0, module_dir)
# main
#
# Does a simple bar chart, using color fill. If color fill is
# unavailable, pattern fill is used instead (automatic).
def main(w):
## # Parse and process command line arguments
##
## pl.ParseOpts(sys.argv, pl.PARSE_FULL)
##
## # Initialize plplot
##
## pl.init()
w.pladv(0)
w.plvsta()
w.plwind(1980.0, 1990.0, 0.0, 35.0)
w.plbox("bc", 1.0, 0, "bcnv", 10.0, 0)
w.plcol0(2)
w.pllab("Year", "Widget Sales (millions)", "#frPLplot Example 12")
y0 = [5, 15, 12, 24, 28, 30, 20, 8, 12, 3]
for i in range(10):
w.plcol0(i + 1)
w.plpsty(0)
fbox( w, (1980. + i), y0[i] )
string = `y0[i]`
w.plptex((1980. + i + .5), (y0[i] + 1.), 1.0, 0.0, .5, string)
string = `1980 + i`
w.plmtex("b", 1.0, ((i + 1) * .1 - .05), 0.5, string)
# Don't forget to call PLEND to finish off!
w.pleop()
def fbox( w, x0, y0 ):
x = [x0, x0, x0 + 1., x0 + 1.]
y = [0., y0, y0, 0.]
w.plfill(x, y)
w.plcol0(1)
w.pllsty(1)
w.plline(x, y)
##main()
``` |
[
{
"content": "Repeat the full code snippet:\n```python\nfrom Screens.Screen import Screen\nfrom Components.ConfigList import ConfigListScreen\nfrom Components.config import config, configfile, getConfigListEntry\nfrom Components.Sources.StaticText import StaticText\nfrom Components.SystemInfo import SystemInfo\... | [
{
"content": "Repeat the full code snippet:\n<|memory_start|>```python\nfrom Screens.Screen import Screen\nfrom Components.ConfigList import ConfigListScreen\nfrom Components.config import config, configfile, getConfigListEntry\nfrom Components.Sources.StaticText import StaticText\nfrom Components.SystemInfo im... | ```python
from Screens.Screen import Screen
from Components.ConfigList import ConfigListScreen
from Components.config import config, configfile, getConfigListEntry
from Components.Sources.StaticText import StaticText
from Components.SystemInfo import SystemInfo
from Tools.Directories import fileExists
from os import path
if path.exists("/dev/hdmi_cec") or path.exists("/dev/misc/hdmi_cec0"):
import Components.HdmiCec
class HdmiCECSetupScreen(Screen, ConfigListScreen):
skin = """
<screen position="c-300,c-250" size="600,500" title="HDMI CEC setup">
<widget name="config" position="25,25" size="550,350" />
<widget source="current_address" render="Label" position="25,375" size="550,30" zPosition="10" font="Regular;21" halign="left" valign="center" />
<widget source="fixed_address" render="Label" position="25,405" size="550,30" zPosition="10" font="Regular;21" halign="left" valign="center" />
<ePixmap pixmap="buttons/red.png" position="20,e-45" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="160,e-45" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="300,e-45" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/blue.png" position="440,e-45" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="20,e-45" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="160,e-45" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="300,e-45" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="440,e-45" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("HDMI CEC Setup"))
from Components.ActionMap import ActionMap
from Components.Button import Button
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self["key_yellow"] = StaticText(_("Set fixed"))
self["key_blue"] = StaticText(_("Clear fixed"))
self["current_address"] = StaticText()
self["fixed_address"] = StaticText()
self["actions"] = ActionMap(["SetupActions", "ColorActions", "MenuActions"],
{
"ok": self.keyGo,
"save": self.keyGo,
"cancel": self.keyCancel,
"green": self.keyGo,
"red": self.keyCancel,
"yellow": self.setFixedAddress,
"blue": self.clearFixedAddress,
"menu": self.closeRecursive,
}, -2)
self.onChangedEntry = [ ]
self.list = []
ConfigListScreen.__init__(self, self.list, session = self.session, on_change = self.changedEntry)
self.createSetup()
def createSetup(self):
self.list = []
self.list.append(getConfigListEntry(_("Enabled"), config.hdmicec.enabled))
if config.hdmicec.enabled.getValue():
self.list.append(getConfigListEntry(_("Put TV in standby"), config.hdmicec.control_tv_standby))
self.list.append(getConfigListEntry(_("Wakeup TV from standby"), config.hdmicec.control_tv_wakeup))
self.list.append(getConfigListEntry(_("Regard deep standby as standby"), config.hdmicec.handle_deepstandby_events))
self.list.append(getConfigListEntry(_("Switch TV to correct input"), config.hdmicec.report_active_source))
self.list.append(getConfigListEntry(_("Use TV remote control"), config.hdmicec.report_active_menu))
self.list.append(getConfigListEntry(_("Handle standby from TV"), config.hdmicec.handle_tv_standby))
self.list.append(getConfigListEntry(_("Handle wakeup from TV"), config.hdmicec.handle_tv_wakeup))
self.list.append(getConfigListEntry(_("Wakeup signal from TV"), config.hdmicec.tv_wakeup_detection))
self.list.append(getConfigListEntry(_("Forward volume keys"), config.hdmicec.volume_forwarding))
self.list.append(getConfigListEntry(_("Put your STB_BOX in standby"), config.hdmicec.control_receiver_standby))
self.list.append(getConfigListEntry(_("Wakeup your STB_BOX from standby"), config.hdmicec.control_receiver_wakeup))
self.list.append(getConfigListEntry(_("Minimum send interval"), config.hdmicec.minimum_send_interval))
if fileExists("/proc/stb/hdmi/preemphasis"):
self.list.append(getConfigListEntry(_("Use HDMI-preemphasis"), config.hdmicec.preemphasis))
self["config"].list = self.list
self["config"].l.setList(self.list)
self.updateAddress()
# for summary:
def changedEntry(self):
if self["config"].getCurrent()[0] == _("Enabled"):
self.createSetup()
for x in self.onChangedEntry:
x()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def keyGo(self):
for x in self["config"].list:
x[1].save()
configfile.save()
self.close()
def keyCancel(self):
for x in self["config"].list:
x[1].cancel()
self.close()
def setFixedAddress(self):
Components.HdmiCec.hdmi_cec.setFixedPhysicalAddress(Components.HdmiCec.hdmi_cec.getPhysicalAddress())
self.updateAddress()
def clearFixedAddress(self):
Components.HdmiCec.hdmi_cec.setFixedPhysicalAddress("0.0.0.0")
self.updateAddress()
def updateAddress(self):
self["current_address"].setText(_("Current CEC address") + ": " + Components.HdmiCec.hdmi_cec.getPhysicalAddress())
if config.hdmicec.fixed_physical_address.getValue() == "0.0.0.0":
fixedaddresslabel = ""
else:
fixedaddresslabel = _("Using fixed address") + ": " + config.hdmicec.fixed_physical_address.getValue()
self["fixed_address"].setText(fixedaddresslabel)
def Plugins(**kwargs):
return []
``` |
[
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n```python\nfrom src.todoMelvin import settings\nfrom datetime import datetime\nfrom subprocess import check_output\n\nlogSender = None\n\nclass WarningLevels:\n Debug = {'level' : 0, 'tag' : 'DEBUG'} \n Info = ... | [
{
"content": "Replicate the code snippet exactly, without paraphrasing or reformatting:\n<|memory_start|>```python\nfrom src.todoMelvin import settings\nfrom datetime import datetime\nfrom subprocess import check_output\n\nlogSender = None\n\nclass WarningLevels:\n Debug = {'level' : 0, 'tag' : 'DEBUG'} ... | ```python
from src.todoMelvin import settings
from datetime import datetime
from subprocess import check_output
logSender = None
class WarningLevels:
Debug = {'level' : 0, 'tag' : 'DEBUG'}
Info = {'level' : 1, 'tag' : 'INFO'}
Warn = {'level' : 2, 'tag' : 'WARNING'}
Fatal = {'level' : 3, 'tag' : 'FATAL'}
def callWithLogging(callData):
dateTime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
messageTag = "%s [%s] [CALL]"%(dateTime, logSender)
try:
with open(settings.logFile, "a") as myfile:
msg = "%s %s"%(messageTag, (' ').join(callData))
myfile.write(msg + "\n")
if settings.logPrintCalls.lower() == 'true':
print msg
output = check_output(callData)
for line in output.split('\n'):
if len(line) > 0:
msg = "%s %s"%(messageTag, line)
myfile.write(msg+ "\n")
if settings.logPrintCalls.lower() == 'true':
print msg
myfile.close()
except:
print "Unable to open logfile for subprocess call \'%s\'"%(' '.join(callData))
return
def log(warningLevel, message):
dateTime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
finalMessage = "%s [%s] [%s] %s"%(dateTime, logSender, warningLevel['tag'], message)
if int(settings.logStdoutWLevel) <= warningLevel['level']:
print finalMessage
if int(settings.logFileWLevel) <= warningLevel['level']:
try:
with open(settings.logFile, "a") as myfile:
myfile.write(finalMessage + "\n")
myfile.close()
except:
print "Unable to open logfile."
return
``` |
[
{
"content": "```python\n########################################################################\n# $HeadURL$\n# File : JobCleaningAgent.py\n# Author : A.T.\n########################################################################\n\"\"\"\nThe Job Cleaning Agent controls removing jobs from the WMS in the e... | [
{
"content": "<|memory_start|>```python\n########################################################################\n# $HeadURL$\n# File : JobCleaningAgent.py\n# Author : A.T.\n########################################################################\n\"\"\"\nThe Job Cleaning Agent controls removing jobs from ... | ```python
########################################################################
# $HeadURL$
# File : JobCleaningAgent.py
# Author : A.T.
########################################################################
"""
The Job Cleaning Agent controls removing jobs from the WMS in the end of their life cycle.
"""
from DIRAC import S_OK, gLogger
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.WorkloadManagementSystem.DB.TaskQueueDB import TaskQueueDB
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
from DIRAC.WorkloadManagementSystem.Client.SandboxStoreClient import SandboxStoreClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.Client.Operation import Operation
from DIRAC.RequestManagementSystem.Client.File import File
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
import DIRAC.Core.Utilities.Time as Time
import time
import os
REMOVE_STATUS_DELAY = { 'Done':7,
'Killed':1,
'Failed':7 }
class JobCleaningAgent( AgentModule ):
"""
The specific agents must provide the following methods:
- initialize() for initial settings
- beginExecution()
- execute() - the main method called in the agent cycle
- endExecution()
- finalize() - the graceful exit of the method, this one is usually used
for the agent restart
"""
#############################################################################
def initialize( self ):
"""Sets defaults
"""
self.am_setOption( "PollingTime", 60 )
self.jobDB = JobDB()
self.taskQueueDB = TaskQueueDB()
self.jobLoggingDB = JobLoggingDB()
# self.sandboxDB = SandboxDB( 'SandboxDB' )
agentTSTypes = self.am_getOption('ProductionTypes', [])
if agentTSTypes:
self.prod_types = agentTSTypes
else:
self.prod_types = Operations().getValue( 'Transformations/DataProcessing', ['MCSimulation', 'Merge'] )
gLogger.info('Will exclude the following Production types from cleaning %s' % ( ', '.join(self.prod_types) ) )
self.maxJobsAtOnce = self.am_getOption('MaxJobsAtOnce', 100)
self.jobByJob = self.am_getOption('JobByJob', True)
self.throttlingPeriod = self.am_getOption('ThrottlingPeriod', 0.)
return S_OK()
def __getAllowedJobTypes( self ):
#Get valid jobTypes
result = self.jobDB.getDistinctJobAttributes( 'JobType' )
if not result[ 'OK' ]:
return result
cleanJobTypes = []
for jobType in result[ 'Value' ]:
if jobType not in self.prod_types:
cleanJobTypes.append( jobType )
self.log.notice( "JobTypes to clean %s" % cleanJobTypes )
return S_OK( cleanJobTypes )
#############################################################################
def execute( self ):
"""The PilotAgent execution method.
"""
#Delete jobs in "Deleted" state
result = self.removeJobsByStatus( { 'Status' : 'Deleted' } )
if not result[ 'OK' ]:
return result
#Get all the Job types that can be cleaned
result = self.__getAllowedJobTypes()
if not result[ 'OK' ]:
return result
baseCond = { 'JobType' : result[ 'Value' ] }
# Remove jobs with final status
for status in REMOVE_STATUS_DELAY:
delay = REMOVE_STATUS_DELAY[ status ]
condDict = dict( baseCond )
condDict[ 'Status' ] = status
delTime = str( Time.dateTime() - delay * Time.day )
result = self.removeJobsByStatus( condDict, delTime )
if not result['OK']:
gLogger.warn( 'Failed to remove jobs in status %s' % status )
return S_OK()
def removeJobsByStatus( self, condDict, delay = False ):
""" Remove deleted jobs
"""
if delay:
gLogger.verbose( "Removing jobs with %s and older than %s" % ( condDict, delay ) )
result = self.jobDB.selectJobs( condDict, older = delay, limit = self.maxJobsAtOnce )
else:
gLogger.verbose( "Removing jobs with %s " % condDict )
result = self.jobDB.selectJobs( condDict, limit = self.maxJobsAtOnce )
if not result['OK']:
return result
jobList = result['Value']
if len(jobList) > self.maxJobsAtOnce:
jobList = jobList[:self.maxJobsAtOnce]
if not jobList:
return S_OK()
self.log.notice( "Deleting %s jobs for %s" % ( len( jobList ), condDict ) )
count = 0
error_count = 0
result = SandboxStoreClient( useCertificates = True ).unassignJobs( jobList )
if not result[ 'OK' ]:
gLogger.warn( "Cannot unassign jobs to sandboxes", result[ 'Message' ] )
result = self.deleteJobOversizedSandbox( jobList )
if not result[ 'OK' ]:
gLogger.warn( "Cannot schedle removal of oversized sandboxes", result[ 'Message' ] )
return result
failedJobs = result['Value']['Failed']
for job in failedJobs:
jobList.pop( jobList.index( job ) )
if self.jobByJob:
for jobID in jobList:
resultJobDB = self.jobDB.removeJobFromDB( jobID )
resultTQ = self.taskQueueDB.deleteJob( jobID )
resultLogDB = self.jobLoggingDB.deleteJob( jobID )
errorFlag = False
if not resultJobDB['OK']:
gLogger.warn( 'Failed to remove job %d from JobDB' % jobID, result['Message'] )
errorFlag = True
if not resultTQ['OK']:
gLogger.warn( 'Failed to remove job %d from TaskQueueDB' % jobID, result['Message'] )
errorFlag = True
if not resultLogDB['OK']:
gLogger.warn( 'Failed to remove job %d from JobLoggingDB' % jobID, result['Message'] )
errorFlag = True
if errorFlag:
error_count += 1
else:
count += 1
if self.throttlingPeriod:
time.sleep(self.throttlingPeriod)
else:
result = self.jobDB.removeJobFromDB( jobList )
if not result['OK']:
gLogger.error('Failed to delete %d jobs from JobDB' % len(jobList) )
else:
gLogger.info('Deleted %d jobs from JobDB' % len(jobList) )
for jobID in jobList:
resultTQ = self.taskQueueDB.deleteJob( jobID )
if not resultTQ['OK']:
gLogger.warn( 'Failed to remove job %d from TaskQueueDB' % jobID, resultTQ['Message'] )
error_count += 1
else:
count += 1
result = self.jobLoggingDB.deleteJob( jobList )
if not result['OK']:
gLogger.error('Failed to delete %d jobs from JobLoggingDB' % len(jobList) )
else:
gLogger.info('Deleted %d jobs from JobLoggingDB' % len(jobList) )
if count > 0 or error_count > 0 :
gLogger.info( 'Deleted %d jobs from JobDB, %d errors' % ( count, error_count ) )
return S_OK()
def deleteJobOversizedSandbox( self, jobIDList ):
""" Delete the job oversized sandbox files from storage elements
"""
failed = {}
successful = {}
lfnDict = {}
for jobID in jobIDList:
result = self.jobDB.getJobParameter( jobID, 'OutputSandboxLFN' )
if result['OK']:
lfn = result['Value']
if lfn:
lfnDict[lfn] = jobID
else:
successful[jobID] = 'No oversized sandbox found'
else:
gLogger.warn( 'Error interrogting JobDB: %s' % result['Message'] )
if not lfnDict:
return S_OK( {'Successful':successful, 'Failed':failed} )
# Schedule removal of the LFNs now
for lfn, jobID in lfnDict.items():
result = self.jobDB.getJobAttributes( jobID, ['OwnerDN', 'OwnerGroup'] )
if not result['OK']:
failed[jobID] = lfn
continue
if not result['Value']:
failed[jobID] = lfn
continue
ownerDN = result['Value']['OwnerDN']
ownerGroup = result['Value']['OwnerGroup']
result = self.__setRemovalRequest( lfn, ownerDN, ownerGroup )
if not result['OK']:
failed[jobID] = lfn
else:
successful[jobID] = lfn
result = {'Successful':successful, 'Failed':failed}
return S_OK( result )
def __setRemovalRequest( self, lfn, ownerDN, ownerGroup ):
""" Set removal request with the given credentials
"""
oRequest = Request()
oRequest.OwnerDN = ownerDN
oRequest.OwnerGroup = ownerGroup
oRequest.RequestName = os.path.basename( lfn ).strip() + '_removal_request.xml'
oRequest.SourceComponent = 'JobCleaningAgent'
removeFile = Operation()
removeFile.Type = 'RemoveFile'
removedFile = File()
removedFile.LFN = lfn
removeFile.addFile( removedFile )
oRequest.addOperation( removeFile )
return ReqClient().putRequest( oRequest )
``` |
[
{
"content": "```python\nfrom django.conf.urls import include, url, patterns\nfrom django.contrib import admin\nfrom HealthApp import views\n\n\"\"\"\nThe urlpatterns is how we map the site urls to specific views in the views.py. The first part is\na regular expression to describe the url pattern, followed by t... | [
{
"content": "<|memory_start|>```python\nfrom django.conf.urls import include, url, patterns\nfrom django.contrib import admin\nfrom HealthApp import views\n\n\"\"\"\nThe urlpatterns is how we map the site urls to specific views in the views.py. The first part is\na regular expression to describe the url patter... | ```python
from django.conf.urls import include, url, patterns
from django.contrib import admin
from HealthApp import views
"""
The urlpatterns is how we map the site urls to specific views in the views.py. The first part is
a regular expression to describe the url pattern, followed by the view that should be called.
Lastly, a name is given to each pattern so that they can be referenced from elsewhere in the code.
For example, when an HTTPResponseRedirect(reverse('login')) is returned in one of the views, it
is doing a reverse lookup of the url pattern named 'login' and returning the view (and subsequently
the html page) associated with the view.
There are a couple patterns that are a bit unique. The first is the url for the admin page which
links to the built in url network already created by django. The other unique urls are the ones
that deal with patient information since the urls are specific to the patient, and the username in
the url needs to be passed into the view as a parameter. The format of (?P<username>\w+) is used
to first identify that information is being captured, and to identify what parameter it is being passed
in as (in this case, the username parameter).
Note: the first url is used to redirect users to the login page when at the 'root' url of the site.
"""
urlpatterns = [
url(r'^$', views.userLogin, name='login'),
url(r'^login/$', views.userLogin, name='login'),
url(r'^register/$', views.register, name='register'),
url(r'^(?P<username>\w+)/profile/$', views.profile, name='profile'),
url(r'^(?P<username>\w+)/staffProfile/$', views.staffProfile, name='staffProfile'),
url(r'^(?P<username>\w+)/staffProfile/(?P<patient>\w+)$', views.updateUser, name='updateUser'),
url(r'^logout/$', views.userLogout, name='logout'),
url(r'^admin/', include(admin.site.urls)),
url(r'^profileEdit/$', views.profileEdit, name='profileEdit'),
url(r'^createAppForm/', views.createApp, name='createAppForm'),
url(r'^deleteAppForm/(\d+)$', views.deleteApp, name='deleteAppForm'),
url(r'^export/$', views.export, name='export')
]
``` |
[
{
"content": "Here is the snippet:\n```python\n#-----------------------------------------------------------------------------\n# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.\n# All rights reserved.\n#\n# The full license is in the file LICENSE.txt, distributed with this software.\n#-------... | [
{
"content": "Here is the snippet:\n<|memory_start|>```python\n#-----------------------------------------------------------------------------\n# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.\n# All rights reserved.\n#\n# The full license is in the file LICENSE.txt, distributed with this sof... | ```python
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import os
import io
import json
from mock import patch
# External imports
# Bokeh imports
# Module under test
import bokeh.util.compiler as buc
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def test_nodejs_compile_coffeescript():
assert buc.nodejs_compile("""(a, b) -> a + b""", "coffeescript", "some.coffee") == \
dict(code="""\
(function (a, b) {
return a + b;
});
""", deps=[])
assert buc.nodejs_compile("""some = require 'some/module'""", "coffeescript", "some.coffee") == \
dict(code="""\
var some;
some = require('some/module');
""", deps=["some/module"])
assert buc.nodejs_compile("""(a, b) -> a + b +""", "coffeescript", "some.coffee") == \
dict(error=dict(
message="unexpected end of input",
text="some.coffee:unexpected end of input"))
assert buc.nodejs_compile("""some = require some/module'""", "coffeescript", "some.coffee") == \
dict(error=dict(
line=1,
column=27,
message="missing '",
text="some.coffee:1:27:missing '",
extract="some = require some/module'",
annotated="some.coffee:1:27:missing '\n some = require some/module'\n ^"))
assert buc.nodejs_compile("""(a, b) -> a + b +""", "coffeescript", "some.coffee") == \
dict(error=dict(
message="unexpected end of input",
text="some.coffee:unexpected end of input"))
assert buc.nodejs_compile("""some = require some/module'""", "coffeescript", "some.coffee") == \
dict(error=dict(
line=1,
column=27,
message="missing '",
text="some.coffee:1:27:missing '",
extract="some = require some/module'",
annotated="some.coffee:1:27:missing '\n some = require some/module'\n ^"))
def test_nodejs_compile_javascript():
assert buc.nodejs_compile("""function f(a, b) { return a + b; };""", "javascript", "some.js") == \
dict(code="""\
function f(a, b) { return a + b; }
;
""", deps=[])
assert buc.nodejs_compile("""var some = require('some/module');""", "javascript", "some.js") == \
dict(code="""\
var some = require('some/module');
""", deps=["some/module"])
assert buc.nodejs_compile("""function f(a, b) { eturn a + b; };""", "javascript", "some.js") == \
dict(error=
'\x1b[96msome.js\x1b[0m:\x1b[93m1\x1b[0m:\x1b[93m26\x1b[0m - '
"\x1b[91merror\x1b[0m\x1b[90m TS1005: \x1b[0m';' expected.\n"
'\n'
'\x1b[7m1\x1b[0m function f(a, b) { eturn a + b; };\n'
'\x1b[7m \x1b[0m \x1b[91m ~\x1b[0m\n')
def test_nodejs_compile_less():
assert buc.nodejs_compile(""".bk-some-style { color: mix(#ff0000, #0000ff, 50%); }""", "less", "some.less") == \
dict(code=""".bk-some-style{color:#800080}""")
assert buc.nodejs_compile(""".bk-some-style color: green; }""", "less", "some.less") == \
dict(error=dict(
line=1,
column=21,
message="Unrecognised input",
text="some.less:1:21:Unrecognised input",
extract=".bk-some-style color: green; }",
annotated="some.less:1:21:Unrecognised input\n .bk-some-style color: green; }"))
def test_Implementation():
obj = buc.Implementation()
assert obj.file == None
def test_Inline():
obj = buc.Inline("code")
assert obj.code == "code"
assert obj.file == None
obj = buc.Inline("code", "file")
assert obj.code == "code"
assert obj.file == "file"
def test_CoffeeScript():
obj = buc.CoffeeScript("code")
assert isinstance(obj, buc.Inline)
assert obj.code == "code"
assert obj.file == None
assert obj.lang == "coffeescript"
def test_TypeScript():
obj = buc.TypeScript("code")
assert isinstance(obj, buc.Inline)
assert obj.code == "code"
assert obj.file == None
assert obj.lang == "typescript"
def test_JavaScript():
obj = buc.JavaScript("code")
assert isinstance(obj, buc.Inline)
assert obj.code == "code"
assert obj.file == None
assert obj.lang == "javascript"
def test_Less():
obj = buc.Less("code")
assert isinstance(obj, buc.Inline)
assert obj.code == "code"
assert obj.file == None
assert obj.lang == "less"
@patch('io.open')
def test_FromFile(mock_open):
obj = buc.FromFile("path.coffee")
assert obj.lang == "coffeescript"
obj = buc.FromFile("path.ts")
assert obj.lang == "typescript"
obj = buc.FromFile("path.js")
assert obj.lang == "javascript"
obj = buc.FromFile("path.css")
assert obj.lang == "less"
obj = buc.FromFile("path.less")
assert obj.lang == "less"
def test_exts():
assert buc.exts == (".coffee", ".ts", ".js", ".css", ".less")
def test_jsons():
for file in os.listdir(os.path.join(buc.bokehjs_dir, "js")):
if file.endswith('.json'):
with io.open(os.path.join(buc.bokehjs_dir, "js", file), encoding="utf-8") as f:
assert all(['\\' not in mod for mod in json.loads(f.read())])
def test_inline_extension():
from bokeh.io import save
from bokeh.models import TickFormatter
from bokeh.plotting import figure
from bokeh.util.compiler import TypeScript
TS_CODE = """
import {TickFormatter} from "models/formatters/tick_formatter"
export class TestFormatter extends TickFormatter {
doFormat(ticks: number[]): string[] {
if (ticks.length == 0)
return[]
else {
const formatted = [`${ticks[0]}`]
for (let i = 1; i < ticks.length; i++) {
const difference = (ticks[i] - ticks[0]).toPrecision(2)
formatted.push(`+${difference}}`)
}
return formatted
}
}
}
"""
class TestFormatter(TickFormatter):
__implementation__ = TypeScript(TS_CODE)
class TestFormatter2(TickFormatter):
__implementation__ = TypeScript("^") # invalid syntax on purpose
p = figure()
p.circle([1, 2, 3, 4, 6], [5, 7, 3, 2, 4])
p.xaxis.formatter = TestFormatter()
save(p)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
``` |
[
{
"content": "```python\nfrom __future__ import print_function\nfrom array import array\nfrom itertools import islice\n\ndef lcs_cut2(s1, s2, lcs_low_bound=0, bg=None, debug=False):\n \"\"\"Compule the length of the LCS 2 sequences s1 and s2.\n \n lcs_low_bound : (int), hint of lower bound for the leng... | [
{
"content": "<|memory_start|>```python\nfrom __future__ import print_function\nfrom array import array\nfrom itertools import islice\n\ndef lcs_cut2(s1, s2, lcs_low_bound=0, bg=None, debug=False):\n \"\"\"Compule the length of the LCS 2 sequences s1 and s2.\n \n lcs_low_bound : (int), hint of lower bo... | ```python
from __future__ import print_function
from array import array
from itertools import islice
def lcs_cut2(s1, s2, lcs_low_bound=0, bg=None, debug=False):
"""Compule the length of the LCS 2 sequences s1 and s2.
lcs_low_bound : (int), hint of lower bound for the lenght of the lcs
to search for. Default to 0.
Algorithmic description:
This is a derivation of Hirschberg's algorithm which include some
optimisation for specific case.
This shoudl use an O(n) memory (n = len(s1)) and should have a worse
case scenario time complexity of O(n**2).
In the best case scenario, (l ~ n) the time complexity is closer to O(n*l)
where l is the lenght of the longest common subsequence.
Though, detail of implementaiton of s1 and s2 object slicing will
affect the optimal performace.
bg is four debug purpose, to see how the algorithme behave visually
using iptyhonblocks. uncomment bg lines below to use.
"""
m = len(s1)
n = len(s2)
if n==0 or m==0:
return 0
# rng is for row "rang" in french, "c" is for current and "p" for previous.
# array are n+1 so that last elemnt is 0. This allow
# to avoid special casing j=0 as j-1 will wrap arround.
# alternative is to offset all indexes by 1, wichi becames hard to
# track
rngc = array('i',[0 for x in range(n+1)]) ## current row
rngp = array('i',[0 for x in range(n+1)]) ## previous row
# current max value of the LCS durrgin the search.
currentmax = lcs_low_bound
# correspond to rngc[j-1], used to avoid lookup in the array
# through the loop to shave off soem execution time.
rngcjm = None
# lower and upper bound for current loop on s2/j
limm,limpp = 0,0
# lower bound for iteration on s1/i and
# another lower bound s2/j
mini,minj = 0,0
if debug:
import pdb; pdb.set_trace()
for i,c1 in enumerate(s1):
# current row become previous, and we reuse previous to avoid
# creating a new empty list.
rngc, rngp = rngp, rngc
limm,limp= max(i-m+currentmax,0,minj-1),min(i+n-currentmax+1,n)
rngcjm = rngc[limm-1]
if i < mini:
print('continue')
continue
isl = islice(s2,limm,limp)
rsl = range(limm,limp)
zsl = zip(rsl,isl)
for j,c2 in zsl:
# if bg:
# bg[i,j].green=255
if c1 == c2 :
if i == 0 or j == 0:
newval = 1
else:
newval = rngp[j-1]+1
# here we will peak ahead as far as possible
# while the two string are matching,
# for strings with high similarity
# this with give us hints on which part of the
# lcs matrix we do not need to explore.
#
# we do this only once, if we are at
# the beginning of the matching streem.
if s1[i-1] != s2[j-1] or i==0 or j==0:
lookahead = -1
k = min(m-i,n-j)
for cx,cy in zip(s1[i:i+k],s2[j:j+k]):
if cx==cy:
lookahead +=1
else:
break
# if bg:
# for xx in range(0,lookahead):
# bg[i+xx,j+xx].blue=255
tmp = rngc[j]+lookahead
# if we are on i,j and have a value M
# then it is useless to process columns that have :
# - a j value lower than M-j
# - a i value lower than M-i
lminj=tmp-j
lmini=tmp-i
if lmini > mini:
mini=lmini
if lminj > minj:
minj=lminj
for xx in range(0,minj):
rngp[xx]=tmp-1
rngc[xx]=tmp-1
# if bg:
# for xx in range(0,lminj):
# for lh in range(i,m):
# bg[lh,xx].red =255
# for xx in range(0,lmini):
# for lh in range(j,n):
# bg[xx,lh].red =255
# bg[i+lookahead,j+lookahead].red =255
if j >= limp+1:
break
if tmp > currentmax:
currentmax = tmp
assert(currentmax <=m)
assert(currentmax <=n)
limp= min(i+n-currentmax+1,n)
if newval > currentmax:
currentmax = newval
else :
b = rngp[j]
newval = rngcjm if rngcjm > b else b
# assert(newval <= i+1)
# assert(newval <= j+1)
rngc[j] = rngcjm = newval
print(rngc)
print('==',rngc)
return rngc[-2]
``` |
[
{
"content": "Replicate the source code:\n```python\n# -*- coding: utf8 -*-\n## Copyright (c) 2013 Stefan Thesing\n##\n##This file is part of Podstatty.\n##\n##Podstatty is free software: you can redistribute it and/or modify\n##it under the terms of the GNU General Public License as published by\n##the Free So... | [
{
"content": "Replicate the source code:\n<|memory_start|>```python\n# -*- coding: utf8 -*-\n## Copyright (c) 2013 Stefan Thesing\n##\n##This file is part of Podstatty.\n##\n##Podstatty is free software: you can redistribute it and/or modify\n##it under the terms of the GNU General Public License as published b... | ```python
# -*- coding: utf8 -*-
## Copyright (c) 2013 Stefan Thesing
##
##This file is part of Podstatty.
##
##Podstatty is free software: you can redistribute it and/or modify
##it under the terms of the GNU General Public License as published by
##the Free Software Foundation, either version 3 of the License, or
##(at your option) any later version.
##
##Podstatty is distributed in the hope that it will be useful,
##but WITHOUT ANY WARRANTY; without even the implied warranty of
##MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
##GNU General Public License for more details.
##
##You should have received a copy of the GNU General Public License
##along with Podstatty. If not, see http://www.gnu.org/licenses/.
from storm.locals import Storm, Int, Unicode, ReferenceSet
import requests
class Db:
"""
A class intended to provide handy control over the database.
"""
def __init__(self, store, base_url):
self.store = store
self.base_url = base_url
def add_file(self, filename, exclude_strings):
"""
Processes a prepared logfile and stores the data into the
database.
"""
log = open(filename)
date = filename.split("access_log_")[1]
date = date.replace("_filtered.txt", "")
if self.store.find(Stats, Stats.date_time_string == unicode(date)).count():
print "A logfile for this date has already been processed."
return None
stats =[]
for line in log:
# In the settings file, users can specify strings that are
# used as filter criteria. If the line contains this string,
# it won't be processed.
# In the beginning, we assume the line will be processed.
line_shall_be_processed = True
# 'exclude_strings' is a list of the filter criteria.
# If the line contains one of those strings, the line will
# not be processed.
for string in exclude_strings:
if string in line:
line_shall_be_processed = False
if line_shall_be_processed:
split_line = line.split()
stat = Stats(unicode(split_line[0]), int(split_line[1]), unicode(date))
stats.append(stat)
urls = []
for stat in stats:
if not stat.url in urls:
urls.append(stat.url)
for url in urls:
new_stat = Stats(url, 0, unicode(date))
for stat in stats:
if stat.url == url:
new_stat.traffic = new_stat.traffic+stat.traffic
self.store.add(new_stat)
#check if all URLs are already in table "filesizes", if not,
#get the filesize and write it into that table
self.check_url(url)
self.store.flush()
self.store.commit()
def check_url(self, url):
"""
Checks if the filesize of the file found behind this url is
already stored in the database. If not, it tries to retrieve
the filesize by making a http HEAD request and stores it into
the database.
"""
#if the url is not yet in the "Filesizes" table
if not self.store.find(Filesizes, Filesizes.url == url).count():
# Get the filesize from the server
# TODO Implement error routine
r = requests.head(self.base_url + url)
# Files no longer present on the server are removed, for now.
# TODO Maybe add an "else"-condition here and ask the user what to do?
# What about files that are no longer there but you still want to
# have them in your statistics?
if not (r.status_code == 404):
size = int(r.headers['Content-Length'])
# Write the URL and it's filesize to database
self.store.add(Filesizes(url, size))
class Stats(Storm):
"""
The table containing the actual numbers
'CREATE TABLE stats (id INTEGER PRIMARY KEY, url VARCHAR,
traffic INTEGER, date_time_string VARCHAR)'
"""
__storm_table__ = "stats"
id = Int(primary=True)
url = Unicode()
traffic = Int()
date_time_string = Unicode()
def __init__(self, url, traffic, date_time_string):
self.url = url
self.traffic = traffic
self.date_time_string = date_time_string
class Filesizes(Storm):
"""
The table containing the filesizes for each URL
'CREATE TABLE filesizes (url VARCHAR PRIMARY KEY, filesize INTEGER)'
"""
__storm_table__ = "filesizes"
url = Unicode(primary=True)
filesize = Int()
def __init__(self, url, filesize):
self.url = url
self.filesize = filesize
``` |
[
{
"content": "Repeat the code exactly:\n```python\n# -*- coding: UTF-8 -*-\n# Copyright 2011-2018 Rumma & Ko Ltd\n# License: BSD (see file COPYING for details)\n\n\n# This is a masterpiece of untransparent code, difficult to understand\n# and maintain. But I didn't find a better solution. Maybe an XSLT\n# exp... | [
{
"content": "Repeat the code exactly:\n<|memory_start|>```python\n# -*- coding: UTF-8 -*-\n# Copyright 2011-2018 Rumma & Ko Ltd\n# License: BSD (see file COPYING for details)\n\n\n# This is a masterpiece of untransparent code, difficult to understand\n# and maintain. But I didn't find a better solution. Mayb... | ```python
# -*- coding: UTF-8 -*-
# Copyright 2011-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
# This is a masterpiece of untransparent code, difficult to understand
# and maintain. But I didn't find a better solution. Maybe an XSLT
# expert might help us to rewrite this from scratch. The purpose is very
# simple: transform the content of a Tx25 response into a printable
# document. A Tx25 response is a rather complex data structure with
# lots and lots of elements. It contains a handler for every element
# type
# In case you need to understand, consult the source code of
# :class:`RowFactory`.
from __future__ import unicode_literals
from builtins import str
from django.db import models
from django.utils.translation import ugettext as _
from django.utils.encoding import force_text
from lino.api import dd
from lino.utils import AttrDict, IncompleteDate
from etgen import html as xghtml
E = xghtml.E
from .utils import cbss2gender
from .models import reply_has_result
from .ui import ConfidentialResultsTable
def rn2date(rd):
return IncompleteDate(
int(rd.Century + rd.Year),
int(rd.Month),
int(rd.Day))
def deldate(n):
if hasattr(n, 'DelDate'):
return [' (' + str(_('until ')) +
dd.dtos(rn2date(n.DelDate)) + ')']
return []
# def simpleattr(n,name):
# v = getattr(n,name,None)
# if v:
# return [ ', '+name+' ' + unicode(v)]
# return []
def simpletype(v):
return Info(xghtml.E.b(str(v)))
def boldstring(v):
return Info(xghtml.E.b(str(v)))
def validate_element(c):
if c is None:
raise Exception("Invalid element %r" % c)
class Info(object):
def __init__(self, *chunks):
for c in chunks:
validate_element(c)
self.chunks = list(chunks)
def addfrom(self, node, name, prefix=None, fmt=boldstring, suffix=''):
v = getattr(node, name, None)
if not v:
return self
if prefix is None:
prefix = '%s ' % name
else:
prefix = force_text(prefix)
if prefix and prefix[-1] not in ' :(':
prefix += ': '
if len(self.chunks):
if not prefix.startswith(' '):
prefix = ', ' + prefix
self.chunks += [prefix] + fmt(v).chunks
if suffix:
self.chunks.append(force_text(suffix))
return self
def add_deldate(self, n):
self.chunks += deldate(n)
def add_codelabel(self, n):
self.chunks += code_label(n).chunks
# if hasattr(n,'Label'):
# self.addfrom(n,'Label')
# self.addfrom(n,'Code','(',simpletype,')')
# else:
# self.addfrom(n,'Code','[',boldstring,']')
return self
def code_label(n):
chunks = []
if hasattr(n, 'Label') and n.Label:
chunks.append(xghtml.E.b(n.Label))
if hasattr(n, 'Code') and n.Code:
chunks += [' (', n.Code, ')']
return Info(*chunks)
# CodeLabel = code_label
# def CodeLabel(n):
# info = Info()
# return info
def NameType(n):
info = Info()
s = ' '.join([ln.Label for ln in n.LastName])
info.chunks.append(xghtml.E.b(s))
if hasattr(n, 'FirstName'):
info.chunks.append(', ')
s = ' '.join([fn.Label for fn in n.FirstName])
info.chunks.append(s)
return info
# def addinfo(node,name,prefix=None,fmt=simpletype,suffix=''):
# v = getattr(node,name,None)
# if not v: return []
# if prefix is None:
# prefix = ', %s ' % name
# info = [force_text(prefix)] + fmt(v)
# if suffix:
# info.append(force_text(suffix))
# return info
def DateType(n):
return Info(dd.dtos(rn2date(n)))
def ForfeitureDateType(n):
info = Info(dd.dtos(rn2date(n)))
info.addfrom(n, 'Graphic', ' (', simpletype, ')')
return info
def ExpiryDateType(n):
info = Info(dd.dtos(rn2date(n)))
info.addfrom(n, 'Graphic', ' (', simpletype, ')')
return info
def TribunalType(n):
return code_label(n)
def PlaceType(n):
return code_label(n)
def SituationType111(n):
return code_label(n)
def JustificationType(n):
return code_label(n)
def GraphicPlaceType(n):
info = CountryType(n.Country)
info.addfrom(n, 'Graphic', '')
# if hasattr(n,'Graphic'):
# info.append(', graphic:'+n.Graphic)
return info
def ForeignJudgementType(n):
return GraphicPlaceType(n.Place)
def BelgianJudgementType(n):
info = Info()
info.addfrom(n, 'Tribunal', None, TribunalType)
info.addfrom(n, 'Date', None, DateType)
info.addfrom(n, 'Place', None, PlaceType)
# info += TribunalType(n.Tribunal)
# info += DateType(n.Date)
# info += PlaceType(n.Place)
return info
def CountryType(n):
return code_label(n)
def LieuType(n):
info = Info()
if hasattr(n, 'Place1'):
# info += code_label(n.Place1)
info.addfrom(n, 'Place1', None, code_label)
elif hasattr(n, 'Place2'):
info.addfrom(n, 'Place2', None, GraphicPlaceType)
else:
place = n.Place3
# info += GraphicPlaceType(place)
info.addfrom(place, 'BelgianJudgement', '', BelgianJudgementType)
info.addfrom(place, 'ForeignJudgement', '', ForeignJudgementType)
# if hasattr(place,'BelgianJudgement'):
# info += BelgianJudgementType(place.BelgianJudgement)
# else:
# info += ForeignJudgementType(place.ForeignJudgement)
return info
def DiplomaticPostType(n):
return code_label(n)
def TerritoryType(n):
return code_label(n)
def ProvinceType(n):
return code_label(n)
def IssuerType(n):
# prefixes can be empty since this is a xs:choice
info = Info().addfrom(n, 'Place', '', PlaceType)
info.addfrom(n, 'Province', '', ProvinceType, ' (%s)' %
str(_("Province")))
info.addfrom(n, 'PosteDiplomatique', '', DiplomaticPostType, ' (%s)' %
str(_("Diplomatic post")))
return info
def ResidenceType(n):
return code_label(n)
def NationalNumberType(n):
info = Info().addfrom(n, 'NationalNumber', '')
return info # [n.NationalNumber]
def PartnerType(n):
info = Info().addfrom(n, 'NationalNumber', '', NationalNumberType)
# info.addfrom(n,'Name','',NameType)
info.addfrom(n, 'Name', ' ', NameType)
return info
def NotaryType(n):
info = Info().addfrom(n, 'NameNotary')
info.addfrom(n, 'Place', ' in ', PlaceType)
info.addfrom(n, 'Country', ', ', CountryType)
return info
def NotificationType(n):
info = Info().addfrom(n, 'NotificationDate', None, DateType)
info.addfrom(n, 'Place', ' in ', PlaceType)
return info
def ReasonType(n):
return code_label(n)
def CessationType(n):
return code_label(n)
def DeclarationType(n):
return code_label(n)
def Residence(n):
info = Info().addfrom(n, 'Residence', '', ResidenceType)
info.addfrom(n, 'Fusion', _("Fusion"))
info.addfrom(n, 'Language', _("Language"))
info.add_deldate(n)
return info
def IT003(n): # AscertainedLegalMainAddresses : Détermination de résidence
# raise Exception(str(n))
def InvestigationResultType(n):
return code_label(n)
info = Info().addfrom(n, 'InvestigationResult',
'', InvestigationResultType)
info.addfrom(n, 'Graphic1', '')
info.addfrom(n, 'Graphic2', '')
info.add_deldate(n)
return info
def IT005(n): # AddressChangeIntention
# raise Exception(str(n))
info = Info().addfrom(n, 'OriginPlace', _('Move from '), PlaceType)
info.addfrom(n, 'DestinationPlace', _('Move to '), PlaceType)
info.add_deldate(n)
return info
def IT006(n):
info = Info()
info.addfrom(n, 'Country', '', CountryType)
info.addfrom(n, 'Graphic', ' ')
info.add_deldate(n)
return info
def IT008(n): # ReturnPermissions
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'ExpiryDate', _("expires "), DateType)
info.add_deldate(n)
return info
def IT011(n): # Pseudonymes
info = Info()
info.addfrom(n, 'Name', '', NameType)
info.add_deldate(n)
return info
def IT013(n):
info = Info()
info.addfrom(n, 'ModificationType', '', ModificationTypeType)
info.addfrom(n, 'Graphic', '')
info.add_deldate(n)
return info
def IT018(n):
info = Info()
info.addfrom(n, 'Address', '', AddressType)
info.add_deldate(n)
return info
def IT024(n):
info = Info()
info.add_deldate(n)
return info
def TypeOfBurialType(n):
return code_label(n)
def LegalRepresentativeType(n):
info = Info()
info.addfrom(n, 'NationalNumber', " ", NationalNumberType)
info.addfrom(n, 'Graphic', " ")
return info
def IT152(n): # BurialModes, Mode de sépulture
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'TypeOfBurial', "", TypeOfBurialType)
info.addfrom(n, 'LegalRepresentative', "", LegalRepresentativeType)
info.add_deldate(n)
return info
def IT023(n): # PostalAddressAbroad, Adresse postale à l'étranger
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'Address', "", AddressType)
info.add_deldate(n)
return info
def TypeOfAbsenceType(n):
return Info(E.b(n.Code))
def IT026(n): # TemporaryAbsences
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'TypeOfAbsence', "", TypeOfAbsenceType)
info.addfrom(n, 'Graphic1', " ")
info.addfrom(n, 'Graphic2', " ")
info.add_deldate(n)
return info
def IT028(n):
info = Info()
info.addfrom(n, 'LegalFact', '', code_label)
info.addfrom(n, 'Graphic', '')
info.addfrom(n, 'ExpiryDate', _("expires "), DateType)
info.add_deldate(n)
return info
def IT208(n):
info = Info()
# info.addfrom(n,'Date','',DateType)
info.addfrom(n, 'PseudoNationalNumber', '')
info.add_deldate(n)
return info
def IT073(n):
info = Info()
info.addfrom(n, 'Category', '', CategoryType)
info.addfrom(n, 'CertificateNumber', _("no."))
info.add_deldate(n)
return info
def IT074(n):
info = Info()
info.addfrom(n, 'SerialNumber')
info.addfrom(n, 'IdentificationNumber')
info.add_deldate(n)
return info
def FiliationType(n):
return code_label(n)
def ParentType(n):
info = Info()
info.addfrom(n, 'Name', '', NameType)
info.addfrom(n, 'NationalNumber', ' (', NationalNumberType, ')')
return info
def StreetType(n):
# we don't print the code of streets
info = Info()
info.addfrom(n, 'Label', '')
# info.addfrom(n,'NationalNumber',' (',NationalNumberType,')')
return info
# return code_label(n)
def IT020(n):
def AddressType020(n):
info = Info()
info.addfrom(n, 'ZipCode', '')
info.addfrom(n, 'Street', '', StreetType)
info.addfrom(n, 'HouseNumber', _('no. '))
info.addfrom(n, 'Box', ' ')
return info
info = Info()
info.addfrom(n, "Address", '', AddressType020)
return info
def IT110(n):
# Filiation ascendante
info = Info()
info.addfrom(n, 'FiliationType', '', FiliationType)
info.addfrom(n, 'Parent1', _('of '), ParentType)
info.addfrom(n, 'Parent2', _('and '), ParentType)
info.addfrom(n, 'ActNumber', _("Act no. "))
info.addfrom(n, 'Place', _("in "), PlaceType)
info.addfrom(n, 'Graphic', " ")
info.add_deldate(n)
return info
def IT111(n):
# Statut de la personne représentée ou assistée
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'Justification', '', JustificationType)
info.addfrom(n, 'Situation', '', SituationType111)
info.addfrom(n, 'Graphic', " ")
info.add_deldate(n)
return info
def IT113(n): # Guardian : Personne qui représente ou assiste
info = Info()
info.addfrom(n, 'Date', _("Date"), DateType)
info.addfrom(n, 'Status', _("Status"), code_label)
info.addfrom(n, 'Justification', _("Justification"), code_label)
info.addfrom(n, 'Place', _("in "), PlaceType)
info.addfrom(n, 'Graphic', " ")
info.addfrom(n, 'Country', " ", CountryType)
info.add_deldate(n)
return info
def IT140(n):
info = Info().addfrom(n, 'Name', ' ', NameType)
info.addfrom(n, 'NationalNumber', ' (', NationalNumberType, ')')
# info += _(' as ')
info.addfrom(n, 'FamilyRole', _('as '), code_label)
info.addfrom(n, 'Housing', None, HousingType)
info.add_deldate(n)
return info
def IT141(n):
info = Info()
info.addfrom(n, 'Housing', None, HousingType)
info.addfrom(n, 'FamilyRole', '', code_label)
info.addfrom(n, 'Name', _('in family headed by '), NameType)
info.addfrom(n, 'NationalNumber', ' (', NationalNumberType, ')')
info.add_deldate(n)
return info
def NationalityType(n):
return code_label(n)
def IT213(n): # Alias
info = Info()
info.addfrom(n, 'Name', '', NameType)
info.addfrom(n, 'Nationality', None, NationalityType)
info.addfrom(n, 'BirthDate', _(' born '), DateType)
info.addfrom(n, 'BirthPlace', _(' in '))
info.add_deldate(n)
return info
def TypeOfLicenseType(n):
return code_label(n)
def TypeOfLicenseType194(n):
return code_label(n)
def DeliveryType206(n):
v = getattr(n, 'Place', None)
if v:
return PlaceType(v)
return CountryType(n.Country)
def DeliveryType194(n):
info = Info().addfrom(n, 'Place', _('in '), PlaceType)
info.addfrom(n, 'Label', '')
info.addfrom(n, 'Code', ' (', simpletype, ')')
# info.add_codelabel(n)
# info += code_label(n)
return info
def CategoryType(n):
return code_label(n)
def GearBoxType(n):
return code_label(n)
def MedicalType(n):
return code_label(n)
def LicenseCategoriesType(n):
info = Info()
# raise Exception(str(n))
# for cat in n.Category:
# info.addfrom(cat,'Category',' ',CategoryType)
info.chunks.append('/'.join([cat.Label for cat in n.Category]))
# info += code_label(n)
return info
def ForfeitureReasonType(n):
return code_label(n)
def IT191(n):
# info = code_label(n.TypeOfLicense)
info = Info().addfrom(n, 'TypeOfLicense', '', TypeOfLicenseType)
info.addfrom(n, 'LicenseNumber', _('no. '))
info.addfrom(n, 'Place', _('delivered in '), PlaceType)
info.addfrom(n, 'DeliveryCountry', ' (', CountryType, ')')
info.addfrom(n, 'ForfeitureReason', None, ForfeitureReasonType)
info.addfrom(n, 'ForfeitureDate', None, ForfeitureDateType)
# info.append()
# info.append(E.b(n.LicenseNumber))
# info.append(', categories '
# + ' '.join([cat.Label for cat in n.Categories.Category]))
# info.append(_(' delivered in '))
# info += code_label(n.Delivery.Place)
info.add_deldate(n)
return info
def IT194(n):
info = Info().addfrom(n, 'TypeOfLicense', '', TypeOfLicenseType194)
info.addfrom(n, 'Categories', _('categories '), LicenseCategoriesType)
info.addfrom(n, 'LicenseNumber', _('no. '))
info.addfrom(n, 'Delivery', _('delivered '), DeliveryType194)
info.addfrom(n, 'GearBox', None, GearBoxType)
info.addfrom(n, 'Medical', None, MedicalType)
info.addfrom(n, 'ExpiryDate', _('expires '), ExpiryDateType)
info.add_deldate(n)
return info
def IT198(n):
info = Info().addfrom(n, 'PermitNumber', _('no. '))
info.addfrom(n, 'Categories', _('categories '), LicenseCategoriesType)
info.addfrom(n, 'LicenseNumber', _('no. '))
info.addfrom(n, 'Delivery', _('delivered '), DeliveryType194)
info.addfrom(n, 'GearBox', None, GearBoxType)
info.addfrom(n, 'Medical', None, MedicalType)
info.addfrom(n, 'ExpiryDate', _('expires '), ExpiryDateType)
info.add_deldate(n)
return info
def TypeOfPassportType(n):
return code_label(n)
def PassportIdentType(n):
info = Info()
info.addfrom(n, 'PassportType', _('type '), TypeOfPassportType)
info.addfrom(n, 'PassportNumber', _('no. '))
return info
def IT199(n):
info = Info()
# info.chunks.append('Number ')
# info.chunks.append(E.b(n.PassportIdent.PassportNumber))
# info.append(', status ')
info.addfrom(n, 'Status', _("status"), code_label)
info.addfrom(n, 'PassportIdent', '', PassportIdentType)
info.addfrom(n, 'Issuer', _('issued by '), IssuerType)
info.addfrom(n, 'RenewalNumber', _('renewal no. '), boldstring)
info.addfrom(n, 'SerialNumber', _('serial no. '), boldstring)
info.addfrom(n, 'SecondNumber', _('second no. '), boldstring)
info.addfrom(n, 'ReplacementOf', _('replacement of '), boldstring)
info.addfrom(n, 'AdditionTo', _('addition to '), boldstring)
info.addfrom(n, 'ProductionDate', _('produced '), DateType)
info.addfrom(n, 'ExpiryDate', _('expires '), DateType)
# info.append(', type ')
# info += code_label(n.PassportIdent.PassportType)
# info.append(', expires ')
# info.append(E.b(dd.dtos(rn2date(n.ExpiryDate))))
# info.append(', delivered by ')
# info += code_label(n.Issuer.PosteDiplomatique)
# info.append(_(' renewal no. '))
# info.append(E.b(n.RenewalNumber))
info.add_deldate(n)
return info
def HousingType(n):
return code_label(n)
def ModificationTypeType(n):
return code_label(n)
def AddressType(n):
info = Info()
# pd = n.Address.Address
info.addfrom(n, 'Country', '', CountryType)
# info.append(', ')
info.addfrom(n, 'Graphic1', '')
info.addfrom(n, 'Graphic2', '')
info.addfrom(n, 'Graphic3', '')
# info.append(E.b(pd.Graphic1))
# info.append(', ')
# info.append(E.b(pd.Graphic2))
# info.append(', ')
# info.append(E.b(pd.Graphic3))
# info.addfrom(pd,'Graphic3')
return info
def CertificateType(n):
return code_label(n)
def IT200(n):
info = Info().addfrom(n, 'PublicSecurityNumber', _('no. '))
info.add_deldate(n)
return info
def IT202(n):
info = Info()
info.addfrom(n, 'Graphic1', '')
info.addfrom(n, 'Graphic2', '')
info.addfrom(n, 'Limosa', '', LimosaType)
info.add_deldate(n)
return info
def LimosaType(n):
info = Info()
info.addfrom(n, 'Reason1', '', LimosaReasonType)
info.addfrom(n, 'Reason2', '', LimosaReasonType)
info.addfrom(n, 'NationalNumber', _('SSIN '), NationalNumberType)
return info
def LimosaReasonType(n):
return code_label(n)
def IT205(n):
info = code_label(n)
info.add_deldate(n)
return info
def OrganizationType(n):
return code_label(n)
def GeneralInfoType(n):
info = code_label(n)
info.addfrom(n, 'Organization', _("Organization"), OrganizationType)
return info
def OrigineType(n):
return Info().add_codelabel(n)
def AppealType(n):
return code_label(n)
def StatusAppealType(n):
return code_label(n)
def ProcedureType(n):
info = Info()
info.addfrom(n, 'Origine', None, OrigineType)
info.addfrom(n, 'Reference')
info.addfrom(n, 'Appeal', None, AppealType)
info.addfrom(n, 'OpenClose', None, StatusAppealType)
info.addfrom(n, 'NationalNumber', _('SSIN '), NationalNumberType)
return info
def DecisionCancelledType(n):
info = Info()
info.addfrom(n, 'Date', None, DateType)
info.addfrom(n, 'Reference')
return info
def DelayLeaveGrantedType(n):
info = Info()
info.addfrom(n, 'Date', None, DateType)
return info
def StrikingOutType(n):
info = Info()
info.addfrom(n, 'Reference')
info.addfrom(n, 'OpenClose', None, OpenCloseType)
info.addfrom(n, 'Status', None, StrikingStatusType)
return info
def StrikingStatusType(n):
return code_label(n)
def TerritoryLeftType(n):
return code_label(n)
def OpenCloseType(n):
return code_label(n)
def ProtectionType(n):
info = code_label(n)
info.addfrom(n, 'Reference')
info.addfrom(n, 'Term')
return info
def AdviceFromCGVSType(n):
info = code_label(n)
info.addfrom(n, 'Reference')
return info
def ApplicationFiledType(n):
info = code_label(n)
info.addfrom(n, 'Place', _("in "), PlaceType)
return info
def DecisionType206(n):
# print 20150513, unicode(n).encode("ascii", errors="replace")
info = code_label(n)
info.addfrom(n, 'Reference', _("Reference"))
info.addfrom(n, 'OpenClose', _("Open/Close"), OpenCloseType)
info.addfrom(n, 'Comments')
info.addfrom(n, 'Term')
return info
def NotificationByDVZType(n):
info = Info()
info.addfrom(n, 'Place', _("in "), PlaceType)
info.addfrom(n, 'Reference')
return info
def NotificationByOrgType(n):
info = Info()
info.addfrom(n, 'Reference')
return info
def AppealLodgedType(n):
info = Info()
info.addfrom(n, 'Reference')
return info
def IT206(n):
def Status(n):
info = Info()
info.addfrom(n, 'Status')
return info
info = Info()
info.addfrom(n, 'GeneralInfo', '', GeneralInfoType)
info.addfrom(n, 'Procedure', _("Procedure"), ProcedureType)
info.addfrom(n, 'StrikingOut', None, StrikingOutType)
info.addfrom(n, 'DecisionCancelled',
_("Decision cancelled"), DecisionCancelledType)
info.addfrom(n, 'Protection', _("Protection"), ProtectionType)
info.addfrom(n, 'DelayLeaveGranted', None, DelayLeaveGrantedType)
info.addfrom(n, 'Escape', _("Escape"), Status)
info.addfrom(n, 'UnrestrictedStay', None, Status)
info.addfrom(n, 'ApplicationRenounced', _("Application renounced"), Status)
info.addfrom(n, 'TerritoryLeft', _("Territory left"), TerritoryLeftType)
info.addfrom(n, 'AdviceFromCGVS', None, AdviceFromCGVSType)
info.addfrom(n, 'Decision', _("Decision"), DecisionType206)
info.addfrom(n, 'ApplicationFiled',
_("Application filed"), ApplicationFiledType)
info.addfrom(n, 'NotificationByDVZ', None, NotificationByDVZType)
info.addfrom(n, 'NotificationByOrg', None, NotificationByOrgType)
info.addfrom(n, 'AppealLodged', None, AppealLodgedType)
info.add_deldate(n)
return info
def InitiativeType(n):
return code_label(n)
def SocialWelfareType(n):
info = Info()
info.addfrom(n, 'Place', _("in "), PlaceType)
info.addfrom(n, 'Initiative', None, InitiativeType)
info.add_deldate(n)
return info
def RefugeeCentreType(n):
return code_label(n)
def IT207(n):
info = Info()
info.addfrom(n, 'SocialWelfare',
_("Social Welfare Centre"), SocialWelfareType)
info.addfrom(n, 'RefugeeCentre', _("Refugee Centre"), RefugeeCentreType)
info.add_deldate(n)
return info
def RegistrationRegisterType(n):
return code_label(n)
def IT210(n):
info = Info()
info.addfrom(n, 'RegistrationRegister',
_("Registration register"), RegistrationRegisterType)
info.add_deldate(n)
return info
def IdentificationType(n):
return code_label(n)
def IT211(n):
info = Info()
info.addfrom(n, 'TypeOfDocument', '', IdentificationType)
info.add_deldate(n)
return info
def ChoosenResidenceType(n):
return code_label(n)
def IT212(n):
info = Info().addfrom(n, 'Residence', None, ChoosenResidenceType)
info.addfrom(n, 'Graphic', '')
info.add_deldate(n)
return info
def IT251(n):
info = Info()
info.add_deldate(n)
return info
def IT192(n):
info = Info().addfrom(n, 'Declaration', '', DeclarationType)
info.addfrom(n, 'Place', _('in '), PlaceType)
info.add_deldate(n)
return info
HANDLERS = dict()
def register_it_handler(name, label, subname, itname):
HANDLERS[name] = (label, subname, itname)
register_it_handler('WorkPermits', _("Work Permits"), 'WorkPermit', 'IT198')
register_it_handler(
'PublicSecurityNumbers',
_("Public Security Numbers"), 'PublicSecurityNumber', 'IT200')
register_it_handler('SpecialInfos', _("Special Infos"), 'SpecialInfo', 'IT202')
register_it_handler('RefugeeTypes', _("Refugee Types"), 'RefugeeType', 'IT205')
register_it_handler('StatusOfRefugee', _("Status of refugee"),
'StatusOfRefugee', 'IT206')
register_it_handler('Passports', _("Passports"), 'Passport', 'IT199')
register_it_handler(
'OrganizationsInCharge',
_("Organizations in charge"), 'OrganizationInCharge', 'IT207')
register_it_handler(
'RegistrationRegisters',
_("Registration registers"), 'RegistrationRegister', 'IT210')
register_it_handler('ChoosenResidences',
_("Choosen residences"), 'ChoosenResidence', 'IT212')
register_it_handler('OrganDonations', _("Organ Donations"),
'OrganDonation', 'IT192')
register_it_handler('ResidenceUpdateDates',
_("Residence Update Dates"), 'ResidenceUpdateDate',
'IT251')
register_it_handler('DocumentTypes', _("Document Types"),
'DocumentType', 'IT211')
register_it_handler('NameModifications',
_("Name Modifications"), 'NameModification', 'IT013')
register_it_handler('CountriesOfOrigin',
_("Countries Of Origin"), 'CountryOfOrigin', 'IT006')
register_it_handler('ReturnPermissions',
_("Return permissions"), 'ReturnPermission', 'IT008')
register_it_handler('AddressDeclarationAbroad',
_("Address Declaration Abroad"), 'Address', 'IT018')
register_it_handler('TemporaryRegistrations',
_("Inscriptions Temporaires"),
'TemporaryRegistration', 'IT028')
register_it_handler('SpecialRetirementCertificates',
_("Special Retirement Certificates"),
'SpecialRetirementCertificate',
'IT074')
register_it_handler('RetirementCertificates',
_("Retirement Certificates"), 'RetirementCertificate',
'IT073')
register_it_handler('Guardians',
_("Guardians"), 'Guardian', 'IT113')
register_it_handler('PseudoNationalNumbers',
_("Pseudo National Numbers"), 'PseudoNationalNumber',
'IT208')
register_it_handler('TemporaryAbsences',
_("Temporary absences"), 'TemporaryAbsence', 'IT026')
register_it_handler('BurialModes',
_("Burial modes"), 'BurialMode', 'IT152')
register_it_handler('PostalAddressAbroad',
_("Postal address abroad"), 'PostalAddressAbroad', 'IT023')
register_it_handler('ParentalAuthorities',
_("Parental authorities"), 'ParentalAuthority', 'IT111')
class RowFactory(object):
# The result of a Tx25 consist of data rows, each of which has a
# given type. Consult the source code of this class to see how it
# works.
def start_group(self, group):
self.current_group = group
self.counter = 0
def datarow(self, node, since, info):
group = self.current_group
self.counter += 1
if node.__class__.__name__.startswith('IT'):
itnum = node.__class__.__name__[2:]
else:
itnum = ''
if hasattr(node, 'Type'):
group += " " + node.Type
# if hasattr(node,'Status'):
# group += " " + unicode(node.Status)
if hasattr(node, 'Structure'):
group += " " + node.Structure
return AttrDict(group=group,
counter=self.counter,
type=itnum,
since=rn2date(since),
info=E.p(*info.chunks))
def get_it_handler(self, itnode):
t = HANDLERS.get(itnode.__class__.__name__, None)
if t is None:
return t
g, subname, itname = t
it = globals().get(itname)
def f(node, name):
self.start_group(g)
for n in getattr(node, subname):
info = it(n)
yield self.datarow(n, n.Date, info)
return f
def IT000(self, n, name):
self.start_group(_("National Number"))
n = n.NationalNumber
info = Info(
E.b(n.NationalNumber),
' (' + str(cbss2gender(n.Sex)) + ')')
yield self.datarow(n, n.Date, info)
def IT019(self, n, name):
self.start_group(_("Address Change Declaration"))
info = Info()
def AddressType(n):
info = Info()
info.addfrom(n, 'Graphic', '')
return info
info.addfrom(n, 'Address', '', AddressType)
info.add_deldate(n)
yield self.datarow(n, n.Date, info)
def FileOwner(self, fo, name):
self.start_group(_("Residences"))
for n in fo.Residences:
info = Residence(n)
yield self.datarow(n, n.Date, info)
def AscertainedLegalMainAddresses(self, fo, name):
# Détermination de résidence
self.start_group(_("Ascertained Legal Main Addresses"))
# raise Exception(str(fo))
# raise Exception(repr([n for n in fo]))
for n in fo.AscertainedLegalMainAddress:
info = IT003(n)
yield self.datarow(n, n.Date, info)
def Pseudonyms(self, fo, name):
self.start_group(_("Pseudonyms")) # Pseudonymes
for n in fo.Pseudonym:
info = IT011(n)
yield self.datarow(n, n.Date, info)
def Aliases(self, fo, name):
self.start_group(_("Aliases"))
for n in fo.Alias:
info = IT213(n)
yield self.datarow(n, n.Date, info)
def AddressChangeIntention(self, fo, name):
self.start_group(
_("Address Change Intention")) # Intention de changer l'adresse
for n in fo.Address:
info = IT005(n)
yield self.datarow(n, n.Date, info)
def AddressReferences(self, fo, name):
self.start_group(_("Address References")) # Adresse de référence
for n in fo.AddressReference:
info = IT024(n)
yield self.datarow(n, n.Date, info)
def Names(self, node, name):
self.start_group(_("Names"))
# group = name
for n in node.Name:
info = Info().addfrom(n, 'Name', '', NameType)
yield self.datarow(n, n.Date, info)
def LegalMainAddresses(self, node, name):
self.start_group(_("Legal Main Addresses"))
for n in node.LegalMainAddress:
yield self.datarow(n, n.Date, IT020(n))
def ResidenceAbroad(self, node, name): # IT022
def ResidenceAbroadAddressType(n):
info = Info('Address')
info.addfrom(n, 'PosteDiplomatique', None, DiplomaticPostType)
info.addfrom(n, 'Territory', ' ', TerritoryType)
info.addfrom(n, 'Address', ' ', AddressType)
return info
self.start_group(_("Residence Abroad"))
for n in node.ResidenceAbroad:
info = Info()
info.addfrom(n, 'Address', '', ResidenceAbroadAddressType)
# info += code_label(n.Address.PosteDiplomatique)
# info.append(', ')
# info += code_label(n.Address.Territory)
# info.append(', ')
info.add_deldate(n)
yield self.datarow(n, n.Date, info)
def Nationalities(self, node, name):
self.start_group(_("Nationalities"))
for n in node.Nationality:
info = code_label(n.Nationality)
yield self.datarow(n, n.Date, info)
def Occupations(self, node, name):
self.start_group(_("Occupations"))
for n in node.Occupation:
info = code_label(n.Occupation)
info.addfrom(n, 'SocialCategory', ' (SC ', code_label, ')')
yield self.datarow(n, n.Date, info)
def IT100(self, n, name):
self.start_group(_("Birth Place"))
info = Info()
info.addfrom(n, 'Place1', _('in '), PlaceType)
info.addfrom(n, 'Place2', _('in '), GraphicPlaceType)
info.addfrom(n, 'ActNumber', _("Act no. "))
info.addfrom(n, 'SuppletoryRegister')
yield self.datarow(n, n.Date, info)
def IT101(self, n, name):
self.start_group(
_("Declared Birth Date")) # Date de naissance déclarée
info = Info()
info.addfrom(n, 'DeclaredBirthDate', '', DateType)
info.addfrom(n, 'Certificate', '', CertificateType)
info.add_deldate(n)
yield self.datarow(n, n.Date, info)
def Filiations(self, node, name):
self.start_group(_("Filiations"))
for n in node.Filiation:
info = IT110(n)
yield self.datarow(n, n.Date, info)
def CivilStates(self, node, name):
self.start_group(_("Civil States")) # IT120
for n in node.CivilState:
info = code_label(n.CivilState)
if hasattr(n, 'Spouse'):
# info.append(' with ')
# info += name2info(n.Spouse.Name)
info.addfrom(n.Spouse, 'Name', _('with '), NameType)
info.chunks.append(' (')
info.chunks.append(n.Spouse.NationalNumber.NationalNumber)
info.chunks.append(')')
info.addfrom(n, 'Lieu', _('in '), LieuType)
# info += LieuType(n.Lieu)
info.addfrom(n, 'ActNumber', _("Act no. "))
# info.addfrom(n,'ActNumber')
info.addfrom(n, 'SuppletoryRegister')
info.add_deldate(n)
yield self.datarow(n, n.Date, info)
def HeadOfFamily(self, node, name):
self.start_group(_("Head Of Family"))
for n in node.HeadOfFamily:
info = IT140(n)
yield self.datarow(n, n.Date, info)
def FamilyMembers(self, node, name):
self.start_group(_("Family Members"))
for n in node.FamilyMember:
info = IT141(n)
yield self.datarow(n, n.Date, info)
def DrivingLicensesOldModel(self, node, name):
self.start_group(_("Driving Licenses Old Model"))
for n in node.DrivingLicense:
info = IT194(n)
yield self.datarow(n, n.Date, info)
def DrivingLicenses(self, node, name):
self.start_group(_("Driving Licenses"))
for n in node.DrivingLicense:
info = IT191(n)
yield self.datarow(n, n.Date, info)
def IdentityCards(self, node, name):
self.start_group(_("Identity Cards"))
for n in node.IdentityCard:
info = code_label(n.TypeOfCard)
info.chunks.append(' ')
info.chunks.append(_('no. '))
info.chunks.append(E.b(n.CardNumber))
info.addfrom(n, 'ExpiryDate', _('expires '), DateType)
# info.chunks.append(E.b(dd.dtos(rn2date(n.ExpiryDate))))
info.addfrom(n, 'Delivery', _('delivered in '), DeliveryType206)
# info.chunks.append(', delivered in ')
# info += code_label(n.Delivery.Place)
yield self.datarow(n, n.Date, info)
def LegalCohabitations(self, node, name):
def CessationType(n):
info = Info()
info.addfrom(n, 'Reason', _("Reason"), ReasonType)
info.addfrom(n, 'Place', _('in '), PlaceType)
info.addfrom(n, 'Notification', _('in '), NotificationType)
return info
def DeclarationType(n):
info = Info()
info.addfrom(n, 'RegistrationDate', '', DateType)
info.addfrom(n, 'Partner', _('with '), PartnerType)
info.addfrom(n, 'Place', _('in '), PlaceType)
info.addfrom(n, 'Notary', _('in '), NotaryType)
return info
self.start_group(_("Legal cohabitations"))
for n in node.LegalCohabitation:
info = Info()
info.addfrom(n, 'Declaration', _("Declaration"), DeclarationType)
info.addfrom(n, 'Cessation', _("Cessation"), CessationType)
info.add_deldate(n)
yield self.datarow(n, n.Date, info)
def IT253(self, node, name):
self.start_group(_("Creation Date"))
n = node # res.CreationDate
info = Info()
yield self.datarow(n, n.Date, info)
def IT254(self, node, name):
self.start_group(_("Last Update"))
n = node # res.LastUpdateDate
info = Info()
yield self.datarow(n, n.Date, info)
class RetrieveTIGroupsResult(ConfidentialResultsTable):
master = 'cbss.RetrieveTIGroupsRequest'
master_key = None
column_names = 'group:18 type:5 since:14 info:50'
@dd.displayfield(_("Group"))
def group(self, obj, ar):
if obj.counter == 1:
return obj.group
return ''
@dd.displayfield(_("TI"))
def type(self, obj, ar):
if obj.counter == 1:
return obj.type
return ''
@dd.virtualfield(models.DateField(_("Since")))
def since(self, obj, ar):
return obj.since
@dd.displayfield(_("Info"))
def info(self, obj, ar):
return obj.info
@classmethod
def get_data_rows(self, ar):
rti = ar.master_instance
if rti is None:
# print "20130425 rti is None"
return
self.check_permission(rti, ar)
# if not ipr.status in (RequestStates.ok,RequestStates.fictive):
# if not rti.status in (RequestStates.ok,RequestStates.warnings):
# return
reply = rti.get_service_reply()
if reply is None:
# print "20130425 reply is None"
return
# print "20130425 ok"
reply_has_result(reply)
res = reply.rrn_it_implicit
rf = RowFactory()
for name, node in res:
# print 20130425, name, node.__class__
m = getattr(rf, node.__class__.__name__, None)
if m is None:
m = rf.get_it_handler(node)
if m is None:
raise Exception("No handler for %s (%s)"
% (name, node.__class__.__name__))
for row in m(node, name):
yield row
``` |
[
{
"content": "Here is a code file:\n```python\n# coding: utf-8\n# Copyright (c) Pymatgen Development Team.\n# Distributed under the terms of the MIT License.\n\n\nimport unittest\n\nimport numpy as np\nfrom monty.os.path import which\n\nfrom pymatgen.core.lattice import Lattice\nfrom pymatgen.core.structure imp... | [
{
"content": "Here is a code file:\n<|memory_start|>```python\n# coding: utf-8\n# Copyright (c) Pymatgen Development Team.\n# Distributed under the terms of the MIT License.\n\n\nimport unittest\n\nimport numpy as np\nfrom monty.os.path import which\n\nfrom pymatgen.core.lattice import Lattice\nfrom pymatgen.co... | ```python
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import numpy as np
from monty.os.path import which
from pymatgen.core.lattice import Lattice
from pymatgen.core.structure import Structure, Molecule
from pymatgen.transformations.site_transformations import (
AddSitePropertyTransformation,
InsertSitesTransformation,
PartialRemoveSitesTransformation,
RemoveSitesTransformation,
ReplaceSiteSpeciesTransformation,
TranslateSitesTransformation,
RadialSiteDistortionTransformation,
)
from pymatgen.util.testing import PymatgenTest
enum_cmd = which("enum.x") or which("multienum.x")
makestr_cmd = which("makestr.x") or which("makeStr.x") or which("makeStr.py")
enumlib_present = enum_cmd and makestr_cmd
class TranslateSitesTransformationTest(PymatgenTest):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([0.5, 0.5, 0.5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice(
[
[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603],
]
)
self.struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "O2-", "O2-", "O2-", "O2-"], coords)
def test_apply_transformation(self):
t = TranslateSitesTransformation([0, 1], [0.1, 0.2, 0.3])
s = t.apply_transformation(self.struct)
self.assertTrue(np.allclose(s[0].frac_coords, [0.1, 0.2, 0.3]))
self.assertTrue(np.allclose(s[1].frac_coords, [0.475, 0.575, 0.675]))
inv_t = t.inverse
s = inv_t.apply_transformation(s)
self.assertAlmostEqual(s[0].distance_and_image_from_frac_coords([0, 0, 0])[0], 0)
self.assertTrue(np.allclose(s[1].frac_coords, [0.375, 0.375, 0.375]))
def test_apply_transformation_site_by_site(self):
t = TranslateSitesTransformation([0, 1], [[0.1, 0.2, 0.3], [-0.075, -0.075, -0.075]])
s = t.apply_transformation(self.struct)
self.assertTrue(np.allclose(s[0].frac_coords, [0.1, 0.2, 0.3]))
self.assertTrue(np.allclose(s[1].frac_coords, [0.3, 0.3, 0.3]))
inv_t = t.inverse
s = inv_t.apply_transformation(s)
self.assertAlmostEqual(s[0].distance_and_image_from_frac_coords([0, 0, 0])[0], 0)
self.assertArrayAlmostEqual(s[1].frac_coords, [0.375, 0.375, 0.375])
def test_to_from_dict(self):
d1 = TranslateSitesTransformation([0], [0.1, 0.2, 0.3]).as_dict()
d2 = TranslateSitesTransformation([0, 1], [[0.1, 0.2, 0.3], [-0.075, -0.075, -0.075]]).as_dict()
t1 = TranslateSitesTransformation.from_dict(d1)
t2 = TranslateSitesTransformation.from_dict(d2)
s1 = t1.apply_transformation(self.struct)
s2 = t2.apply_transformation(self.struct)
self.assertTrue(np.allclose(s1[0].frac_coords, [0.1, 0.2, 0.3]))
self.assertTrue(np.allclose(s2[0].frac_coords, [0.1, 0.2, 0.3]))
self.assertTrue(np.allclose(s2[1].frac_coords, [0.3, 0.3, 0.3]))
str(t1)
str(t2)
class ReplaceSiteSpeciesTransformationTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([0.5, 0.5, 0.5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice(
[
[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603],
]
)
self.struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "O2-", "O2-", "O2-", "O2-"], coords)
def test_apply_transformation(self):
t = ReplaceSiteSpeciesTransformation({0: "Na"})
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Na1 Li3 O4")
def test_to_from_dict(self):
d = ReplaceSiteSpeciesTransformation({0: "Na"}).as_dict()
t = ReplaceSiteSpeciesTransformation.from_dict(d)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Na1 Li3 O4")
class RemoveSitesTransformationTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([0.5, 0.5, 0.5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice(
[
[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603],
]
)
self.struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "O2-", "O2-", "O2-", "O2-"], coords)
def test_apply_transformation(self):
t = RemoveSitesTransformation(range(2))
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O4")
def test_to_from_dict(self):
d = RemoveSitesTransformation(range(2)).as_dict()
t = RemoveSitesTransformation.from_dict(d)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O4")
class InsertSitesTransformationTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([0.5, 0.5, 0.5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice(
[
[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603],
]
)
self.struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "O2-", "O2-", "O2-", "O2-"], coords)
def test_apply_transformation(self):
t = InsertSitesTransformation(["Fe", "Mn"], [[0.0, 0.5, 0], [0.5, 0.2, 0.2]])
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li4 Mn1 Fe1 O4")
t = InsertSitesTransformation(["Fe", "Mn"], [[0.001, 0, 0], [0.1, 0.2, 0.2]])
# Test validate proximity
self.assertRaises(ValueError, t.apply_transformation, self.struct)
def test_to_from_dict(self):
d = InsertSitesTransformation(["Fe", "Mn"], [[0.5, 0, 0], [0.1, 0.5, 0.2]]).as_dict()
t = InsertSitesTransformation.from_dict(d)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li4 Mn1 Fe1 O4")
class PartialRemoveSitesTransformationTest(unittest.TestCase):
def setUp(self):
coords = list()
coords.append([0, 0, 0])
coords.append([0.375, 0.375, 0.375])
coords.append([0.5, 0.5, 0.5])
coords.append([0.875, 0.875, 0.875])
coords.append([0.125, 0.125, 0.125])
coords.append([0.25, 0.25, 0.25])
coords.append([0.625, 0.625, 0.625])
coords.append([0.75, 0.75, 0.75])
lattice = Lattice(
[
[3.8401979337, 0.00, 0.00],
[1.9200989668, 3.3257101909, 0.00],
[0.00, -2.2171384943, 3.1355090603],
]
)
self.struct = Structure(lattice, ["Li+", "Li+", "Li+", "Li+", "O2-", "O2-", "O2-", "O2-"], coords)
def test_apply_transformation_complete(self):
t = PartialRemoveSitesTransformation(
[tuple(range(4)), tuple(range(4, 8))],
[0.5, 0.5],
PartialRemoveSitesTransformation.ALGO_COMPLETE,
)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O2")
s = t.apply_transformation(self.struct, 12)
self.assertEqual(len(s), 12)
@unittest.skipIf(not enumlib_present, "enum_lib not present.")
def test_apply_transformation_enumerate(self):
t = PartialRemoveSitesTransformation(
[tuple(range(4)), tuple(range(4, 8))],
[0.5, 0.5],
PartialRemoveSitesTransformation.ALGO_ENUMERATE,
)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O2")
s = t.apply_transformation(self.struct, 12)
self.assertEqual(len(s), 12)
def test_apply_transformation_best_first(self):
t = PartialRemoveSitesTransformation(
[tuple(range(4)), tuple(range(4, 8))],
[0.5, 0.5],
PartialRemoveSitesTransformation.ALGO_BEST_FIRST,
)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O2")
def test_apply_transformation_fast(self):
t = PartialRemoveSitesTransformation(
[tuple(range(4)), tuple(range(4, 8))],
[0.5, 0.5],
PartialRemoveSitesTransformation.ALGO_FAST,
)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O2")
t = PartialRemoveSitesTransformation([tuple(range(8))], [0.5], PartialRemoveSitesTransformation.ALGO_FAST)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O2")
def test_to_from_dict(self):
d = PartialRemoveSitesTransformation([tuple(range(4))], [0.5]).as_dict()
t = PartialRemoveSitesTransformation.from_dict(d)
s = t.apply_transformation(self.struct)
self.assertEqual(s.formula, "Li2 O4")
def test_str(self):
d = PartialRemoveSitesTransformation([tuple(range(4))], [0.5]).as_dict()
self.assertIsNotNone(str(d))
class AddSitePropertyTransformationTest(PymatgenTest):
def test_apply_transformation(self):
s = self.get_structure("Li2O2")
sd = [[True, True, True] for site in s.sites]
bader = np.random.random(s.num_sites).tolist()
site_props = {"selective_dynamics": sd, "bader": bader}
trans = AddSitePropertyTransformation(site_props)
manually_set = s.copy()
for prop, value in site_props.items():
manually_set.add_site_property(prop, value)
trans_set = trans.apply_transformation(s)
for prop in site_props:
self.assertArrayAlmostEqual(trans_set.site_properties[prop], manually_set.site_properties[prop])
class RadialSiteDistortionTransformationTest(PymatgenTest):
def setUp(self):
self.molecule = Molecule(
species=["C", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H"],
coords=[
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[-1, 0, 0],
[0, -1, 0],
[0, 0, -1],
[3, 0, 0],
[0, 3, 0],
[0, 0, 3],
[-3, 0, 0],
[0, -3, 0],
[0, 0, -3],
],
)
self.structure = Structure(
species=["C", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H"],
coords=[
[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[-1, 0, 0],
[0, -1, 0],
[0, 0, -1],
[3, 0, 0],
[0, 3, 0],
[0, 0, 3],
[-3, 0, 0],
[0, -3, 0],
[0, 0, -3],
],
lattice=[[10, 0, 0], [0, 10, 0], [0, 0, 10]],
coords_are_cartesian=True,
)
def test(self):
t = RadialSiteDistortionTransformation(0, 1, nn_only=True)
s = t.apply_transformation(self.molecule)
self.assertTrue(np.array_equal(s[0].coords, [0, 0, 0]))
self.assertTrue(np.array_equal(s[1].coords, [2, 0, 0]))
self.assertTrue(np.array_equal(s[2].coords, [0, 2, 0]))
self.assertTrue(np.array_equal(s[3].coords, [0, 0, 2]))
self.assertTrue(np.array_equal(s[4].coords, [-2, 0, 0]))
self.assertTrue(np.array_equal(s[5].coords, [0, -2, 0]))
self.assertTrue(np.array_equal(s[6].coords, [0, 0, -2]))
t = RadialSiteDistortionTransformation(0, 1, nn_only=True)
s = t.apply_transformation(self.structure)
for c1, c2 in zip(self.structure[1:7], s[1:7]):
self.assertTrue(c1.distance(c2) == 1.0)
self.assertTrue(np.array_equal(s[0].coords, [0, 0, 0]))
self.assertTrue(np.array_equal(s[1].coords, [2, 0, 0]))
self.assertTrue(np.array_equal(s[2].coords, [0, 2, 0]))
self.assertTrue(np.array_equal(s[3].coords, [0, 0, 2]))
self.assertTrue(np.array_equal(s[4].coords, [8, 0, 0]))
self.assertTrue(np.array_equal(s[5].coords, [0, 8, 0]))
self.assertTrue(np.array_equal(s[6].coords, [0, 0, 8]))
def test_second_nn(self):
t = RadialSiteDistortionTransformation(0, 1, nn_only=False)
s = t.apply_transformation(self.molecule)
for c1, c2 in zip(self.molecule[7:], s[7:]):
self.assertEqual(abs(round(sum(c2.coords - c1.coords), 2)), 0.33)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
``` |
[
{
"content": "Here is the source code:\n```python\nfrom filterDb import *\r\n#from queriesDb import *\r\nclass Filter():\r\n\t\"\"\"\r\n\tpermette di definire una serie di condizioni che il pv deve soddisfare, il metodo test applicato\r\n\tsul singolo pv verifica che questo le rispetti, in particolare la logic... | [
{
"content": "Here is the source code:\n<|memory_start|>```python\nfrom filterDb import *\r\n#from queriesDb import *\r\nclass Filter():\r\n\t\"\"\"\r\n\tpermette di definire una serie di condizioni che il pv deve soddisfare, il metodo test applicato\r\n\tsul singolo pv verifica che questo le rispetti, in part... | ```python
from filterDb import *
#from queriesDb import *
class Filter():
"""
permette di definire una serie di condizioni che il pv deve soddisfare, il metodo test applicato
sul singolo pv verifica che questo le rispetti, in particolare la logica dei filtri prevede che
piu' parametri della stessa classe siano in OR mentre parametri di classi differenti devono
essere in AND
"""
def __repr__(self):
return "tab:{0},parametri:{1}".format(self.tab,self.parameters)
def __init__(self,parametri, tab,db,user,activeDb):
"""
@param parametri: [{int:[int]}]::[{class_id:[parameter_id]}] setting del filtro
@param tab: string chiave del Filtro
@param db:queriesDb, stessa istanza di QueriesDb usata da interrogazioni savequery e loadQuery
@param USER:utente attivo
@param Db:Db attivo
"""
self.user=user
self.activeDb=activeDb
self.db=filterDb(user,activeDb)
self.Dbquery=db
self.parameters=parametri
self.tab=tab
def unpackParameter(self,d):
"""ritorna una versione dei parametri del filtro "scompattata", cioe' nel formato[(int,int)]::[(category_id,parameter_id)],
perche' sia compatibile con le funzioni di queriesDb
@return: [(int,int)]::[(category_id,parameter_id)]
"""
l=[]
for k in d.iterkeys():
for i in d[k]:
l.append((k,i))
return l
def save(self,Id):
"""
salva i parametri del filtro sul db usando come chiave esterna l'id della query
@param Id:int:: query_id
"""
#instanzio il dict per la selezione della tabella su cui operare in funzione del tab
insertFunctions={}
insertFunctions['marchi']=self.Dbquery.insertMarchi
insertFunctions['potenziali']=self.Dbquery.insertPotenziale
insertFunctions['parametri']=self.Dbquery.insertParametri
#registro i parametri sul db
insertFunctions[self.tab](Id,self.unpackParameter(self.parameters))
def test(self,Id):
"""
esegue il test definito dai parametri del filtro sul pv con pv_id Id
@param Id::int pv_id
@return boolean
"""
return self.checkTab(Id,self.tab,self.parameters)
def checkTab(self,Id,tab,d):
"""
esegue i filtri appartenenti ad un tab
@param Id::int pv_id
@param tab:string specifica il tab di cui eseguire i filtri 'marchi',parametri','potenziali'
@param d:{int:[int]}::{class_id:[parameter_id]}
@return boolean: il valore ritornato e' l'operazione di AND tra i check per ogni classe
"""
# True e' il valore neutro per l'operazione di AND
b= True
# definisco un dict dei metodi da usare per il check delle classi
chekers={}
chekers['marchi']=self.db.checkClassBrand
chekers['parametri']=self.db.checkClassParameter
chekers['potenziali']=self.db.checkClassPotential
for k in d.iterkeys():
b=b and chekers[tab](Id,k,d[k])
return b
``` |
[
{
"content": "Reproduce the code exactly as provided (keep formatting):\n```python\n# Copyright 2014-2016 The ODL development group\n#\n# This file is part of ODL.\n#\n# ODL is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Fr... | [
{
"content": "Reproduce the code exactly as provided (keep formatting):\n<|memory_start|>```python\n# Copyright 2014-2016 The ODL development group\n#\n# This file is part of ODL.\n#\n# ODL is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as publis... | ```python
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Basic abstract and concrete sets."""
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from builtins import int, object, str, zip
from future import standard_library
from past.builtins import basestring
standard_library.install_aliases()
from abc import ABCMeta, abstractmethod
from numbers import Integral, Real, Complex
import numpy as np
from odl.util import is_int_dtype, is_real_dtype, is_scalar_dtype
from odl.util.utility import with_metaclass
__all__ = ('Set', 'EmptySet', 'UniversalSet', 'Field', 'Integers',
'RealNumbers', 'ComplexNumbers', 'Strings', 'CartesianProduct')
class Set(with_metaclass(ABCMeta, object)):
"""An abstract set.
**Abstract Methods**
Each subclass of `Set` must implement two methods: one to
check if an object is contained in the set and one to test if two
sets are equal.
**Membership test:** ``__contains__(self, other)``
Test if ``other`` is a member of this set. This function provides
the operator overload for ``in``.
**Parameters:**
other :
Object to be tested for membership
**Returns:**
contains : bool
``True`` if ``other`` is a member of this set, ``False``
otherwise.
**Equality test:** ``__eq__(self, other)``
Test if ``other`` is the same set as this set, i.e. both sets are
of the same type and contain the same elements. This function
provides the operator overload for ``==``.
**Parameters:**
other :
Object to be tested for equality.
**Returns:**
equals : bool
``True`` if both sets are of the same type and contain the
same elements, ``False`` otherwise.
A default implementation of the operator overload for ``!=`` via
``__ne__(self, other)`` is provided as ``not self.__eq__(other)``.
**Element creation (optional)**: ``element(self, inp=None)``
Create an element of this set, either from scratch or from an
input parameter.
**Parameters:**
inp : optional
Object from which to create the new element
**Returns:**
element : member of this set
If ``inp`` is None, return an arbitrary element.
Otherwise, return the element created from ``inp``.
"""
@abstractmethod
def __contains__(self, other):
"""Return ``other in self``."""
def contains_set(self, other):
"""Test if ``other`` is a subset of this set.
This is a default implementation that simply tests for equality.
It should be overridden by subclasses.
Returns
-------
set_contained : bool
``True`` if ``other`` is contained in this set, ``False``
otherwise.
"""
return self == other
def contains_all(self, other):
"""Test if all elements in ``other`` are contained in this set.
This is a default implementation that assumes ``other`` to be
a sequence and tests each elment of ``other`` sequentially.
This method should be overridden by subclasses.
Returns
-------
all_contained : bool
``True`` if all elements of ``other`` are contained in this
set, ``False`` otherwise
"""
return all(x in self for x in other)
@abstractmethod
def __eq__(self, other):
"""Return ``self == other``."""
def __ne__(self, other):
"""Return ``self != other``."""
return not self.__eq__(other)
def __cmp__(self, other):
"""Comparsion not implemented."""
# Stops python 2 from allowing comparsion of arbitrary objects
raise TypeError('unorderable types: {}, {}'
''.format(self.__class__.__name__, type(other)))
def element(self, inp=None):
"""Return an element from ``inp`` or from scratch.
This method should be overridden by subclasses.
"""
raise NotImplementedError('`element` method not implemented')
@property
def examples(self):
"""Generator creating name-value pairs of set elements.
This method is mainly intended for diagnostics and yields elements,
either a finite number of times or indefinitely.
This default implementation returns
``('element()', self.element())`` and should be overridden by
subclasses.
"""
yield ('element()', self.element())
def __repr__(self):
"""Return ``repr(self)``."""
return '{}()'.format(self.__class__.__name__)
def __str__(self):
"""Return ``str(self)``."""
return '{}'.format(self.__class__.__name__)
class EmptySet(Set):
"""Set with no member elements (except ``None``).
``None`` is considered as "no element", i.e. ``None in EmptySet()``
is the only test that evaluates to ``True``.
"""
def __contains__(self, other):
"""Return ``other in self``, always ``False`` except for ``None``."""
return other is None
def contains_set(self, other):
"""Return ``True`` for the empty set, ``False`` otherwise."""
return isinstance(other, EmptySet)
def __eq__(self, other):
"""Return ``self == other``."""
return isinstance(other, EmptySet)
def __hash__(self):
"""Return ``hash(self)``."""
return hash(type(self))
def element(self, inp=None):
"""Return None."""
return None
class UniversalSet(Set):
"""Set of all objects.
Forget about set theory for a moment :-).
"""
def __contains__(self, other):
"""Return ``other in self``, always ``True``."""
return True
def contains_set(self, other):
"""Return ``True`` for any set."""
return isinstance(other, Set)
def __eq__(self, other):
"""Return ``self == other``."""
return isinstance(other, UniversalSet)
def __hash__(self):
"""Return ``hash(self)``."""
return hash(type(self))
def element(self, inp=None):
"""Return ``inp`` in any case."""
return inp
class Strings(Set):
"""Set of fixed-length (unicode) strings."""
def __init__(self, length):
"""Initialize a new instance.
Parameters
----------
length : positive int
Fixed length of the strings in this set.
"""
length, length_in = int(length), length
if length <= 0:
raise ValueError('`length` must be positive, got {}'
''.format(length_in))
self.__length = length
@property
def length(self):
"""Fixed length of the strings in this set."""
return self.__length
def __contains__(self, other):
"""Return ``other in self``.
Returns
-------
contained : bool
``True`` if ``other`` is a string of exactly `length`
characters, ``False`` otherwise.
"""
return isinstance(other, basestring) and len(other) == self.length
def contains_all(self, other):
"""Return ``True`` if all strings in ``other`` have size `length`."""
dtype = getattr(other, 'dtype', None)
if dtype is None:
dtype = np.result_type(*other)
dtype_str = np.dtype('S{}'.format(self.length))
dtype_uni = np.dtype('<U{}'.format(self.length))
return dtype in (dtype_str, dtype_uni)
def __eq__(self, other):
"""Return ``self == other``."""
return isinstance(other, Strings) and other.length == self.length
def __hash__(self):
"""Return ``hash(self)``."""
return hash((type(self), self.length))
def element(self, inp=None):
"""Return an element from ``inp`` or from scratch."""
if inp is not None:
s = str(inp)[:self.length]
s += ' ' * (self.length - len(s))
return s
else:
return ' ' * self.length
@property
def examples(self):
"""Return example strings 'hello', 'world' (size adapted)."""
hello_str = 'hello'[:self.length]
hello_str += ' ' * (self.length - len(hello_str))
world_str = 'world'[:self.length]
world_str += ' ' * (self.length - len(world_str))
return [('hello', hello_str), ('world', world_str)]
def __repr__(self):
"""Return ``repr(self)``."""
return 'Strings({})'.format(self.length)
class Field(Set):
"""A set that satisfies the field axioms.
Examples: `RealNumbers`, `ComplexNumbers` or
the finite field :math:`F_2`.
See `the Wikipedia entry on fields
<https://en.wikipedia.org/wiki/Field_%28mathematics%29>`_ for
further information.
"""
@property
def field(self):
"""Field of scalars for a field is itself.
Notes
-----
This is a hack to make fields to work via duck-typing with
`LinearSpace`'s.
"""
return self
class ComplexNumbers(Field):
"""Set of complex numbers."""
def __contains__(self, other):
"""Return ``other in self``."""
return isinstance(other, Complex)
def contains_set(self, other):
"""Return ``True`` if ``other`` is a subset of the complex numbers.
Returns
-------
contained : bool
``True`` if ``other`` is an instance of `ComplexNumbers`,
`RealNumbers` or `Integers`, ``False`` otherwise.
Examples
--------
>>> complex_numbers = ComplexNumbers()
>>> complex_numbers.contains_set(RealNumbers())
True
"""
if other is self:
return True
return (isinstance(other, ComplexNumbers) or
isinstance(other, RealNumbers) or
isinstance(other, Integers))
def contains_all(self, other):
"""Return ``True`` if ``other`` is a sequence of complex numbers."""
dtype = getattr(other, 'dtype', None)
if dtype is None:
dtype = np.result_type(*other)
return is_scalar_dtype(dtype)
def __eq__(self, other):
"""Return ``self == other``."""
if other is self:
return True
return isinstance(other, ComplexNumbers)
def __hash__(self):
"""Return ``hash(self)``."""
return hash(type(self))
def element(self, inp=None):
"""Return a complex number from ``inp`` or from scratch."""
if inp is not None:
return complex(inp)
else:
return complex(0.0, 0.0)
@property
def examples(self):
"""Return examples of complex numbers."""
numbers = [-1.0, 0.5, 0.0 + 2.0j, 0.0, 0.01, 1.0 + 1.0j, 1.0j, 1.0]
return [(str(x), x) for x in numbers]
class RealNumbers(Field):
"""Set of real numbers."""
def __contains__(self, other):
"""Return ``other in self``."""
return isinstance(other, Real)
def contains_set(self, other):
"""Return ``True`` if ``other`` is a subset of the real numbers.
Returns
-------
contained : bool
``True`` if other is an instance of `RealNumbers` or
`Integers` False otherwise.
Examples
--------
>>> real_numbers = RealNumbers()
>>> real_numbers.contains_set(RealNumbers())
True
"""
if other is self:
return True
return (isinstance(other, RealNumbers) or
isinstance(other, Integers))
def contains_all(self, array):
"""Test if `array` is an array of real numbers."""
dtype = getattr(array, 'dtype', None)
if dtype is None:
dtype = np.result_type(*array)
return is_real_dtype(dtype)
def __eq__(self, other):
"""Return ``self == other``."""
if other is self:
return True
return isinstance(other, RealNumbers)
def __hash__(self):
"""Return ``hash(self)``."""
return hash(type(self))
def element(self, inp=None):
"""Return a real number from ``inp`` or from scratch."""
if inp is not None:
return float(inp)
else:
return 0.0
@property
def examples(self):
"""Return examples of real numbers."""
numbers = [-1.0, 0.5, 0.0, 0.01, 1.0]
return [(str(x), x) for x in numbers]
class Integers(Set):
"""Set of integers."""
def __contains__(self, other):
"""Return ``other in self``."""
return isinstance(other, Integral)
def contains_set(self, other):
"""Test if ``other`` is a subset of the integers.
Returns
-------
contained : bool
``True`` if other is an instance of `Integers`,
``False`` otherwise.
Examples
--------
>>> integers = Integers()
>>> integers.contains_set(RealNumbers())
False
"""
if other is self:
return True
return isinstance(other, Integers)
def contains_all(self, other):
"""Return ``True`` if ``other`` is a sequence of integers."""
dtype = getattr(other, 'dtype', None)
if dtype is None:
dtype = np.result_type(*other)
return is_int_dtype(dtype)
def __eq__(self, other):
"""Return ``self == other``."""
if other is self:
return True
return isinstance(other, Integers)
def __hash__(self):
"""Return ``hash(self)``."""
return hash(type(self))
def element(self, inp=None):
"""Return an integer from ``inp`` or from scratch."""
if inp is not None:
return int(inp)
else:
return 0
@property
def examples(self):
"""Return examples of integers."""
numbers = [-1, 0, 1]
return [(str(x), x) for x in numbers]
class CartesianProduct(Set):
"""Cartesian product of a finite number of sets.
The elements of this set are tuples where the i-th entry
is an element of the i-th set.
"""
def __init__(self, *sets):
"""Initialize a new instance."""
for set_ in sets:
if not isinstance(set_, Set):
raise TypeError('{!r} is not a Set instance.'.format(set_))
self.__sets = tuple(sets)
@property
def sets(self):
"""Factors (sets) as a tuple."""
return self.__sets
def __contains__(self, other):
"""Return ``other in self``.
Parameters
----------
other :
Object to be tested for membership
Returns
-------
contains : bool
``True`` if ``other`` is a sequence with same length as this
Cartesian product, and each entry is contained in the set with
corresponding index, ``False`` otherwise.
"""
try:
len(other)
except TypeError:
return False
return (len(other) == len(self) and
all(p in set_ for set_, p in zip(self.sets, other)))
def __eq__(self, other):
"""Return ``self == other``.
Returns
-------
equals : bool
``True`` if ``other`` is a `CartesianProduct` instance,
has the same length as this Cartesian product and all sets
with the same index are equal, ``False`` otherwise.
"""
return (isinstance(other, CartesianProduct) and
len(other) == len(self) and
all(so == ss for so, ss in zip(other.sets, self.sets)))
def __hash__(self):
"""Return ``hash(self)``."""
return hash((type(self), self.sets))
def element(self, inp=None):
"""Create a `CartesianProduct` element.
Parameters
----------
inp : iterable, optional
Collection of input values for the
`LinearSpace.element` methods
of all sets in the Cartesian product.
Returns
-------
element : tuple
A tuple of the given input
"""
if inp is None:
tpl = tuple(set_.element() for set_ in self.sets)
else:
tpl = tuple(set_.element(inpt)
for inpt, set_ in zip(inp, self.sets))
if len(tpl) != len(self):
raise ValueError('input provides only {} values, needed '
'are {}'.format(len(tpl), len(self)))
return tpl
def __len__(self):
"""Return ``len(self)``."""
return len(self.sets)
def __getitem__(self, indices):
"""Return ``self[indices]``.
Examples
--------
>>> emp, univ = EmptySet(), UniversalSet()
>>> prod = CartesianProduct(emp, univ, univ, emp, emp)
>>> prod[2]
UniversalSet()
>>> prod[2:4]
CartesianProduct(UniversalSet(), EmptySet())
"""
if isinstance(indices, slice):
return CartesianProduct(*self.sets[indices])
else:
return self.sets[indices]
def __str__(self):
"""Return ``str(self)``."""
return ' x '.join(str(set_) for set_ in self.sets)
def __repr__(self):
"""Return ``repr(self)``."""
sets_str = ', '.join(repr(set_) for set_ in self.sets)
return '{}({})'.format(self.__class__.__name__, sets_str)
if __name__ == '__main__':
# pylint: disable=wrong-import-position
from odl.util.testutils import run_doctests
run_doctests()
``` |
[
{
"content": "Write the code verbatim:\n```python\n# -*- coding: utf-8 -*-\n\nfrom gettext import gettext as _\n\nNAME = _('Bolivia')\n\nSTATES = [\n (_('Pando'), 254, 136, 124, 0),\n (_('Beni'), 253, 283, 292, 0),\n (_('La Paz'), 252, 124, 431, 0),\n (_('Oruro'), 251, 146, 609, 0),\n (_('Potosí'... | [
{
"content": "Write the code verbatim:\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\nfrom gettext import gettext as _\n\nNAME = _('Bolivia')\n\nSTATES = [\n (_('Pando'), 254, 136, 124, 0),\n (_('Beni'), 253, 283, 292, 0),\n (_('La Paz'), 252, 124, 431, 0),\n (_('Oruro'), 251, 146, 609, 0),\... | ```python
# -*- coding: utf-8 -*-
from gettext import gettext as _
NAME = _('Bolivia')
STATES = [
(_('Pando'), 254, 136, 124, 0),
(_('Beni'), 253, 283, 292, 0),
(_('La Paz'), 252, 124, 431, 0),
(_('Oruro'), 251, 146, 609, 0),
(_('Potosí'), 250, 204, 745, 0),
(_('Cochabamba'), 249, 279, 503, 0),
(_('Chuquisaca'), 248, 377, 730, 0),
(_('Tarija'), 247, 374, 788, 0),
(_('Santa Cruz'), 246, 529, 513, 0),
(_('Brazil'), 245, 552, 109, 0),
(_('Perú'), 244, 23, 271, 90),
(_('Chile'), 243, 50, 763, 90),
(_('Paraguay'), 242, 616, 800, 0),
(_('Argentina'), 241, 331, 884, 0)
]
CAPITALS = [
(_('La Paz'), 118, 464, 0, 0, -14),
(_('Cobija'), 66, 104, 1, 0, 14),
(_('Trinidad'), 318, 352, 1, 0, -14),
(_('Cochabamba'), 244, 519, 1, 0, -14),
(_('Oruro'), 193, 557, 1, -10, 14),
(_('Potosí'), 272, 662, 1, 0, 14),
(_('Sucre'), 305, 626, 1, 0, -14),
(_('Tarija'), 342, 789, 1, 0, 14),
(_('Santa Cruz'), 430, 544, 1, 0, 14)
]
CITIES = [
(_('Apolo'), 86, 356, 2, 0, -14),
(_('Reyes'), 159, 323, 2, 10, -14),
(_('Santa Ana'), 277, 280, 2, 0, -14),
(_('San Borja'), 202, 355, 2, 0, -14),
(_('Puerto Heath'), 79, 204, 2, 0, 14),
(_('Asunción'), 124, 160, 2, 0, -14),
(_('Riberalta'), 238, 104, 2, -15, -14),
(_('Magdalena'), 366, 255, 2, 0, -14),
(_('Loreto'), 330, 376, 2, 0, 14),
(_('Puerto Acosta'), 48, 403, 2, 30, -14),
(_('Caranavi'), 155, 419, 2, 0, 14),
(_('Guaqui'), 77, 475, 2, -15, 14),
(_('Ascención'), 435, 405, 2, 0, -14),
(_('Concepción'), 500, 434, 2, 0, -14),
(_('San Ignacio'), 563, 443, 2, 0, 14),
(_('Tarabuco'), 324, 634, 2, 0, 14),
(_('Aiquile'), 307, 569, 2, 0, -14),
(_('Villazón'), 289, 816, 2, 15, 14),
(_('Uyuni'), 209, 723, 2, 0, -14),
(_('Yucuiba'), 407, 809, 2, 0, 14),
(_('Villa Montes'), 421, 762, 2, 0, -14),
(_('Camiri'), 409, 694, 2, 20, -14),
(_('Santa Rosa del Sara'), 402, 497, 2, 15, -14),
(_('Montero'), 425, 513, 2, 0, 14),
(_('Las Petas'), 680, 449, 2, 0, 14),
(_('San José de Chiquitos'), 583, 544, 2, 0, -14),
(_('Roboré'), 643, 576, 2, 0, -14),
(_('Puerto Suárez'), 758, 614, 2, -30, -14)
]
RIVERS = [
(_('Pilcomayo River'), 254, 462, 796, -45),
(_('Parapetí River'), 253, 444, 690, 30),
(_('Sécure River'), 252, 260, 407, 30),
(_('Ichoa River'), 251, 296, 434, 40),
(_('Piray River'), 250, 406, 520, 90),
(_('Ichilo River'), 249, 311, 470, 90),
(_('Grande River'), 248, 461, 526, -80),
(_('Yacuma River'), 247, 204, 302, 30),
(_('Madre de Dios River'), 246, 133, 158, 40),
(_('Desaguadero River'), 245, 96, 538, -40),
(_('Grande de Lípez River'), 244, 171, 773, 90),
(_('San Miguel River'), 243, 400, 392, -45),
(_('San Martín River'), 242, 505, 332, -45),
(_('Abuná River'), 241, 176, 41, 30),
(_('Orton River'), 240, 188, 88, 20),
(_('Madeira River'), 239, 209, 54, 30),
(_('Madidi River'), 238, 123, 238, 30),
(_('Tequeje River'), 237, 118, 275, 20),
(_('Beni River'), 236, 166, 299, 60),
(_('Viata River'), 235, 207, 213, 70),
(_('Apere River'), 234, 260, 338, 30),
(_('Mamoré River'), 233, 338, 346, -80),
(_('Blanco River'), 232, 474, 366, -50),
(_('Paraguá River'), 231, 575, 351, -70),
(_('Guaporé River'), 230, 524, 244, -25),
(_('Tucavaca River'), 229, 682, 563, -40),
(_('Lateriquique River'), 228, 613, 610, -40),
(_('Lake Titicaca River'), 227, 47, 424, -45),
(_('Lake Poopo River'), 226, 180, 610, 0)
]
ROUTES = []
STATS = [
(_('Capital:'), _('Sucre') + _("(19º2' S - 65º15' W)")),
(_('Language:'), _('Spanish') + ' , ' + _('Quechua') + ' , ' + _('Guarani')),
(_('Government:'), _('Presidential republic')),
(_('President:'), _('Evo Morales Ayma')),
(_('Vice President:'), _('Álvaro García Linera')),
(_('Independence:'), _('from Spain')),
('', _('declared: %s') % _('August 6, 1825')),
('', _('recognized: %s') % _('July 21, 1847')),
(_('Area:'), '%(sup)s %(u)s (%(p)s)' % {'sup': _('1.098.581'), 'u': _('km²'), 'p': _('27th')}),
(_('Population:'), '%(v)s (%(p)s)' % {'v': _('11.410.651'), 'p': _('83rd')}),
(_('GDP:'), '%(c)s %(v)s %(u)s (%(p)s)' % {'c': _('USD'), 'v': _('33.537'), 'u': _('billion'), 'p': _('96th')}),
(_('HDI:'), '%(l)s - %(v)s (%(p)s)' % {'l': _('Medium'), 'v': _('0,662'), 'p': _('119th')}),
(_('Currency:'), _('Boliviano')),
(_('Updated:'), _('April 5, 2016'))
]
``` |
[
{
"content": "Here is a code snippet:\n```python\n# TODO: fix and check allcommand-line arguments\n\nfrom util import debug_print\nfrom pygame import image, Rect, Surface\nfrom pygame.locals import KEYDOWN, KEYUP, USEREVENT\nimport pygame\nimport sys\nimport random\nimport json\nimport os\nfrom collections impo... | [
{
"content": "Here is a code snippet:\n<|memory_start|>```python\n# TODO: fix and check allcommand-line arguments\n\nfrom util import debug_print\nfrom pygame import image, Rect, Surface\nfrom pygame.locals import KEYDOWN, KEYUP, USEREVENT\nimport pygame\nimport sys\nimport random\nimport json\nimport os\nfrom ... | ```python
# TODO: fix and check allcommand-line arguments
from util import debug_print
from pygame import image, Rect, Surface
from pygame.locals import KEYDOWN, KEYUP, USEREVENT
import pygame
import sys
import random
import json
import os
from collections import namedtuple
from functools import partial
import argparse
import logging
#logging.basicConfig(filename='random_levels.log', level=logging.INFO)
log = logging.getLogger('moves')
log.addHandler(logging.FileHandler('moves.log', mode='w'))
log.setLevel(logging.INFO)
eventlog = logging.getLogger('events')
eventlog.addHandler(logging.StreamHandler(sys.stderr))
#fmt='%(asctime)s %(message)s'
#eventlog.addFormatter(logging.Formatter(fmt), datefmt='%m/%d/%Y %I:%M:%S %p')
eventlog.setLevel(logging.INFO)
# ------------ CONSTANTS ----------------
CONFIG_PATH = os.path.split(__file__)[0]
TILE_POSITION_FILE = CONFIG_PATH + 'tiles.json'
TILE_IMAGE_FILE = CONFIG_PATH + '../images/tiles.xpm'
LEVEL_FILE = 'level.txt'
SIZE = 32
SPEED = 4
Position = namedtuple("Position", ["x", "y"])
LEFT = Position(-1, 0)
RIGHT = Position(1, 0)
UP = Position(0, -1)
DOWN = Position(0, 1)
DIRECTIONS = {
276: LEFT, 275: RIGHT,
273: UP, 274: DOWN
}
KEY_REPEAT_TIME = 250
KEY_REPEATED = USEREVENT + 1
DRAW_REPEAT_TIME = 100
DRAW = USEREVENT + 2
UPDATE = USEREVENT + 3
UPDATE_REPEAT_TIME = 20
MOVE_GHOST = USEREVENT + 4
MOVE_GHOST_TIME = 500
EXIT = USEREVENT + 5
# ------------- LOADING TILES -----------
def get_tile_rect(pos):
"""Converts tile indices to a pygame.Rect"""
return Rect(pos.x*SIZE, pos.y*SIZE, SIZE, SIZE)
def load_tiles(json_fn):
"""Loads tile positions from a JSON file name"""
tiles = {}
jd = json.loads(open(json_fn).read())
for tile in jd.values():
abbrev = tile["abbrev"]
pos = Position(tile["x"], tile["y"])
rect = get_tile_rect(pos)
tiles[abbrev] = rect
return tiles
# ------------- GENERATING MAZES ------------
class MazeGenerator:
"""Generates two-dimensional mazes consisting of walls and dots."""
@staticmethod
def create_grid_string(dots, xsize, ysize):
grid = ""
for y in range(ysize):
for x in range(xsize):
grid += "." if Position(x, y) in dots else "#"
grid += "\n"
return grid
@staticmethod
def get_all_dot_positions(xsize, ysize):
return [Position(x, y) for x in range(1, xsize-1) for y in range(1, ysize-1)]
@staticmethod
def get_neighbors(pos):
return [
Position(pos.x , pos.y-1), Position(pos.x , pos.y+1),
Position(pos.x-1, pos.y ), Position(pos.x+1, pos.y ),
Position(pos.x-1, pos.y-1), Position(pos.x+1, pos.y-1),
Position(pos.x-1, pos.y+1), Position(pos.x+1, pos.y+1)
]
@staticmethod
def generate_dot_positions(xsize, ysize):
positions = MazeGenerator.get_all_dot_positions(xsize, ysize)
dots = set()
while positions != []:
pos = random.choice(positions)
neighbors = MazeGenerator.get_neighbors(pos)
free = [nb in dots for nb in neighbors]
if free.count(True) < 5:
dots.add(pos)
positions.remove(pos)
return dots
@staticmethod
def create_maze(size):
"""Returns a size.x * size.y maze as a string"""
dots = MazeGenerator.generate_dot_positions(size.x, size.y)
maze = MazeGenerator.create_grid_string(dots, size.x, size.y)
return maze
# ------------- DRAWING GRIDS --------------
class TileGrid:
def __init__(self, data):
self._grid = self.parse_grid(data)
def __repr__(self):
return "\n".join(["".join(row) for row in self._grid])
def parse_grid(self, data):
"""Parses the string representation into a nested list"""
return [list(row) for row in data.strip().split("\n")]
@property
def rows(self):
return self._grid
@property
def xsize(self):
return len(self.rows[0])
@property
def ysize(self):
return len(self.rows)
def __getitem__(self, pos):
return self._grid[pos.y][pos.x]
def __setitem__(self, pos, value):
self._grid[pos.y][pos.x] = value
def __iter__(self):
"""Iterate over all grid tiles"""
for y, row in enumerate(self.rows):
for x, char in enumerate(row):
pos = Position(x, y)
yield pos, char
def find_tile(self, query='*'):
"""Returns a Position tuple for the given char on the level"""
for pos, char in self:
if char == query:
return pos
def draw_grid(self, tile_img, tiles):
"""Returns an image of a tile-based grid"""
#debug_print("drawing level", data)
img = Surface((self.xsize * SIZE, self.ysize * SIZE))
for pos, char in self:
rect = get_tile_rect(pos)
img.blit(tile_img, rect, tiles[char])
return img
# ------------- SPRITES --------------
Sprite = namedtuple("Sprite", ['tile', 'pos'])
Animation = namedtuple("Animation", ['direction', 'offset', 'callback'])
sprites = {}
animations = {}
def is_moving(actor):
return actor in animations
def move(level, direction, actor="player"):
"""Handles moves on the level"""
if is_moving(actor):
return
log.info('{} moves in direction {}/{}'.format(actor, direction.x, direction.y))
old = sprites[actor].pos
# avoids problem with finding: if '*' on map it might not be there
new = Position(old.x + direction.x, old.y + direction.y)
if level[new] in [" ", ".", "x"]:
sprites[actor] = Sprite(sprites[actor].tile, new)
start_ofs = Position(-direction.x * SIZE, -direction.y * SIZE)
check_collision()
if actor == 'player':
# this if should appear only once!
# anyway, it is an indication that a class would be better.
callback = partial(player_arrives_on_new_tile, level)
else:
callback = ghost_arrives_on_new_tile
animations[actor] = Animation(direction, start_ofs, callback)
def ghost_arrives_on_new_tile():
pass
def player_arrives_on_new_tile(level):
pos = sprites['player'].pos
tile = level[pos]
if tile == '.':
level[pos] = ' ' # eats dot
elif tile == 'x':
exit_game()
def animate_sprites():
for actor in list(animations.keys()):
ani = animations[actor]
ofs_x = ani.offset.x + ani.direction.x * SPEED
ofs_y = ani.offset.y + ani.direction.y * SPEED
new_offset = Position(ofs_x, ofs_y)
if ofs_x == 0 and ofs_y == 0:
ani.callback()
del animations[actor]
else:
animations[actor] = Animation(ani.direction, new_offset, ani.callback)
def draw_sprites(img, tile_img, tiles):
"""Returns an image of a tile-based grid"""
for actor in sprites:
sprite = sprites[actor]
rect = get_tile_rect(sprite.pos)
if actor in animations:
offset = animations[actor].offset
rect = Rect((rect.x + offset.x, rect.y + offset.y, rect.w, rect.h))
img.blit(tile_img, rect, tiles[sprite.tile])
# ------------- EVENT LOOP --------------
def event_loop(handle_key, delay=10, repeat=KEY_REPEAT_TIME):
"""Processes events and updates callbacks."""
repeat_key = None
running = True
while running:
pygame.event.pump()
event = pygame.event.poll()
if event.type == KEYDOWN:
handle_key(event.key)
repeat_key = event.key
pygame.time.set_timer(KEY_REPEATED, KEY_REPEAT_TIME)
elif event.type == KEYUP:
if event.key == repeat_key:
repeat_key = None
pygame.time.set_timer(KEY_REPEATED, 0)
elif event.type == KEY_REPEATED:
handle_key(repeat_key)
elif event.type == DRAW:
draw()
elif event.type == UPDATE:
update()
elif event.type == MOVE_GHOST:
move_ghost()
elif event.type == EXIT:
running = False
eventlog.critical('exit event received: ' + str(event))
else:
eventlog.info('unhandled event: ' + str(event))
pygame.time.delay(delay)
# ------------- GAME MECHANICS --------------
def move_ghost():
direction = random.choice([LEFT, RIGHT, UP, DOWN])
move(maze, direction, "ghost")
def check_collision():
if sprites['player'].pos == sprites['ghost'].pos:
# much better than: if level[new] in ["*", "g"]:
exit_game()
def exit_game():
eve = pygame.event.Event(EXIT)
pygame.event.post(eve)
def update():
"""Manages recurring checks in the game"""
#check_collision() # redundant at the moment
animate_sprites()
# ------------- MAIN GAME --------------
def load_level(fn):
data = open(fn).read()
maze = TileGrid(data)
return maze
def draw():
img = maze.draw_grid(tile_img, tiles)
draw_sprites(img, tile_img, tiles)
rect = Rect((0, 0, maze.xsize*SIZE, maze.ysize*SIZE))
display.blit(img, rect, rect)
pygame.display.update()
def game(key):
"""Handles key events in the game"""
direction = DIRECTIONS.get(key)
if direction:
move(maze, direction, "player") # more explicit than '*'
# design flaw: uses global variables 'display', tile_img', 'tiles'
def create_random_maze(size):
maze_data = MazeGenerator.create_maze(size)
maze = TileGrid(maze_data)
maze[Position(size.x-2, size.y-2)] = 'x'
return maze
def create_sprites(size):
sprites = {
'player': Sprite('*', Position(1, 1)),
'ghost': Sprite('g', Position(size.x-2, 1))
}
return sprites
def create_display():
pygame.init()
pygame.display.set_mode((800, 600))
display = pygame.display.get_surface()
return display
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Start the MazeRun game.')
parser.add_argument('--x', type=int, default=12,
help='x size of random maze')
parser.add_argument('--y', type=int, default=7,
help='y size of random maze')
parser.add_argument('--ghost',
#dest="MOVE_GHOST_TIME", action="store_const",
type=int, default=500,
help='ghost speed (moves every .. milliseconds)')
parser.add_argument('--load', type=str, default=None,
help='load maze from text file')
parser.add_argument('--replay', type=str, default=None,
help='log file to replay from')
parser.add_argument('-v', '--verbose', action="store_true",
help='print debugging information')
#parser.add_argument('words', type=str, nargs='+',
# help='the word for which characters are counted')
#parser.add_argument("-v", "--verbosity", type=int, choices=[0, 1, 2],
# positional arguments: without dashes
# optional: with --
# g = parser.add_mutually_exclusive_group()
# g.add_argument(...)
# g.add_argument(...)
# -d delay=50 game speed
# -g ghost speed
# -x, -y size of the grid
# -r replay from logfile
# -l load level from file
# optional arguments
# --verbose
# --help info
args = parser.parse_args()
size = Position(args.x, args.y)
display = create_display()
maze = create_random_maze(size)
log.info("random level created\n" + str(maze))
log.info("----------------\n")
#maze = load_level(LEVEL_FILE)
sprites = create_sprites(size)
tile_img = image.load(TILE_IMAGE_FILE)
tiles = load_tiles(TILE_POSITION_FILE)
pygame.time.set_timer(DRAW, DRAW_REPEAT_TIME)
pygame.time.set_timer(UPDATE, UPDATE_REPEAT_TIME)
pygame.time.set_timer(MOVE_GHOST, MOVE_GHOST_TIME)
event_loop(game)
``` |
[
{
"content": "Recreate the original code text:\n```python\n# Copyright 2017 Yahoo Inc.\n# Licensed under the terms of the Apache 2.0 license.\n# Please see LICENSE file in the project root for terms.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import nested_scopes... | [
{
"content": "Recreate the original code text:\n<|memory_start|>```python\n# Copyright 2017 Yahoo Inc.\n# Licensed under the terms of the Apache 2.0 license.\n# Please see LICENSE file in the project root for terms.\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ impo... | ```python
# Copyright 2017 Yahoo Inc.
# Licensed under the terms of the Apache 2.0 license.
# Please see LICENSE file in the project root for terms.
from __future__ import absolute_import
from __future__ import division
from __future__ import nested_scopes
from __future__ import print_function
from multiprocessing.managers import BaseManager
from multiprocessing import JoinableQueue
class TFManager(BaseManager):
"""Python multiprocessing.Manager for distributed, multi-process communication."""
pass
# global to each Spark executor's python worker
mgr = None # TFManager
qdict = {} # dictionary of queues
kdict = {} # dictionary of key-values
def _get(key):
return kdict[key]
def _set(key, value):
kdict[key] = value
def _get_queue(qname):
try:
return qdict[qname]
except KeyError:
return None
def start(authkey, queues, mode='local'):
"""Create a new multiprocess.Manager (or return existing one).
Args:
:authkey: string authorization key
:queues: *INTERNAL_USE*
:mode: 'local' indicates that the manager will only be accessible from the same host, otherwise remotely accessible.
Returns:
A TFManager instance, which is also cached in local memory of the Python worker process.
"""
global mgr, qdict, kdict
qdict.clear()
kdict.clear()
for q in queues:
qdict[q] = JoinableQueue()
TFManager.register('get_queue', callable=lambda qname: _get_queue(qname))
TFManager.register('get', callable=lambda key: _get(key))
TFManager.register('set', callable=lambda key, value: _set(key, value))
if mode == 'remote':
mgr = TFManager(address=('', 0), authkey=authkey)
else:
mgr = TFManager(authkey=authkey)
mgr.start()
return mgr
def connect(address, authkey):
"""Connect to a multiprocess.Manager.
Args:
:address: unique address to the TFManager, either a unique connection string for 'local', or a (host, port) tuple for remote.
:authkey: string authorization key
Returns:
A TFManager instance referencing the remote TFManager at the supplied address.
"""
TFManager.register('get_queue')
TFManager.register('get')
TFManager.register('set')
m = TFManager(address, authkey=authkey)
m.connect()
return m
``` |
[
{
"content": "Here is the code block:\n```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\n\nDIRAC and GridPP: perform a query on the CERN@school frames.\n\n\"\"\"\n\n#...for the operating system stuff.\nimport os\n\n#...for parsing the arguments.\nimport argparse\n\n#...for the logging.\nimpor... | [
{
"content": "Here is the code block:\n<|memory_start|>```python\n#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\n\nDIRAC and GridPP: perform a query on the CERN@school frames.\n\n\"\"\"\n\n#...for the operating system stuff.\nimport os\n\n#...for parsing the arguments.\nimport argparse\n\n#...for the... | ```python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
DIRAC and GridPP: perform a query on the CERN@school frames.
"""
#...for the operating system stuff.
import os
#...for parsing the arguments.
import argparse
#...for the logging.
import logging as lg
# Import the JSON library.
import json
# The DIRAC import statements.
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from DIRAC.Interfaces.API.Dirac import Dirac
#...for the DIRAC File Catalog client interface.
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
if __name__ == "__main__":
print("")
print("########################################################")
print("* GridPP and DIRAC: CERN@school frame metadata queries *")
print("########################################################")
print("")
# Get the datafile path from the command line.
parser = argparse.ArgumentParser()
parser.add_argument("queryJson", help="Path to the query JSON.")
parser.add_argument("outputPath", help="The path for the output files.")
parser.add_argument("dfcBaseDir", help="The name of the base directory on the DFC.")
parser.add_argument("-v", "--verbose", help="Increase output verbosity", action="store_true")
args = parser.parse_args()
## The path to the data file.
datapath = args.queryJson
## The output path.
outputpath = args.outputPath
# Check if the output directory exists. If it doesn't, quit.
if not os.path.isdir(outputpath):
raise IOError("* ERROR: '%s' output directory does not exist!" % (outputpath))
## Base directory for the file uploads.
dfc_base = args.dfcBaseDir
# Set the logging level.
if args.verbose:
level=lg.DEBUG
else:
level=lg.INFO
# Configure the logging.
lg.basicConfig(filename='log_perform_frame_query.log', filemode='w', level=level)
print("*")
print("* Input JSON : '%s'" % (datapath))
print("* Output path : '%s'" % (outputpath))
print("* DFC base directory : '%s'" % (dfc_base))
## The DFC client.
fc = FileCatalogClient()
## The frame query JSON file - FIXME: check it exists...
qf = open(datapath, "r")
#
qd = json.load(qf)
qf.close()
meta_dict = {\
"start_time" : { ">=" : int(qd[0]["start_time"]) },
"end_time" : { "<=" : int(qd[0]["end_time" ]) }
# #"lat" : { ">" : 60.0 }\
# #"n_pixel" : { ">" : 700 }\
# #"n_kluster" : { ">" : 40}\
}
## The query result.
result = fc.findFilesByMetadata(meta_dict, path=dfc_base)
print("*")
print "* Metadata query:", meta_dict
print("*")
print("* Number of frames found : %d" % (len(result["Value"])))
print("*")
# Get the cluster file names from the metadata query.
# ## Kluster file names.
# kluster_file_names = []
for fn in sorted(result["Value"]):
#print("* Found: '%s'." % (fn))
filemetadata = fc.getFileUserMetadata(fn)
frameid = str(filemetadata['Value']['frameid'])
n_kluster = int(filemetadata['Value']['n_kluster'])
print("*--> Frame ID : '%s'" % (frameid))
print("*--> Number of clusters = %d" % (n_kluster))
#print("*")
# for i in range(n_kluster):
# kn = "%s_k%05d.png" % (frameid, i)
# kluster_file_names.append(kn)
# print("*")
#
# #lg.info(" * Clusters to be downloaded:")
# #for kn in kluster_names:
# # lg.info(" *--> '%s'" % (kn))
#
# print("* Number of clusters found : %d" % (len(kluster_file_names)))
``` |
[
{
"content": "Repeat the code precisely as written (spacing intact):\n```python\n# -*- coding: utf-8 -*-\n\n__author__ = 'Patrick Michl'\n__email__ = 'patrick.michl@gmail.com'\n__license__ = 'GPLv3'\n\nimport nemoa\nimport qdeep.objects.common\nfrom PySide import QtGui, QtCore\n\nclass Editor(qdeep.objects.c... | [
{
"content": "Repeat the code precisely as written (spacing intact):\n<|memory_start|>```python\n# -*- coding: utf-8 -*-\n\n__author__ = 'Patrick Michl'\n__email__ = 'patrick.michl@gmail.com'\n__license__ = 'GPLv3'\n\nimport nemoa\nimport qdeep.objects.common\nfrom PySide import QtGui, QtCore\n\nclass Editor... | ```python
# -*- coding: utf-8 -*-
__author__ = 'Patrick Michl'
__email__ = 'patrick.michl@gmail.com'
__license__ = 'GPLv3'
import nemoa
import qdeep.objects.common
from PySide import QtGui, QtCore
class Editor(qdeep.objects.common.Editor):
objType = 'script'
def createCentralWidget(self):
self.textArea = QtGui.QTextEdit()
self.textArea.setHorizontalScrollBarPolicy(
QtCore.Qt.ScrollBarAsNeeded)
self.textArea.setVerticalScrollBarPolicy(
QtCore.Qt.ScrollBarAsNeeded)
self.textArea.document().contentsChanged.connect(
self.documentWasModified)
font = QtGui.QFont()
font.setFamily('Courier')
font.setFixedPitch(True)
font.setPointSize(10)
self.textArea.setFont(font)
self.textArea.setAcceptDrops(True)
self.highlighter = Highlighter(self.textArea.document())
self.setCentralWidget(self.textArea)
def createActions(self):
self.actRunScript = QtGui.QAction(
qdeep.common.getIcon('actions', 'debug-run.png'),
"Run Script", self,
shortcut = "F5",
statusTip = "Run python script",
triggered = self.runScript)
def createToolBars(self):
self.scriptToolBar = self.addToolBar("Script")
self.scriptToolBar.addAction(self.actRunScript)
def getModified(self):
return self.textArea.document().isModified()
def setModified(self, value = True):
self.textArea.document().setModified(value)
def loadFile(self, fileName):
file = QtCore.QFile(fileName)
if not file.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text):
QtGui.QMessageBox.warning(self, "MDI",
"Cannot read file %s:\n%s." % (
fileName, file.errorString()))
return False
instr = QtCore.QTextStream(file)
self.textArea.setPlainText(instr.readAll())
self.textArea.document().contentsChanged.connect(
self.documentWasModified)
return True
def saveFile(self, fileName):
file = QtCore.QFile(fileName)
if not file.open(QtCore.QFile.WriteOnly | QtCore.QFile.Text):
QtGui.QMessageBox.warning(self, "MDI",
"Cannot write file %s:\n%s." % (fileName,
file.errorString()))
return False
outstr = QtCore.QTextStream(file)
QtGui.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
outstr << self.textArea.toPlainText()
QtGui.QApplication.restoreOverrideCursor()
self.setModified(False)
self.updateWindowTitle()
return True
def runScript(self):
QtGui.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
nemoa.run(self.getName())
QtGui.QApplication.restoreOverrideCursor()
class Highlighter(QtGui.QSyntaxHighlighter):
def __init__(self, parent=None):
super(Highlighter, self).__init__(parent)
keywordFormat = QtGui.QTextCharFormat()
keywordFormat.setForeground(QtCore.Qt.darkBlue)
keywordFormat.setFontWeight(QtGui.QFont.Bold)
keywordPatterns = ["\\bchar\\b", "\\bclass\\b", "\\bconst\\b",
"\\bdouble\\b", "\\benum\\b", "\\bexplicit\\b", "\\bfriend\\b",
"\\binline\\b", "\\bint\\b", "\\blong\\b", "\\bnamespace\\b",
"\\boperator\\b", "\\bprivate\\b", "\\bprotected\\b",
"\\bpublic\\b", "\\bshort\\b", "\\bsignals\\b", "\\bsigned\\b",
"\\bslots\\b", "\\bstatic\\b", "\\bstruct\\b",
"\\btemplate\\b", "\\btypedef\\b", "\\btypename\\b",
"\\bunion\\b", "\\bunsigned\\b", "\\bvirtual\\b", "\\bvoid\\b",
"\\bvolatile\\b", "\\bimport\\b", "\\bdef\\b",
"\\bTrue\\b", "\\bFalse\\b", "\\breturn\\b"]
self.highlightingRules = [(QtCore.QRegExp(pattern), keywordFormat)
for pattern in keywordPatterns]
classFormat = QtGui.QTextCharFormat()
classFormat.setFontWeight(QtGui.QFont.Bold)
classFormat.setForeground(QtCore.Qt.darkMagenta)
self.highlightingRules.append((QtCore.QRegExp("\\bQ[A-Za-z]+\\b"),
classFormat))
singleLineCommentFormat = QtGui.QTextCharFormat()
singleLineCommentFormat.setForeground(QtCore.Qt.red)
self.highlightingRules.append((QtCore.QRegExp("//[^\n]*"),
singleLineCommentFormat))
self.multiLineCommentFormat = QtGui.QTextCharFormat()
self.multiLineCommentFormat.setForeground(QtCore.Qt.red)
quotationFormat = QtGui.QTextCharFormat()
quotationFormat.setForeground(QtCore.Qt.darkGreen)
self.highlightingRules.append((QtCore.QRegExp("\".*\""),
quotationFormat))
self.highlightingRules.append((QtCore.QRegExp("'.*'"),
quotationFormat))
functionFormat = QtGui.QTextCharFormat()
functionFormat.setFontItalic(True)
functionFormat.setForeground(QtCore.Qt.blue)
self.highlightingRules.append((QtCore.QRegExp("\\b[A-Za-z0-9_]+(?=\\()"),
functionFormat))
self.commentStartExpression = QtCore.QRegExp("/\\*")
self.commentEndExpression = QtCore.QRegExp("\\*/")
def highlightBlock(self, text):
for pattern, format in self.highlightingRules:
expression = QtCore.QRegExp(pattern)
index = expression.indexIn(text)
while index >= 0:
length = expression.matchedLength()
self.setFormat(index, length, format)
index = expression.indexIn(text, index + length)
self.setCurrentBlockState(0)
startIndex = 0
if self.previousBlockState() != 1:
startIndex = self.commentStartExpression.indexIn(text)
while startIndex >= 0:
endIndex = self.commentEndExpression.indexIn(text, startIndex)
if endIndex == -1:
self.setCurrentBlockState(1)
commentLength = len(text) - startIndex
else:
commentLength = endIndex - startIndex + self.commentEndExpression.matchedLength()
self.setFormat(startIndex, commentLength,
self.multiLineCommentFormat)
startIndex = self.commentStartExpression.indexIn(text,
startIndex + commentLength)
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.