repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
codesparkle/youtube-dl | youtube_dl/extractor/hornbunny.py | 4 | 1526 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_duration,
)
class HornBunnyIE(InfoExtractor):
_VALID_URL = r'http?://(?:www\.)?hornbunny\.com/videos/(?P<title_dash>[a-z-]+)-(?P<id>\d+)\.html'
_TEST = {
'url': 'http://hornbunny.com/videos/panty-slut-jerk-off-instruction-5227.html',
'md5': 'e20fd862d1894b67564c96f180f43924',
'info_dict': {
'id': '5227',
'ext': 'mp4',
'title': 'panty slut jerk off instruction',
'duration': 550,
'age_limit': 18,
'view_count': int,
'thumbnail': 're:^https?://.*\.jpg$',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._og_search_title(webpage)
info_dict = self._parse_html5_media_entries(url, webpage, video_id)[0]
duration = parse_duration(self._search_regex(
r'<strong>Runtime:</strong>\s*([0-9:]+)</div>',
webpage, 'duration', fatal=False))
view_count = int_or_none(self._search_regex(
r'<strong>Views:</strong>\s*(\d+)</div>',
webpage, 'view count', fatal=False))
info_dict.update({
'id': video_id,
'title': title,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
})
return info_dict
| unlicense |
asimshankar/tensorflow | tensorflow/python/estimator/canned/optimizers.py | 41 | 1298 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""optimizers python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python.estimator.canned import optimizers
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
optimizers.__all__ = [s for s in dir(optimizers) if not s.startswith('__')]
from tensorflow_estimator.python.estimator.canned.optimizers import *
| apache-2.0 |
lukas-bednar/jenkins-job-builder | tests/cmd/subcommands/test_update.py | 1 | 6946 |
# Joint copyright:
# - Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
from jenkins_jobs import builder
from jenkins_jobs import cmd
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.Jenkins.get_plugins_info', mock.MagicMock)
class UpdateTests(CmdTestsBase):
@mock.patch('jenkins_jobs.cmd.Builder.update_jobs')
def test_update_jobs(self, update_jobs_mock):
"""
Test update_job is called
"""
# don't care about the value returned here
update_jobs_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
cmd.execute(args, self.config)
update_jobs_mock.assert_called_with([path], [], n_workers=mock.ANY)
@mock.patch('jenkins_jobs.builder.Jenkins.is_job', return_value=True)
@mock.patch('jenkins_jobs.builder.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.Jenkins.get_job_md5')
@mock.patch('jenkins_jobs.builder.Jenkins.update_job')
def test_update_jobs_decode_job_output(self, update_job_mock,
get_job_md5_mock, get_jobs_mock,
is_job_mock):
"""
Test that job xml output has been decoded before attempting to update
"""
# don't care about the value returned here
update_job_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
cmd.execute(args, self.config)
self.assertTrue(isinstance(update_job_mock.call_args[0][1],
six.text_type))
@mock.patch('jenkins_jobs.builder.Jenkins.is_job', return_value=True)
@mock.patch('jenkins_jobs.builder.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.Builder.delete_job')
@mock.patch('jenkins_jobs.cmd.Builder')
def test_update_jobs_and_delete_old(self, builder_mock, delete_job_mock,
get_jobs_mock, is_job_mock):
"""
Test update behaviour with --delete-old option
Test update of jobs with the --delete-old option enabled, where only
some jobs result in has_changed() to limit the number of times
update_job is called, and have the get_jobs() method return additional
jobs not in the input yaml to test that the code in cmd will call
delete_job() after update_job() when '--delete-old' is set but only
for the extra jobs.
"""
# set up some test data
jobs = ['old_job001', 'old_job002']
extra_jobs = [{'name': name} for name in jobs]
builder_obj = builder.Builder('http://jenkins.example.com',
'doesnot', 'matter',
plugins_list={})
# get the instance created by mock and redirect some of the method
# mocks to call real methods on a the above test object.
b_inst = builder_mock.return_value
b_inst.plugins_list = builder_obj.plugins_list
b_inst.update_jobs.side_effect = builder_obj.update_jobs
b_inst.delete_old_managed.side_effect = builder_obj.delete_old_managed
def _get_jobs():
return builder_obj.parser.jobs + extra_jobs
get_jobs_mock.side_effect = _get_jobs
# override cache to ensure Jenkins.update_job called a limited number
# of times
self.cache_mock.return_value.has_changed.side_effect = (
[True] * 2 + [False] * 2)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', '--delete-old', path])
with mock.patch('jenkins_jobs.builder.Jenkins.update_job') as update:
with mock.patch('jenkins_jobs.builder.Jenkins.is_managed',
return_value=True):
cmd.execute(args, self.config)
self.assertEqual(2, update.call_count,
"Expected Jenkins.update_job to be called '%d' "
"times, got '%d' calls instead.\n"
"Called with: %s" % (2, update.call_count,
update.mock_calls))
calls = [mock.call(name) for name in jobs]
self.assertEqual(2, delete_job_mock.call_count,
"Expected Jenkins.delete_job to be called '%d' "
"times got '%d' calls instead.\n"
"Called with: %s" % (2, delete_job_mock.call_count,
delete_job_mock.mock_calls))
delete_job_mock.assert_has_calls(calls, any_order=True)
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins')
def test_update_timeout_not_set(self, jenkins_mock):
"""Check that timeout is left unset
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
with mock.patch('jenkins_jobs.cmd.Builder.update_job') as update_mock:
update_mock.return_value = ([], 0)
cmd.execute(args, self.config)
# unless the timeout is set, should only call with 3 arguments
# (url, user, password)
self.assertEqual(len(jenkins_mock.call_args[0]), 3)
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins')
def test_update_timeout_set(self, jenkins_mock):
"""Check that timeout is set correctly
Test that the Jenkins object has the timeout set on it only when
provided via the config option.
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = self.parser.parse_args(['update', path])
self.config.set('jenkins', 'timeout', '0.2')
with mock.patch('jenkins_jobs.cmd.Builder.update_job') as update_mock:
update_mock.return_value = ([], 0)
cmd.execute(args, self.config)
# when timeout is set, the fourth argument to the Jenkins api init
# should be the value specified from the config
self.assertEqual(jenkins_mock.call_args[0][3], 0.2)
| apache-2.0 |
eldabbagh/gae-boilerplate | bp_includes/external/wtforms/fields/core.py | 50 | 28554 | from __future__ import unicode_literals
import datetime
import decimal
import itertools
import time
from wtforms import widgets
from wtforms.compat import text_type, izip
from wtforms.validators import StopValidation
__all__ = (
'BooleanField', 'DecimalField', 'DateField', 'DateTimeField', 'FieldList',
'FloatField', 'FormField', 'IntegerField', 'RadioField', 'SelectField',
'SelectMultipleField', 'StringField',
)
_unset_value = object()
class DummyTranslations(object):
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
if n == 1:
return singular
return plural
class Field(object):
"""
Field base class
"""
errors = tuple()
process_errors = tuple()
raw_data = None
validators = tuple()
widget = None
_formfield = True
_translations = DummyTranslations()
def __new__(cls, *args, **kwargs):
if '_form' in kwargs and '_name' in kwargs:
return super(Field, cls).__new__(cls)
else:
return UnboundField(cls, *args, **kwargs)
def __init__(self, label=None, validators=None, filters=tuple(),
description='', id=None, default=None, widget=None,
_form=None, _name=None, _prefix='', _translations=None):
"""
Construct a new field.
:param label:
The label of the field.
:param validators:
A sequence of validators to call when `validate` is called.
:param filters:
A sequence of filters which are run on input data by `process`.
:param description:
A description for the field, typically used for help text.
:param id:
An id to use for the field. A reasonable default is set by the form,
and you shouldn't need to set this manually.
:param default:
The default value to assign to the field, if no form or object
input is provided. May be a callable.
:param widget:
If provided, overrides the widget used to render the field.
:param _form:
The form holding this field. It is passed by the form itself during
construction. You should never pass this value yourself.
:param _name:
The name of this field, passed by the enclosing form during its
construction. You should never pass this value yourself.
:param _prefix:
The prefix to prepend to the form name of this field, passed by
the enclosing form during construction.
If `_form` and `_name` isn't provided, an :class:`UnboundField` will be
returned instead. Call its :func:`bind` method with a form instance and
a name to construct the field.
"""
if _translations is not None:
self._translations = _translations
self.default = default
self.description = description
self.filters = filters
self.flags = Flags()
self.name = _prefix + _name
self.short_name = _name
self.type = type(self).__name__
self.validators = validators or list(self.validators)
self.id = id or self.name
self.label = Label(self.id, label if label is not None else self.gettext(_name.replace('_', ' ').title()))
if widget is not None:
self.widget = widget
for v in self.validators:
flags = getattr(v, 'field_flags', ())
for f in flags:
setattr(self.flags, f, True)
def __unicode__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __str__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __html__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __call__(self, **kwargs):
"""
Render this field as HTML, using keyword args as additional attributes.
Any HTML attribute passed to the method will be added to the tag
and entity-escaped properly.
"""
return self.widget(self, **kwargs)
def gettext(self, string):
return self._translations.gettext(string)
def ngettext(self, singular, plural, n):
return self._translations.ngettext(singular, plural, n)
def validate(self, form, extra_validators=tuple()):
"""
Validates the field and returns True or False. `self.errors` will
contain any errors raised during validation. This is usually only
called by `Form.validate`.
Subfields shouldn't override this, but rather override either
`pre_validate`, `post_validate` or both, depending on needs.
:param form: The form the field belongs to.
:param extra_validators: A list of extra validators to run.
"""
self.errors = list(self.process_errors)
stop_validation = False
# Call pre_validate
try:
self.pre_validate(form)
except StopValidation as e:
if e.args and e.args[0]:
self.errors.append(e.args[0])
stop_validation = True
except ValueError as e:
self.errors.append(e.args[0])
# Run validators
if not stop_validation:
for validator in itertools.chain(self.validators, extra_validators):
try:
validator(form, self)
except StopValidation as e:
if e.args and e.args[0]:
self.errors.append(e.args[0])
stop_validation = True
break
except ValueError as e:
self.errors.append(e.args[0])
# Call post_validate
try:
self.post_validate(form, stop_validation)
except ValueError as e:
self.errors.append(e.args[0])
return len(self.errors) == 0
def pre_validate(self, form):
"""
Override if you need field-level validation. Runs before any other
validators.
:param form: The form the field belongs to.
"""
pass
def post_validate(self, form, validation_stopped):
"""
Override if you need to run any field-level validation tasks after
normal validation. This shouldn't be needed in most cases.
:param form: The form the field belongs to.
:param validation_stopped:
`True` if any validator raised StopValidation.
"""
pass
def process(self, formdata, data=_unset_value):
"""
Process incoming data, calling process_data, process_formdata as needed,
and run filters.
If `data` is not provided, process_data will be called on the field's
default.
Field subclasses usually won't override this, instead overriding the
process_formdata and process_data methods. Only override this for
special advanced processing, such as when a field encapsulates many
inputs.
"""
self.process_errors = []
if data is _unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self.object_data = data
try:
self.process_data(data)
except ValueError as e:
self.process_errors.append(e.args[0])
if formdata:
try:
if self.name in formdata:
self.raw_data = formdata.getlist(self.name)
else:
self.raw_data = []
self.process_formdata(self.raw_data)
except ValueError as e:
self.process_errors.append(e.args[0])
for filter in self.filters:
try:
self.data = filter(self.data)
except ValueError as e:
self.process_errors.append(e.args[0])
def process_data(self, value):
"""
Process the Python data applied to this field and store the result.
This will be called during form construction by the form's `kwargs` or
`obj` argument.
:param value: The python object containing the value to process.
"""
self.data = value
def process_formdata(self, valuelist):
"""
Process data received over the wire from a form.
This will be called during form construction with data supplied
through the `formdata` argument.
:param valuelist: A list of strings to process.
"""
if valuelist:
self.data = valuelist[0]
def populate_obj(self, obj, name):
"""
Populates `obj.<name>` with the field's data.
:note: This is a destructive operation. If `obj.<name>` already exists,
it will be overridden. Use with caution.
"""
setattr(obj, name, self.data)
class UnboundField(object):
_formfield = True
creation_counter = 0
def __init__(self, field_class, *args, **kwargs):
UnboundField.creation_counter += 1
self.field_class = field_class
self.args = args
self.kwargs = kwargs
self.creation_counter = UnboundField.creation_counter
def bind(self, form, name, prefix='', translations=None, **kwargs):
return self.field_class(_form=form, _prefix=prefix, _name=name, _translations=translations, *self.args, **dict(self.kwargs, **kwargs))
def __repr__(self):
return '<UnboundField(%s, %r, %r)>' % (self.field_class.__name__, self.args, self.kwargs)
class Flags(object):
"""
Holds a set of boolean flags as attributes.
Accessing a non-existing attribute returns False for its value.
"""
def __getattr__(self, name):
if name.startswith('_'):
return super(Flags, self).__getattr__(name)
return False
def __contains__(self, name):
return getattr(self, name)
def __repr__(self):
flags = (name for name in dir(self) if not name.startswith('_'))
return '<wtforms.fields.Flags: {%s}>' % ', '.join(flags)
class Label(object):
"""
An HTML form label.
"""
def __init__(self, field_id, text):
self.field_id = field_id
self.text = text
def __str__(self):
return self()
def __unicode__(self):
return self()
def __html__(self):
return self()
def __call__(self, text=None, **kwargs):
kwargs['for'] = self.field_id
attributes = widgets.html_params(**kwargs)
return widgets.HTMLString('<label %s>%s</label>' % (attributes, text or self.text))
def __repr__(self):
return 'Label(%r, %r)' % (self.field_id, self.text)
class SelectFieldBase(Field):
option_widget = widgets.Option()
"""
Base class for fields which can be iterated to produce options.
This isn't a field, but an abstract base class for fields which want to
provide this functionality.
"""
def __init__(self, label=None, validators=None, option_widget=None, **kwargs):
super(SelectFieldBase, self).__init__(label, validators, **kwargs)
if option_widget is not None:
self.option_widget = option_widget
def iter_choices(self):
"""
Provides data for choice widget rendering. Must return a sequence or
iterable of (value, label, selected) tuples.
"""
raise NotImplementedError()
def __iter__(self):
opts = dict(widget=self.option_widget, _name=self.name, _form=None)
for i, (value, label, checked) in enumerate(self.iter_choices()):
opt = self._Option(label=label, id='%s-%d' % (self.id, i), **opts)
opt.process(None, value)
opt.checked = checked
yield opt
class _Option(Field):
checked = False
def _value(self):
return self.data
class SelectField(SelectFieldBase):
widget = widgets.Select()
def __init__(self, label=None, validators=None, coerce=text_type, choices=None, **kwargs):
super(SelectField, self).__init__(label, validators, **kwargs)
self.coerce = coerce
self.choices = choices
def iter_choices(self):
for value, label in self.choices:
yield (value, label, self.coerce(value) == self.data)
def process_data(self, value):
try:
self.data = self.coerce(value)
except (ValueError, TypeError):
self.data = None
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = self.coerce(valuelist[0])
except ValueError:
raise ValueError(self.gettext('Invalid Choice: could not coerce'))
def pre_validate(self, form):
for v, _ in self.choices:
if self.data == v:
break
else:
raise ValueError(self.gettext('Not a valid choice'))
class SelectMultipleField(SelectField):
"""
No different from a normal select field, except this one can take (and
validate) multiple choices. You'll need to specify the HTML `rows`
attribute to the select field when rendering.
"""
widget = widgets.Select(multiple=True)
def iter_choices(self):
for value, label in self.choices:
selected = self.data is not None and self.coerce(value) in self.data
yield (value, label, selected)
def process_data(self, value):
try:
self.data = list(self.coerce(v) for v in value)
except (ValueError, TypeError):
self.data = None
def process_formdata(self, valuelist):
try:
self.data = list(self.coerce(x) for x in valuelist)
except ValueError:
raise ValueError(self.gettext('Invalid choice(s): one or more data inputs could not be coerced'))
def pre_validate(self, form):
if self.data:
values = list(c[0] for c in self.choices)
for d in self.data:
if d not in values:
raise ValueError(self.gettext("'%(value)s' is not a valid choice for this field") % dict(value=d))
class RadioField(SelectField):
"""
Like a SelectField, except displays a list of radio buttons.
Iterating the field will produce subfields (each containing a label as
well) in order to allow custom rendering of the individual radio fields.
"""
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.RadioInput()
class StringField(Field):
"""
This field is the base for most of the more complicated fields, and
represents an ``<input type="text">``.
"""
widget = widgets.TextInput()
def process_formdata(self, valuelist):
if valuelist:
self.data = valuelist[0]
else:
self.data = ''
def _value(self):
return text_type(self.data) if self.data is not None else ''
class IntegerField(Field):
"""
A text field, except all input is coerced to an integer. Erroneous input
is ignored and will not be accepted as a value.
"""
widget = widgets.TextInput()
def __init__(self, label=None, validators=None, **kwargs):
super(IntegerField, self).__init__(label, validators, **kwargs)
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
return text_type(self.data)
else:
return ''
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = int(valuelist[0])
except ValueError:
self.data = None
raise ValueError(self.gettext('Not a valid integer value'))
class DecimalField(Field):
"""
A text field which displays and coerces data of the `decimal.Decimal` type.
:param places:
How many decimal places to quantize the value to for display on form.
If None, does not quantize value.
:param rounding:
How to round the value during quantize, for example
`decimal.ROUND_UP`. If unset, uses the rounding value from the
current thread's context.
"""
widget = widgets.TextInput()
def __init__(self, label=None, validators=None, places=2, rounding=None, **kwargs):
super(DecimalField, self).__init__(label, validators, **kwargs)
self.places = places
self.rounding = rounding
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
if self.places is not None:
if hasattr(self.data, 'quantize'):
exp = decimal.Decimal('.1') ** self.places
quantized = self.data.quantize(exp, rounding=self.rounding)
return text_type(quantized)
else:
# If for some reason, data is a float or int, then format
# as we would for floats using string formatting.
format = '%%0.%df' % self.places
return format % self.data
else:
return text_type(self.data)
else:
return ''
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = decimal.Decimal(valuelist[0])
except (decimal.InvalidOperation, ValueError):
self.data = None
raise ValueError(self.gettext('Not a valid decimal value'))
class FloatField(Field):
"""
A text field, except all input is coerced to an float. Erroneous input
is ignored and will not be accepted as a value.
"""
widget = widgets.TextInput()
def __init__(self, label=None, validators=None, **kwargs):
super(FloatField, self).__init__(label, validators, **kwargs)
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
return text_type(self.data)
else:
return ''
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = float(valuelist[0])
except ValueError:
self.data = None
raise ValueError(self.gettext('Not a valid float value'))
class BooleanField(Field):
"""
Represents an ``<input type="checkbox">``.
"""
widget = widgets.CheckboxInput()
def __init__(self, label=None, validators=None, **kwargs):
super(BooleanField, self).__init__(label, validators, **kwargs)
def process_data(self, value):
self.data = bool(value)
def process_formdata(self, valuelist):
# Checkboxes and submit buttons simply do not send a value when
# unchecked/not pressed. So the actual value="" doesn't matter for
# purpose of determining .data, only whether one exists or not.
self.data = bool(valuelist)
def _value(self):
if self.raw_data:
return text_type(self.raw_data[0])
else:
return 'y'
class DateTimeField(Field):
"""
A text field which stores a `datetime.datetime` matching a format.
"""
widget = widgets.TextInput()
def __init__(self, label=None, validators=None, format='%Y-%m-%d %H:%M:%S', **kwargs):
super(DateTimeField, self).__init__(label, validators, **kwargs)
self.format = format
def _value(self):
if self.raw_data:
return ' '.join(self.raw_data)
else:
return self.data and self.data.strftime(self.format) or ''
def process_formdata(self, valuelist):
if valuelist:
date_str = ' '.join(valuelist)
try:
self.data = datetime.datetime.strptime(date_str, self.format)
except ValueError:
self.data = None
raise ValueError(self.gettext('Not a valid datetime value'))
class DateField(DateTimeField):
"""
Same as DateTimeField, except stores a `datetime.date`.
"""
def __init__(self, label=None, validators=None, format='%Y-%m-%d', **kwargs):
super(DateField, self).__init__(label, validators, format, **kwargs)
def process_formdata(self, valuelist):
if valuelist:
date_str = ' '.join(valuelist)
try:
self.data = datetime.datetime.strptime(date_str, self.format).date()
except ValueError:
self.data = None
raise ValueError(self.gettext('Not a valid date value'))
class FormField(Field):
"""
Encapsulate a form as a field in another form.
:param form_class:
A subclass of Form that will be encapsulated.
:param separator:
A string which will be suffixed to this field's name to create the
prefix to enclosed fields. The default is fine for most uses.
"""
widget = widgets.TableWidget()
def __init__(self, form_class, label=None, validators=None, separator='-', **kwargs):
super(FormField, self).__init__(label, validators, **kwargs)
self.form_class = form_class
self.separator = separator
self._obj = None
if self.filters:
raise TypeError('FormField cannot take filters, as the encapsulated data is not mutable.')
if validators:
raise TypeError('FormField does not accept any validators. Instead, define them on the enclosed form.')
def process(self, formdata, data=_unset_value):
if data is _unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self._obj = data
self.object_data = data
prefix = self.name + self.separator
if isinstance(data, dict):
self.form = self.form_class(formdata=formdata, prefix=prefix, **data)
else:
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix)
def validate(self, form, extra_validators=tuple()):
if extra_validators:
raise TypeError('FormField does not accept in-line validators, as it gets errors from the enclosed form.')
return self.form.validate()
def populate_obj(self, obj, name):
candidate = getattr(obj, name, None)
if candidate is None:
if self._obj is None:
raise TypeError('populate_obj: cannot find a value to populate from the provided obj or input data/defaults')
candidate = self._obj
setattr(obj, name, candidate)
self.form.populate_obj(candidate)
def __iter__(self):
return iter(self.form)
def __getitem__(self, name):
return self.form[name]
def __getattr__(self, name):
return getattr(self.form, name)
@property
def data(self):
return self.form.data
@property
def errors(self):
return self.form.errors
class FieldList(Field):
"""
Encapsulate an ordered list of multiple instances of the same field type,
keeping data as a list.
>>> authors = FieldList(TextField('Name', [validators.required()]))
:param unbound_field:
A partially-instantiated field definition, just like that would be
defined on a form directly.
:param min_entries:
if provided, always have at least this many entries on the field,
creating blank ones if the provided input does not specify a sufficient
amount.
:param max_entries:
accept no more than this many entries as input, even if more exist in
formdata.
"""
widget=widgets.ListWidget()
def __init__(self, unbound_field, label=None, validators=None, min_entries=0,
max_entries=None, default=tuple(), **kwargs):
super(FieldList, self).__init__(label, validators, default=default, **kwargs)
if self.filters:
raise TypeError('FieldList does not accept any filters. Instead, define them on the enclosed field.')
if validators:
raise TypeError('FieldList does not accept any validators. Instead, define them on the enclosed field.')
assert isinstance(unbound_field, UnboundField), 'Field must be unbound, not a field class'
self.unbound_field = unbound_field
self.min_entries = min_entries
self.max_entries = max_entries
self.last_index = -1
self._prefix = kwargs.get('_prefix', '')
def process(self, formdata, data=_unset_value):
self.entries = []
if data is _unset_value or not data:
try:
data = self.default()
except TypeError:
data = self.default
self.object_data = data
if formdata:
indices = sorted(set(self._extract_indices(self.name, formdata)))
if self.max_entries:
indices = indices[:self.max_entries]
idata = iter(data)
for index in indices:
try:
obj_data = next(idata)
except StopIteration:
obj_data = _unset_value
self._add_entry(formdata, obj_data, index=index)
else:
for obj_data in data:
self._add_entry(formdata, obj_data)
while len(self.entries) < self.min_entries:
self._add_entry(formdata)
def _extract_indices(self, prefix, formdata):
"""
Yield indices of any keys with given prefix.
formdata must be an object which will produce keys when iterated. For
example, if field 'foo' contains keys 'foo-0-bar', 'foo-1-baz', then
the numbers 0 and 1 will be yielded, but not neccesarily in order.
"""
offset = len(prefix) + 1
for k in formdata:
if k.startswith(prefix):
k = k[offset:].split('-', 1)[0]
if k.isdigit():
yield int(k)
def validate(self, form, extra_validators=tuple()):
self.errors = []
success = True
for subfield in self.entries:
if not subfield.validate(form):
success = False
self.errors.append(subfield.errors)
return success
def populate_obj(self, obj, name):
values = getattr(obj, name, None)
try:
ivalues = iter(values)
except TypeError:
ivalues = iter([])
candidates = itertools.chain(ivalues, itertools.repeat(None))
_fake = type(str('_fake'), (object, ), {})
output = []
for field, data in izip(self.entries, candidates):
fake_obj = _fake()
fake_obj.data = data
field.populate_obj(fake_obj, 'data')
output.append(fake_obj.data)
setattr(obj, name, output)
def _add_entry(self, formdata=None, data=_unset_value, index=None):
assert not self.max_entries or len(self.entries) < self.max_entries, \
'You cannot have more than max_entries entries in this FieldList'
new_index = self.last_index = index or (self.last_index + 1)
name = '%s-%d' % (self.short_name, new_index)
id = '%s-%d' % (self.id, new_index)
field = self.unbound_field.bind(form=None, name=name, prefix=self._prefix, id=id)
field.process(formdata, data)
self.entries.append(field)
return field
def append_entry(self, data=_unset_value):
"""
Create a new entry with optional default data.
Entries added in this way will *not* receive formdata however, and can
only receive object data.
"""
return self._add_entry(data=data)
def pop_entry(self):
""" Removes the last entry from the list and returns it. """
entry = self.entries.pop()
self.last_index -= 1
return entry
def __iter__(self):
return iter(self.entries)
def __len__(self):
return len(self.entries)
def __getitem__(self, index):
return self.entries[index]
@property
def data(self):
return [f.data for f in self.entries]
| lgpl-3.0 |
Spoken-tutorial/spoken-website | impersonate/decorators.py | 1 | 1214 | # -*- coding: utf-8 -*-
import django
from django.conf import settings
from django.utils.http import urlquote
from django.utils.encoding import force_str
from django.shortcuts import redirect, resolve_url
from django.contrib.auth import REDIRECT_FIELD_NAME
from .helpers import get_redir_path, check_allow_impersonate, is_authenticated
def get_login_url():
return force_str(resolve_url(settings.LOGIN_URL))
def allowed_user_required(view_func):
def _checkuser(request, *args, **kwargs):
if not is_authenticated(request.user):
return redirect(u'{0}?{1}={2}'.format(
get_login_url(),
REDIRECT_FIELD_NAME,
urlquote(request.get_full_path()),
))
if getattr(request.user, 'is_impersonate', False):
# Do not allow an impersonated session to use the
# impersonate views.
return redirect(get_redir_path())
if check_allow_impersonate(request):
# user is allowed to impersonate
return view_func(request, *args, **kwargs)
else:
# user not allowed impersonate at all
return redirect(get_redir_path())
return _checkuser
| gpl-3.0 |
piyush82/icclab-rcb | common/db_client.py | 2 | 2743 | # -*- coding: ascii -*-
#--------------------------------------------------------------
#Created on Dec 2, 2013
#
#@author: Piyush Harsh
#@contact: piyush.harsh@zhaw.ch
#@organization: ICCLab, Zurich University of Applied Sciences
#@summary: module for database operations, supported DB - sqlite3, planned: mangodb, mysql
# Note:
#@var
#@requires: python 2.7, sqlite3
#--------------------------------------------------------------
import sqlite3 as db
def query(path, field_list, table, condition, fetchOne):
con = None
data = None
try:
con = db.connect(path)
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
try:
cur = con.cursor()
statement = "SELECT " + field_list + " from " + table + " WHERE " + condition
cur.execute(statement);
if fetchOne:
data = cur.fetchone()
else:
data = cur.fetchall()
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
if con:
con.close()
return True, data
def add(path, table, values):
con = None
data = None
max = -1
try:
con = db.connect(path)
except db.Error, e:
print 'Error %s:' % e.args[0]
return False
try:
cur = con.cursor()
statement = "INSERT into " + table + " VALUES(" + values + ")"
cur.execute(statement)
con.commit()
except db.Error, e:
print 'Error %s:' % e.args[0]
return False
if con:
con.close()
return True
def getMax(path, field, table):
con = None
data = None
max = -1
try:
con = db.connect(path)
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
try:
cur = con.cursor()
statement = "SELECT max(" + field + ") from " + table
cur.execute(statement)
data = cur.fetchone()
if data[0] != None:
max = data[0]
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
if con:
con.close()
return True, max
def count(path, field, table, condition):
con = None
data = None
count = 0
try:
con = db.connect(path)
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
try:
cur = con.cursor()
statement = "SELECT count(" + field + ") from " + table + " WHERE " + condition
cur.execute(statement)
data = cur.fetchone()
if data[0] != None:
count = data[0]
except db.Error, e:
print 'Error %s:' % e.args[0]
return False, None
if con:
con.close()
return True, count | apache-2.0 |
slarosa/QGIS | python/plugins/sextante/algs/MeanAndStdDevPlot.py | 3 | 3304 | # -*- coding: utf-8 -*-
"""
***************************************************************************
MeanAndStdDevPlot.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import matplotlib.pyplot as plt
import matplotlib.pylab as lab
import numpy as np
from PyQt4.QtCore import *
from qgis.core import *
from sextante.parameters.ParameterTable import ParameterTable
from sextante.parameters.ParameterTableField import ParameterTableField
from sextante.core.GeoAlgorithm import GeoAlgorithm
from sextante.outputs.OutputHTML import OutputHTML
from sextante.tools import *
from sextante.core.QGisLayers import QGisLayers
class MeanAndStdDevPlot(GeoAlgorithm):
INPUT = "INPUT"
OUTPUT = "OUTPUT"
NAME_FIELD = "NAME_FIELD"
MEAN_FIELD = "MEAN_FIELD"
STDDEV_FIELD = "STDDEV_FIELD"
def processAlgorithm(self, progress):
uri = self.getParameterValue(self.INPUT)
layer = QGisLayers.getObjectFromUri(uri)
namefieldname = self.getParameterValue(self.NAME_FIELD)
meanfieldname = self.getParameterValue(self.MEAN_FIELD)
stddevfieldname = self.getParameterValue(self.STDDEV_FIELD)
output = self.getOutputValue(self.OUTPUT)
values = vector.getAttributeValues(layer, namefieldname, meanfieldname, stddevfieldname)
plt.close()
ind = np.arange(len(values[namefieldname]))
width = 0.8
plt.bar(ind, values[meanfieldname], width,
color='r',
yerr=values[stddevfieldname],
error_kw=dict(ecolor='yellow'))
plt.xticks(ind, values[namefieldname], rotation = 45)
plotFilename = output +".png"
lab.savefig(plotFilename)
f = open(output, "w")
f.write("<img src=\"" + plotFilename + "\"/>")
f.close()
def defineCharacteristics(self):
self.name = "Mean and standard deviation plot"
self.group = "Graphics"
self.addParameter(ParameterTable(self.INPUT, "Input table"))
self.addParameter(ParameterTableField(self.NAME_FIELD, "Category name field", self.INPUT,ParameterTableField.DATA_TYPE_ANY))
self.addParameter(ParameterTableField(self.MEAN_FIELD, "Mean field", self.INPUT))
self.addParameter(ParameterTableField(self.STDDEV_FIELD, "StdDev field", self.INPUT))
self.addOutput(OutputHTML(self.OUTPUT, "Output"))
| gpl-2.0 |
PinMeTo/big-zelda-kegerator | measuring/measur.py | 1 | 5542 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Author: Andreas Älveborn
# URL: https://github.com/aelveborn/Wii-Scale
#
# This file is part of Wii-Scale
# Copyright (C) 2015 Andreas Älveborn
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import wiiboard
import time
import sys
import getopt
from bluetooth import *
from socketIO_client import SocketIO, LoggingNamespace
# Global
board = None
sleep = True
sensitivity = 0.1 #kg
calibrate = 0 #kg
config_address = None
port = 3001
host = "localhost"
class CalculateWeight:
def formatWeight(self, weight):
return round(weight, 2)
def weight(self, data):
i = 0
total = 0
global calibrate
for i in range(len(data)):
total += data[i]
total = total / len(data)
total = total + calibrate
return self.formatWeight(total)
class WebSocketIO:
def __init__(self):
global host
global port
self.socketIO = SocketIO(host, port, LoggingNamespace)
self.socketIO.on('wiiscale-connect', self.receive_connect)
self.socketIO.on('wiiscale-disconnect', self.receive_disconnect)
def wait(self):
print "wait"
self.socketIO.wait(seconds = 1)
def send_status(self, status):
print status
self.socketIO.emit('wiiscale-status', {'status': status})
def send_weight(self, keg1, keg2, keg3):
print "keg1 %.2f, keg2 %.2f, keg3 %.2f" % (keg1, keg2, keg3)
self.socketIO.emit('wiiscale-weight', { 'keg1': keg1,
'keg2':keg2,
'keg3':keg3})
# def send_connection_status(self, status):
# self.socketIO.emit('wiiscale-connection', {'status': status})
# Accepts True or False as argument
def receive_connect(self):
global sleep
sleep = False
def receive_disconnect(self):
global board
global sleep
sleep = True
board.disconnect()
def options(argv):
try:
opts, args = getopt.getopt(argv, "h:p:c:a:", ["host=", "port=", "calibrate=", "address="])
except getopt.GetoptError:
print "wii-scale.py -h <host> -p <port> -c <calibration kg> -a <mac-addres>"
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--host"):
global host
if arg:
host = arg.strip()
elif opt in ("-p", "--port"):
global port
try:
port = int(arg)
except:
pass
elif opt in ("-c", "--calibrate"):
global calibrate
try:
calibrate = int(arg)
except:
pass
elif opt in ("-a", "--address"):
global config_address
if arg:
config_address = arg.strip()
def main(argv):
options(argv)
print "Wii-Scale started"
global sleep
global port
global config_address
global calibrate
global board
ready = False
sleep = False
connected = False
calculate = CalculateWeight()
socket = WebSocketIO()
board = wiiboard.Wiiboard()
# time.sleep(10)
# Scale
while(True):
# Check if connection status changed
if connected is not board.isConnected():
connected = board.isConnected()
if connected:
socket.send_status("CONNECTED")
else:
socket.send_status("DISCONNECTED")
#Turn off lights
time.sleep(0.1) # This is needed for wiiboard.py
board.setLight(False)
# Waiting for disconnect/sleep command
# socket.wait()
if sleep:
time.sleep(10)
#continue
# Reset
done = False
keg1 = []
keg2 = []
keg3 = []
firstStep = True
skipReadings = 5
# Connect to balance board
if not board.isConnected():
# Re initialize each run due to bug in wiiboard
# Note: Seems to be working though :/
board = wiiboard.Wiiboard()
socket.send_status("SYNC")
if not config_address:
address = board.discover()
else:
address = config_address
if not address:
sleep = True
socket.send_status("NO DEVICE FOUND")
continue
socket.send_status("CONNECTING")
board.connect(address)
if board.isConnected():
connected = True
socket.send_status("CONNECTED")
#Board is connected and ready
if board.isConnected():
# Post ready status once
if not ready:
ready = True
time.sleep(0.1) # This is needed for wiiboard.py
board.setLight(True)
socket.send_status("READY")
#Measure weight
if board.mass.totalWeight > sensitivity:
while(not done):
time.sleep(0.1)
if firstStep:
firstStep = False
socket.send_status("MEASURING")
# Skips the first readings when the user steps on the balance board
skipReadings -= 1
if(skipReadings < 0):
keg1.append(board.mass.topLeft)
keg2.append(board.mass.bottomRight)
keg3.append(board.mass.bottomLeft)
#socket.send_weight(board.mass.topLeft)
#socket.send_weight(calculate.weight(keg1), calculate.weight(keg2), calculate.weight(keg3))
socket.send_weight(board.mass.topLeft, board.mass.bottomLeft, board.mass.bottomRight)
if board.mass.totalWeight <= sensitivity and not firstStep:
done = True
socket.send_status("DONE")
ready = False
if __name__ == "__main__":
main(sys.argv[1:])
| mit |
shujaatak/UAV_MissionPlanner | Lib/site-packages/scipy/misc/tests/test_doccer.py | 61 | 2281 | ''' Some tests for the documenting decorator and support functions '''
import numpy as np
from numpy.testing import assert_equal, assert_raises
from nose.tools import assert_true
from scipy.misc import doccer
docstring = \
"""Docstring
%(strtest1)s
%(strtest2)s
%(strtest3)s
"""
param_doc1 = \
"""Another test
with some indent"""
param_doc2 = \
"""Another test, one line"""
param_doc3 = \
""" Another test
with some indent"""
doc_dict = {'strtest1':param_doc1,
'strtest2':param_doc2,
'strtest3':param_doc3}
filled_docstring = \
"""Docstring
Another test
with some indent
Another test, one line
Another test
with some indent
"""
def test_unindent():
yield assert_equal, doccer.unindent_string(param_doc1), param_doc1
yield assert_equal, doccer.unindent_string(param_doc2), param_doc2
yield assert_equal, doccer.unindent_string(param_doc3), param_doc1
def test_unindent_dict():
d2 = doccer.unindent_dict(doc_dict)
yield assert_equal, d2['strtest1'], doc_dict['strtest1']
yield assert_equal, d2['strtest2'], doc_dict['strtest2']
yield assert_equal, d2['strtest3'], doc_dict['strtest1']
def test_docformat():
udd = doccer.unindent_dict(doc_dict)
formatted = doccer.docformat(docstring, udd)
yield assert_equal, formatted, filled_docstring
single_doc = 'Single line doc %(strtest1)s'
formatted = doccer.docformat(single_doc, doc_dict)
# Note - initial indent of format string does not
# affect subsequent indent of inserted parameter
yield assert_equal, formatted, """Single line doc Another test
with some indent"""
def test_decorator():
# with unindentation of parameters
decorator = doccer.filldoc(doc_dict, True)
@decorator
def func():
""" Docstring
%(strtest3)s
"""
yield assert_equal, func.__doc__, """ Docstring
Another test
with some indent
"""
# without unindentation of parameters
decorator = doccer.filldoc(doc_dict, False)
@decorator
def func():
""" Docstring
%(strtest3)s
"""
yield assert_equal, func.__doc__, """ Docstring
Another test
with some indent
"""
| gpl-2.0 |
rgayon/plaso | tests/parsers/syslog.py | 1 | 8396 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the syslog parser."""
from __future__ import unicode_literals
import unittest
from plaso.parsers import syslog
from tests.parsers import test_lib
class SyslogParserTest(test_lib.ParserTestCase):
"""Tests for the syslog parser."""
def testParseRsyslog(self):
"""Tests the Parse function on a rsyslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2020}
storage_writer = self._ParseFile(
['syslog_rsyslog'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 5)
def testParseRsyslogTraditional(self):
"""Tests the Parse function on a traditional rsyslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_rsyslog_traditional'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-01-22 07:54:32.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'Job')
self.assertIsNone(event_data.severity)
def testParseDarwin(self):
"""Tests the Parse function on an Darwin-style syslog file."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_osx'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 2)
def testParseChromeOS(self):
"""Tests the Parse function."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_chromeos'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:23.297265')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'periodic_scheduler')
self.assertEqual(event_data.severity, 'INFO')
expected_message = (
'INFO [periodic_scheduler, pid: 13707] cleanup_logs: job completed')
self._TestGetMessageStrings(
event_data, expected_message, expected_message)
event = events[2]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:24.987014')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'DEBUG')
# Testing year increment.
event = events[4]
self.CheckTimestamp(event.timestamp, '2016-10-25 19:37:24.993079')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'DEBUG')
event = events[6]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'kernel')
self.assertEqual(event_data.severity, 'ERR')
event = events[7]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'aprocess')
self.assertEqual(event_data.severity, 'INFO')
expected_message = (
'INFO [aprocess] [ 316.587330] cfg80211: This is a multi-line\t'
'message that screws up many syslog parsers.')
expected_short_message = (
'INFO [aprocess] [ 316.587330] cfg80211: This is a multi-line\t'
'message that sc...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testParse(self):
"""Tests the Parse function."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2012}
storage_writer = self._ParseFile(
['syslog'], parser, knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 1)
self.assertEqual(storage_writer.number_of_events, 16)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2012-01-22 07:52:33.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'client')
self.assertIsNone(event_data.severity)
expected_message = (
'[client, pid: 30840] INFO No new content in ímynd.dd.')
self._TestGetMessageStrings(
event_data, expected_message, expected_message)
event = events[6]
self.CheckTimestamp(event.timestamp, '2012-02-29 01:15:43.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, '---')
self.assertIsNone(event_data.severity)
# Testing year increment.
event = events[9]
self.CheckTimestamp(event.timestamp, '2013-03-23 23:01:18.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(
event_data.body,
'This syslog message has a fractional value for seconds.')
self.assertEqual(event_data.reporter, 'somrandomexe')
self.assertIsNone(event_data.severity)
event = events[11]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, '/sbin/anacron')
self.assertIsNone(event_data.severity)
event = events[10]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.reporter, 'aprocess')
self.assertIsNone(event_data.severity)
expected_message = (
'[aprocess, pid: 10100] This is a multi-line message that screws up'
'\tmany syslog parsers.')
expected_short_message = (
'[aprocess, pid: 10100] This is a multi-line message that screws up'
'\tmany syslo...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[14]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertIsNone(event_data.hostname)
self.assertEqual(event_data.reporter, 'kernel')
self.assertIsNone(event_data.severity)
expected_message = (
'[kernel] [997.390602] sda2: rw=0, want=65, limit=2')
expected_short_message = (
'[kernel] [997.390602] sda2: rw=0, want=65, limit=2')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
# Testing non-leap year.
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2013}
storage_writer = self._ParseFile(
['syslog'], parser,
knowledge_base_values=knowledge_base_values)
self.assertEqual(storage_writer.number_of_warnings, 2)
self.assertEqual(storage_writer.number_of_events, 15)
def testParseWithTimeZone(self):
"""Tests the Parse function with a time zone."""
parser = syslog.SyslogParser()
knowledge_base_values = {'year': 2016}
storage_writer = self._ParseFile(
['syslog_rsyslog_traditional'], parser,
knowledge_base_values=knowledge_base_values, timezone='CET')
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 8)
events = list(storage_writer.GetSortedEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2016-01-22 06:54:32.000000')
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.assertEqual(event_data.data_type, 'syslog:line')
self.assertEqual(event_data.hostname, 'myhostname.myhost.com')
self.assertEqual(event_data.reporter, 'Job')
self.assertIsNone(event_data.severity)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
Alwnikrotikz/secure-gappproxy | fetchserver/nonce.py | 2 | 3071 | #! /usr/bin/env python
# coding=utf-8
#======================================================================
# SecureGAppProxy is a security-strengthened version of GAppProxy.
# http://secure-gappproxy.googlecode.com
# This file is a part of SecureGAppProxy.
# Copyright (C) 2011 nleven <www.nleven.com i@nleven.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ACKNOWLEDGEMENT
# SecureGAppProxy is a based on the work of GAppProxy
# <http://gappproxy.googlecode.com> by Du XiaoGang <dugang@188.com>
#======================================================================
from google.appengine.ext import db
from google.appengine.api import memcache
import os
import struct
import datetime, time
import base64
NONCE_LENGTH = 16
NONCE_EXPIRE = 900
class Nonce(db.Model):
timestamp = db.IntegerProperty(required=True)
random = db.StringProperty(required=True)
def __ser_int(num, length=None):
serialized = ""
while num != 0:
serialized += chr(num % 256)
num /= 256
if length:
serialized += "\x00" * (length-len(serialized))
return serialized[::-1]
def __deser_int(serialized):
num = 0
for i in serialized:
num = num * 256 + ord(i)
return int(num)
def __utc_time():
return int(time.time())
def GenerateNonce():
return os.urandom(8) + __ser_int(__utc_time(), 8)
def CheckNonce(n):
if len(n) != NONCE_LENGTH:
return False
rand = base64.b64encode(n[:8])
timestamp = __deser_int(n[8:])
time_expire = __utc_time() - NONCE_EXPIRE
if timestamp <= time_expire:
return False
if memcache.get('nonce_' + rand):
return False
memcache.set('nonce_' + rand, '', NONCE_EXPIRE)
return True
def CleanExpiredNonce():
pass
if __name__ == "__main__":
nonce = GenerateNonce()
print nonce
print CheckNonce(nonce)
print CheckNonce(nonce)
| gpl-3.0 |
webmasterraj/GaSiProMo | flask/lib/python2.7/site-packages/boto/s3/acl.py | 13 | 5434 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.s3.user import User
CannedACLStrings = ['private', 'public-read',
'public-read-write', 'authenticated-read',
'bucket-owner-read', 'bucket-owner-full-control',
'log-delivery-write']
class Policy(object):
def __init__(self, parent=None):
self.parent = parent
self.acl = None
def __repr__(self):
grants = []
for g in self.acl.grants:
if g.id == self.owner.id:
grants.append("%s (owner) = %s" % (g.display_name, g.permission))
else:
if g.type == 'CanonicalUser':
u = g.display_name
elif g.type == 'Group':
u = g.uri
else:
u = g.email_address
grants.append("%s = %s" % (u, g.permission))
return "<Policy: %s>" % ", ".join(grants)
def startElement(self, name, attrs, connection):
if name == 'Owner':
self.owner = User(self)
return self.owner
elif name == 'AccessControlList':
self.acl = ACL(self)
return self.acl
else:
return None
def endElement(self, name, value, connection):
if name == 'Owner':
pass
elif name == 'AccessControlList':
pass
else:
setattr(self, name, value)
def to_xml(self):
s = '<AccessControlPolicy>'
s += self.owner.to_xml()
s += self.acl.to_xml()
s += '</AccessControlPolicy>'
return s
class ACL(object):
def __init__(self, policy=None):
self.policy = policy
self.grants = []
def add_grant(self, grant):
self.grants.append(grant)
def add_email_grant(self, permission, email_address):
grant = Grant(permission=permission, type='AmazonCustomerByEmail',
email_address=email_address)
self.grants.append(grant)
def add_user_grant(self, permission, user_id, display_name=None):
grant = Grant(permission=permission, type='CanonicalUser', id=user_id, display_name=display_name)
self.grants.append(grant)
def startElement(self, name, attrs, connection):
if name == 'Grant':
self.grants.append(Grant(self))
return self.grants[-1]
else:
return None
def endElement(self, name, value, connection):
if name == 'Grant':
pass
else:
setattr(self, name, value)
def to_xml(self):
s = '<AccessControlList>'
for grant in self.grants:
s += grant.to_xml()
s += '</AccessControlList>'
return s
class Grant(object):
NameSpace = 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
def __init__(self, permission=None, type=None, id=None,
display_name=None, uri=None, email_address=None):
self.permission = permission
self.id = id
self.display_name = display_name
self.uri = uri
self.email_address = email_address
self.type = type
def startElement(self, name, attrs, connection):
if name == 'Grantee':
self.type = attrs['xsi:type']
return None
def endElement(self, name, value, connection):
if name == 'ID':
self.id = value
elif name == 'DisplayName':
self.display_name = value
elif name == 'URI':
self.uri = value
elif name == 'EmailAddress':
self.email_address = value
elif name == 'Grantee':
pass
elif name == 'Permission':
self.permission = value
else:
setattr(self, name, value)
def to_xml(self):
s = '<Grant>'
s += '<Grantee %s xsi:type="%s">' % (self.NameSpace, self.type)
if self.type == 'CanonicalUser':
s += '<ID>%s</ID>' % self.id
s += '<DisplayName>%s</DisplayName>' % self.display_name
elif self.type == 'Group':
s += '<URI>%s</URI>' % self.uri
else:
s += '<EmailAddress>%s</EmailAddress>' % self.email_address
s += '</Grantee>'
s += '<Permission>%s</Permission>' % self.permission
s += '</Grant>'
return s
| gpl-2.0 |
0x0all/ROOT | tutorials/pyroot/zdemo.py | 28 | 8280 | # This macro is an example of graphs in log scales with annotations.
#
# The begin_html <a href="gif/zdemo.gif" >presented results</a> end_html
# are predictions of invariant cross-section of Direct Photons produced
# at RHIC energies, based on the universality of scaling function H(z).
#
# Authors: Michael Tokarev and Elena Potrebenikova (JINR Dubna)
#
# These Figures were published in JINR preprint E2-98-64, Dubna,
# 1998 and submitted to CPC.
#
# Note that the way greek symbols, super/subscripts are obtained
# illustrate the current limitations of Root in this area.
#
import ROOT
from array import array
from math import *
NMAX = 20
Z = array( 'f', [0.]*NMAX )
HZ = array( 'f', [0.]*NMAX )
PT = array( 'f', [0.]*NMAX )
INVSIG = array( 'f', [0.]*NMAX )
NLOOP = 0
saves = {}
#_______________________________________________________________________________
def zdemo():
global NLOOP
global Z, HZ, PT, INVSIG
global saves
# Create a new canvas.
c1 = ROOT.TCanvas( 'zdemo', 'Monte Carlo Study of Z scaling', 10, 40, 800, 600 )
c1.Range( 0, 0, 25, 18 )
c1.SetFillColor( 40 )
saves[ 'c1' ] = c1 # prevent deteletion at end of zdemo
pl = ROOT.TPaveLabel( 1, 16.3, 24, 17.5,
'Z-scaling of Direct Photon Productions in pp Collisions at RHIC Energies', 'br' )
pl.SetFillColor(18)
pl.SetTextFont(32)
pl.SetTextColor(49)
pl.Draw()
saves[ 'pl' ] = pl
t = ROOT.TLatex()
t.SetTextFont(32)
t.SetTextColor(1)
t.SetTextSize(0.03)
t.SetTextAlign(12)
t.DrawLatex( 3.1, 15.5, 'M.Tokarev, E.Potrebenikova ')
t.DrawLatex( 14., 15.5, 'JINR preprint E2-98-64, Dubna, 1998 ')
saves[ 't' ] = t
pad1 = ROOT.TPad( 'pad1', 'This is pad1', 0.02, 0.02, 0.48, 0.83, 33 )
pad2 = ROOT.TPad( 'pad2', 'This is pad2', 0.52, 0.02, 0.98, 0.83, 33 )
pad1.Draw()
pad2.Draw()
saves[ 'pad1' ] = pad1; saves[ 'pad2' ] = pad2
#
# Cross-section of direct photon production in pp collisions at 500 GeV vs Pt
#
energ = 63
dens = 1.766
tgrad = 90.
ptmin = 4.
ptmax = 24.
delp = 2.
hz_calc( energ, dens, tgrad, ptmin, ptmax, delp )
pad1.cd()
pad1.Range( -0.255174, -19.25, 2.29657, -6.75 )
pad1.SetLogx()
pad1.SetLogy()
# create a 2-d histogram to define the range
pad1.DrawFrame( 1, 1e-18, 110, 1e-8 )
pad1.GetFrame().SetFillColor( 19 )
t = ROOT.TLatex()
t.SetNDC()
t.SetTextFont( 62 )
t.SetTextColor( 36 )
t.SetTextSize( 0.08 )
t.SetTextAlign( 12 )
t.DrawLatex( 0.6, 0.85, 'p - p' )
t.SetTextSize( 0.05 )
t.DrawLatex( 0.6, 0.79, 'Direct #gamma' )
t.DrawLatex( 0.6, 0.75, '#theta = 90^{o}' )
t.DrawLatex( 0.20, 0.45, 'Ed^{3}#sigma/dq^{3}' )
t.DrawLatex( 0.18, 0.40, '(barn/Gev^{2})' )
t.SetTextSize( 0.045 )
t.SetTextColor( ROOT.kBlue )
t.DrawLatex( 0.22, 0.260, '#sqrt{s} = 63(GeV)' )
t.SetTextColor( ROOT.kRed )
t.DrawLatex( 0.22, 0.205,'#sqrt{s} = 200(GeV)' )
t.SetTextColor( 6 )
t.DrawLatex( 0.22, 0.15, '#sqrt{s} = 500(GeV)' )
t.SetTextSize( 0.05 )
t.SetTextColor( 1 )
t.DrawLatex( 0.6, 0.06, 'q_{T} (Gev/c)' )
saves[ 't2' ] = t # note the label that is used!
gr1 = ROOT.TGraph( NLOOP, PT, INVSIG )
gr1.SetLineColor( 38 )
gr1.SetMarkerColor( ROOT.kBlue )
gr1.SetMarkerStyle( 21 )
gr1.SetMarkerSize( 1.1 )
gr1.Draw( 'LP' )
saves[ 'gr1' ] = gr1
#
# Cross-section of direct photon production in pp collisions at 200 GeV vs Pt
#
energ = 200
dens = 2.25
tgrad = 90.
ptmin = 4.
ptmax = 64.
delp = 6.
hz_calc( energ, dens, tgrad, ptmin, ptmax, delp )
gr2 = ROOT.TGraph( NLOOP, PT, INVSIG )
gr2.SetLineColor( 38 )
gr2.SetMarkerColor( ROOT.kRed )
gr2.SetMarkerStyle( 29 )
gr2.SetMarkerSize( 1.5 )
gr2.Draw( 'LP' )
saves[ 'gr2' ] = gr2
#
# Cross-section of direct photon production in pp collisions at 500 GeV vs Pt
#
energ = 500
dens = 2.73
tgrad = 90.
ptmin = 4.
ptmax = 104.
delp = 10.
hz_calc( energ, dens, tgrad, ptmin, ptmax, delp )
gr3 = ROOT.TGraph( NLOOP, PT, INVSIG )
gr3.SetLineColor( 38 )
gr3.SetMarkerColor( 6 )
gr3.SetMarkerStyle( 8 )
gr3.SetMarkerSize( 1.1 )
gr3.Draw( 'LP' )
saves[ 'gr3' ] = gr3
dum = array( 'f', [0.] )
graph = ROOT.TGraph( 1, dum, dum )
graph.SetMarkerColor( ROOT.kBlue )
graph.SetMarkerStyle( 21 )
graph.SetMarkerSize( 1.1 )
graph.SetPoint( 0, 1.7, 1.e-16 )
graph.Draw( 'LP' )
saves[ 'graph' ] = graph
graph = ROOT.TGraph( 1, dum, dum )
graph.SetMarkerColor( ROOT.kRed )
graph.SetMarkerStyle( 29 )
graph.SetMarkerSize( 1.5 )
graph.SetPoint( 0, 1.7, 2.e-17 )
graph.Draw( 'LP' )
saves[ 'graph2' ] = graph # note the label that is used!
graph = ROOT.TGraph( 1, dum, dum )
graph.SetMarkerColor( 6 )
graph.SetMarkerStyle( 8 )
graph.SetMarkerSize( 1.1 )
graph.SetPoint( 0, 1.7, 4.e-18)
graph.Draw( 'LP' )
saves[ 'graph3' ] = graph # note the label that is used!
pad2.cd()
pad2.Range( -0.43642, -23.75, 3.92778, -6.25 )
pad2.SetLogx()
pad2.SetLogy()
pad2.DrawFrame( 1, 1e-22, 3100, 1e-8 )
pad2.GetFrame().SetFillColor( 19 )
gr = ROOT.TGraph( NLOOP, Z, HZ )
gr.SetTitle( 'HZ vs Z' )
gr.SetFillColor( 19 )
gr.SetLineColor( 9 )
gr.SetMarkerColor( 50 )
gr.SetMarkerStyle( 29 )
gr.SetMarkerSize( 1.5 )
gr.Draw( 'LP' )
saves[ 'gr' ] = gr
t = ROOT.TLatex()
t.SetNDC()
t.SetTextFont( 62 )
t.SetTextColor( 36 )
t.SetTextSize( 0.08 )
t.SetTextAlign( 12 )
t.DrawLatex( 0.6, 0.85, 'p - p' )
t.SetTextSize( 0.05 )
t.DrawLatex( 0.6, 0.79, 'Direct #gamma' )
t.DrawLatex( 0.6, 0.75, '#theta = 90^{o}' )
t.DrawLatex( 0.70, 0.55, 'H(z)' )
t.DrawLatex( 0.68, 0.50, '(barn)' )
t.SetTextSize( 0.045 )
t.SetTextColor( 46 )
t.DrawLatex( 0.20, 0.30, '#sqrt{s}, GeV' )
t.DrawLatex( 0.22, 0.26, '63' )
t.DrawLatex( 0.22, 0.22, '200' )
t.DrawLatex( 0.22, 0.18, '500' )
t.SetTextSize( 0.05 )
t.SetTextColor( 1 )
t.DrawLatex( 0.88, 0.06, 'z' )
saves[ 't3' ] = t # note the label that is used!
c1.Modified()
c1.Update()
#_______________________________________________________________________________
def hz_calc( ENERG, DENS, TGRAD, PTMIN, PTMAX, DELP ):
global NLOOP
global Z, HZ, PT, INVSIG
CSEFT= 1.
GM1 = 0.00001
GM2 = 0.00001
A1 = 1.
A2 = 1.
ALX = 2.
BETA = 1.
KF1 = 8.E-7
KF2 = 5.215
MN = 0.9383
DEGRAD=0.01745329
# print 'ENR= %f DENS= %f PTMIN= %f PTMAX= %f DELP= %f ' % (ENERG,DENS,PTMIN,PTMAX,DELP)
DNDETA= DENS
MB1 = MN*A1
MB2 = MN*A2
EB1 = ENERG/2.*A1
EB2 = ENERG/2.*A2
M1 = GM1
M2 = GM2
THET = TGRAD*DEGRAD
NLOOP = int((PTMAX-PTMIN)/DELP)
for I in range(NLOOP):
PT[I]=PTMIN+I*DELP
PTOT = PT[I]/sin(THET)
ETOT = sqrt(M1*M1 + PTOT*PTOT)
PB1 = sqrt(EB1*EB1 - MB1*MB1)
PB2 = sqrt(EB2*EB2 - MB2*MB2)
P2P3 = EB2*ETOT+PB2*PTOT*cos(THET)
P1P2 = EB2*EB1+PB2*PB1
P1P3 = EB1*ETOT-PB1*PTOT*cos(THET)
X1 = P2P3/P1P2
X2 = P1P3/P1P2
Y1 = X1+sqrt(X1*X2*(1.-X1)/(1.-X2))
Y2 = X2+sqrt(X1*X2*(1.-X2)/(1.-X1))
S = (MB1*MB1)+2.*P1P2+(MB2*MB2)
SMIN = 4.*((MB1*MB1)*(X1*X1) +2.*X1*X2*P1P2+(MB2*MB2)*(X2*X2))
SX1 = 4.*( 2*(MB1*MB1)*X1+2*X2*P1P2)
SX2 = 4.*( 2*(MB2*MB2)*X2+2*X1*P1P2)
SX1X2= 4.*(2*P1P2)
DELM = pow((1.-Y1)*(1.-Y2),ALX)
Z[I] = sqrt(SMIN)/DELM/pow(DNDETA,BETA)
Y1X1 = 1. +X2*(1-2.*X1)/(2.*(Y1-X1)*(1.-X2))
Y1X2 = X1*(1-X1)/(2.*(Y1-X1)*(1.-X2)*(1.-X2))
Y2X1 = X2*(1-X2)/(2.*(Y2-X2)*(1.-X1)*(1.-X1))
Y2X2 = 1. +X1*(1-2.*X2)/(2.*(Y2-X2)*(1.-X1))
Y2X1X2= Y2X1*( (1.-2.*X2)/(X2*(1-X2)) -( Y2X2-1.)/(Y2-X2))
Y1X1X2= Y1X2*( (1.-2.*X1)/(X1*(1-X1)) -( Y1X1-1.)/(Y1-X1))
KX1=-DELM*(Y1X1*ALX/(1.-Y1) + Y2X1*ALX/(1.-Y2))
KX2=-DELM*(Y2X2*ALX/(1.-Y2) + Y1X2*ALX/(1.-Y1))
ZX1=Z[I]*(SX1/(2.*SMIN)-KX1/DELM)
ZX2=Z[I]*(SX2/(2.*SMIN)-KX2/DELM)
H1=ZX1*ZX2
HZ[I]=KF1/pow(Z[I],KF2)
INVSIG[I]=(HZ[I]*H1*16.)/S
# run if loaded as script
if __name__ == '__main__':
zdemo()
| lgpl-2.1 |
jmachuca77/ardupilot | libraries/SITL/examples/Morse/rover_follow.py | 35 | 1477 | '''
This is an example builder script that sets up a a set of rovers to
be driven by ArduPilot for demonstrating follow mode
The rover has the basic set of sensors that ArduPilot needs
To start the simulation use this:
morse run rover_follow.py
'''
from morse.builder import *
num_vehicles = 3
for i in range(num_vehicles):
vehicle = ATRV('Vehicle%u' % i)
vehicle.properties(Object = True, Graspable = False, Label = "Vehicle")
# set rovers 3 meters apart
vehicle.translate(x=0.0, y=3*i, z=0.0)
# add sensors needed for ArduPilot operation to a vehicle
pose = Pose()
vehicle.append(pose)
imu = IMU()
vehicle.append(imu)
gps = GPS()
gps.alter('UTM')
vehicle.append(gps)
velocity = Velocity()
vehicle.append(velocity)
# create a compound sensor of all of the individual sensors and stream it
all_sensors = CompoundSensor([imu, gps, velocity, pose])
all_sensors.add_stream('socket')
vehicle.append(all_sensors)
# make the vehicle controllable with speed and angular velocity
motion = MotionVW()
vehicle.append(motion)
motion.add_stream('socket')
# Environment
env = Environment('land-1/trees', fastmode=False)
env.set_camera_location([10.0, -10.0, 10.0])
env.set_camera_rotation([1.0470, 0, 0.7854])
env.set_camera_clip(clip_end=1000)
# startup at CMAC. A location is needed for the magnetometer
env.properties(longitude = 149.165230, latitude = -35.363261, altitude = 584.0)
| gpl-3.0 |
gallandarakhneorg/autolatex | plugins/gedit3/autolatexeditor/utils/gedit_runner.py | 1 | 4936 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# autolatex/utils/gedit_runner.py
# Copyright (C) 2013-14 Stephane Galland <galland@arakhne.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from gi.repository import GObject, Gtk, Gedit
from autolatex.utils import runner as autolatex_runner
# List of all the runners
_all_runners = []
def kill_all_runners():
autolatex_runner.kill_all_runners()
class Runner(autolatex_runner.Listener):
def __init__(self, caller, label, show_progress, directory, directive, params):
autolatex_runner.Listener.__init__(self)
self._caller = caller
self._info_bar_label = label
self._show_progress = bool(show_progress)
self._gedit_tab = None
self._info_bar = None
self._sig_info_bar_response = 0
self._sig_info_bar_remove = 0
self._automatic_bar_creation = False
self._last_fraction = 0
self._last_comment = None
self._thread = autolatex_runner.Runner(self, directory, directive, params)
def start(self):
if self._thread:
self._thread.start()
def cancel(self):
if self._thread:
self._thread.cancel()
self._thread = None
if self._info_bar:
self._info_bar.set_response_sensitive(
Gtk.ResponseType.CANCEL,
False)
def get_runner_progress(self):
return self._show_progress and self._info_bar_label is not None
def on_runner_add_ui(self):
self._gedit_tab = self._caller.window.get_active_tab()
GObject.idle_add(self._add_info_bar)
def on_runner_remove_ui(self):
GObject.idle_add(self._hide_info_bar)
def on_runner_progress(self, amount, comment):
GObject.idle_add(self._update_info_bar, amount, comment)
def on_runner_finalize_execution(self, retcode, output, latex_warnings):
self._automatic_bar_creation = False
GObject.idle_add(self._caller._update_action_validity,
True, output, latex_warnings)
def _add_info_bar(self):
if self._gedit_tab:
self._info_bar = Gedit.ProgressInfoBar()
self._info_bar.set_stock_image(Gtk.STOCK_EXECUTE)
self._info_bar.set_text(self._info_bar_label)
self._sig_info_bar_response = self._info_bar.connect(
"response",
self._on_cancel_action)
self._sig_info_bar_remove = self._info_bar.connect(
"parent-set",
self._on_parent_remove_action)
self._gedit_tab.set_info_bar(self._info_bar)
self._info_bar.show()
self._gedit_tab.grab_focus();
def _hide_info_bar(self):
if self._info_bar:
self._info_bar.hide()
self._info_bar.disconnect(self._sig_info_bar_response);
self._info_bar.disconnect(self._sig_info_bar_remove);
self._info_bar.destroy()
self._info_bar = None
self._gedit_tab.grab_focus();
def _on_cancel_action(self, widget, response, data=None):
if response == Gtk.ResponseType.CANCEL:
self.cancel()
def _on_parent_remove_action(self, widget, oldParent=None, data=None):
# The progress bar was removed by an other info bar
bar = self._info_bar
if bar and bar.get_parent() == None:
self._hide_info_bar()
self._automatic_bar_creation = True
GObject.idle_add(self._update_info_bar,
self._last_fraction, self._last_comment)
def __has_info_child(self):
if self._gedit_tab:
for child in self._gedit_tab.get_children():
if isinstance(child, Gtk.InfoBar):
return True # Search says: has info bar
return False # Search says: no info bar
return True # Assume that the panel is inside
def _update_info_bar(self, progress_value, comment):
#print "MOVE TO "+str(progress_value)+"/"+str(comment)
self._last_fraction = progress_value
self._last_comment = comment
if self._automatic_bar_creation and not self._info_bar and not self.__has_info_child():
self._automatic_bar_creation = False
GObject.idle_add(self._add_info_bar)
GObject.idle_add(self.__set_info_bar_data, progress_value, comment)
def __set_info_bar_data(self, progress_value, comment):
#print "MOVE TO "+str(progress_value)+"/"+str(comment)
if self._info_bar:
self._info_bar.set_fraction(progress_value)
if comment:
self._info_bar.set_text(comment)
self._info_bar.show()
| gpl-2.0 |
Q-Leap-Networks/pyside-segfault | wizard.py | 1 | 3656 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
import argparse
from Qt import QtGui, QtCore, loadUi
from QtCore import Slot
from HWWizard import HWWizard
class Host:
def __init__(self, name, autodetect):
self.name = name
self.autodetect = autodetect
class Name:
def __init__(self, name):
self.name = name
class PropSet:
def __init__(self, name_id, feature_id):
self.name_id = name_id
self.feature_id = feature_id
class Prop:
def __init__(self, name_id, val):
self.name_id = name_id
self.val = val
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
super().__init__(parent)
loadUi("MainWindow.ui", self)
self.hosts = {
1: Host("beo-01", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
2: Host("beo-02", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
3: Host("beo-03", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "1",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
4: Host("beo-04", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "2",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
5: Host("beo-05", {"# CPU cores": "1",
"# CPU sockets": "1",
"Size of RAM (GB)": "2",
"IB Adapter": "True",
"IPMI Adapter": "False"}),
}
self.hw_prop_set_names = {
1: Name("Existing"),
2: Name("Similar"),
3: Name("Bad"),
}
self.hw_prop_names = {
1: Name("# CPU cores"),
2: Name("# CPU sockets"),
3: Name("Size of RAM (GB)"),
4: Name("IB Adapter"),
5: Name("IPMI Adapter"),
6: Name("Other"),
}
self.hw_props = {
1: Prop(1, "1"),
2: Prop(2, "1"),
3: Prop(3, "1"),
4: Prop(4, "True"),
5: Prop(5, "False"),
6: Prop(6, "something"),
7: Prop(1, "2"),
}
self.hw_prop_sets = {
1: PropSet(1, 1),
2: PropSet(1, 2),
3: PropSet(1, 3),
4: PropSet(1, 4),
5: PropSet(1, 5),
6: PropSet(2, 1),
7: PropSet(2, 2),
8: PropSet(2, 3),
9: PropSet(2, 4),
10: PropSet(2, 5),
11: PropSet(2, 6),
12: PropSet(3, 7),
13: PropSet(3, 2),
14: PropSet(3, 3),
15: PropSet(3, 4),
16: PropSet(3, 5),
}
@Slot()
def on_wizardButton_clicked(self):
wiz = HWWizard(self)
wiz.exec_()
def main():
app = QtGui.QApplication(sys.argv)
app.setOrganizationName("test")
app.setOrganizationDomain("test")
app.setApplicationName("test")
w = MainWindow()
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-2.0 |
Tatsh-ansible/ansible | lib/ansible/plugins/callback/profile_tasks.py | 44 | 4664 | # (C) 2016, Joel, http://github.com/jjshoe
# (C) 2015, Tom Paine, <github@aioue.net>
# (C) 2014, Jharrod LaFon, @JharrodLaFon
# (C) 2012-2013, Michael DeHaan, <michael.dehaan@gmail.com>
#
# This file is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# File is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# See <http://www.gnu.org/licenses/> for a copy of the
# GNU General Public License
# Provides per-task timing, ongoing playbook elapsed time and
# ordered list of top 20 longest running tasks at end
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
import os
import time
from ansible.module_utils.six.moves import reduce
from ansible.plugins.callback import CallbackBase
# define start time
t0 = tn = time.time()
def secondsToStr(t):
# http://bytes.com/topic/python/answers/635958-handy-short-cut-formatting-elapsed-time-floating-point-seconds
def rediv(ll, b):
return list(divmod(ll[0], b)) + ll[1:]
return "%d:%02d:%02d.%03d" % tuple(reduce(rediv, [[t * 1000, ], 1000, 60, 60]))
def filled(msg, fchar="*"):
if len(msg) == 0:
width = 79
else:
msg = "%s " % msg
width = 79 - len(msg)
if width < 3:
width = 3
filler = fchar * width
return "%s%s " % (msg, filler)
def timestamp(self):
if self.current is not None:
self.stats[self.current]['time'] = time.time() - self.stats[self.current]['time']
def tasktime():
global tn
time_current = time.strftime('%A %d %B %Y %H:%M:%S %z')
time_elapsed = secondsToStr(time.time() - tn)
time_total_elapsed = secondsToStr(time.time() - t0)
tn = time.time()
return filled('%s (%s)%s%s' % (time_current, time_elapsed, ' ' * 7, time_total_elapsed))
class CallbackModule(CallbackBase):
"""
This callback module provides per-task timing, ongoing playbook elapsed time
and ordered list of top 20 longest running tasks at end.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'profile_tasks'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
self.stats = collections.OrderedDict()
self.current = None
self.sort_order = os.getenv('PROFILE_TASKS_SORT_ORDER', True)
self.task_output_limit = os.getenv('PROFILE_TASKS_TASK_OUTPUT_LIMIT', 20)
if self.sort_order == 'ascending':
self.sort_order = False
if self.task_output_limit == 'all':
self.task_output_limit = None
else:
self.task_output_limit = int(self.task_output_limit)
super(CallbackModule, self).__init__()
def _record_task(self, task):
"""
Logs the start of each task
"""
self._display.display(tasktime())
timestamp(self)
# Record the start time of the current task
self.current = task._uuid
self.stats[self.current] = {'time': time.time(), 'name': task.get_name()}
if self._display.verbosity >= 2:
self.stats[self.current]['path'] = task.get_path()
def v2_playbook_on_task_start(self, task, is_conditional):
self._record_task(task)
def v2_playbook_on_handler_task_start(self, task):
self._record_task(task)
def playbook_on_setup(self):
self._display.display(tasktime())
def playbook_on_stats(self, stats):
self._display.display(tasktime())
self._display.display(filled("", fchar="="))
timestamp(self)
results = self.stats.items()
# Sort the tasks by the specified sort
if self.sort_order != 'none':
results = sorted(
self.stats.items(),
key=lambda x: x[1]['time'],
reverse=self.sort_order,
)
# Display the number of tasks specified or the default of 20
results = results[:self.task_output_limit]
# Print the timings
for uuid, result in results:
msg = u"{0:-<{2}}{1:->9}".format(result['name'] + u' ', u' {0:.02f}s'.format(result['time']), self._display.columns - 9)
if 'path' in result:
msg += u"\n{0:-<{1}}".format(result['path'] + u' ', self._display.columns)
self._display.display(msg)
| gpl-3.0 |
gymnasium/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/factories.py | 10 | 26160 | """
Factories for use in tests of XBlocks.
"""
import datetime
import functools
import pymongo.message
import pytz
import threading
import traceback
from collections import defaultdict
from contextlib import contextmanager
from uuid import uuid4
from factory import Factory, Sequence, lazy_attribute_sequence, lazy_attribute
from factory.errors import CyclicDefinitionError
from mock import patch
from nose.tools import assert_less_equal, assert_greater_equal
import dogstats_wrapper as dog_stats_api
from opaque_keys.edx.locator import BlockUsageLocator
from opaque_keys.edx.keys import UsageKey
from xblock.core import XBlock
from xmodule.modulestore import prefer_xmodules, ModuleStoreEnum
from xmodule.modulestore.tests.sample_courses import default_block_info_tree, TOY_BLOCK_INFO_TREE
from xmodule.tabs import CourseTab
from xmodule.x_module import DEPRECATION_VSCOMPAT_EVENT
from xmodule.course_module import Textbook
class Dummy(object):
pass
class XModuleFactoryLock(threading.local):
"""
This class exists to store whether XModuleFactory can be accessed in a safe
way (meaning, in a context where the data it creates will be cleaned up).
Users of XModuleFactory (or its subclasses) should only call XModuleFactoryLock.enable
after ensuring that a) the modulestore will be cleaned up, and b) that XModuleFactoryLock.disable
will be called.
"""
def __init__(self):
super(XModuleFactoryLock, self).__init__()
self._enabled = False
def enable(self):
"""
Enable XModuleFactories. This should only be turned in a context
where the modulestore will be reset at the end of the test (such
as inside ModuleStoreTestCase).
"""
self._enabled = True
def disable(self):
"""
Disable XModuleFactories. This should be called once the data
from the factory has been cleaned up.
"""
self._enabled = False
def is_enabled(self):
"""
Return whether XModuleFactories are enabled.
"""
return self._enabled
XMODULE_FACTORY_LOCK = XModuleFactoryLock()
class XModuleFactory(Factory):
"""
Factory for XModules
"""
# We have to give a model for Factory.
# However, the class that we create is actually determined by the category
# specified in the factory
class Meta(object):
model = Dummy
@lazy_attribute
def modulestore(self):
msg = "XMODULE_FACTORY_LOCK not enabled. Please use ModuleStoreTestCase as your test baseclass."
assert XMODULE_FACTORY_LOCK.is_enabled(), msg
from xmodule.modulestore.django import modulestore
return modulestore()
last_course = threading.local()
class CourseFactory(XModuleFactory):
"""
Factory for XModule courses.
"""
org = Sequence('org.{}'.format)
number = Sequence('course_{}'.format)
display_name = Sequence('Run {}'.format)
# pylint: disable=unused-argument
@classmethod
def _create(cls, target_class, **kwargs):
"""
Create and return a new course. For performance reasons, we do not emit
signals during this process, but if you need signals to run, you can
pass `emit_signals=True` to this method.
"""
# All class attributes (from this class and base classes) are
# passed in via **kwargs. However, some of those aren't actual field values,
# so pop those off for use separately
org = kwargs.pop('org', None)
# because the factory provides a default 'number' arg, prefer the non-defaulted 'course' arg if any
number = kwargs.pop('course', kwargs.pop('number', None))
store = kwargs.pop('modulestore')
name = kwargs.get('name', kwargs.get('run', BlockUsageLocator.clean(kwargs.get('display_name'))))
run = kwargs.pop('run', name)
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
emit_signals = kwargs.pop('emit_signals', False)
# Pass the metadata just as field=value pairs
kwargs.update(kwargs.pop('metadata', {}))
default_store_override = kwargs.pop('default_store', None)
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
course_key = store.make_course_key(org, number, run)
with store.bulk_operations(course_key, emit_signals=emit_signals):
if default_store_override is not None:
with store.default_store(default_store_override):
new_course = store.create_course(org, number, run, user_id, fields=kwargs)
else:
new_course = store.create_course(org, number, run, user_id, fields=kwargs)
last_course.loc = new_course.location
return new_course
class SampleCourseFactory(CourseFactory):
"""
Factory for sample courses using block_info_tree definitions.
"""
@classmethod
def _create(cls, target_class, **kwargs):
"""
Create and return a new sample course. See CourseFactory for customization kwargs.
"""
block_info_tree = kwargs.pop('block_info_tree', default_block_info_tree)
store = kwargs.get('modulestore')
user_id = kwargs.get('user_id', ModuleStoreEnum.UserID.test)
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, None):
course = super(SampleCourseFactory, cls)._create(target_class, **kwargs)
def create_sub_tree(parent_loc, block_info):
"""Recursively creates a sub_tree on this parent_loc with this block."""
block = store.create_child(
user_id,
parent_loc,
block_info.category,
block_id=block_info.block_id,
fields=block_info.fields,
)
for tree in block_info.sub_tree:
create_sub_tree(block.location, tree)
for tree in block_info_tree:
create_sub_tree(course.location, tree)
store.publish(course.location, user_id)
return course
class ToyCourseFactory(SampleCourseFactory):
"""
Factory for sample course that is equivalent to the toy xml course.
"""
org = 'edX'
course = 'toy'
run = '2012_Fall'
display_name = 'Toy Course'
@classmethod
def _create(cls, target_class, **kwargs):
"""
Create and return a new toy course instance. See SampleCourseFactory for customization kwargs.
"""
store = kwargs.get('modulestore')
user_id = kwargs.get('user_id', ModuleStoreEnum.UserID.test)
fields = {
'block_info_tree': TOY_BLOCK_INFO_TREE,
'textbooks': [Textbook("Textbook", "path/to/a/text_book")],
'wiki_slug': "toy",
'graded': True,
'discussion_topics': {"General": {"id": "i4x-edX-toy-course-2012_Fall"}},
'graceperiod': datetime.timedelta(days=2, seconds=21599),
'start': datetime.datetime(2015, 07, 17, 12, tzinfo=pytz.utc),
'xml_attributes': {"filename": ["course/2012_Fall.xml", "course/2012_Fall.xml"]},
'pdf_textbooks': [
{
"tab_title": "Sample Multi Chapter Textbook",
"id": "MyTextbook",
"chapters": [
{"url": "/static/Chapter1.pdf", "title": "Chapter 1"},
{"url": "/static/Chapter2.pdf", "title": "Chapter 2"}
]
}
],
'course_image': "just_a_test.jpg",
}
fields.update(kwargs)
toy_course = super(ToyCourseFactory, cls)._create(
target_class,
**fields
)
with store.bulk_operations(toy_course.id, emit_signals=False):
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, toy_course.id):
store.create_item(
user_id, toy_course.id, "about", block_id="short_description",
fields={"data": "A course about toys."}
)
store.create_item(
user_id, toy_course.id, "about", block_id="effort",
fields={"data": "6 hours"}
)
store.create_item(
user_id, toy_course.id, "about", block_id="end_date",
fields={"data": "TBD"}
)
store.create_item(
user_id, toy_course.id, "course_info", "handouts",
fields={"data": "<a href='/static/handouts/sample_handout.txt'>Sample</a>"}
)
store.create_item(
user_id, toy_course.id, "static_tab", "resources",
fields={"display_name": "Resources"},
)
store.create_item(
user_id, toy_course.id, "static_tab", "syllabus",
fields={"display_name": "Syllabus"},
)
return toy_course
class LibraryFactory(XModuleFactory):
"""
Factory for creating a content library
"""
org = Sequence('org{}'.format)
library = Sequence('lib{}'.format)
display_name = Sequence('Test Library {}'.format)
# pylint: disable=unused-argument
@classmethod
def _create(cls, target_class, **kwargs):
"""
Create a library with a unique name and key.
All class attributes (from this class and base classes) are automagically
passed in via **kwargs.
"""
# some of the kwargst actual field values, so pop those off for use separately:
org = kwargs.pop('org')
library = kwargs.pop('library')
store = kwargs.pop('modulestore')
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
# Pass the metadata just as field=value pairs
kwargs.update(kwargs.pop('metadata', {}))
default_store_override = kwargs.pop('default_store', ModuleStoreEnum.Type.split)
with store.default_store(default_store_override):
new_library = store.create_library(org, library, user_id, fields=kwargs)
return new_library
class ItemFactory(XModuleFactory):
"""
Factory for XModule items.
"""
category = 'chapter'
parent = None
@lazy_attribute_sequence
def display_name(self, n):
return "{} {}".format(self.category, n)
@lazy_attribute
def location(self):
if self.display_name is None:
dest_name = uuid4().hex
else:
dest_name = self.display_name.replace(" ", "_")
new_location = self.parent_location.course_key.make_usage_key(
self.category,
dest_name
)
return new_location
@lazy_attribute
def parent_location(self):
default_location = getattr(last_course, 'loc', None)
try:
parent = self.parent
# This error is raised if the caller hasn't provided either parent or parent_location
# In this case, we'll just return the default parent_location
except CyclicDefinitionError:
return default_location
if parent is None:
return default_location
return parent.location
@classmethod
def _create(cls, target_class, **kwargs):
"""
Uses ``**kwargs``:
:parent_location: (required): the location of the parent module
(e.g. the parent course or section)
:category: the category of the resulting item.
:data: (optional): the data for the item
(e.g. XML problem definition for a problem item)
:display_name: (optional): the display name of the item
:metadata: (optional): dictionary of metadata attributes
:boilerplate: (optional) the boilerplate for overriding field values
:publish_item: (optional) whether or not to publish the item (default is True)
:target_class: is ignored
"""
# All class attributes (from this class and base classes) are
# passed in via **kwargs. However, some of those aren't actual field values,
# so pop those off for use separately
# catch any old style users before they get into trouble
assert 'template' not in kwargs
parent_location = kwargs.pop('parent_location', None)
data = kwargs.pop('data', None)
category = kwargs.pop('category', None)
display_name = kwargs.pop('display_name', None)
metadata = kwargs.pop('metadata', {})
location = kwargs.pop('location')
user_id = kwargs.pop('user_id', ModuleStoreEnum.UserID.test)
publish_item = kwargs.pop('publish_item', True)
assert isinstance(location, UsageKey)
assert location != parent_location
store = kwargs.pop('modulestore')
# This code was based off that in cms/djangoapps/contentstore/views.py
parent = kwargs.pop('parent', None) or store.get_item(parent_location)
with store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
if 'boilerplate' in kwargs:
template_id = kwargs.pop('boilerplate')
clz = XBlock.load_class(category, select=prefer_xmodules)
template = clz.get_template(template_id)
assert template is not None
metadata.update(template.get('metadata', {}))
if not isinstance(data, basestring):
data.update(template.get('data'))
# replace the display name with an optional parameter passed in from the caller
if display_name is not None:
metadata['display_name'] = display_name
module = store.create_child(
user_id,
parent.location,
location.block_type,
block_id=location.block_id,
metadata=metadata,
definition_data=data,
runtime=parent.runtime,
fields=kwargs,
)
# VS[compat] cdodge: This is a hack because static_tabs also have references from the course module, so
# if we add one then we need to also add it to the policy information (i.e. metadata)
# we should remove this once we can break this reference from the course to static tabs
if category == 'static_tab':
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=(
"location:itemfactory_create_static_tab",
u"block:{}".format(location.block_type),
)
)
course = store.get_course(location.course_key)
course.tabs.append(
CourseTab.load('static_tab', name='Static Tab', url_slug=location.block_id)
)
store.update_item(course, user_id)
# parent and publish the item, so it can be accessed
if 'detached' not in module._class_tags:
parent.children.append(location)
store.update_item(parent, user_id)
if publish_item:
published_parent = store.publish(parent.location, user_id)
# module is last child of parent
return published_parent.get_children()[-1]
else:
return store.get_item(location)
elif publish_item:
return store.publish(location, user_id)
else:
return module
@contextmanager
def check_exact_number_of_calls(object_with_method, method_name, num_calls):
"""
Instruments the given method on the given object to verify the number of calls to the
method is exactly equal to 'num_calls'.
"""
with check_number_of_calls(object_with_method, method_name, num_calls, num_calls):
yield
def check_number_of_calls(object_with_method, method_name, maximum_calls, minimum_calls=1):
"""
Instruments the given method on the given object to verify the number of calls to the method is
less than or equal to the expected maximum_calls and greater than or equal to the expected minimum_calls.
"""
return check_sum_of_calls(object_with_method, [method_name], maximum_calls, minimum_calls)
class StackTraceCounter(object):
"""
A class that counts unique stack traces underneath a particular stack frame.
"""
def __init__(self, stack_depth, include_arguments=True):
"""
Arguments:
stack_depth (int): The number of stack frames above this constructor to capture.
include_arguments (bool): Whether to store the arguments that are passed
when capturing a stack trace.
"""
self.include_arguments = include_arguments
self._top_of_stack = traceback.extract_stack(limit=stack_depth)[0]
if self.include_arguments:
self._stacks = defaultdict(lambda: defaultdict(int))
else:
self._stacks = defaultdict(int)
def capture_stack(self, args, kwargs):
"""
Record the stack frames starting at the caller of this method, and
ending at the top of the stack as defined by the ``stack_depth``.
Arguments:
args: The positional arguments to capture at this stack frame
kwargs: The keyword arguments to capture at this stack frame
"""
# pylint: disable=broad-except
stack = traceback.extract_stack()[:-2]
if self._top_of_stack in stack:
stack = stack[stack.index(self._top_of_stack):]
if self.include_arguments:
safe_args = []
for arg in args:
try:
safe_args.append(repr(arg))
except Exception as exc:
safe_args.append('<un-repr-able value: {}'.format(exc))
safe_kwargs = {}
for key, kwarg in kwargs.items():
try:
safe_kwargs[key] = repr(kwarg)
except Exception as exc:
safe_kwargs[key] = '<un-repr-able value: {}'.format(exc)
self._stacks[tuple(stack)][tuple(safe_args), tuple(safe_kwargs.items())] += 1
else:
self._stacks[tuple(stack)] += 1
@property
def total_calls(self):
"""
Return the total number of stacks recorded.
"""
return sum(self.stack_calls(stack) for stack in self._stacks)
def stack_calls(self, stack):
"""
Return the number of calls to the supplied ``stack``.
"""
if self.include_arguments:
return sum(self._stacks[stack].values())
else:
return self._stacks[stack]
def __iter__(self):
"""
Iterate over all unique captured stacks.
"""
return iter(sorted(self._stacks.keys(), key=lambda stack: (self.stack_calls(stack), stack), reverse=True))
def __getitem__(self, stack):
"""
Return the set of captured calls with the supplied stack.
"""
return self._stacks[stack]
@classmethod
def capture_call(cls, func, stack_depth, include_arguments=True):
"""
A decorator that wraps ``func``, and captures each call to ``func``,
recording the stack trace, and optionally the arguments that the function
is called with.
Arguments:
func: the function to wrap
stack_depth: how far up the stack to truncate the stored stack traces (
this is counted from the call to ``capture_call``, rather than calls
to the captured function).
"""
stacks = StackTraceCounter(stack_depth, include_arguments)
# pylint: disable=missing-docstring
@functools.wraps(func)
def capture(*args, **kwargs):
stacks.capture_stack(args, kwargs)
return func(*args, **kwargs)
capture.stack_counter = stacks
return capture
@contextmanager
def check_sum_of_calls(object_, methods, maximum_calls, minimum_calls=1, include_arguments=True):
"""
Instruments the given methods on the given object to verify that the total sum of calls made to the
methods falls between minumum_calls and maximum_calls.
"""
mocks = {
method: StackTraceCounter.capture_call(
getattr(object_, method),
stack_depth=7,
include_arguments=include_arguments
)
for method in methods
}
with patch.multiple(object_, **mocks):
yield
call_count = sum(capture_fn.stack_counter.total_calls for capture_fn in mocks.values())
# Assertion errors don't handle multi-line values, so pretty-print to std-out instead
if not minimum_calls <= call_count <= maximum_calls:
messages = ["Expected between {} and {} calls, {} were made.\n\n".format(
minimum_calls,
maximum_calls,
call_count,
)]
for method_name, capture_fn in mocks.items():
stack_counter = capture_fn.stack_counter
messages.append("{!r} was called {} times:\n".format(
method_name,
stack_counter.total_calls
))
for stack in stack_counter:
messages.append(" called {} times:\n\n".format(stack_counter.stack_calls(stack)))
messages.append(" " + " ".join(traceback.format_list(stack)))
messages.append("\n\n")
if include_arguments:
for (args, kwargs), count in stack_counter[stack].items():
messages.append(" called {} times with:\n".format(count))
messages.append(" args: {}\n".format(args))
messages.append(" kwargs: {}\n\n".format(dict(kwargs)))
print "".join(messages)
# verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls
assert_greater_equal(call_count, minimum_calls)
# now verify the number of actual calls is less than (or equal to) the expected maximum
assert_less_equal(call_count, maximum_calls)
def mongo_uses_error_check(store):
"""
Does mongo use the error check as a separate message?
"""
if hasattr(store, 'mongo_wire_version'):
return store.mongo_wire_version() <= 1
if hasattr(store, 'modulestores'):
return any([mongo_uses_error_check(substore) for substore in store.modulestores])
return False
@contextmanager
def check_mongo_calls_range(max_finds=float("inf"), min_finds=0, max_sends=None, min_sends=None):
"""
Instruments the given store to count the number of calls to find (incl find_one) and the number
of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the
end of the with statement, it compares the counts to the bounds provided in the arguments.
:param max_finds: the maximum number of find calls expected
:param min_finds: the minimum number of find calls expected
:param max_sends: If non-none, make sure number of send calls are <=max_sends
:param min_sends: If non-none, make sure number of send calls are >=min_sends
"""
with check_sum_of_calls(
pymongo.message,
['query', 'get_more'],
max_finds,
min_finds,
):
if max_sends is not None or min_sends is not None:
with check_sum_of_calls(
pymongo.message,
# mongo < 2.6 uses insert, update, delete and _do_batched_insert. >= 2.6 _do_batched_write
['insert', 'update', 'delete', '_do_batched_write_command', '_do_batched_insert', ],
max_sends if max_sends is not None else float("inf"),
min_sends if min_sends is not None else 0,
):
yield
else:
yield
@contextmanager
def check_mongo_calls(num_finds=0, num_sends=None):
"""
Instruments the given store to count the number of calls to find (incl find_one) and the number
of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the
end of the with statement, it compares the counts to the num_finds and num_sends.
:param num_finds: the exact number of find calls expected
:param num_sends: If none, don't instrument the send calls. If non-none, count and compare to
the given int value.
"""
with check_mongo_calls_range(num_finds, num_finds, num_sends, num_sends):
yield
# This dict represents the attribute keys for a course's 'about' info.
# Note: The 'video' attribute is intentionally excluded as it must be
# handled separately; its value maps to an alternate key name.
# Reference : from openedx.core.djangoapps.models.course_details.py
ABOUT_ATTRIBUTES = {
'effort': "Testing effort",
}
class CourseAboutFactory(XModuleFactory):
"""
Factory for XModule course about.
"""
@classmethod
def _create(cls, target_class, **kwargs): # pylint: disable=unused-argument
"""
Uses **kwargs:
effort: effor information
video : video link
"""
user_id = kwargs.pop('user_id', None)
course_id, course_runtime = kwargs.pop("course_id"), kwargs.pop("course_runtime")
store = kwargs.pop('modulestore')
for about_key in ABOUT_ATTRIBUTES:
about_item = store.create_xblock(course_runtime, course_id, 'about', about_key)
about_item.data = ABOUT_ATTRIBUTES[about_key]
store.update_item(about_item, user_id, allow_not_found=True)
about_item = store.create_xblock(course_runtime, course_id, 'about', 'video')
about_item.data = "www.youtube.com/embed/testing-video-link"
store.update_item(about_item, user_id, allow_not_found=True)
| agpl-3.0 |
Desarrollo-CeSPI/meran | dev-plugins/node64/lib/node_modules/npm/node_modules/node-gyp/legacy/tools/gyp/pylib/gyp/generator/ninja.py | 38 | 49879 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import gyp
import gyp.common
import gyp.system_test
import gyp.xcode_emulation
import os.path
import re
import subprocess
import sys
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! (which begin with a $ so gyp knows it
# should be treated as a path, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# TODO: enable cross compiling once we figure out:
# - how to not build extra host objects in the non-cross-compile case.
# - how to decide what the host compiler is (should not just be $cc).
# - need ld_host as well.
generator_supports_multiple_toolsets = False
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=-]+$', arg):
return arg # No quoting necessary.
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def InvertRelativePath(path):
"""Given a relative path like foo/bar, return the inverse relative path:
the path from the relative path back to the origin dir.
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
should always produce the empty string."""
if not path:
return path
# Only need to handle relative paths into subdirectories for now.
assert '..' not in path, path
depth = len(path.split(os.path.sep))
return os.path.sep.join(['..'] * depth)
class Target:
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def PreActionInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that is unique
# to the input file name as well as the output target name.
class NinjaWriter:
def __init__(self, target_outputs, base_dir, build_dir, output_file, flavor,
abs_build_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
abs_build_dir: absolute path to the build directory
"""
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.flavor = flavor
self.abs_build_dir = abs_build_dir
self.obj_ext = '.obj' if flavor == 'win' else '.o'
# Relative path from build output dir to base dir.
self.build_to_base = os.path.join(InvertRelativePath(build_dir), base_dir)
# Relative path from base dir to build dir.
self.base_to_build = os.path.join(InvertRelativePath(base_dir), build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
return os.path.normpath(path)
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
if path.startswith('$!'):
return self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
return None
if len(targets) > 1:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets)
self.ninja.newline()
return targets[0]
def WriteSpec(self, spec, config_name):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
else:
self.xcode_settings = None
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput())
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = spec.get('sources', []) + extra_sources
if sources:
link_deps = self.WriteSources(
config_name, config, sources, compile_depends_stamp,
gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)))
# Some actions/rules output 'sources' that are already object files.
link_deps += [self.GypPathToNinja(f)
for f in sources if f.endswith(self.obj_ext)]
# Write out a link step, if needed.
output = None
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends)
if not output:
return None
if self.name != output and self.toolset == 'target':
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
self.ninja.build(self.name, 'phony', output)
assert self.target.FinalOutput(), output
return self.target
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', []) + \
extra_mac_bundle_resources
self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends)
self.WriteMacInfoPlist(mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetXcodeEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = re.sub(r'[ {}$]', '_', action['action_name'])
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
rule_name = self.WriteNewNinjaRule(name, action['action'], description,
env=env)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
extra_mac_bundle_resources):
all_outputs = []
for rule in rules:
# First write out a rule for the rule action.
name = rule['rule_name']
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
rule_name = self.WriteNewNinjaRule(name, args, description)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if ('${%s}' % var) in argument:
needed_variables.add(var)
# For each source file, write an edge that generates all the outputs.
for source in rule.get('rule_sources', []):
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of outputs, expanding $vars if possible.
outputs = []
for output in rule['outputs']:
outputs.append(self.ExpandRuleVariables(output, root, dirname,
source, ext, basename))
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', root))
elif var == 'dirname':
extra_bindings.append(('dirname', dirname))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', source_expanded))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', basename))
else:
assert var == None, repr(var)
inputs = map(self.GypPathToNinja, rule.get('inputs', []))
outputs = map(self.GypPathToNinja, outputs)
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild):
outputs = []
env = self.GetXcodeEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
for output, res in gyp.xcode_emulation.GetMacBundleResources(
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.xcode_settings, map(self.GypPathToNinja, resources)):
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource')])
bundle_depends.append(output)
def WriteMacInfoPlist(self, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join(
[QuoteShellArgument(ninja_syntax.escape('-D' + d)) for d in defines])
info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
self.ninja.build(out, 'mac_tool', info_plist,
variables=[('mactool_cmd', 'copy-info-plist'),
('env', env)])
bundle_depends.append(out)
def WriteSources(self, config_name, config, sources, predepends,
precompiled_header):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
self.WriteVariableList('defines',
[QuoteShellArgument(ninja_syntax.escape('-D' + d))
for d in config.get('defines', [])])
self.WriteVariableList('includes',
['-I' + self.GypPathToNinja(i)
for i in config.get('include_dirs', [])])
pch_commands = precompiled_header.GetGchBuildCommands()
if self.flavor == 'mac':
self.WriteVariableList('cflags_pch_c',
[precompiled_header.GetInclude('c')])
self.WriteVariableList('cflags_pch_cc',
[precompiled_header.GetInclude('cc')])
self.WriteVariableList('cflags_pch_objc',
[precompiled_header.GetInclude('m')])
self.WriteVariableList('cflags_pch_objcc',
[precompiled_header.GetInclude('mm')])
self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags))
self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c))
self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList('cflags_objc', map(self.ExpandSpecial,
cflags_objc))
self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial,
cflags_objcc))
self.ninja.newline()
outputs = []
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
elif ext in ('c', 's', 'S'):
command = 'cc'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
else:
# TODO: should we assert here on unexpected extensions?
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + self.obj_ext)
implicit = precompiled_header.GetObjDependencies([input], [output])
self.ninja.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends)
outputs.append(output)
self.WritePchTargets(pch_commands)
self.ninja.newline()
return outputs
def WritePchTargets(self, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
cmd = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }.get(lang)
self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Returns the path to the output."""
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
implicit_deps = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
extra_link_deps.add(target.binary)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
link_deps.extend(list(extra_link_deps))
extra_bindings = []
if self.is_mac_bundle:
output = self.ComputeMacBundleBinaryOutput()
else:
output = self.ComputeOutput(spec)
extra_bindings.append(('postbuilds',
self.GetPostbuildCommand(spec, output, output)))
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja)
else:
ldflags = config.get('ldflags', [])
self.WriteVariableList('ldflags',
gyp.common.uniquer(map(self.ExpandSpecial,
ldflags)))
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteVariableList('libs', libraries)
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
self.ninja.build(output, command, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
return output
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
if spec['type'] == 'none':
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps,
variables=[('postbuilds', self.GetPostbuildCommand(
spec, self.target.binary, self.target.binary))])
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
postbuild = self.GetPostbuildCommand(spec, output, self.target.binary,
is_command_start=not package_framework)
variables = []
if postbuild:
variables.append(('postbuilds', postbuild))
if package_framework:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = self.GypPathToNinja(
strip_save_file)
return self.GetXcodeEnv(additional_settings=postbuild_settings)
def GetPostbuildCommand(self, spec, output, output_binary,
is_command_start=False):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output)
target_postbuilds = self.xcode_settings.GetTargetPostbuilds(
self.config_name, output, QuoteShellArgument(output_binary), quiet=True)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(
spec, self.GypPathToNinja, quiet=True)
postbuilds = target_postbuilds + postbuilds
if not postbuilds:
return ''
env = self.ComputeExportEnvString(self.GetXcodePostbuildEnv())
commands = env + ' F=0; ' + \
' '.join([ninja_syntax.escape(command) + ' || F=$$?;'
for command in postbuilds])
command_string = env + commands + ' ((exit $$F) || rm -rf %s) ' % output + \
'&& exit $$F)'
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k in gyp.xcode_emulation.TopologicallySortedEnvVarKeys(env):
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(env[k]))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self):
"""Return the 'output' (full output path) to the binary in a bundle."""
assert self.is_mac_bundle
path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise 'Unhandled output type', type
def ComputeOutput(self, spec, type=None):
"""Compute the path for the final output of the spec."""
assert not self.is_mac_bundle or type
if not type:
type = spec['type']
if self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
if type in type_in_output_root:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, var, values):
if values is None:
values = []
self.ninja.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, env={}):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule."""
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = rule_name.replace(' ', '_')
args = args[:]
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
if self.flavor == 'win':
cd = 'cmd /s /c "cd %s && ' % self.build_to_base
else:
cd = 'cd %s; ' % self.build_to_base
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
env = self.ComputeExportEnvString(env)
if self.flavor == 'win':
# TODO(scottmg): Really don't want encourage cygwin, but I'm not sure
# how much sh is depended upon. For now, double quote args to make most
# things work.
command = args[0] + ' "' + '" "'.join(args[1:]) + '""'
else:
command = gyp.common.EncodePOSIXShellList(args)
if env:
# If an environment is passed in, variables in the command should be
# read from it, instead of from ninja's internal variables.
command = ninja_syntax.escape(command)
command = cd + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, restat=True)
self.ninja.newline()
return rule_name
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc'))
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
global generator_additional_path_sections
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
default_variables['OS'] = 'win'
default_variables['EXECUTABLE_SUFFIX'] = '.exe'
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR', '')
def OpenOutput(path):
"""Open |path| for writing, creating directories if necessary."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
return open(path, 'w')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
config_name)
master_ninja = ninja_syntax.Writer(
OpenOutput(os.path.join(options.toplevel_dir, build_dir, 'build.ninja')),
width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, os.path.join(options.toplevel_dir, build_dir))
# Grab make settings for CC/CXX.
if flavor == 'win':
cc = cxx = 'cl'
else:
cc, cxx = 'gcc', 'g++'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = InvertRelativePath(build_dir)
for key, value in make_global_settings:
if key == 'CC': cc = os.path.join(build_to_root, value)
if key == 'CXX': cxx = os.path.join(build_to_root, value)
flock = 'flock'
if flavor == 'mac':
flock = './gyp-mac-tool flock'
master_ninja.variable('cc', os.environ.get('CC', cc))
master_ninja.variable('cxx', os.environ.get('CXX', cxx))
if flavor == 'win':
master_ninja.variable('ld', 'link')
else:
master_ninja.variable('ld', flock + ' linker.lock $cxx')
master_ninja.variable('cc_host', '$cc')
master_ninja.variable('cxx_host', '$cxx')
if flavor == 'mac':
master_ninja.variable('mac_tool', os.path.join('.', 'gyp-mac-tool'))
master_ninja.newline()
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d')
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d')
else:
# TODO(scottmg): Requires deplist branch of ninja for now (for
# /showIncludes handling).
master_ninja.rule(
'cc',
description='CC $out',
command=('cmd /c $cc /nologo /showIncludes '
'$defines $includes $cflags $cflags_c '
'$cflags_pch_c /c $in /Fo$out '
'| ninja-deplist-helper -f cl -o $out.dl'),
deplist='$out.dl')
master_ninja.rule(
'cxx',
description='CXX $out',
command=('cmd /c $cxx /nologo /showIncludes '
'$defines $includes $cflags $cflags_cc '
'$cflags_pch_cc /c $in /Fo$out '
'| ninja-deplist-helper -f cl -o $out.dl'),
deplist='$out.dl')
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && ar rcsT $out $in')
master_ninja.rule(
'solink',
description='SOLINK $out',
command=('$ld -shared $ldflags -o $out -Wl,-soname=$soname '
'-Wl,--whole-archive $in -Wl,--no-whole-archive $libs'))
master_ninja.rule(
'solink_module',
description='SOLINK(module) $out',
command=('$ld -shared $ldflags -o $out -Wl,-soname=$soname '
'-Wl,--start-group $in -Wl,--end-group $libs'))
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out -Wl,-rpath=\$$ORIGIN/lib '
'-Wl,--start-group $in -Wl,--end-group $libs'))
elif flavor == 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='lib /nologo /OUT:$out $in')
master_ninja.rule(
'solink',
description='SOLINK $out',
command=('$ld /nologo /DLL $ldflags /OUT:$out $in $libs'))
master_ninja.rule(
'solink_module',
description='SOLINK(module) $out',
command=('$ld /nologo /DLL $ldflags /OUT:$out $in $libs'))
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld /nologo $ldflags /OUT:$out $in $libs'))
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d')
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d')
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool -static -o $out $in'
'$postbuilds')
# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
# -bundle -single_module here (for osmesa.so).
master_ninja.rule(
'solink',
description='SOLINK $out, POSTBUILDS',
command=('$ld -shared $ldflags -o $out '
'$in $libs$postbuilds'))
master_ninja.rule(
'solink_module',
description='SOLINK(module) $out, POSTBUILDS',
command=('$ld -shared $ldflags -o $out '
'$in $libs$postbuilds'))
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $libs$postbuilds'))
master_ninja.rule(
'infoplist',
description='INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env $mac_tool $mactool_cmd $in $out')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='$mac_tool package-framework $out $version$postbuilds '
'&& touch $out')
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
if flavor == 'win':
# TODO(scottmg): Copy fallback?
master_ninja.rule(
'copy',
description='COPY $in $out',
command='cmd /c mklink /h $out $in >nul || mklink /h /j $out $in >nul')
else:
master_ninja.rule(
'copy',
description='COPY $in $out',
command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets.")
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
abs_build_dir=os.path.abspath(os.path.join(options.toplevel_dir, build_dir))
writer = NinjaWriter(target_outputs, base_path, build_dir,
OpenOutput(os.path.join(options.toplevel_dir,
build_dir,
output_file)),
flavor, abs_build_dir=abs_build_dir)
master_ninja.subninja(output_file)
target = writer.WriteSpec(spec, config_name)
if target:
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
if all_outputs:
master_ninja.build('all', 'phony', list(all_outputs))
def GenerateOutput(target_list, target_dicts, data, params):
if params['options'].generator_output:
raise NotImplementedError, "--generator_output not implemented for ninja"
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| gpl-3.0 |
luoxufeiyan/python | zentst/0023/mysite/settings.py | 38 | 5492 | # Django settings for mysite project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'D:/workspace/mysite/sqlite3.db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'GMT+8'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'wm-!t8*l0+5yq-##j=(%&^2ns0bw=h@r=5!b9%3(63by^7-pg&'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'mysite.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'mysite.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'D:/workspace/mysite/templates',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'messageboard',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| mit |
AmrThabet/CouchPotatoServer | libs/suds/sax/parser.py | 180 | 4461 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The sax module contains a collection of classes that provide a
(D)ocument (O)bject (M)odel representation of an XML document.
The goal is to provide an easy, intuative interface for managing XML
documents. Although, the term, DOM, is used above, this model is
B{far} better.
XML namespaces in suds are represented using a (2) element tuple
containing the prefix and the URI. Eg: I{('tns', 'http://myns')}
"""
from logging import getLogger
import suds.metrics
from suds import *
from suds.sax import *
from suds.sax.document import Document
from suds.sax.element import Element
from suds.sax.text import Text
from suds.sax.attribute import Attribute
from xml.sax import make_parser, InputSource, ContentHandler
from xml.sax.handler import feature_external_ges
from cStringIO import StringIO
log = getLogger(__name__)
class Handler(ContentHandler):
""" sax hanlder """
def __init__(self):
self.nodes = [Document()]
def startElement(self, name, attrs):
top = self.top()
node = Element(unicode(name), parent=top)
for a in attrs.getNames():
n = unicode(a)
v = unicode(attrs.getValue(a))
attribute = Attribute(n,v)
if self.mapPrefix(node, attribute):
continue
node.append(attribute)
node.charbuffer = []
top.append(node)
self.push(node)
def mapPrefix(self, node, attribute):
skip = False
if attribute.name == 'xmlns':
if len(attribute.value):
node.expns = unicode(attribute.value)
skip = True
elif attribute.prefix == 'xmlns':
prefix = attribute.name
node.nsprefixes[prefix] = unicode(attribute.value)
skip = True
return skip
def endElement(self, name):
name = unicode(name)
current = self.top()
if len(current.charbuffer):
current.text = Text(u''.join(current.charbuffer))
del current.charbuffer
if len(current):
current.trim()
currentqname = current.qname()
if name == currentqname:
self.pop()
else:
raise Exception('malformed document')
def characters(self, content):
text = unicode(content)
node = self.top()
node.charbuffer.append(text)
def push(self, node):
self.nodes.append(node)
return node
def pop(self):
return self.nodes.pop()
def top(self):
return self.nodes[len(self.nodes)-1]
class Parser:
""" SAX Parser """
@classmethod
def saxparser(cls):
p = make_parser()
p.setFeature(feature_external_ges, 0)
h = Handler()
p.setContentHandler(h)
return (p, h)
def parse(self, file=None, string=None):
"""
SAX parse XML text.
@param file: Parse a python I{file-like} object.
@type file: I{file-like} object.
@param string: Parse string XML.
@type string: str
"""
timer = metrics.Timer()
timer.start()
sax, handler = self.saxparser()
if file is not None:
sax.parse(file)
timer.stop()
metrics.log.debug('sax (%s) duration: %s', file, timer)
return handler.nodes[0]
if string is not None:
source = InputSource(None)
source.setByteStream(StringIO(string))
sax.parse(source)
timer.stop()
metrics.log.debug('%s\nsax duration: %s', string, timer)
return handler.nodes[0] | gpl-3.0 |
guoxf/linux | tools/perf/scripts/python/futex-contention.py | 1997 | 1508 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, callchain,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
annarev/tensorflow | tensorflow/python/framework/subscribe_test.py | 14 | 13361 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.subscribe."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import subscribe
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class SubscribeTest(test_util.TensorFlowTestCase):
def _ExpectSubscribedIdentities(self, container):
"""Convenience function to test a container of subscribed identities."""
self.assertTrue(
all(subscribe._is_subscribed_identity(x) for x in container))
@test_util.run_deprecated_v1
def testSideEffect(self):
a = constant_op.constant(1)
b = constant_op.constant(1)
c = math_ops.add(a, b)
with ops.control_dependencies([c]):
d = constant_op.constant(42)
n = math_ops.negative(c)
shared = []
def sub(t):
shared.append(t)
return t
c0 = c
self.assertTrue(c0.op in d.op.control_inputs)
c = subscribe.subscribe(c,
lambda t: script_ops.py_func(sub, [t], [t.dtype]))
# Verify that control dependencies are correctly moved to the subscription.
self.assertFalse(c0.op in d.op.control_inputs)
self.assertTrue(c.op in d.op.control_inputs)
with self.cached_session() as sess:
c_out = self.evaluate([c])
n_out = self.evaluate([n])
d_out = self.evaluate([d])
self.assertEqual(n_out, [-2])
self.assertEqual(c_out, [2])
self.assertEqual(d_out, [42])
self.assertEqual(shared, [2, 2, 2])
@test_util.run_deprecated_v1
def testSupportedTypes(self):
"""Confirm that supported types are correctly detected and handled."""
a = constant_op.constant(1)
b = constant_op.constant(1)
c = math_ops.add(a, b)
def sub(t):
return t
# Tuples.
subscribed = subscribe.subscribe(
(a, b), lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertIsInstance(subscribed, tuple)
self._ExpectSubscribedIdentities(subscribed)
# Lists.
subscribed = subscribe.subscribe(
[a, b], lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertIsInstance(subscribed, list)
self._ExpectSubscribedIdentities(subscribed)
# Dictionaries.
subscribed = subscribe.subscribe({
'first': a,
'second': b
}, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertIsInstance(subscribed, dict)
self._ExpectSubscribedIdentities(subscribed.values())
# Namedtuples.
# pylint: disable=invalid-name
TensorPair = collections.namedtuple('TensorPair', ['first', 'second'])
# pylint: enable=invalid-name
pair = TensorPair(a, b)
subscribed = subscribe.subscribe(
pair, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertIsInstance(subscribed, TensorPair)
self._ExpectSubscribedIdentities(subscribed)
# Expect an exception to be raised for unsupported types.
with self.assertRaisesRegex(TypeError, 'has invalid type'):
subscribe.subscribe(c.name,
lambda t: script_ops.py_func(sub, [t], [t.dtype]))
@test_util.run_deprecated_v1
def testCaching(self):
"""Confirm caching of control output is recalculated between calls."""
a = constant_op.constant(1)
b = constant_op.constant(2)
with ops.control_dependencies([a]):
c = constant_op.constant(42)
shared = {}
def sub(t):
shared[t] = shared.get(t, 0) + 1
return t
a = subscribe.subscribe(a,
lambda t: script_ops.py_func(sub, [t], [t.dtype]))
with ops.control_dependencies([b]):
d = constant_op.constant(11)
# If it was using outdated cached control_outputs then
# evaling would not trigger the new subscription.
b = subscribe.subscribe(b,
lambda t: script_ops.py_func(sub, [t], [t.dtype]))
with self.cached_session() as sess:
c_out = self.evaluate([c])
d_out = self.evaluate([d])
self.assertEqual(c_out, [42])
self.assertEqual(d_out, [11])
self.assertEqual(shared, {2: 1, 1: 1})
@test_util.run_deprecated_v1
def testIsSubscribedIdentity(self):
"""Confirm subscribed identity ops are correctly detected."""
a = constant_op.constant(1)
b = constant_op.constant(2)
c = math_ops.add(a, b)
idop = array_ops.identity(c)
c_sub = subscribe.subscribe(c, [])
self.assertFalse(subscribe._is_subscribed_identity(a))
self.assertFalse(subscribe._is_subscribed_identity(c))
self.assertFalse(subscribe._is_subscribed_identity(idop))
self.assertTrue(subscribe._is_subscribed_identity(c_sub))
@test_util.run_deprecated_v1
def testSubscribeExtend(self):
"""Confirm side effect are correctly added for different input types."""
a = constant_op.constant(1)
b = constant_op.constant(2)
c = math_ops.add(a, b)
shared = {}
def sub(t, name):
shared[name] = shared.get(name, 0) + 1
return t
# Subscribe with a first side effect graph, passing an unsubscribed tensor.
sub_graph1 = lambda t: sub(t, 'graph1')
c_sub = subscribe.subscribe(
c, lambda t: script_ops.py_func(sub_graph1, [t], [t.dtype]))
# Add a second side effect graph, passing the tensor returned by the
# previous call to subscribe().
sub_graph2 = lambda t: sub(t, 'graph2')
c_sub2 = subscribe.subscribe(
c_sub, lambda t: script_ops.py_func(sub_graph2, [t], [t.dtype]))
# Add a third side effect graph, passing the original tensor.
sub_graph3 = lambda t: sub(t, 'graph3')
c_sub3 = subscribe.subscribe(
c, lambda t: script_ops.py_func(sub_graph3, [t], [t.dtype]))
# Make sure there's only one identity op matching the source tensor's name.
graph_ops = ops.get_default_graph().get_operations()
name_prefix = c.op.name + '/subscription/Identity'
identity_ops = [op for op in graph_ops if op.name.startswith(name_prefix)]
self.assertEqual(1, len(identity_ops))
# Expect the objects returned by subscribe() to reference the same tensor.
self.assertIs(c_sub, c_sub2)
self.assertIs(c_sub, c_sub3)
# Expect the three side effect graphs to have been evaluated.
with self.cached_session() as sess:
self.evaluate([c_sub])
self.assertIn('graph1', shared)
self.assertIn('graph2', shared)
self.assertIn('graph3', shared)
@test_util.run_v1_only('b/120545219')
def testSubscribeVariable(self):
"""Confirm that variables can be subscribed."""
v1 = variables.VariableV1(0.0)
v2 = variables.VariableV1(4.0)
add = math_ops.add(v1, v2)
assign_v1 = v1.assign(3.0)
shared = []
def sub(t):
shared.append(t)
return t
v1_sub = subscribe.subscribe(
v1, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertTrue(subscribe._is_subscribed_identity(v1_sub))
with self.cached_session() as sess:
# Initialize the variables first.
self.evaluate([v1.initializer])
self.evaluate([v2.initializer])
# Expect the side effects to be triggered when evaluating the add op as
# it will read the value of the variable.
self.evaluate([add])
self.assertEqual(1, len(shared))
# Expect the side effect not to be triggered when evaluating the assign
# op as it will not access the 'read' output of the variable.
self.evaluate([assign_v1])
self.assertEqual(1, len(shared))
self.evaluate([add])
self.assertEqual(2, len(shared))
# Make sure the values read from the variable match the expected ones.
self.assertEqual([0.0, 3.0], shared)
@test_util.run_v1_only('b/120545219')
def testResourceType(self):
"""Confirm that subscribe correctly handles tensors with 'resource' type."""
tensor_array = tensor_array_ops.TensorArray(
dtype=dtypes.float32,
tensor_array_name='test',
size=3,
infer_shape=False)
writer = tensor_array.write(0, [[4.0, 5.0]])
reader = writer.read(0)
shared = []
def sub(t):
shared.append(t)
return t
# TensorArray's handle output tensor has a 'resource' type and cannot be
# subscribed as it's not 'numpy compatible' (see dtypes.py).
# Expect that the original tensor is returned when subscribing to it.
tensor_array_sub = subscribe.subscribe(
tensor_array.handle, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
self.assertIs(tensor_array_sub, tensor_array.handle)
self.assertFalse(subscribe._is_subscribed_identity(tensor_array.handle))
with self.cached_session() as sess:
self.evaluate([reader])
self.assertEqual(0, len(shared))
@test_util.run_deprecated_v1
def testMultipleOutputs(self):
"""Handle subscriptions to multiple outputs from the same op."""
sparse_tensor_1 = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=[1, 2], dense_shape=[3, 4])
sparse_tensor_2 = sparse_tensor.SparseTensor(
indices=[[0, 0], [1, 2]], values=[2, 3], dense_shape=[3, 4])
# This op has three outputs.
sparse_add = sparse_ops.sparse_add(sparse_tensor_1, sparse_tensor_2)
self.assertEqual(3, len(sparse_add.op.outputs))
c1 = constant_op.constant(1)
with ops.control_dependencies(sparse_add.op.outputs):
# This op depends on all the three outputs.
neg = -c1
shared = []
def sub(t):
shared.append(t)
return t
# Subscribe the three outputs at once.
subscribe.subscribe(sparse_add.op.outputs,
lambda t: script_ops.py_func(sub, [t], [t.dtype]))
with self.cached_session() as sess:
self.evaluate([neg])
# All three ops have been processed.
self.assertEqual(3, len(shared))
@test_util.run_deprecated_v1
def test_subscribe_tensors_on_different_devices(self):
"""Side effect ops are added with the same device of the subscribed op."""
c1 = constant_op.constant(10)
c2 = constant_op.constant(20)
with ops.device('cpu:0'):
add = math_ops.add(c1, c2)
with ops.device('cpu:1'):
mul = math_ops.multiply(c1, c2)
def sub(t):
return t
add_sub = subscribe.subscribe(
add, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
mul_sub = subscribe.subscribe(
mul, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
# Expect the identity tensors injected by subscribe to have been created
# on the same device as their original tensors.
self.assertNotEqual(add_sub.device, mul_sub.device)
self.assertEqual(add.device, add_sub.device)
self.assertEqual(mul.device, mul_sub.device)
@test_util.run_v1_only('b/120545219')
def test_subscribe_tensors_within_control_flow_context(self):
"""Side effect ops are added with the same control flow context."""
c1 = constant_op.constant(10)
c2 = constant_op.constant(20)
x1 = math_ops.add(c1, c2)
x2 = math_ops.multiply(c1, c2)
cond = control_flow_ops.cond(
x1 < x2,
lambda: math_ops.add(c1, c2, name='then'),
lambda: math_ops.subtract(c1, c2, name='else'),
name='cond')
branch = ops.get_default_graph().get_tensor_by_name('cond/then:0')
def context(tensor):
return tensor.op._get_control_flow_context()
self.assertIs(context(x1), context(x2))
self.assertIsNot(context(x1), context(branch))
results = []
def sub(tensor):
results.append(tensor)
return tensor
tensors = [x1, branch, x2]
subscriptions = subscribe.subscribe(
tensors, lambda t: script_ops.py_func(sub, [t], [t.dtype]))
for tensor, subscription in zip(tensors, subscriptions):
self.assertIs(context(tensor), context(subscription))
# Verify that sub(x1) and sub(x2) are in the same context.
self.assertIs(context(subscriptions[0]), context(subscriptions[2]))
# Verify that sub(x1) and sub(branch) are not.
self.assertIsNot(context(subscriptions[0]), context(subscriptions[1]))
with self.cached_session() as sess:
self.evaluate(cond)
self.assertEqual(3, len(results))
if __name__ == '__main__':
googletest.main()
| apache-2.0 |
python-ivi/python-ivi | ivi/agilent/agilentMSOX4054A.py | 2 | 1695 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2014-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent4000A import *
class agilentMSOX4054A(agilent4000A):
"Agilent InfiniiVision MSOX4054A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO-X 4054A')
super(agilentMSOX4054A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 500e6
self._init_channels()
| mit |
apache-spark-on-k8s/spark | python/pyspark/mllib/recommendation.py | 50 | 11714 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import array
from collections import namedtuple
from pyspark import SparkContext, since
from pyspark.rdd import RDD
from pyspark.mllib.common import JavaModelWrapper, callMLlibFunc, inherit_doc
from pyspark.mllib.util import JavaLoader, JavaSaveable
from pyspark.sql import DataFrame
__all__ = ['MatrixFactorizationModel', 'ALS', 'Rating']
class Rating(namedtuple("Rating", ["user", "product", "rating"])):
"""
Represents a (user, product, rating) tuple.
>>> r = Rating(1, 2, 5.0)
>>> (r.user, r.product, r.rating)
(1, 2, 5.0)
>>> (r[0], r[1], r[2])
(1, 2, 5.0)
.. versionadded:: 1.2.0
"""
def __reduce__(self):
return Rating, (int(self.user), int(self.product), float(self.rating))
@inherit_doc
class MatrixFactorizationModel(JavaModelWrapper, JavaSaveable, JavaLoader):
"""A matrix factorisation model trained by regularized alternating
least-squares.
>>> r1 = (1, 1, 1.0)
>>> r2 = (1, 2, 2.0)
>>> r3 = (2, 1, 2.0)
>>> ratings = sc.parallelize([r1, r2, r3])
>>> model = ALS.trainImplicit(ratings, 1, seed=10)
>>> model.predict(2, 2)
0.4...
>>> testset = sc.parallelize([(1, 2), (1, 1)])
>>> model = ALS.train(ratings, 2, seed=0)
>>> model.predictAll(testset).collect()
[Rating(user=1, product=1, rating=1.0...), Rating(user=1, product=2, rating=1.9...)]
>>> model = ALS.train(ratings, 4, seed=10)
>>> model.userFeatures().collect()
[(1, array('d', [...])), (2, array('d', [...]))]
>>> model.recommendUsers(1, 2)
[Rating(user=2, product=1, rating=1.9...), Rating(user=1, product=1, rating=1.0...)]
>>> model.recommendProducts(1, 2)
[Rating(user=1, product=2, rating=1.9...), Rating(user=1, product=1, rating=1.0...)]
>>> model.rank
4
>>> first_user = model.userFeatures().take(1)[0]
>>> latents = first_user[1]
>>> len(latents)
4
>>> model.productFeatures().collect()
[(1, array('d', [...])), (2, array('d', [...]))]
>>> first_product = model.productFeatures().take(1)[0]
>>> latents = first_product[1]
>>> len(latents)
4
>>> products_for_users = model.recommendProductsForUsers(1).collect()
>>> len(products_for_users)
2
>>> products_for_users[0]
(1, (Rating(user=1, product=2, rating=...),))
>>> users_for_products = model.recommendUsersForProducts(1).collect()
>>> len(users_for_products)
2
>>> users_for_products[0]
(1, (Rating(user=2, product=1, rating=...),))
>>> model = ALS.train(ratings, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
3.73...
>>> df = sqlContext.createDataFrame([Rating(1, 1, 1.0), Rating(1, 2, 2.0), Rating(2, 1, 2.0)])
>>> model = ALS.train(df, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
3.73...
>>> model = ALS.trainImplicit(ratings, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
0.4...
>>> import os, tempfile
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = MatrixFactorizationModel.load(sc, path)
>>> sameModel.predict(2, 2)
0.4...
>>> sameModel.predictAll(testset).collect()
[Rating(...
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
.. versionadded:: 0.9.0
"""
@since("0.9.0")
def predict(self, user, product):
"""
Predicts rating for the given user and product.
"""
return self._java_model.predict(int(user), int(product))
@since("0.9.0")
def predictAll(self, user_product):
"""
Returns a list of predicted ratings for input user and product
pairs.
"""
assert isinstance(user_product, RDD), "user_product should be RDD of (user, product)"
first = user_product.first()
assert len(first) == 2, "user_product should be RDD of (user, product)"
user_product = user_product.map(lambda u_p: (int(u_p[0]), int(u_p[1])))
return self.call("predict", user_product)
@since("1.2.0")
def userFeatures(self):
"""
Returns a paired RDD, where the first element is the user and the
second is an array of features corresponding to that user.
"""
return self.call("getUserFeatures").mapValues(lambda v: array.array('d', v))
@since("1.2.0")
def productFeatures(self):
"""
Returns a paired RDD, where the first element is the product and the
second is an array of features corresponding to that product.
"""
return self.call("getProductFeatures").mapValues(lambda v: array.array('d', v))
@since("1.4.0")
def recommendUsers(self, product, num):
"""
Recommends the top "num" number of users for a given product and
returns a list of Rating objects sorted by the predicted rating in
descending order.
"""
return list(self.call("recommendUsers", product, num))
@since("1.4.0")
def recommendProducts(self, user, num):
"""
Recommends the top "num" number of products for a given user and
returns a list of Rating objects sorted by the predicted rating in
descending order.
"""
return list(self.call("recommendProducts", user, num))
def recommendProductsForUsers(self, num):
"""
Recommends the top "num" number of products for all users. The
number of recommendations returned per user may be less than "num".
"""
return self.call("wrappedRecommendProductsForUsers", num)
def recommendUsersForProducts(self, num):
"""
Recommends the top "num" number of users for all products. The
number of recommendations returned per product may be less than
"num".
"""
return self.call("wrappedRecommendUsersForProducts", num)
@property
@since("1.4.0")
def rank(self):
"""Rank for the features in this model"""
return self.call("rank")
@classmethod
@since("1.3.1")
def load(cls, sc, path):
"""Load a model from the given path"""
model = cls._load_java(sc, path)
wrapper = sc._jvm.org.apache.spark.mllib.api.python.MatrixFactorizationModelWrapper(model)
return MatrixFactorizationModel(wrapper)
class ALS(object):
"""Alternating Least Squares matrix factorization
.. versionadded:: 0.9.0
"""
@classmethod
def _prepare(cls, ratings):
if isinstance(ratings, RDD):
pass
elif isinstance(ratings, DataFrame):
ratings = ratings.rdd
else:
raise TypeError("Ratings should be represented by either an RDD or a DataFrame, "
"but got %s." % type(ratings))
first = ratings.first()
if isinstance(first, Rating):
pass
elif isinstance(first, (tuple, list)):
ratings = ratings.map(lambda x: Rating(*x))
else:
raise TypeError("Expect a Rating or a tuple/list, but got %s." % type(first))
return ratings
@classmethod
@since("0.9.0")
def train(cls, ratings, rank, iterations=5, lambda_=0.01, blocks=-1, nonnegative=False,
seed=None):
"""
Train a matrix factorization model given an RDD of ratings by users
for a subset of products. The ratings matrix is approximated as the
product of two lower-rank matrices of a given rank (number of
features). To solve for these features, ALS is run iteratively with
a configurable level of parallelism.
:param ratings:
RDD of `Rating` or (userID, productID, rating) tuple.
:param rank:
Number of features to use (also referred to as the number of latent factors).
:param iterations:
Number of iterations of ALS.
(default: 5)
:param lambda_:
Regularization parameter.
(default: 0.01)
:param blocks:
Number of blocks used to parallelize the computation. A value
of -1 will use an auto-configured number of blocks.
(default: -1)
:param nonnegative:
A value of True will solve least-squares with nonnegativity
constraints.
(default: False)
:param seed:
Random seed for initial matrix factorization model. A value
of None will use system time as the seed.
(default: None)
"""
model = callMLlibFunc("trainALSModel", cls._prepare(ratings), rank, iterations,
lambda_, blocks, nonnegative, seed)
return MatrixFactorizationModel(model)
@classmethod
@since("0.9.0")
def trainImplicit(cls, ratings, rank, iterations=5, lambda_=0.01, blocks=-1, alpha=0.01,
nonnegative=False, seed=None):
"""
Train a matrix factorization model given an RDD of 'implicit
preferences' of users for a subset of products. The ratings matrix
is approximated as the product of two lower-rank matrices of a
given rank (number of features). To solve for these features, ALS
is run iteratively with a configurable level of parallelism.
:param ratings:
RDD of `Rating` or (userID, productID, rating) tuple.
:param rank:
Number of features to use (also referred to as the number of latent factors).
:param iterations:
Number of iterations of ALS.
(default: 5)
:param lambda_:
Regularization parameter.
(default: 0.01)
:param blocks:
Number of blocks used to parallelize the computation. A value
of -1 will use an auto-configured number of blocks.
(default: -1)
:param alpha:
A constant used in computing confidence.
(default: 0.01)
:param nonnegative:
A value of True will solve least-squares with nonnegativity
constraints.
(default: False)
:param seed:
Random seed for initial matrix factorization model. A value
of None will use system time as the seed.
(default: None)
"""
model = callMLlibFunc("trainImplicitALSModel", cls._prepare(ratings), rank,
iterations, lambda_, blocks, alpha, nonnegative, seed)
return MatrixFactorizationModel(model)
def _test():
import doctest
import pyspark.mllib.recommendation
from pyspark.sql import SQLContext
globs = pyspark.mllib.recommendation.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['sc'] = sc
globs['sqlContext'] = SQLContext(sc)
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
globs['sc'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
pvtodorov/indra | indra/sources/cwms/api.py | 2 | 2665 | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
from indra.sources.cwms.processor import CWMSProcessor
from indra.sources.cwms.rdf_processor import CWMSRDFProcessor
from indra.sources.trips import client
logger = logging.getLogger(__name__)
def process_text(text, save_xml='cwms_output.xml'):
"""Processes text using the CWMS web service.
Parameters
----------
text : str
Text to process
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
xml = client.send_query(text, 'cwmsreader')
# There are actually two EKBs in the xml document. Extract the second.
first_end = xml.find('</ekb>') # End of first EKB
second_start = xml.find('<ekb', first_end) # Start of second EKB
second_end = xml.find('</ekb>', second_start) # End of second EKB
second_ekb = xml[second_start:second_end+len('</ekb>')] # second EKB
if save_xml:
with open(save_xml, 'wb') as fh:
fh.write(second_ekb.encode('utf-8'))
return process_ekb(second_ekb)
def process_ekb_file(fname):
"""Processes an EKB file produced by CWMS.
Parameters
----------
fname : str
Path to the EKB file to process.
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
# Process EKB XML file into statements
with open(fname, 'rb') as fh:
ekb_str = fh.read().decode('utf-8')
return process_ekb(ekb_str)
def process_ekb(ekb_str):
"""Processes an EKB string produced by CWMS.
Parameters
----------
ekb_str : str
EKB string to process
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
# Process EKB XML into statements
cp = CWMSProcessor(ekb_str)
return cp
def process_rdf_file(text, rdf_filename):
"""Process CWMS's RDF output for the given statement and returns a
processor populated with INDRA statements.
Parameters
----------
text : str
Sentence to process
rdf_filename : str
The RDF filename to process
Returns
-------
cp : indra.sources.cwms.CWMSRDFProcessor
A CWMSProcessor instance, which contains a list of INDRA Statements
as its statements attribute.
"""
cp = CWMSRDFProcessor(text, rdf_filename)
return cp
| bsd-2-clause |
bestwpw/letsencrypt | letsencrypt/tests/display/util_test.py | 34 | 11787 | """Test :mod:`letsencrypt.display.util`."""
import os
import unittest
import mock
from letsencrypt.display import util as display_util
CHOICES = [("First", "Description1"), ("Second", "Description2")]
TAGS = ["tag1", "tag2", "tag3"]
TAGS_CHOICES = [("1", "tag1"), ("2", "tag2"), ("3", "tag3")]
class NcursesDisplayTest(unittest.TestCase):
"""Test ncurses display.
Since this is mostly a wrapper, it might be more helpful to test the
actual dialog boxes. The test file located in ./tests/display.py
(relative to the root of the repository) will actually display the
various boxes but requires the user to do the verification. If
something seems amiss please use that test script to debug it, the
automatic tests rely on too much mocking.
"""
def setUp(self):
super(NcursesDisplayTest, self).setUp()
self.displayer = display_util.NcursesDisplay()
self.default_menu_options = {
"choices": CHOICES,
"ok_label": "OK",
"cancel_label": "Cancel",
"help_button": False,
"help_label": "",
"width": display_util.WIDTH,
"height": display_util.HEIGHT,
"menu_height": display_util.HEIGHT - 6,
}
@mock.patch("letsencrypt.display.util.dialog.Dialog.msgbox")
def test_notification(self, mock_msgbox):
"""Kind of worthless... one liner."""
self.displayer.notification("message")
self.assertEqual(mock_msgbox.call_count, 1)
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_tag_and_desc(self, mock_menu):
mock_menu.return_value = (display_util.OK, "First")
ret = self.displayer.menu("Message", CHOICES)
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.OK, 0))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_tag_and_desc_cancel(self, mock_menu):
mock_menu.return_value = (display_util.CANCEL, "")
ret = self.displayer.menu("Message", CHOICES)
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.CANCEL, -1))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only(self, mock_menu):
mock_menu.return_value = (display_util.OK, "1")
ret = self.displayer.menu("Message", TAGS, help_label="More Info")
self.default_menu_options.update(
choices=TAGS_CHOICES, help_button=True, help_label="More Info")
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.OK, 0))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only_help(self, mock_menu):
mock_menu.return_value = (display_util.HELP, "2")
ret = self.displayer.menu("Message", TAGS, help_label="More Info")
self.assertEqual(ret, (display_util.HELP, 1))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only_cancel(self, mock_menu):
mock_menu.return_value = (display_util.CANCEL, "")
ret = self.displayer.menu("Message", TAGS, help_label="More Info")
self.assertEqual(ret, (display_util.CANCEL, -1))
@mock.patch("letsencrypt.display.util."
"dialog.Dialog.inputbox")
def test_input(self, mock_input):
self.displayer.input("message")
self.assertEqual(mock_input.call_count, 1)
@mock.patch("letsencrypt.display.util.dialog.Dialog.yesno")
def test_yesno(self, mock_yesno):
mock_yesno.return_value = display_util.OK
self.assertTrue(self.displayer.yesno("message"))
mock_yesno.assert_called_with(
"message", display_util.HEIGHT, display_util.WIDTH,
yes_label="Yes", no_label="No")
@mock.patch("letsencrypt.display.util."
"dialog.Dialog.checklist")
def test_checklist(self, mock_checklist):
self.displayer.checklist("message", TAGS)
choices = [
(TAGS[0], "", True),
(TAGS[1], "", True),
(TAGS[2], "", True),
]
mock_checklist.assert_called_with(
"message", width=display_util.WIDTH, height=display_util.HEIGHT,
choices=choices)
class FileOutputDisplayTest(unittest.TestCase):
"""Test stdout display.
Most of this class has to deal with visual output. In order to test how the
functions look to a user, uncomment the test_visual function.
"""
def setUp(self):
super(FileOutputDisplayTest, self).setUp()
self.mock_stdout = mock.MagicMock()
self.displayer = display_util.FileDisplay(self.mock_stdout)
def test_notification_no_pause(self):
self.displayer.notification("message", 10, False)
string = self.mock_stdout.write.call_args[0][0]
self.assertTrue("message" in string)
def test_notification_pause(self):
with mock.patch("__builtin__.raw_input", return_value="enter"):
self.displayer.notification("message")
self.assertTrue("message" in self.mock_stdout.write.call_args[0][0])
@mock.patch("letsencrypt.display.util."
"FileDisplay._get_valid_int_ans")
def test_menu(self, mock_ans):
mock_ans.return_value = (display_util.OK, 1)
ret = self.displayer.menu("message", CHOICES)
self.assertEqual(ret, (display_util.OK, 0))
def test_input_cancel(self):
with mock.patch("__builtin__.raw_input", return_value="c"):
code, _ = self.displayer.input("message")
self.assertTrue(code, display_util.CANCEL)
def test_input_normal(self):
with mock.patch("__builtin__.raw_input", return_value="domain.com"):
code, input_ = self.displayer.input("message")
self.assertEqual(code, display_util.OK)
self.assertEqual(input_, "domain.com")
def test_yesno(self):
with mock.patch("__builtin__.raw_input", return_value="Yes"):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="y"):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", side_effect=["maybe", "y"]):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="No"):
self.assertFalse(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", side_effect=["cancel", "n"]):
self.assertFalse(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="a"):
self.assertTrue(self.displayer.yesno("msg", yes_label="Agree"))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_valid(self, mock_input):
mock_input.return_value = (display_util.OK, "2 1")
code, tag_list = self.displayer.checklist("msg", TAGS)
self.assertEqual(
(code, set(tag_list)), (display_util.OK, set(["tag1", "tag2"])))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_miss_valid(self, mock_input):
mock_input.side_effect = [
(display_util.OK, "10"),
(display_util.OK, "tag1 please"),
(display_util.OK, "1")
]
ret = self.displayer.checklist("msg", TAGS)
self.assertEqual(ret, (display_util.OK, ["tag1"]))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_miss_quit(self, mock_input):
mock_input.side_effect = [
(display_util.OK, "10"),
(display_util.CANCEL, "1")
]
ret = self.displayer.checklist("msg", TAGS)
self.assertEqual(ret, (display_util.CANCEL, []))
def test_scrub_checklist_input_valid(self):
# pylint: disable=protected-access
indices = [
["1"],
["1", "2", "1"],
["2", "3"],
]
exp = [
set(["tag1"]),
set(["tag1", "tag2"]),
set(["tag2", "tag3"]),
]
for i, list_ in enumerate(indices):
set_tags = set(
self.displayer._scrub_checklist_input(list_, TAGS))
self.assertEqual(set_tags, exp[i])
def test_scrub_checklist_input_invalid(self):
# pylint: disable=protected-access
indices = [
["0"],
["4"],
["tag1"],
["1", "tag1"],
["2", "o"]
]
for list_ in indices:
self.assertEqual(
self.displayer._scrub_checklist_input(list_, TAGS), [])
def test_print_menu(self):
# pylint: disable=protected-access
# This is purely cosmetic... just make sure there aren't any exceptions
self.displayer._print_menu("msg", CHOICES)
self.displayer._print_menu("msg", TAGS)
def test_wrap_lines(self):
# pylint: disable=protected-access
msg = ("This is just a weak test{0}"
"This function is only meant to be for easy viewing{0}"
"Test a really really really really really really really really "
"really really really really long line...".format(os.linesep))
text = self.displayer._wrap_lines(msg)
self.assertEqual(text.count(os.linesep), 3)
def test_get_valid_int_ans_valid(self):
# pylint: disable=protected-access
with mock.patch("__builtin__.raw_input", return_value="1"):
self.assertEqual(
self.displayer._get_valid_int_ans(1), (display_util.OK, 1))
ans = "2"
with mock.patch("__builtin__.raw_input", return_value=ans):
self.assertEqual(
self.displayer._get_valid_int_ans(3),
(display_util.OK, int(ans)))
def test_get_valid_int_ans_invalid(self):
# pylint: disable=protected-access
answers = [
["0", "c"],
["4", "one", "C"],
["c"],
]
for ans in answers:
with mock.patch("__builtin__.raw_input", side_effect=ans):
self.assertEqual(
self.displayer._get_valid_int_ans(3),
(display_util.CANCEL, -1))
class SeparateListInputTest(unittest.TestCase):
"""Test Module functions."""
def setUp(self):
self.exp = ["a", "b", "c", "test"]
@classmethod
def _call(cls, input_):
from letsencrypt.display.util import separate_list_input
return separate_list_input(input_)
def test_commas(self):
self.assertEqual(self._call("a,b,c,test"), self.exp)
def test_spaces(self):
self.assertEqual(self._call("a b c test"), self.exp)
def test_both(self):
self.assertEqual(self._call("a, b, c, test"), self.exp)
def test_mess(self):
actual = [
self._call(" a , b c \t test"),
self._call(",a, ,, , b c test "),
self._call(",,,,, , a b,,, , c,test"),
]
for act in actual:
self.assertEqual(act, self.exp)
class PlaceParensTest(unittest.TestCase):
@classmethod
def _call(cls, label): # pylint: disable=protected-access
from letsencrypt.display.util import _parens_around_char
return _parens_around_char(label)
def test_single_letter(self):
self.assertEqual("(a)", self._call("a"))
def test_multiple(self):
self.assertEqual("(L)abel", self._call("Label"))
self.assertEqual("(y)es please", self._call("yes please"))
if __name__ == "__main__":
unittest.main() # pragma: no cover
| apache-2.0 |
fle-internal/content-curation | contentcuration/contentcuration/utils/celery_signals.py | 1 | 2657 | import ast
import logging
import os
import traceback
from celery.signals import after_task_publish, task_failure, task_success
from celery.utils.log import get_task_logger
from django.core.exceptions import ObjectDoesNotExist
from contentcuration.models import Task, User
# because Celery connects signals upon import, we don't want to put signals into other modules that may be
# imported multiple times. Instead, we follow the advice here and use AppConfig.init to import the module:
# https://stackoverflow.com/questions/7115097/the-right-place-to-keep-my-signals-py-file-in-a-django-project/21612050#21612050
logger = get_task_logger(__name__)
@after_task_publish.connect
def before_start(sender, headers, body, **kwargs):
"""
Create a Task object before the task actually started,
set the task object status to be PENDING, with the signal
after_task_publish to indicate that the task has been
sent to the broker.
"""
task_id = headers["id"]
options = ast.literal_eval(headers["kwargsrepr"])
# We use the existence of the task_type kwarg to know if it's an async task.
if not "task_type" in options:
return
Task.objects.filter(task_id=task_id).update(status="PENDING")
logger.info("Task object {} updated with status PENDING.".format(task_id))
@task_failure.connect
def on_failure(sender, **kwargs):
try:
task = Task.objects.get(task_id=sender.request.id)
task.status = "FAILURE"
exception_data = {
'task_args': kwargs['args'],
'task_kwargs': kwargs['kwargs'],
'traceback': traceback.format_tb(kwargs['traceback'])
}
task.metadata['error'] = exception_data
task.save()
except ObjectDoesNotExist:
pass # If the object doesn't exist, that likely means the task was created outside of create_async_task
@task_success.connect
def on_success(sender, result, **kwargs):
try:
logger.info("on_success called, process is {}".format(os.getpid()))
task_id = sender.request.id
task = Task.objects.get(task_id=task_id)
task.status="SUCCESS"
task.metadata['result'] = result
# We're finished, so go ahead and record 100% progress so that getters expecting it get a value
# even though there is no longer a Celery task to query.
if task.is_progress_tracking:
task.metadata['progress'] = 100
task.save()
logger.info("Task with ID {} succeeded".format(task_id))
except ObjectDoesNotExist:
pass # If the object doesn't exist, that likely means the task was created outside of create_async_task
| mit |
vFense/vFenseAgent-nix | agent/deps/mac/Python-2.7.5/lib/python2.7/bsddb/test/test_db.py | 72 | 5796 | import unittest
import os, glob
from test_all import db, test_support, get_new_environment_path, \
get_new_database_path
#----------------------------------------------------------------------
class DB(unittest.TestCase):
def setUp(self):
self.path = get_new_database_path()
self.db = db.DB()
def tearDown(self):
self.db.close()
del self.db
test_support.unlink(self.path)
class DB_general(DB) :
def test_get_open_flags(self) :
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
self.assertEqual(db.DB_CREATE, self.db.get_open_flags())
def test_get_open_flags2(self) :
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE |
db.DB_THREAD)
self.assertEqual(db.DB_CREATE | db.DB_THREAD, self.db.get_open_flags())
def test_get_dbname_filename(self) :
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
self.assertEqual((self.path, None), self.db.get_dbname())
def test_get_dbname_filename_database(self) :
name = "jcea-random-name"
self.db.open(self.path, dbname=name, dbtype=db.DB_HASH,
flags = db.DB_CREATE)
self.assertEqual((self.path, name), self.db.get_dbname())
def test_bt_minkey(self) :
for i in [17, 108, 1030] :
self.db.set_bt_minkey(i)
self.assertEqual(i, self.db.get_bt_minkey())
def test_lorder(self) :
self.db.set_lorder(1234)
self.assertEqual(1234, self.db.get_lorder())
self.db.set_lorder(4321)
self.assertEqual(4321, self.db.get_lorder())
self.assertRaises(db.DBInvalidArgError, self.db.set_lorder, 9182)
if db.version() >= (4, 6) :
def test_priority(self) :
flags = [db.DB_PRIORITY_VERY_LOW, db.DB_PRIORITY_LOW,
db.DB_PRIORITY_DEFAULT, db.DB_PRIORITY_HIGH,
db.DB_PRIORITY_VERY_HIGH]
for flag in flags :
self.db.set_priority(flag)
self.assertEqual(flag, self.db.get_priority())
def test_get_transactional(self) :
self.assertFalse(self.db.get_transactional())
self.db.open(self.path, dbtype=db.DB_HASH, flags = db.DB_CREATE)
self.assertFalse(self.db.get_transactional())
class DB_hash(DB) :
def test_h_ffactor(self) :
for ffactor in [4, 16, 256] :
self.db.set_h_ffactor(ffactor)
self.assertEqual(ffactor, self.db.get_h_ffactor())
def test_h_nelem(self) :
for nelem in [1, 2, 4] :
nelem = nelem*1024*1024 # Millions
self.db.set_h_nelem(nelem)
self.assertEqual(nelem, self.db.get_h_nelem())
def test_pagesize(self) :
for i in xrange(9, 17) : # From 512 to 65536
i = 1<<i
self.db.set_pagesize(i)
self.assertEqual(i, self.db.get_pagesize())
# The valid values goes from 512 to 65536
# Test 131072 bytes...
self.assertRaises(db.DBInvalidArgError, self.db.set_pagesize, 1<<17)
# Test 256 bytes...
self.assertRaises(db.DBInvalidArgError, self.db.set_pagesize, 1<<8)
class DB_txn(DB) :
def setUp(self) :
self.homeDir = get_new_environment_path()
self.env = db.DBEnv()
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
db.DB_INIT_LOG | db.DB_INIT_TXN)
self.db = db.DB(self.env)
def tearDown(self) :
self.db.close()
del self.db
self.env.close()
del self.env
test_support.rmtree(self.homeDir)
def test_flags(self) :
self.db.set_flags(db.DB_CHKSUM)
self.assertEqual(db.DB_CHKSUM, self.db.get_flags())
self.db.set_flags(db.DB_TXN_NOT_DURABLE)
self.assertEqual(db.DB_TXN_NOT_DURABLE | db.DB_CHKSUM,
self.db.get_flags())
def test_get_transactional(self) :
self.assertFalse(self.db.get_transactional())
# DB_AUTO_COMMIT = Implicit transaction
self.db.open("XXX", dbtype=db.DB_HASH,
flags = db.DB_CREATE | db.DB_AUTO_COMMIT)
self.assertTrue(self.db.get_transactional())
class DB_recno(DB) :
def test_re_pad(self) :
for i in [' ', '*'] : # Check chars
self.db.set_re_pad(i)
self.assertEqual(ord(i), self.db.get_re_pad())
for i in [97, 65] : # Check integers
self.db.set_re_pad(i)
self.assertEqual(i, self.db.get_re_pad())
def test_re_delim(self) :
for i in [' ', '*'] : # Check chars
self.db.set_re_delim(i)
self.assertEqual(ord(i), self.db.get_re_delim())
for i in [97, 65] : # Check integers
self.db.set_re_delim(i)
self.assertEqual(i, self.db.get_re_delim())
def test_re_source(self) :
for i in ["test", "test2", "test3"] :
self.db.set_re_source(i)
self.assertEqual(i, self.db.get_re_source())
class DB_queue(DB) :
def test_re_len(self) :
for i in [33, 65, 300, 2000] :
self.db.set_re_len(i)
self.assertEqual(i, self.db.get_re_len())
def test_q_extentsize(self) :
for i in [1, 60, 100] :
self.db.set_q_extentsize(i)
self.assertEqual(i, self.db.get_q_extentsize())
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DB_general))
suite.addTest(unittest.makeSuite(DB_txn))
suite.addTest(unittest.makeSuite(DB_hash))
suite.addTest(unittest.makeSuite(DB_recno))
suite.addTest(unittest.makeSuite(DB_queue))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| lgpl-3.0 |
nkhuyu/SFrame | oss_src/python_packages/predictive_service_client/graphlab_service_client/test/test.py | 10 | 4624 | '''
Copyright (C) 2015 Dato, Inc.
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
'''
import os
from ConfigParser import ConfigParser
import tempfile
from unittest import TestCase
import httpretty
from predictive_client import PredictiveServiceClient
import json
import re
import urllib
API_KEY = '123'
class PredictiveServiceClientTest(TestCase):
def setUp(self):
self.configfile = self._create_temp_config()
def tearDown(self):
os.remove(self.configfile)
def _create_temp_config(self):
fd, filename = tempfile.mkstemp()
config = ConfigParser()
section_name = 'Service Info'
config.add_section(section_name)
config.set(section_name, "endpoint", 'http://abc.com')
config.set(section_name, 'api key', API_KEY)
with open(filename, 'w') as f:
config.write(f)
return filename
def _register_fake_endpoints(self):
def query_callback(request, uri, headers):
headers['content_type'] = 'text/json'
print uri
body = json.loads(request.body)
if ('api key' not in body) or (body['api key'] != API_KEY):
raise AssertionError('API Key Error')
predict_response = {'type': 'QuerySuccessful', 'response':{'something':1}}
wrong_method_response = {'type':'QueryFailed', 'error': 'unknown query method'}
unknonuri_response = {'type':'UnknownURI'}
paths = re.match("http://abc.com/data/(.*)", uri)
print paths.groups()
model_name = paths.groups()[0]
print "model_name: %s" % model_name
if model_name == 'a' or model_name == urllib.quote('name with space'):
request_data = json.loads(request.body)
method = request_data['data']['method']
if method != 'predict' and method != 'recommend':
return (200, headers, json.dumps(wrong_method_response))
else:
return (200, headers, json.dumps(predict_response))
else:
return (404, headers, json.dumps(unknonuri_response))
def feedback_callback(request, uri, headers):
headers['content_type'] = 'text/json'
print request
print uri
body = json.loads(request.body)
if ('api key' not in body) or (body['api key'] != API_KEY):
raise AssertionError('API Key Error')
feedback_response = {'success': 'true'}
return (200, headers, json.dumps(feedback_response))
httpretty.register_uri(httpretty.GET, "http://abc.com",
body='I am here',
status=200)
httpretty.register_uri(httpretty.POST, "http://abc.com/control/list_objects",
body='{"a":1, "name with space": 2}',
status=200,
content_type='text/json')
httpretty.register_uri(httpretty.POST, re.compile("http://abc.com/data/(\w+)"),
body=query_callback)
httpretty.register_uri(httpretty.POST, re.compile("http://abc.com/feedback"),
body=feedback_callback)
@httpretty.activate
def test_read_config(self):
self._register_fake_endpoints()
t = PredictiveServiceClient(config_file =self.configfile)
self.assertEquals(t.endpoint, 'http://abc.com')
self.assertEquals(t.api_key, API_KEY)
@httpretty.activate
def test_query(self):
self._register_fake_endpoints()
t = PredictiveServiceClient(config_file = self.configfile)
data = {"dataset":{"user_id":175343, "movie_id":1011}}
result = t.query('a', method='predict', data= data})
result = t.query('a', method='recommend', data= data})
result = t.query('name with space', method='recommend', data= data})
# unknown model
with self.assertRaises(RuntimeError):
t.query('nonexist', data)
# wrong data type
with self.assertRaises(TypeError):
t.query('a', 'str')
# wrong method
with self.assertRaises(RuntimeError):
t.query('a', {"method":"wrong method", "data":data})
@httpretty.activate
def test_feedback(self):
self._register_fake_endpoints()
t = PredictiveServiceClient(config_file =self.configfile)
t.feedback('some', {'a':1})
t.feedback('some more', {'a':1})
| bsd-3-clause |
koomik/CouchPotatoServer | libs/subliminal/language.py | 107 | 54658 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .utils import to_unicode
import re
import logging
logger = logging.getLogger(__name__)
COUNTRIES = [('AF', 'AFG', '004', u'Afghanistan'),
('AX', 'ALA', '248', u'Åland Islands'),
('AL', 'ALB', '008', u'Albania'),
('DZ', 'DZA', '012', u'Algeria'),
('AS', 'ASM', '016', u'American Samoa'),
('AD', 'AND', '020', u'Andorra'),
('AO', 'AGO', '024', u'Angola'),
('AI', 'AIA', '660', u'Anguilla'),
('AQ', 'ATA', '010', u'Antarctica'),
('AG', 'ATG', '028', u'Antigua and Barbuda'),
('AR', 'ARG', '032', u'Argentina'),
('AM', 'ARM', '051', u'Armenia'),
('AW', 'ABW', '533', u'Aruba'),
('AU', 'AUS', '036', u'Australia'),
('AT', 'AUT', '040', u'Austria'),
('AZ', 'AZE', '031', u'Azerbaijan'),
('BS', 'BHS', '044', u'Bahamas'),
('BH', 'BHR', '048', u'Bahrain'),
('BD', 'BGD', '050', u'Bangladesh'),
('BB', 'BRB', '052', u'Barbados'),
('BY', 'BLR', '112', u'Belarus'),
('BE', 'BEL', '056', u'Belgium'),
('BZ', 'BLZ', '084', u'Belize'),
('BJ', 'BEN', '204', u'Benin'),
('BM', 'BMU', '060', u'Bermuda'),
('BT', 'BTN', '064', u'Bhutan'),
('BO', 'BOL', '068', u'Bolivia, Plurinational State of'),
('BQ', 'BES', '535', u'Bonaire, Sint Eustatius and Saba'),
('BA', 'BIH', '070', u'Bosnia and Herzegovina'),
('BW', 'BWA', '072', u'Botswana'),
('BV', 'BVT', '074', u'Bouvet Island'),
('BR', 'BRA', '076', u'Brazil'),
('IO', 'IOT', '086', u'British Indian Ocean Territory'),
('BN', 'BRN', '096', u'Brunei Darussalam'),
('BG', 'BGR', '100', u'Bulgaria'),
('BF', 'BFA', '854', u'Burkina Faso'),
('BI', 'BDI', '108', u'Burundi'),
('KH', 'KHM', '116', u'Cambodia'),
('CM', 'CMR', '120', u'Cameroon'),
('CA', 'CAN', '124', u'Canada'),
('CV', 'CPV', '132', u'Cape Verde'),
('KY', 'CYM', '136', u'Cayman Islands'),
('CF', 'CAF', '140', u'Central African Republic'),
('TD', 'TCD', '148', u'Chad'),
('CL', 'CHL', '152', u'Chile'),
('CN', 'CHN', '156', u'China'),
('CX', 'CXR', '162', u'Christmas Island'),
('CC', 'CCK', '166', u'Cocos (Keeling) Islands'),
('CO', 'COL', '170', u'Colombia'),
('KM', 'COM', '174', u'Comoros'),
('CG', 'COG', '178', u'Congo'),
('CD', 'COD', '180', u'Congo, The Democratic Republic of the'),
('CK', 'COK', '184', u'Cook Islands'),
('CR', 'CRI', '188', u'Costa Rica'),
('CI', 'CIV', '384', u'Côte d\'Ivoire'),
('HR', 'HRV', '191', u'Croatia'),
('CU', 'CUB', '192', u'Cuba'),
('CW', 'CUW', '531', u'Curaçao'),
('CY', 'CYP', '196', u'Cyprus'),
('CZ', 'CZE', '203', u'Czech Republic'),
('DK', 'DNK', '208', u'Denmark'),
('DJ', 'DJI', '262', u'Djibouti'),
('DM', 'DMA', '212', u'Dominica'),
('DO', 'DOM', '214', u'Dominican Republic'),
('EC', 'ECU', '218', u'Ecuador'),
('EG', 'EGY', '818', u'Egypt'),
('SV', 'SLV', '222', u'El Salvador'),
('GQ', 'GNQ', '226', u'Equatorial Guinea'),
('ER', 'ERI', '232', u'Eritrea'),
('EE', 'EST', '233', u'Estonia'),
('ET', 'ETH', '231', u'Ethiopia'),
('FK', 'FLK', '238', u'Falkland Islands (Malvinas)'),
('FO', 'FRO', '234', u'Faroe Islands'),
('FJ', 'FJI', '242', u'Fiji'),
('FI', 'FIN', '246', u'Finland'),
('FR', 'FRA', '250', u'France'),
('GF', 'GUF', '254', u'French Guiana'),
('PF', 'PYF', '258', u'French Polynesia'),
('TF', 'ATF', '260', u'French Southern Territories'),
('GA', 'GAB', '266', u'Gabon'),
('GM', 'GMB', '270', u'Gambia'),
('GE', 'GEO', '268', u'Georgia'),
('DE', 'DEU', '276', u'Germany'),
('GH', 'GHA', '288', u'Ghana'),
('GI', 'GIB', '292', u'Gibraltar'),
('GR', 'GRC', '300', u'Greece'),
('GL', 'GRL', '304', u'Greenland'),
('GD', 'GRD', '308', u'Grenada'),
('GP', 'GLP', '312', u'Guadeloupe'),
('GU', 'GUM', '316', u'Guam'),
('GT', 'GTM', '320', u'Guatemala'),
('GG', 'GGY', '831', u'Guernsey'),
('GN', 'GIN', '324', u'Guinea'),
('GW', 'GNB', '624', u'Guinea-Bissau'),
('GY', 'GUY', '328', u'Guyana'),
('HT', 'HTI', '332', u'Haiti'),
('HM', 'HMD', '334', u'Heard Island and McDonald Islands'),
('VA', 'VAT', '336', u'Holy See (Vatican City State)'),
('HN', 'HND', '340', u'Honduras'),
('HK', 'HKG', '344', u'Hong Kong'),
('HU', 'HUN', '348', u'Hungary'),
('IS', 'ISL', '352', u'Iceland'),
('IN', 'IND', '356', u'India'),
('ID', 'IDN', '360', u'Indonesia'),
('IR', 'IRN', '364', u'Iran, Islamic Republic of'),
('IQ', 'IRQ', '368', u'Iraq'),
('IE', 'IRL', '372', u'Ireland'),
('IM', 'IMN', '833', u'Isle of Man'),
('IL', 'ISR', '376', u'Israel'),
('IT', 'ITA', '380', u'Italy'),
('JM', 'JAM', '388', u'Jamaica'),
('JP', 'JPN', '392', u'Japan'),
('JE', 'JEY', '832', u'Jersey'),
('JO', 'JOR', '400', u'Jordan'),
('KZ', 'KAZ', '398', u'Kazakhstan'),
('KE', 'KEN', '404', u'Kenya'),
('KI', 'KIR', '296', u'Kiribati'),
('KP', 'PRK', '408', u'Korea, Democratic People\'s Republic of'),
('KR', 'KOR', '410', u'Korea, Republic of'),
('KW', 'KWT', '414', u'Kuwait'),
('KG', 'KGZ', '417', u'Kyrgyzstan'),
('LA', 'LAO', '418', u'Lao People\'s Democratic Republic'),
('LV', 'LVA', '428', u'Latvia'),
('LB', 'LBN', '422', u'Lebanon'),
('LS', 'LSO', '426', u'Lesotho'),
('LR', 'LBR', '430', u'Liberia'),
('LY', 'LBY', '434', u'Libya'),
('LI', 'LIE', '438', u'Liechtenstein'),
('LT', 'LTU', '440', u'Lithuania'),
('LU', 'LUX', '442', u'Luxembourg'),
('MO', 'MAC', '446', u'Macao'),
('MK', 'MKD', '807', u'Macedonia, Republic of'),
('MG', 'MDG', '450', u'Madagascar'),
('MW', 'MWI', '454', u'Malawi'),
('MY', 'MYS', '458', u'Malaysia'),
('MV', 'MDV', '462', u'Maldives'),
('ML', 'MLI', '466', u'Mali'),
('MT', 'MLT', '470', u'Malta'),
('MH', 'MHL', '584', u'Marshall Islands'),
('MQ', 'MTQ', '474', u'Martinique'),
('MR', 'MRT', '478', u'Mauritania'),
('MU', 'MUS', '480', u'Mauritius'),
('YT', 'MYT', '175', u'Mayotte'),
('MX', 'MEX', '484', u'Mexico'),
('FM', 'FSM', '583', u'Micronesia, Federated States of'),
('MD', 'MDA', '498', u'Moldova, Republic of'),
('MC', 'MCO', '492', u'Monaco'),
('MN', 'MNG', '496', u'Mongolia'),
('ME', 'MNE', '499', u'Montenegro'),
('MS', 'MSR', '500', u'Montserrat'),
('MA', 'MAR', '504', u'Morocco'),
('MZ', 'MOZ', '508', u'Mozambique'),
('MM', 'MMR', '104', u'Myanmar'),
('NA', 'NAM', '516', u'Namibia'),
('NR', 'NRU', '520', u'Nauru'),
('NP', 'NPL', '524', u'Nepal'),
('NL', 'NLD', '528', u'Netherlands'),
('NC', 'NCL', '540', u'New Caledonia'),
('NZ', 'NZL', '554', u'New Zealand'),
('NI', 'NIC', '558', u'Nicaragua'),
('NE', 'NER', '562', u'Niger'),
('NG', 'NGA', '566', u'Nigeria'),
('NU', 'NIU', '570', u'Niue'),
('NF', 'NFK', '574', u'Norfolk Island'),
('MP', 'MNP', '580', u'Northern Mariana Islands'),
('NO', 'NOR', '578', u'Norway'),
('OM', 'OMN', '512', u'Oman'),
('PK', 'PAK', '586', u'Pakistan'),
('PW', 'PLW', '585', u'Palau'),
('PS', 'PSE', '275', u'Palestinian Territory, Occupied'),
('PA', 'PAN', '591', u'Panama'),
('PG', 'PNG', '598', u'Papua New Guinea'),
('PY', 'PRY', '600', u'Paraguay'),
('PE', 'PER', '604', u'Peru'),
('PH', 'PHL', '608', u'Philippines'),
('PN', 'PCN', '612', u'Pitcairn'),
('PL', 'POL', '616', u'Poland'),
('PT', 'PRT', '620', u'Portugal'),
('PR', 'PRI', '630', u'Puerto Rico'),
('QA', 'QAT', '634', u'Qatar'),
('RE', 'REU', '638', u'Réunion'),
('RO', 'ROU', '642', u'Romania'),
('RU', 'RUS', '643', u'Russian Federation'),
('RW', 'RWA', '646', u'Rwanda'),
('BL', 'BLM', '652', u'Saint Barthélemy'),
('SH', 'SHN', '654', u'Saint Helena, Ascension and Tristan da Cunha'),
('KN', 'KNA', '659', u'Saint Kitts and Nevis'),
('LC', 'LCA', '662', u'Saint Lucia'),
('MF', 'MAF', '663', u'Saint Martin (French part)'),
('PM', 'SPM', '666', u'Saint Pierre and Miquelon'),
('VC', 'VCT', '670', u'Saint Vincent and the Grenadines'),
('WS', 'WSM', '882', u'Samoa'),
('SM', 'SMR', '674', u'San Marino'),
('ST', 'STP', '678', u'Sao Tome and Principe'),
('SA', 'SAU', '682', u'Saudi Arabia'),
('SN', 'SEN', '686', u'Senegal'),
('RS', 'SRB', '688', u'Serbia'),
('SC', 'SYC', '690', u'Seychelles'),
('SL', 'SLE', '694', u'Sierra Leone'),
('SG', 'SGP', '702', u'Singapore'),
('SX', 'SXM', '534', u'Sint Maarten (Dutch part)'),
('SK', 'SVK', '703', u'Slovakia'),
('SI', 'SVN', '705', u'Slovenia'),
('SB', 'SLB', '090', u'Solomon Islands'),
('SO', 'SOM', '706', u'Somalia'),
('ZA', 'ZAF', '710', u'South Africa'),
('GS', 'SGS', '239', u'South Georgia and the South Sandwich Islands'),
('ES', 'ESP', '724', u'Spain'),
('LK', 'LKA', '144', u'Sri Lanka'),
('SD', 'SDN', '729', u'Sudan'),
('SR', 'SUR', '740', u'Suriname'),
('SS', 'SSD', '728', u'South Sudan'),
('SJ', 'SJM', '744', u'Svalbard and Jan Mayen'),
('SZ', 'SWZ', '748', u'Swaziland'),
('SE', 'SWE', '752', u'Sweden'),
('CH', 'CHE', '756', u'Switzerland'),
('SY', 'SYR', '760', u'Syrian Arab Republic'),
('TW', 'TWN', '158', u'Taiwan, Province of China'),
('TJ', 'TJK', '762', u'Tajikistan'),
('TZ', 'TZA', '834', u'Tanzania, United Republic of'),
('TH', 'THA', '764', u'Thailand'),
('TL', 'TLS', '626', u'Timor-Leste'),
('TG', 'TGO', '768', u'Togo'),
('TK', 'TKL', '772', u'Tokelau'),
('TO', 'TON', '776', u'Tonga'),
('TT', 'TTO', '780', u'Trinidad and Tobago'),
('TN', 'TUN', '788', u'Tunisia'),
('TR', 'TUR', '792', u'Turkey'),
('TM', 'TKM', '795', u'Turkmenistan'),
('TC', 'TCA', '796', u'Turks and Caicos Islands'),
('TV', 'TUV', '798', u'Tuvalu'),
('UG', 'UGA', '800', u'Uganda'),
('UA', 'UKR', '804', u'Ukraine'),
('AE', 'ARE', '784', u'United Arab Emirates'),
('GB', 'GBR', '826', u'United Kingdom'),
('US', 'USA', '840', u'United States'),
('UM', 'UMI', '581', u'United States Minor Outlying Islands'),
('UY', 'URY', '858', u'Uruguay'),
('UZ', 'UZB', '860', u'Uzbekistan'),
('VU', 'VUT', '548', u'Vanuatu'),
('VE', 'VEN', '862', u'Venezuela, Bolivarian Republic of'),
('VN', 'VNM', '704', u'Viet Nam'),
('VG', 'VGB', '092', u'Virgin Islands, British'),
('VI', 'VIR', '850', u'Virgin Islands, U.S.'),
('WF', 'WLF', '876', u'Wallis and Futuna'),
('EH', 'ESH', '732', u'Western Sahara'),
('YE', 'YEM', '887', u'Yemen'),
('ZM', 'ZMB', '894', u'Zambia'),
('ZW', 'ZWE', '716', u'Zimbabwe')]
LANGUAGES = [('aar', '', 'aa', u'Afar', u'afar'),
('abk', '', 'ab', u'Abkhazian', u'abkhaze'),
('ace', '', '', u'Achinese', u'aceh'),
('ach', '', '', u'Acoli', u'acoli'),
('ada', '', '', u'Adangme', u'adangme'),
('ady', '', '', u'Adyghe; Adygei', u'adyghé'),
('afa', '', '', u'Afro-Asiatic languages', u'afro-asiatiques, langues'),
('afh', '', '', u'Afrihili', u'afrihili'),
('afr', '', 'af', u'Afrikaans', u'afrikaans'),
('ain', '', '', u'Ainu', u'aïnou'),
('aka', '', 'ak', u'Akan', u'akan'),
('akk', '', '', u'Akkadian', u'akkadien'),
('alb', 'sqi', 'sq', u'Albanian', u'albanais'),
('ale', '', '', u'Aleut', u'aléoute'),
('alg', '', '', u'Algonquian languages', u'algonquines, langues'),
('alt', '', '', u'Southern Altai', u'altai du Sud'),
('amh', '', 'am', u'Amharic', u'amharique'),
('ang', '', '', u'English, Old (ca.450-1100)', u'anglo-saxon (ca.450-1100)'),
('anp', '', '', u'Angika', u'angika'),
('apa', '', '', u'Apache languages', u'apaches, langues'),
('ara', '', 'ar', u'Arabic', u'arabe'),
('arc', '', '', u'Official Aramaic (700-300 BCE); Imperial Aramaic (700-300 BCE)', u'araméen d\'empire (700-300 BCE)'),
('arg', '', 'an', u'Aragonese', u'aragonais'),
('arm', 'hye', 'hy', u'Armenian', u'arménien'),
('arn', '', '', u'Mapudungun; Mapuche', u'mapudungun; mapuche; mapuce'),
('arp', '', '', u'Arapaho', u'arapaho'),
('art', '', '', u'Artificial languages', u'artificielles, langues'),
('arw', '', '', u'Arawak', u'arawak'),
('asm', '', 'as', u'Assamese', u'assamais'),
('ast', '', '', u'Asturian; Bable; Leonese; Asturleonese', u'asturien; bable; léonais; asturoléonais'),
('ath', '', '', u'Athapascan languages', u'athapascanes, langues'),
('aus', '', '', u'Australian languages', u'australiennes, langues'),
('ava', '', 'av', u'Avaric', u'avar'),
('ave', '', 'ae', u'Avestan', u'avestique'),
('awa', '', '', u'Awadhi', u'awadhi'),
('aym', '', 'ay', u'Aymara', u'aymara'),
('aze', '', 'az', u'Azerbaijani', u'azéri'),
('bad', '', '', u'Banda languages', u'banda, langues'),
('bai', '', '', u'Bamileke languages', u'bamiléké, langues'),
('bak', '', 'ba', u'Bashkir', u'bachkir'),
('bal', '', '', u'Baluchi', u'baloutchi'),
('bam', '', 'bm', u'Bambara', u'bambara'),
('ban', '', '', u'Balinese', u'balinais'),
('baq', 'eus', 'eu', u'Basque', u'basque'),
('bas', '', '', u'Basa', u'basa'),
('bat', '', '', u'Baltic languages', u'baltes, langues'),
('bej', '', '', u'Beja; Bedawiyet', u'bedja'),
('bel', '', 'be', u'Belarusian', u'biélorusse'),
('bem', '', '', u'Bemba', u'bemba'),
('ben', '', 'bn', u'Bengali', u'bengali'),
('ber', '', '', u'Berber languages', u'berbères, langues'),
('bho', '', '', u'Bhojpuri', u'bhojpuri'),
('bih', '', 'bh', u'Bihari languages', u'langues biharis'),
('bik', '', '', u'Bikol', u'bikol'),
('bin', '', '', u'Bini; Edo', u'bini; edo'),
('bis', '', 'bi', u'Bislama', u'bichlamar'),
('bla', '', '', u'Siksika', u'blackfoot'),
('bnt', '', '', u'Bantu (Other)', u'bantoues, autres langues'),
('bos', '', 'bs', u'Bosnian', u'bosniaque'),
('bra', '', '', u'Braj', u'braj'),
('bre', '', 'br', u'Breton', u'breton'),
('btk', '', '', u'Batak languages', u'batak, langues'),
('bua', '', '', u'Buriat', u'bouriate'),
('bug', '', '', u'Buginese', u'bugi'),
('bul', '', 'bg', u'Bulgarian', u'bulgare'),
('bur', 'mya', 'my', u'Burmese', u'birman'),
('byn', '', '', u'Blin; Bilin', u'blin; bilen'),
('cad', '', '', u'Caddo', u'caddo'),
('cai', '', '', u'Central American Indian languages', u'amérindiennes de L\'Amérique centrale, langues'),
('car', '', '', u'Galibi Carib', u'karib; galibi; carib'),
('cat', '', 'ca', u'Catalan; Valencian', u'catalan; valencien'),
('cau', '', '', u'Caucasian languages', u'caucasiennes, langues'),
('ceb', '', '', u'Cebuano', u'cebuano'),
('cel', '', '', u'Celtic languages', u'celtiques, langues; celtes, langues'),
('cha', '', 'ch', u'Chamorro', u'chamorro'),
('chb', '', '', u'Chibcha', u'chibcha'),
('che', '', 'ce', u'Chechen', u'tchétchène'),
('chg', '', '', u'Chagatai', u'djaghataï'),
('chi', 'zho', 'zh', u'Chinese', u'chinois'),
('chk', '', '', u'Chuukese', u'chuuk'),
('chm', '', '', u'Mari', u'mari'),
('chn', '', '', u'Chinook jargon', u'chinook, jargon'),
('cho', '', '', u'Choctaw', u'choctaw'),
('chp', '', '', u'Chipewyan; Dene Suline', u'chipewyan'),
('chr', '', '', u'Cherokee', u'cherokee'),
('chu', '', 'cu', u'Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic', u'slavon d\'église; vieux slave; slavon liturgique; vieux bulgare'),
('chv', '', 'cv', u'Chuvash', u'tchouvache'),
('chy', '', '', u'Cheyenne', u'cheyenne'),
('cmc', '', '', u'Chamic languages', u'chames, langues'),
('cop', '', '', u'Coptic', u'copte'),
('cor', '', 'kw', u'Cornish', u'cornique'),
('cos', '', 'co', u'Corsican', u'corse'),
('cpe', '', '', u'Creoles and pidgins, English based', u'créoles et pidgins basés sur l\'anglais'),
('cpf', '', '', u'Creoles and pidgins, French-based ', u'créoles et pidgins basés sur le français'),
('cpp', '', '', u'Creoles and pidgins, Portuguese-based ', u'créoles et pidgins basés sur le portugais'),
('cre', '', 'cr', u'Cree', u'cree'),
('crh', '', '', u'Crimean Tatar; Crimean Turkish', u'tatar de Crimé'),
('crp', '', '', u'Creoles and pidgins ', u'créoles et pidgins'),
('csb', '', '', u'Kashubian', u'kachoube'),
('cus', '', '', u'Cushitic languages', u'couchitiques, langues'),
('cze', 'ces', 'cs', u'Czech', u'tchèque'),
('dak', '', '', u'Dakota', u'dakota'),
('dan', '', 'da', u'Danish', u'danois'),
('dar', '', '', u'Dargwa', u'dargwa'),
('day', '', '', u'Land Dayak languages', u'dayak, langues'),
('del', '', '', u'Delaware', u'delaware'),
('den', '', '', u'Slave (Athapascan)', u'esclave (athapascan)'),
('dgr', '', '', u'Dogrib', u'dogrib'),
('din', '', '', u'Dinka', u'dinka'),
('div', '', 'dv', u'Divehi; Dhivehi; Maldivian', u'maldivien'),
('doi', '', '', u'Dogri', u'dogri'),
('dra', '', '', u'Dravidian languages', u'dravidiennes, langues'),
('dsb', '', '', u'Lower Sorbian', u'bas-sorabe'),
('dua', '', '', u'Duala', u'douala'),
('dum', '', '', u'Dutch, Middle (ca.1050-1350)', u'néerlandais moyen (ca. 1050-1350)'),
('dut', 'nld', 'nl', u'Dutch; Flemish', u'néerlandais; flamand'),
('dyu', '', '', u'Dyula', u'dioula'),
('dzo', '', 'dz', u'Dzongkha', u'dzongkha'),
('efi', '', '', u'Efik', u'efik'),
('egy', '', '', u'Egyptian (Ancient)', u'égyptien'),
('eka', '', '', u'Ekajuk', u'ekajuk'),
('elx', '', '', u'Elamite', u'élamite'),
('eng', '', 'en', u'English', u'anglais'),
('enm', '', '', u'English, Middle (1100-1500)', u'anglais moyen (1100-1500)'),
('epo', '', 'eo', u'Esperanto', u'espéranto'),
('est', '', 'et', u'Estonian', u'estonien'),
('ewe', '', 'ee', u'Ewe', u'éwé'),
('ewo', '', '', u'Ewondo', u'éwondo'),
('fan', '', '', u'Fang', u'fang'),
('fao', '', 'fo', u'Faroese', u'féroïen'),
('fat', '', '', u'Fanti', u'fanti'),
('fij', '', 'fj', u'Fijian', u'fidjien'),
('fil', '', '', u'Filipino; Pilipino', u'filipino; pilipino'),
('fin', '', 'fi', u'Finnish', u'finnois'),
('fiu', '', '', u'Finno-Ugrian languages', u'finno-ougriennes, langues'),
('fon', '', '', u'Fon', u'fon'),
('fre', 'fra', 'fr', u'French', u'français'),
('frm', '', '', u'French, Middle (ca.1400-1600)', u'français moyen (1400-1600)'),
('fro', '', '', u'French, Old (842-ca.1400)', u'français ancien (842-ca.1400)'),
('frr', '', '', u'Northern Frisian', u'frison septentrional'),
('frs', '', '', u'Eastern Frisian', u'frison oriental'),
('fry', '', 'fy', u'Western Frisian', u'frison occidental'),
('ful', '', 'ff', u'Fulah', u'peul'),
('fur', '', '', u'Friulian', u'frioulan'),
('gaa', '', '', u'Ga', u'ga'),
('gay', '', '', u'Gayo', u'gayo'),
('gba', '', '', u'Gbaya', u'gbaya'),
('gem', '', '', u'Germanic languages', u'germaniques, langues'),
('geo', 'kat', 'ka', u'Georgian', u'géorgien'),
('ger', 'deu', 'de', u'German', u'allemand'),
('gez', '', '', u'Geez', u'guèze'),
('gil', '', '', u'Gilbertese', u'kiribati'),
('gla', '', 'gd', u'Gaelic; Scottish Gaelic', u'gaélique; gaélique écossais'),
('gle', '', 'ga', u'Irish', u'irlandais'),
('glg', '', 'gl', u'Galician', u'galicien'),
('glv', '', 'gv', u'Manx', u'manx; mannois'),
('gmh', '', '', u'German, Middle High (ca.1050-1500)', u'allemand, moyen haut (ca. 1050-1500)'),
('goh', '', '', u'German, Old High (ca.750-1050)', u'allemand, vieux haut (ca. 750-1050)'),
('gon', '', '', u'Gondi', u'gond'),
('gor', '', '', u'Gorontalo', u'gorontalo'),
('got', '', '', u'Gothic', u'gothique'),
('grb', '', '', u'Grebo', u'grebo'),
('grc', '', '', u'Greek, Ancient (to 1453)', u'grec ancien (jusqu\'à 1453)'),
('gre', 'ell', 'el', u'Greek, Modern (1453-)', u'grec moderne (après 1453)'),
('grn', '', 'gn', u'Guarani', u'guarani'),
('gsw', '', '', u'Swiss German; Alemannic; Alsatian', u'suisse alémanique; alémanique; alsacien'),
('guj', '', 'gu', u'Gujarati', u'goudjrati'),
('gwi', '', '', u'Gwich\'in', u'gwich\'in'),
('hai', '', '', u'Haida', u'haida'),
('hat', '', 'ht', u'Haitian; Haitian Creole', u'haïtien; créole haïtien'),
('hau', '', 'ha', u'Hausa', u'haoussa'),
('haw', '', '', u'Hawaiian', u'hawaïen'),
('heb', '', 'he', u'Hebrew', u'hébreu'),
('her', '', 'hz', u'Herero', u'herero'),
('hil', '', '', u'Hiligaynon', u'hiligaynon'),
('him', '', '', u'Himachali languages; Western Pahari languages', u'langues himachalis; langues paharis occidentales'),
('hin', '', 'hi', u'Hindi', u'hindi'),
('hit', '', '', u'Hittite', u'hittite'),
('hmn', '', '', u'Hmong; Mong', u'hmong'),
('hmo', '', 'ho', u'Hiri Motu', u'hiri motu'),
('hrv', '', 'hr', u'Croatian', u'croate'),
('hsb', '', '', u'Upper Sorbian', u'haut-sorabe'),
('hun', '', 'hu', u'Hungarian', u'hongrois'),
('hup', '', '', u'Hupa', u'hupa'),
('iba', '', '', u'Iban', u'iban'),
('ibo', '', 'ig', u'Igbo', u'igbo'),
('ice', 'isl', 'is', u'Icelandic', u'islandais'),
('ido', '', 'io', u'Ido', u'ido'),
('iii', '', 'ii', u'Sichuan Yi; Nuosu', u'yi de Sichuan'),
('ijo', '', '', u'Ijo languages', u'ijo, langues'),
('iku', '', 'iu', u'Inuktitut', u'inuktitut'),
('ile', '', 'ie', u'Interlingue; Occidental', u'interlingue'),
('ilo', '', '', u'Iloko', u'ilocano'),
('ina', '', 'ia', u'Interlingua (International Auxiliary Language Association)', u'interlingua (langue auxiliaire internationale)'),
('inc', '', '', u'Indic languages', u'indo-aryennes, langues'),
('ind', '', 'id', u'Indonesian', u'indonésien'),
('ine', '', '', u'Indo-European languages', u'indo-européennes, langues'),
('inh', '', '', u'Ingush', u'ingouche'),
('ipk', '', 'ik', u'Inupiaq', u'inupiaq'),
('ira', '', '', u'Iranian languages', u'iraniennes, langues'),
('iro', '', '', u'Iroquoian languages', u'iroquoises, langues'),
('ita', '', 'it', u'Italian', u'italien'),
('jav', '', 'jv', u'Javanese', u'javanais'),
('jbo', '', '', u'Lojban', u'lojban'),
('jpn', '', 'ja', u'Japanese', u'japonais'),
('jpr', '', '', u'Judeo-Persian', u'judéo-persan'),
('jrb', '', '', u'Judeo-Arabic', u'judéo-arabe'),
('kaa', '', '', u'Kara-Kalpak', u'karakalpak'),
('kab', '', '', u'Kabyle', u'kabyle'),
('kac', '', '', u'Kachin; Jingpho', u'kachin; jingpho'),
('kal', '', 'kl', u'Kalaallisut; Greenlandic', u'groenlandais'),
('kam', '', '', u'Kamba', u'kamba'),
('kan', '', 'kn', u'Kannada', u'kannada'),
('kar', '', '', u'Karen languages', u'karen, langues'),
('kas', '', 'ks', u'Kashmiri', u'kashmiri'),
('kau', '', 'kr', u'Kanuri', u'kanouri'),
('kaw', '', '', u'Kawi', u'kawi'),
('kaz', '', 'kk', u'Kazakh', u'kazakh'),
('kbd', '', '', u'Kabardian', u'kabardien'),
('kha', '', '', u'Khasi', u'khasi'),
('khi', '', '', u'Khoisan languages', u'khoïsan, langues'),
('khm', '', 'km', u'Central Khmer', u'khmer central'),
('kho', '', '', u'Khotanese; Sakan', u'khotanais; sakan'),
('kik', '', 'ki', u'Kikuyu; Gikuyu', u'kikuyu'),
('kin', '', 'rw', u'Kinyarwanda', u'rwanda'),
('kir', '', 'ky', u'Kirghiz; Kyrgyz', u'kirghiz'),
('kmb', '', '', u'Kimbundu', u'kimbundu'),
('kok', '', '', u'Konkani', u'konkani'),
('kom', '', 'kv', u'Komi', u'kom'),
('kon', '', 'kg', u'Kongo', u'kongo'),
('kor', '', 'ko', u'Korean', u'coréen'),
('kos', '', '', u'Kosraean', u'kosrae'),
('kpe', '', '', u'Kpelle', u'kpellé'),
('krc', '', '', u'Karachay-Balkar', u'karatchai balkar'),
('krl', '', '', u'Karelian', u'carélien'),
('kro', '', '', u'Kru languages', u'krou, langues'),
('kru', '', '', u'Kurukh', u'kurukh'),
('kua', '', 'kj', u'Kuanyama; Kwanyama', u'kuanyama; kwanyama'),
('kum', '', '', u'Kumyk', u'koumyk'),
('kur', '', 'ku', u'Kurdish', u'kurde'),
('kut', '', '', u'Kutenai', u'kutenai'),
('lad', '', '', u'Ladino', u'judéo-espagnol'),
('lah', '', '', u'Lahnda', u'lahnda'),
('lam', '', '', u'Lamba', u'lamba'),
('lao', '', 'lo', u'Lao', u'lao'),
('lat', '', 'la', u'Latin', u'latin'),
('lav', '', 'lv', u'Latvian', u'letton'),
('lez', '', '', u'Lezghian', u'lezghien'),
('lim', '', 'li', u'Limburgan; Limburger; Limburgish', u'limbourgeois'),
('lin', '', 'ln', u'Lingala', u'lingala'),
('lit', '', 'lt', u'Lithuanian', u'lituanien'),
('lol', '', '', u'Mongo', u'mongo'),
('loz', '', '', u'Lozi', u'lozi'),
('ltz', '', 'lb', u'Luxembourgish; Letzeburgesch', u'luxembourgeois'),
('lua', '', '', u'Luba-Lulua', u'luba-lulua'),
('lub', '', 'lu', u'Luba-Katanga', u'luba-katanga'),
('lug', '', 'lg', u'Ganda', u'ganda'),
('lui', '', '', u'Luiseno', u'luiseno'),
('lun', '', '', u'Lunda', u'lunda'),
('luo', '', '', u'Luo (Kenya and Tanzania)', u'luo (Kenya et Tanzanie)'),
('lus', '', '', u'Lushai', u'lushai'),
('mac', 'mkd', 'mk', u'Macedonian', u'macédonien'),
('mad', '', '', u'Madurese', u'madourais'),
('mag', '', '', u'Magahi', u'magahi'),
('mah', '', 'mh', u'Marshallese', u'marshall'),
('mai', '', '', u'Maithili', u'maithili'),
('mak', '', '', u'Makasar', u'makassar'),
('mal', '', 'ml', u'Malayalam', u'malayalam'),
('man', '', '', u'Mandingo', u'mandingue'),
('mao', 'mri', 'mi', u'Maori', u'maori'),
('map', '', '', u'Austronesian languages', u'austronésiennes, langues'),
('mar', '', 'mr', u'Marathi', u'marathe'),
('mas', '', '', u'Masai', u'massaï'),
('may', 'msa', 'ms', u'Malay', u'malais'),
('mdf', '', '', u'Moksha', u'moksa'),
('mdr', '', '', u'Mandar', u'mandar'),
('men', '', '', u'Mende', u'mendé'),
('mga', '', '', u'Irish, Middle (900-1200)', u'irlandais moyen (900-1200)'),
('mic', '', '', u'Mi\'kmaq; Micmac', u'mi\'kmaq; micmac'),
('min', '', '', u'Minangkabau', u'minangkabau'),
('mkh', '', '', u'Mon-Khmer languages', u'môn-khmer, langues'),
('mlg', '', 'mg', u'Malagasy', u'malgache'),
('mlt', '', 'mt', u'Maltese', u'maltais'),
('mnc', '', '', u'Manchu', u'mandchou'),
('mni', '', '', u'Manipuri', u'manipuri'),
('mno', '', '', u'Manobo languages', u'manobo, langues'),
('moh', '', '', u'Mohawk', u'mohawk'),
('mon', '', 'mn', u'Mongolian', u'mongol'),
('mos', '', '', u'Mossi', u'moré'),
('mun', '', '', u'Munda languages', u'mounda, langues'),
('mus', '', '', u'Creek', u'muskogee'),
('mwl', '', '', u'Mirandese', u'mirandais'),
('mwr', '', '', u'Marwari', u'marvari'),
('myn', '', '', u'Mayan languages', u'maya, langues'),
('myv', '', '', u'Erzya', u'erza'),
('nah', '', '', u'Nahuatl languages', u'nahuatl, langues'),
('nai', '', '', u'North American Indian languages', u'nord-amérindiennes, langues'),
('nap', '', '', u'Neapolitan', u'napolitain'),
('nau', '', 'na', u'Nauru', u'nauruan'),
('nav', '', 'nv', u'Navajo; Navaho', u'navaho'),
('nbl', '', 'nr', u'Ndebele, South; South Ndebele', u'ndébélé du Sud'),
('nde', '', 'nd', u'Ndebele, North; North Ndebele', u'ndébélé du Nord'),
('ndo', '', 'ng', u'Ndonga', u'ndonga'),
('nds', '', '', u'Low German; Low Saxon; German, Low; Saxon, Low', u'bas allemand; bas saxon; allemand, bas; saxon, bas'),
('nep', '', 'ne', u'Nepali', u'népalais'),
('new', '', '', u'Nepal Bhasa; Newari', u'nepal bhasa; newari'),
('nia', '', '', u'Nias', u'nias'),
('nic', '', '', u'Niger-Kordofanian languages', u'nigéro-kordofaniennes, langues'),
('niu', '', '', u'Niuean', u'niué'),
('nno', '', 'nn', u'Norwegian Nynorsk; Nynorsk, Norwegian', u'norvégien nynorsk; nynorsk, norvégien'),
('nob', '', 'nb', u'Bokmål, Norwegian; Norwegian Bokmål', u'norvégien bokmål'),
('nog', '', '', u'Nogai', u'nogaï; nogay'),
('non', '', '', u'Norse, Old', u'norrois, vieux'),
('nor', '', 'no', u'Norwegian', u'norvégien'),
('nqo', '', '', u'N\'Ko', u'n\'ko'),
('nso', '', '', u'Pedi; Sepedi; Northern Sotho', u'pedi; sepedi; sotho du Nord'),
('nub', '', '', u'Nubian languages', u'nubiennes, langues'),
('nwc', '', '', u'Classical Newari; Old Newari; Classical Nepal Bhasa', u'newari classique'),
('nya', '', 'ny', u'Chichewa; Chewa; Nyanja', u'chichewa; chewa; nyanja'),
('nym', '', '', u'Nyamwezi', u'nyamwezi'),
('nyn', '', '', u'Nyankole', u'nyankolé'),
('nyo', '', '', u'Nyoro', u'nyoro'),
('nzi', '', '', u'Nzima', u'nzema'),
('oci', '', 'oc', u'Occitan (post 1500); Provençal', u'occitan (après 1500); provençal'),
('oji', '', 'oj', u'Ojibwa', u'ojibwa'),
('ori', '', 'or', u'Oriya', u'oriya'),
('orm', '', 'om', u'Oromo', u'galla'),
('osa', '', '', u'Osage', u'osage'),
('oss', '', 'os', u'Ossetian; Ossetic', u'ossète'),
('ota', '', '', u'Turkish, Ottoman (1500-1928)', u'turc ottoman (1500-1928)'),
('oto', '', '', u'Otomian languages', u'otomi, langues'),
('paa', '', '', u'Papuan languages', u'papoues, langues'),
('pag', '', '', u'Pangasinan', u'pangasinan'),
('pal', '', '', u'Pahlavi', u'pahlavi'),
('pam', '', '', u'Pampanga; Kapampangan', u'pampangan'),
('pan', '', 'pa', u'Panjabi; Punjabi', u'pendjabi'),
('pap', '', '', u'Papiamento', u'papiamento'),
('pau', '', '', u'Palauan', u'palau'),
('peo', '', '', u'Persian, Old (ca.600-400 B.C.)', u'perse, vieux (ca. 600-400 av. J.-C.)'),
('per', 'fas', 'fa', u'Persian', u'persan'),
('phi', '', '', u'Philippine languages', u'philippines, langues'),
('phn', '', '', u'Phoenician', u'phénicien'),
('pli', '', 'pi', u'Pali', u'pali'),
('pol', '', 'pl', u'Polish', u'polonais'),
('pon', '', '', u'Pohnpeian', u'pohnpei'),
('por', '', 'pt', u'Portuguese', u'portugais'),
('pra', '', '', u'Prakrit languages', u'prâkrit, langues'),
('pro', '', '', u'Provençal, Old (to 1500)', u'provençal ancien (jusqu\'à 1500)'),
('pus', '', 'ps', u'Pushto; Pashto', u'pachto'),
('que', '', 'qu', u'Quechua', u'quechua'),
('raj', '', '', u'Rajasthani', u'rajasthani'),
('rap', '', '', u'Rapanui', u'rapanui'),
('rar', '', '', u'Rarotongan; Cook Islands Maori', u'rarotonga; maori des îles Cook'),
('roa', '', '', u'Romance languages', u'romanes, langues'),
('roh', '', 'rm', u'Romansh', u'romanche'),
('rom', '', '', u'Romany', u'tsigane'),
('rum', 'ron', 'ro', u'Romanian; Moldavian; Moldovan', u'roumain; moldave'),
('run', '', 'rn', u'Rundi', u'rundi'),
('rup', '', '', u'Aromanian; Arumanian; Macedo-Romanian', u'aroumain; macédo-roumain'),
('rus', '', 'ru', u'Russian', u'russe'),
('sad', '', '', u'Sandawe', u'sandawe'),
('sag', '', 'sg', u'Sango', u'sango'),
('sah', '', '', u'Yakut', u'iakoute'),
('sai', '', '', u'South American Indian (Other)', u'indiennes d\'Amérique du Sud, autres langues'),
('sal', '', '', u'Salishan languages', u'salishennes, langues'),
('sam', '', '', u'Samaritan Aramaic', u'samaritain'),
('san', '', 'sa', u'Sanskrit', u'sanskrit'),
('sas', '', '', u'Sasak', u'sasak'),
('sat', '', '', u'Santali', u'santal'),
('scn', '', '', u'Sicilian', u'sicilien'),
('sco', '', '', u'Scots', u'écossais'),
('sel', '', '', u'Selkup', u'selkoupe'),
('sem', '', '', u'Semitic languages', u'sémitiques, langues'),
('sga', '', '', u'Irish, Old (to 900)', u'irlandais ancien (jusqu\'à 900)'),
('sgn', '', '', u'Sign Languages', u'langues des signes'),
('shn', '', '', u'Shan', u'chan'),
('sid', '', '', u'Sidamo', u'sidamo'),
('sin', '', 'si', u'Sinhala; Sinhalese', u'singhalais'),
('sio', '', '', u'Siouan languages', u'sioux, langues'),
('sit', '', '', u'Sino-Tibetan languages', u'sino-tibétaines, langues'),
('sla', '', '', u'Slavic languages', u'slaves, langues'),
('slo', 'slk', 'sk', u'Slovak', u'slovaque'),
('slv', '', 'sl', u'Slovenian', u'slovène'),
('sma', '', '', u'Southern Sami', u'sami du Sud'),
('sme', '', 'se', u'Northern Sami', u'sami du Nord'),
('smi', '', '', u'Sami languages', u'sames, langues'),
('smj', '', '', u'Lule Sami', u'sami de Lule'),
('smn', '', '', u'Inari Sami', u'sami d\'Inari'),
('smo', '', 'sm', u'Samoan', u'samoan'),
('sms', '', '', u'Skolt Sami', u'sami skolt'),
('sna', '', 'sn', u'Shona', u'shona'),
('snd', '', 'sd', u'Sindhi', u'sindhi'),
('snk', '', '', u'Soninke', u'soninké'),
('sog', '', '', u'Sogdian', u'sogdien'),
('som', '', 'so', u'Somali', u'somali'),
('son', '', '', u'Songhai languages', u'songhai, langues'),
('sot', '', 'st', u'Sotho, Southern', u'sotho du Sud'),
('spa', '', 'es', u'Spanish; Castilian', u'espagnol; castillan'),
('srd', '', 'sc', u'Sardinian', u'sarde'),
('srn', '', '', u'Sranan Tongo', u'sranan tongo'),
('srp', '', 'sr', u'Serbian', u'serbe'),
('srr', '', '', u'Serer', u'sérère'),
('ssa', '', '', u'Nilo-Saharan languages', u'nilo-sahariennes, langues'),
('ssw', '', 'ss', u'Swati', u'swati'),
('suk', '', '', u'Sukuma', u'sukuma'),
('sun', '', 'su', u'Sundanese', u'soundanais'),
('sus', '', '', u'Susu', u'soussou'),
('sux', '', '', u'Sumerian', u'sumérien'),
('swa', '', 'sw', u'Swahili', u'swahili'),
('swe', '', 'sv', u'Swedish', u'suédois'),
('syc', '', '', u'Classical Syriac', u'syriaque classique'),
('syr', '', '', u'Syriac', u'syriaque'),
('tah', '', 'ty', u'Tahitian', u'tahitien'),
('tai', '', '', u'Tai languages', u'tai, langues'),
('tam', '', 'ta', u'Tamil', u'tamoul'),
('tat', '', 'tt', u'Tatar', u'tatar'),
('tel', '', 'te', u'Telugu', u'télougou'),
('tem', '', '', u'Timne', u'temne'),
('ter', '', '', u'Tereno', u'tereno'),
('tet', '', '', u'Tetum', u'tetum'),
('tgk', '', 'tg', u'Tajik', u'tadjik'),
('tgl', '', 'tl', u'Tagalog', u'tagalog'),
('tha', '', 'th', u'Thai', u'thaï'),
('tib', 'bod', 'bo', u'Tibetan', u'tibétain'),
('tig', '', '', u'Tigre', u'tigré'),
('tir', '', 'ti', u'Tigrinya', u'tigrigna'),
('tiv', '', '', u'Tiv', u'tiv'),
('tkl', '', '', u'Tokelau', u'tokelau'),
('tlh', '', '', u'Klingon; tlhIngan-Hol', u'klingon'),
('tli', '', '', u'Tlingit', u'tlingit'),
('tmh', '', '', u'Tamashek', u'tamacheq'),
('tog', '', '', u'Tonga (Nyasa)', u'tonga (Nyasa)'),
('ton', '', 'to', u'Tonga (Tonga Islands)', u'tongan (Îles Tonga)'),
('tpi', '', '', u'Tok Pisin', u'tok pisin'),
('tsi', '', '', u'Tsimshian', u'tsimshian'),
('tsn', '', 'tn', u'Tswana', u'tswana'),
('tso', '', 'ts', u'Tsonga', u'tsonga'),
('tuk', '', 'tk', u'Turkmen', u'turkmène'),
('tum', '', '', u'Tumbuka', u'tumbuka'),
('tup', '', '', u'Tupi languages', u'tupi, langues'),
('tur', '', 'tr', u'Turkish', u'turc'),
('tut', '', '', u'Altaic languages', u'altaïques, langues'),
('tvl', '', '', u'Tuvalu', u'tuvalu'),
('twi', '', 'tw', u'Twi', u'twi'),
('tyv', '', '', u'Tuvinian', u'touva'),
('udm', '', '', u'Udmurt', u'oudmourte'),
('uga', '', '', u'Ugaritic', u'ougaritique'),
('uig', '', 'ug', u'Uighur; Uyghur', u'ouïgour'),
('ukr', '', 'uk', u'Ukrainian', u'ukrainien'),
('umb', '', '', u'Umbundu', u'umbundu'),
('und', '', '', u'Undetermined', u'indéterminée'),
('urd', '', 'ur', u'Urdu', u'ourdou'),
('uzb', '', 'uz', u'Uzbek', u'ouszbek'),
('vai', '', '', u'Vai', u'vaï'),
('ven', '', 've', u'Venda', u'venda'),
('vie', '', 'vi', u'Vietnamese', u'vietnamien'),
('vol', '', 'vo', u'Volapük', u'volapük'),
('vot', '', '', u'Votic', u'vote'),
('wak', '', '', u'Wakashan languages', u'wakashanes, langues'),
('wal', '', '', u'Walamo', u'walamo'),
('war', '', '', u'Waray', u'waray'),
('was', '', '', u'Washo', u'washo'),
('wel', 'cym', 'cy', u'Welsh', u'gallois'),
('wen', '', '', u'Sorbian languages', u'sorabes, langues'),
('wln', '', 'wa', u'Walloon', u'wallon'),
('wol', '', 'wo', u'Wolof', u'wolof'),
('xal', '', '', u'Kalmyk; Oirat', u'kalmouk; oïrat'),
('xho', '', 'xh', u'Xhosa', u'xhosa'),
('yao', '', '', u'Yao', u'yao'),
('yap', '', '', u'Yapese', u'yapois'),
('yid', '', 'yi', u'Yiddish', u'yiddish'),
('yor', '', 'yo', u'Yoruba', u'yoruba'),
('ypk', '', '', u'Yupik languages', u'yupik, langues'),
('zap', '', '', u'Zapotec', u'zapotèque'),
('zbl', '', '', u'Blissymbols; Blissymbolics; Bliss', u'symboles Bliss; Bliss'),
('zen', '', '', u'Zenaga', u'zenaga'),
('zha', '', 'za', u'Zhuang; Chuang', u'zhuang; chuang'),
('znd', '', '', u'Zande languages', u'zandé, langues'),
('zul', '', 'zu', u'Zulu', u'zoulou'),
('zun', '', '', u'Zuni', u'zuni'),
('zza', '', '', u'Zaza; Dimili; Dimli; Kirdki; Kirmanjki; Zazaki', u'zaza; dimili; dimli; kirdki; kirmanjki; zazaki')]
class Country(object):
"""Country according to ISO-3166
:param string country: country name, alpha2 code, alpha3 code or numeric code
:param list countries: all countries
:type countries: see :data:`~subliminal.language.COUNTRIES`
"""
def __init__(self, country, countries=None):
countries = countries or COUNTRIES
country = to_unicode(country.strip().lower())
country_tuple = None
# Try to find the country
if len(country) == 2:
country_tuple = dict((c[0].lower(), c) for c in countries).get(country)
elif len(country) == 3 and not country.isdigit():
country_tuple = dict((c[1].lower(), c) for c in countries).get(country)
elif len(country) == 3 and country.isdigit():
country_tuple = dict((c[2].lower(), c) for c in countries).get(country)
if country_tuple is None:
country_tuple = dict((c[3].lower(), c) for c in countries).get(country)
# Raise ValueError if nothing is found
if country_tuple is None:
raise ValueError('Country %s does not exist' % country)
# Set default attrs
self.alpha2 = country_tuple[0]
self.alpha3 = country_tuple[1]
self.numeric = country_tuple[2]
self.name = country_tuple[3]
def __hash__(self):
return hash(self.alpha3)
def __eq__(self, other):
if isinstance(other, Country):
return self.alpha3 == other.alpha3
return False
def __ne__(self, other):
return not self == other
def __unicode__(self):
return self.name
def __str__(self):
return unicode(self).encode('utf-8')
def __repr__(self):
return 'Country(%s)' % self
class Language(object):
"""Language according to ISO-639
:param string language: language name (english or french), alpha2 code, alpha3 code, terminologic code or numeric code, eventually with a country
:param country: country of the language
:type country: :class:`Country` or string
:param languages: all languages
:type languages: see :data:`~subliminal.language.LANGUAGES`
:param countries: all countries
:type countries: see :data:`~subliminal.language.COUNTRIES`
:param bool strict: whether to raise a ValueError on unknown language or not
:class:`Language` implements the inclusion test, with the ``in`` keyword::
>>> Language('pt-BR') in Language('pt') # Portuguese (Brazil) is included in Portuguese
True
>>> Language('pt') in Language('pt-BR') # Portuguese is not included in Portuguese (Brazil)
False
"""
with_country_regexps = [re.compile('(.*)\((.*)\)'), re.compile('(.*)[-_](.*)')]
def __init__(self, language, country=None, languages=None, countries=None, strict=True):
languages = languages or LANGUAGES
countries = countries or COUNTRIES
# Get the country
self.country = None
if isinstance(country, Country):
self.country = country
elif isinstance(country, basestring):
try:
self.country = Country(country, countries)
except ValueError:
logger.warning(u'Country %s could not be identified' % country)
if strict:
raise
# Language + Country format
#TODO: Improve this part
if country is None:
for regexp in [r.match(language) for r in self.with_country_regexps]:
if regexp:
language = regexp.group(1)
try:
self.country = Country(regexp.group(2), countries)
except ValueError:
logger.warning(u'Country %s could not be identified' % country)
if strict:
raise
break
# Try to find the language
language = to_unicode(language.strip().lower())
language_tuple = None
if len(language) == 2:
language_tuple = dict((l[2].lower(), l) for l in languages).get(language)
elif len(language) == 3:
language_tuple = dict((l[0].lower(), l) for l in languages).get(language)
if language_tuple is None:
language_tuple = dict((l[1].lower(), l) for l in languages).get(language)
if language_tuple is None:
language_tuple = dict((l[3].split('; ')[0].lower(), l) for l in languages).get(language)
if language_tuple is None:
language_tuple = dict((l[4].split('; ')[0].lower(), l) for l in languages).get(language)
# Raise ValueError if strict or continue with Undetermined
if language_tuple is None:
if strict:
raise ValueError('Language %s does not exist' % language)
language_tuple = dict((l[0].lower(), l) for l in languages).get('und')
# Set attributes
self.alpha2 = language_tuple[2]
self.alpha3 = language_tuple[0]
self.terminologic = language_tuple[1]
self.name = language_tuple[3]
self.french_name = language_tuple[4]
def __hash__(self):
if self.country is None:
return hash(self.alpha3)
return hash(self.alpha3 + self.country.alpha3)
def __eq__(self, other):
if isinstance(other, Language):
return self.alpha3 == other.alpha3 and self.country == other.country
return False
def __contains__(self, item):
if isinstance(item, Language):
if self == item:
return True
if self.country is None:
return self.alpha3 == item.alpha3
return False
def __ne__(self, other):
return not self == other
def __nonzero__(self):
return self.alpha3 != 'und'
def __unicode__(self):
if self.country is None:
return self.name
return '%s (%s)' % (self.name, self.country)
def __str__(self):
return unicode(self).encode('utf-8')
def __repr__(self):
if self.country is None:
return 'Language(%s)' % self.name.encode('utf-8')
return 'Language(%s, country=%s)' % (self.name.encode('utf-8'), self.country)
class language_set(set):
"""Set of :class:`Language` with some specificities.
:param iterable: where to take elements from
:type iterable: iterable of :class:`Languages <Language>` or string
:param languages: all languages
:type languages: see :data:`~subliminal.language.LANGUAGES`
:param bool strict: whether to raise a ValueError on invalid language or not
The following redefinitions are meant to reflect the inclusion logic in :class:`Language`
* Inclusion test, with the ``in`` keyword
* Intersection
* Substraction
Here is an illustration of the previous points::
>>> Language('en') in language_set(['en-US', 'en-CA'])
False
>>> Language('en-US') in language_set(['en', 'fr'])
True
>>> language_set(['en']) & language_set(['en-US', 'en-CA'])
language_set([Language(English, country=Canada), Language(English, country=United States)])
>>> language_set(['en-US', 'en-CA', 'fr']) - language_set(['en'])
language_set([Language(French)])
"""
def __init__(self, iterable=None, languages=None, strict=True):
iterable = iterable or []
languages = languages or LANGUAGES
items = []
for i in iterable:
if isinstance(i, Language):
items.append(i)
continue
if isinstance(i, tuple):
items.append(Language(i[0], languages=languages, strict=strict))
continue
items.append(Language(i, languages=languages, strict=strict))
super(language_set, self).__init__(items)
def __contains__(self, item):
for i in self:
if item in i:
return True
return super(language_set, self).__contains__(item)
def __and__(self, other):
results = language_set()
for i in self:
for j in other:
if i in j:
results.add(i)
for i in other:
for j in self:
if i in j:
results.add(i)
return results
def __sub__(self, other):
results = language_set()
for i in self:
if i not in other:
results.add(i)
return results
class language_list(list):
"""List of :class:`Language` with some specificities.
:param iterable: where to take elements from
:type iterable: iterable of :class:`Languages <Language>` or string
:param languages: all languages
:type languages: see :data:`~subliminal.language.LANGUAGES`
:param bool strict: whether to raise a ValueError on invalid language or not
The following redefinitions are meant to reflect the inclusion logic in :class:`Language`
* Inclusion test, with the ``in`` keyword
* Index
Here is an illustration of the previous points::
>>> Language('en') in language_list(['en-US', 'en-CA'])
False
>>> Language('en-US') in language_list(['en', 'fr-BE'])
True
>>> language_list(['en', 'fr-BE']).index(Language('en-US'))
0
"""
def __init__(self, iterable=None, languages=None, strict=True):
iterable = iterable or []
languages = languages or LANGUAGES
items = []
for i in iterable:
if isinstance(i, Language):
items.append(i)
continue
if isinstance(i, tuple):
items.append(Language(i[0], languages=languages, strict=strict))
continue
items.append(Language(i, languages=languages, strict=strict))
super(language_list, self).__init__(items)
def __contains__(self, item):
for i in self:
if item in i:
return True
return super(language_list, self).__contains__(item)
def index(self, x, strict=False):
if not strict:
for i in range(len(self)):
if x in self[i]:
return i
return super(language_list, self).index(x)
| gpl-3.0 |
kbussell/django-docusign | setup.py | 1 | 2412 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python packaging."""
import os
import sys
from setuptools import setup
#: Absolute path to directory containing setup.py file.
here = os.path.abspath(os.path.dirname(__file__))
#: Boolean, ``True`` if environment is running Python version 2.
IS_PYTHON2 = sys.version_info[0] == 2
NAME = 'django-docusign'
DESCRIPTION = 'Django application for DocuSign signature SAAS platform.'
README = open(os.path.join(here, 'README.rst')).read()
VERSION = open(os.path.join(here, 'VERSION')).read().strip()
AUTHOR = u'Benoît Bryon'
EMAIL = u'novafloss@people-doc.com'
LICENSE = 'BSD'
URL = 'https://{name}.readthedocs.io/'.format(name=NAME)
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
]
KEYWORDS = [
'docusign',
'signature',
'backend',
'pydocusign',
'django-anysign',
]
PACKAGES = [NAME.replace('-', '_')]
REQUIREMENTS = [
'Django>=1.8,<1.10',
'django-anysign>=1.0',
'pydocusign>=0.13.1,<1.0',
'setuptools',
]
if IS_PYTHON2:
REQUIREMENTS.append('mock')
ENTRY_POINTS = {}
TEST_REQUIREMENTS = []
CMDCLASS = {}
SETUP_REQUIREMENTS = [
'setuptools'
]
# Tox integration.
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
"""Test command that runs tox."""
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox # import here, cause outside the eggs aren't loaded.
errno = tox.cmdline(self.test_args)
sys.exit(errno)
TEST_REQUIREMENTS.append('tox')
CMDCLASS['test'] = Tox
if __name__ == '__main__': # Do not run setup() when we import this module.
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=README,
classifiers=CLASSIFIERS,
keywords=' '.join(KEYWORDS),
author=AUTHOR,
author_email=EMAIL,
url=URL,
license=LICENSE,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIREMENTS,
entry_points=ENTRY_POINTS,
tests_require=TEST_REQUIREMENTS,
cmdclass=CMDCLASS,
setup_requires=SETUP_REQUIREMENTS,
)
| bsd-3-clause |
katsikas/gnuradio | gr-uhd/examples/python/usrp_wxapt_rcv.py | 8 | 10755 | #!/usr/bin/env python
#
# Copyright 2005-2007,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, audio, blks2, uhd
from gnuradio.eng_option import eng_option
from gnuradio.wxgui import slider, powermate
from gnuradio.wxgui import stdgui2, fftsink2, form
from optparse import OptionParser
import sys
import wx
class wxapt_rx_block (stdgui2.std_top_block):
def __init__(self,frame,panel,vbox,argv):
stdgui2.std_top_block.__init__ (self,frame,panel,vbox,argv)
parser=OptionParser(option_class=eng_option)
parser.add_option("-a", "--args", type="string", default="",
help="UHD device address args, [default=%default]")
parser.add_option("", "--spec", type="string", default=None,
help="Subdevice of UHD device where appropriate")
parser.add_option("-A", "--antenna", type="string", default=None,
help="select Rx Antenna where appropriate")
parser.add_option("-f", "--freq", type="eng_float", default=137.5e6,
help="set frequency to FREQ", metavar="FREQ")
parser.add_option("-g", "--gain", type="eng_float", default=None,
help="set gain in dB (default is midpoint)")
parser.add_option("-V", "--volume", type="eng_float", default=None,
help="set volume (default is midpoint)")
parser.add_option("-O", "--audio-output", type="string", default="",
help="pcm device name. E.g., hw:0,0 or surround51 or /dev/dsp")
parser.add_option("", "--freq-min", type="eng_float", default=137e6,
help="Set a minimum frequency [default=%default]")
parser.add_option("", "--freq-max", type="eng_float", default=138e6,
help="Set a maximum frequency [default=%default]")
(options, args) = parser.parse_args()
if len(args) != 0:
parser.print_help()
sys.exit(1)
self.frame = frame
self.panel = panel
self.vol = 0
self.state = "FREQ"
self.freq = 0
self.freq_min = options.freq_min
self.freq_max = options.freq_max
# build graph
self.u = uhd.usrp_source(device_addr=options.args, stream_args=uhd.stream_args('fc32'))
# Set the subdevice spec
if(options.spec):
self.u.set_subdev_spec(options.spec, 0)
# Set the antenna
if(options.antenna):
self.u.set_antenna(options.antenna, 0)
usrp_rate = 320e3
demod_rate = 320e3
audio_rate = 32e3
audio_decim = int(demod_rate / audio_rate)
self.u.set_samp_rate(usrp_rate)
dev_rate = self.u.get_samp_rate()
nfilts = 32
chan_coeffs = gr.firdes.low_pass_2 (nfilts, # gain
nfilts*usrp_rate, # sampling rate
40e3, # passband cutoff
20e3, # transition bw
60) # stopband attenuation
rrate = usrp_rate / dev_rate
self.chan_filt = blks2.pfb_arb_resampler_ccf(rrate, chan_coeffs, nfilts)
self.guts = blks2.wfm_rcv (demod_rate, audio_decim)
self.volume_control = gr.multiply_const_ff(self.vol)
# sound card as final sink
self.audio_sink = audio.sink (int (audio_rate), options.audio_output)
# now wire it all together
self.connect (self.u, self.chan_filt, self.guts,
self.volume_control, self.audio_sink)
self._build_gui(vbox, usrp_rate, demod_rate, audio_rate)
if options.gain is None:
# if no gain was specified, use the mid-point in dB
g = self.u.get_gain_range()
options.gain = float(g.start()+g.stop())/2.0
if options.volume is None:
g = self.volume_range()
options.volume = float(g[0]+g[1])/2
frange = self.u.get_freq_range()
if(frange.start() > self.freq_max or frange.stop() < self.freq_min):
sys.stderr.write("Radio does not support required frequency range.\n")
sys.exit(1)
if(options.freq < self.freq_min or options.freq > self.freq_max):
sys.stderr.write("Requested frequency is outside of required frequency range.\n")
sys.exit(1)
# set initial values
self.set_gain(options.gain)
self.set_vol(options.volume)
if not(self.set_freq(options.freq)):
self._set_status_msg("Failed to set initial frequency")
def _set_status_msg(self, msg, which=0):
self.frame.GetStatusBar().SetStatusText(msg, which)
def _build_gui(self, vbox, usrp_rate, demod_rate, audio_rate):
def _form_set_freq(kv):
return self.set_freq(kv['freq'])
if 1:
self.src_fft = fftsink2.fft_sink_c (self.panel, title="Data from USRP",
fft_size=512, sample_rate=usrp_rate,
ref_scale=32768.0, ref_level=0, y_divs=12)
self.connect (self.u, self.src_fft)
vbox.Add (self.src_fft.win, 4, wx.EXPAND)
if 1:
post_deemph_fft = fftsink2.fft_sink_f (self.panel, title="Post Deemph",
fft_size=512, sample_rate=demod_rate,
y_per_div=10, ref_level=-20)
self.connect (self.guts.deemph, post_deemph_fft)
vbox.Add (post_deemph_fft.win, 4, wx.EXPAND)
if 1:
post_filt_fft = fftsink2.fft_sink_f (self.panel, title="Post Filter",
fft_size=512, sample_rate=audio_rate,
y_per_div=10, ref_level=0)
self.connect (self.guts.audio_filter, post_filt_fft)
vbox.Add (post_filt_fft.win, 4, wx.EXPAND)
# control area form at bottom
self.myform = myform = form.form()
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0)
myform['freq'] = form.float_field(
parent=self.panel, sizer=hbox, label="Freq", weight=1,
callback=myform.check_input_and_call(_form_set_freq, self._set_status_msg))
hbox.Add((5,0), 0)
myform['freq_slider'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, weight=3,
range=(self.freq_min, self.freq_max, 0.0005e6),
callback=self.set_freq)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
hbox = wx.BoxSizer(wx.HORIZONTAL)
hbox.Add((5,0), 0)
myform['volume'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Volume",
weight=3, range=self.volume_range(),
callback=self.set_vol)
hbox.Add((5,0), 1)
g = self.u.get_gain_range()
myform['gain'] = \
form.quantized_slider_field(parent=self.panel, sizer=hbox, label="Gain",
weight=3, range=(g.start(), g.start(), g.step()),
callback=self.set_gain)
hbox.Add((5,0), 0)
vbox.Add(hbox, 0, wx.EXPAND)
try:
self.knob = powermate.powermate(self.frame)
self.rot = 0
powermate.EVT_POWERMATE_ROTATE (self.frame, self.on_rotate)
powermate.EVT_POWERMATE_BUTTON (self.frame, self.on_button)
except:
print "FYI: No Powermate or Contour Knob found"
def on_rotate (self, event):
self.rot += event.delta
if (self.state == "FREQ"):
if self.rot >= 3:
self.set_freq(self.freq + .1e6)
self.rot -= 3
elif self.rot <=-3:
self.set_freq(self.freq - .1e6)
self.rot += 3
else:
step = self.volume_range()[2]
if self.rot >= 3:
self.set_vol(self.vol + step)
self.rot -= 3
elif self.rot <=-3:
self.set_vol(self.vol - step)
self.rot += 3
def on_button (self, event):
if event.value == 0: # button up
return
self.rot = 0
if self.state == "FREQ":
self.state = "VOL"
else:
self.state = "FREQ"
self.update_status_bar ()
def set_vol (self, vol):
g = self.volume_range()
self.vol = max(g[0], min(g[1], vol))
self.volume_control.set_k(10**(self.vol/10))
self.myform['volume'].set_value(self.vol)
self.update_status_bar ()
def set_freq(self, target_freq):
"""
Set the center frequency we're interested in.
@param target_freq: frequency in Hz
@rypte: bool
"""
r = self.u.set_center_freq(target_freq)
if r:
self.freq = target_freq
self.myform['freq'].set_value(target_freq) # update displayed value
self.myform['freq_slider'].set_value(target_freq) # update displayed value
self.update_status_bar()
self._set_status_msg("OK", 0)
return True
self._set_status_msg("Failed", 0)
return False
def set_gain(self, gain):
self.myform['gain'].set_value(gain) # update displayed value
self.u.set_gain(gain)
def update_status_bar (self):
msg = "Volume:%r Setting:%s" % (self.vol, self.state)
self._set_status_msg(msg, 1)
self.src_fft.set_baseband_freq(self.freq)
def volume_range(self):
return (-20.0, 0.0, 0.5)
if __name__ == '__main__':
app = stdgui2.stdapp (wxapt_rx_block, "USRP WXAPT RX")
app.MainLoop ()
| gpl-3.0 |
pastebt/yeast | testcase/test_acore.py | 1 | 4379 | import sys
import unittest
from StringIO import StringIO
sys.path.append('../yeast')
import ahttp
import acore
class FAKE_USER(acore.Acore):
def read_all(self, arw, size=0, seps=()):
for y in arw.aread(size, seps):
yield y
class FAKE(acore.ARW):
def __init__(self, data):
acore.ARW.__init__(self, user=FAKE_USER())
self.fileno = 4
self.src = iter(data)
def _read(self):
try:
return next(self.src)
except StopIteration:
self.sep_hit = acore.EOR()
return ''
#class FAKE(ahttp.AHTTP):
# def __init__(self, data):
# self.src = iter(data)
# self.rdata, self.sep = '', ''
# self.sock = FakeSock()
#
# def read(self, sock):
# return next(self.src)
#
# def get_http(self, sock, body_size_limit=0):
# for y in self.get_http_head(sock):
# yield y
# for y in self.get_http_body(sock, body_size_limit):
# yield y
class TestARW(unittest.TestCase):
def test_read_all1(self):
f = FAKE(("1234\r\n56",))
ret = [y for y in f.read_all(seps=('\n',))]
#print ret
self.assertEqual(f.request_data, '1234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all2(self):
f = FAKE(("1234\r\n56",))
f._data_buf = '0'
ret = [y for y in f.read_all(seps=('\n',))]
self.assertEqual(f.request_data, '01234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all3(self):
f = FAKE(("1234\r\n56",))
f._data_buf = 'abc\r\naa'
ret = [y for y in f.read_all(seps=('\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, 'aa')
def test_read_all4(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc\r'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all5(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\n1234\r\n')
self.assertEqual(f._data_buf, '56')
def test_read_all6(self):
f = FAKE(("\r\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.request_data, 'abc\r\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all7(self):
# Winner's index is smaller
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(seps=('\r\n', '\n'))]
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all8(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=2, seps=('\r\n', '\n'))]
self.assertEqual(f.request_data, 'ab')
self.assertEqual(f._data_buf, 'c')
def test_read_all9(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=4, seps=('\r\n', '\n'))]
self.assertEqual(f.sep_hit, '\n')
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all10(self):
f = FAKE(("\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=5, seps=('\r\n', '\n'))]
self.assertEqual(f.sep_hit, '\n')
self.assertEqual(f.request_data, 'abc\n')
self.assertEqual(f._data_buf, '1234\r\n56')
def test_read_all11(self):
f = FAKE(("\r\n1234\r\n56",))
f._data_buf = 'abc'
ret = [y for y in f.read_all(size=4, seps=('\r\n',))]
self.assertEqual(f.sep_hit, '')
self.assertEqual(f.request_data, 'abc\r')
self.assertEqual(f._data_buf, '\n1234\r\n56')
def test_read_all12(self):
f = FAKE(("1234\r\n56",))
ret = [y for y in f.read_all(seps=('\r\n',))]
self.assertEqual(f.sep_hit, '\r\n')
self.assertEqual(f.request_data, '1234\r\n')
ret = [y for y in f.read_all()]
self.assertEqual(f.request_data, '56')
if __name__ == '__main__':
#unittest.main()
unittest.main(defaultTest='TestARW.test_read_all12')
| gpl-2.0 |
StrellaGroup/frappe | frappe/integrations/doctype/webhook/webhook.py | 15 | 2261 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import json, requests
from frappe import _
from frappe.model.document import Document
from six.moves.urllib.parse import urlparse
from time import sleep
class Webhook(Document):
def autoname(self):
self.name = self.webhook_doctype + "-" + self.webhook_docevent
def validate(self):
self.validate_docevent()
self.validate_request_url()
self.validate_repeating_fields()
def on_update(self):
frappe.cache().delete_value('webhooks')
def validate_docevent(self):
if self.webhook_doctype:
is_submittable = frappe.get_value("DocType", self.webhook_doctype, "is_submittable")
if not is_submittable and self.webhook_docevent in ["on_submit", "on_cancel", "on_update_after_submit"]:
frappe.throw(_("DocType must be Submittable for the selected Doc Event"))
def validate_request_url(self):
try:
request_url = urlparse(self.request_url).netloc
if not request_url:
raise frappe.ValidationError
except Exception as e:
frappe.throw(_("Check Request URL"), exc=e)
def validate_repeating_fields(self):
"""Error when Same Field is entered multiple times in webhook_data"""
webhook_data = []
for entry in self.webhook_data:
webhook_data.append(entry.fieldname)
if len(webhook_data)!= len(set(webhook_data)):
frappe.throw(_("Same Field is entered more than once"))
def enqueue_webhook(doc, webhook):
webhook = frappe.get_doc("Webhook", webhook.get("name"))
headers = {}
data = {}
if webhook.webhook_headers:
for h in webhook.webhook_headers:
if h.get("key") and h.get("value"):
headers[h.get("key")] = h.get("value")
if webhook.webhook_data:
for w in webhook.webhook_data:
for k, v in doc.as_dict().items():
if k == w.fieldname:
data[w.key] = v
for i in range(3):
try:
r = requests.post(webhook.request_url, data=json.dumps(data), headers=headers, timeout=5)
r.raise_for_status()
frappe.logger().debug({"webhook_success":r.text})
break
except Exception as e:
frappe.logger().debug({"webhook_error":e, "try": i+1})
sleep(3*i + 1)
if i !=2:
continue
else:
raise e
| mit |
tilde-lab/tilde | tests/apps/perovskite_tilting/tilt_data.py | 1 | 4199 | #!/usr/bin/env python
# Euler tilting angles extraction test
# Author: Evgeny Blokhin
"""
Data for this test are published in:
[1] Surf.Sci.602, 3674 (2008), http://dx.doi.org/10.1016/j.susc.2008.10.002
[2] Evgeny Blokhin's MSc. thesis (in Russian), http://dx.doi.org/10.13140/RG.2.1.4276.2727
[3] PRB83, 134108 (2011), http://dx.doi.org/10.1103/PhysRevB.83.134108
[4] PRB88, 241407 (2013), http://dx.doi.org/10.1103/PhysRevB.88.241407
"""
from __future__ import print_function
import os
import unittest
import six
import set_path
from tilde.core.api import API
data_dir = os.path.realpath(os.path.dirname(__file__) + '/outputs')
# NB: in Euler notation delta is gamma, delta plus/minus phi is alpha
# or in another terminology: phi is gamma, phi plus/minus psi is alpha
test_data = {
'check_last_point.cryst.out': {
'comment': '[1], Table 1, calculated, Euler notation',
'data': {
5: [0.04, 12.26, 7.93], # the 1st test: corner number can be 5 or 7 FIXME
}
},
'y4h4srhfo3_62_pbe0_9hf_cis_go.cryst.out': {
'comment': '[2], Table 10, HfO2-terminated, dissociative water adsorption, monolayer coverage, Euler notation (bare slab reference data: delta=1.9, phi=9.729, psi=1.867)',
'data': {
17: [1.56, 15.07, 8.91],
}
},
'srhfo3_62_pbe0_110_9sr_go.cryst.out': {
'comment': '[1], Table 5, SrO termination, 110 surface, relaxed, Euler notation',
'data': {
13: [14.73, 12.03, 5.24],
15: [1.54, 8.74, 12.48],
}
},
'sto140afd_f3.cryst.out': {
'comment': '[3], Table 6, LCAO-PBE0 optimized basis set',
'data': {
3: [0.0, 0.0, 0.85],
}
},
'5ti_d_x2_scanned_freqs.cryst.out': {
'comment': '[4], page 241407-2, at the left, second paragraph',
'data': {
9: [0.0, 0.0, 0.36],
}
}
}
for k, v in six.iteritems(test_data):
if not os.path.exists(data_dir + os.sep + k):
raise RuntimeError(k + ': missed file for test!')
work = API()
class Data_Perovskite_Tilting_Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.results = {}
for k, v in six.iteritems(test_data):
cls.results[k] = {}
for calc, error in work.parse(data_dir + os.sep + k):
if error:
raise RuntimeError(k + ': ' + error)
calc, error = work.classify(calc)
if error:
raise RuntimeError(k + ': ' + error)
calc = work.postprocess(calc)
target_category_num = 4 # perovskite category, pre-defined in init-data.sql
assert target_category_num in calc.info['tags']
cls.results[k] = [ v['data'], calc.apps['perovskite_tilting']['data'] ]
def test_all(self):
for k, v in six.iteritems(self.results):
centers = v[0].keys()
for center in centers:
self.assertEqual(v[0][center], v[1][center]) # the 1st test: corner number can be 5 or 7 FIXME
if __name__ == "__main__":
for k, v in test_data.iteritems():
for calc, error in work.parse(data_dir + os.sep + k):
if error:
raise RuntimeError(k + ': ' + error)
calc, error = work.classify(calc)
if error:
raise RuntimeError(k + ': ' + error)
calc = work.postprocess(calc)
target_category_num = 4 # perovskite category, pre-defined in init-data.sql
assert target_category_num in calc.info['tags']
if not 'perovskite_tilting' in calc.apps:
raise RuntimeError(k + ': invalid result!')
print("\nSource", v['comment'], "(" + k + ")")
for center in v['data'].keys():
if not center in calc.apps['perovskite_tilting']['data']:
raise RuntimeError(k + ': invalid result!')
print('Octahedron N', center)
print('expected:', v['data'][center])
print('got :', calc.apps['perovskite_tilting']['data'][center])
print(__doc__)
| mit |
iandev/HarvestMood | Requests/docs/_themes/flask_theme_support.py | 2228 | 4875 | # flasky extensions. flasky pygments style based on tango style
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
class FlaskyStyle(Style):
background_color = "#f8f8f8"
default_style = ""
styles = {
# No corresponding class for the following:
#Text: "", # class: ''
Whitespace: "underline #f8f8f8", # class: 'w'
Error: "#a40000 border:#ef2929", # class: 'err'
Other: "#000000", # class 'x'
Comment: "italic #8f5902", # class: 'c'
Comment.Preproc: "noitalic", # class: 'cp'
Keyword: "bold #004461", # class: 'k'
Keyword.Constant: "bold #004461", # class: 'kc'
Keyword.Declaration: "bold #004461", # class: 'kd'
Keyword.Namespace: "bold #004461", # class: 'kn'
Keyword.Pseudo: "bold #004461", # class: 'kp'
Keyword.Reserved: "bold #004461", # class: 'kr'
Keyword.Type: "bold #004461", # class: 'kt'
Operator: "#582800", # class: 'o'
Operator.Word: "bold #004461", # class: 'ow' - like keywords
Punctuation: "bold #000000", # class: 'p'
# because special names such as Name.Class, Name.Function, etc.
# are not recognized as such later in the parsing, we choose them
# to look the same as ordinary variables.
Name: "#000000", # class: 'n'
Name.Attribute: "#c4a000", # class: 'na' - to be revised
Name.Builtin: "#004461", # class: 'nb'
Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
Name.Class: "#000000", # class: 'nc' - to be revised
Name.Constant: "#000000", # class: 'no' - to be revised
Name.Decorator: "#888", # class: 'nd' - to be revised
Name.Entity: "#ce5c00", # class: 'ni'
Name.Exception: "bold #cc0000", # class: 'ne'
Name.Function: "#000000", # class: 'nf'
Name.Property: "#000000", # class: 'py'
Name.Label: "#f57900", # class: 'nl'
Name.Namespace: "#000000", # class: 'nn' - to be revised
Name.Other: "#000000", # class: 'nx'
Name.Tag: "bold #004461", # class: 'nt' - like a keyword
Name.Variable: "#000000", # class: 'nv' - to be revised
Name.Variable.Class: "#000000", # class: 'vc' - to be revised
Name.Variable.Global: "#000000", # class: 'vg' - to be revised
Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
Number: "#990000", # class: 'm'
Literal: "#000000", # class: 'l'
Literal.Date: "#000000", # class: 'ld'
String: "#4e9a06", # class: 's'
String.Backtick: "#4e9a06", # class: 'sb'
String.Char: "#4e9a06", # class: 'sc'
String.Doc: "italic #8f5902", # class: 'sd' - like a comment
String.Double: "#4e9a06", # class: 's2'
String.Escape: "#4e9a06", # class: 'se'
String.Heredoc: "#4e9a06", # class: 'sh'
String.Interpol: "#4e9a06", # class: 'si'
String.Other: "#4e9a06", # class: 'sx'
String.Regex: "#4e9a06", # class: 'sr'
String.Single: "#4e9a06", # class: 's1'
String.Symbol: "#4e9a06", # class: 'ss'
Generic: "#000000", # class: 'g'
Generic.Deleted: "#a40000", # class: 'gd'
Generic.Emph: "italic #000000", # class: 'ge'
Generic.Error: "#ef2929", # class: 'gr'
Generic.Heading: "bold #000080", # class: 'gh'
Generic.Inserted: "#00A000", # class: 'gi'
Generic.Output: "#888", # class: 'go'
Generic.Prompt: "#745334", # class: 'gp'
Generic.Strong: "bold #000000", # class: 'gs'
Generic.Subheading: "bold #800080", # class: 'gu'
Generic.Traceback: "bold #a40000", # class: 'gt'
}
| mit |
xHeliotrope/injustice_dropper | env/lib/python3.4/site-packages/phonenumbers/data/region_UZ.py | 11 | 3203 | """Auto-generated file, do not edit by hand. UZ metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_UZ = PhoneMetadata(id='UZ', country_code=998, international_prefix='810',
general_desc=PhoneNumberDesc(national_number_pattern='[679]\\d{8}', possible_number_pattern='\\d{7,9}'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:6(?:1(?:22|3[124]|4[1-4]|5[123578]|64)|2(?:22|3[0-57-9]|41)|5(?:22|3[3-7]|5[024-8])|6\\d{2}|7(?:[23]\\d|7[69])|9(?:22|4[1-8]|6[135]))|7(?:0(?:5[4-9]|6[0146]|7[12456]|9[135-8])|1[12]\\d|2(?:22|3[1345789]|4[123579]|5[14])|3(?:2\\d|3[1578]|4[1-35-7]|5[1-57]|61)|4(?:2\\d|3[1-579]|7[1-79])|5(?:22|5[1-9]|6[1457])|6(?:22|3[12457]|4[13-8])|9(?:22|5[1-9])))\\d{5}', possible_number_pattern='\\d{7,9}', example_number='662345678'),
mobile=PhoneNumberDesc(national_number_pattern='6(?:1(?:2(?:98|2[01])|35[0-4]|50\\d|61[23]|7(?:[01][017]|4\\d|55|9[5-9]))|2(?:11\\d|2(?:[12]1|9[01379])|5(?:[126]\\d|3[0-4])|7\\d{2})|5(?:19[01]|2(?:27|9[26])|30\\d|59\\d|7\\d{2})|6(?:2(?:1[5-9]|2[0367]|38|41|52|60)|3[79]\\d|4(?:56|83)|7(?:[07]\\d|1[017]|3[07]|4[047]|5[057]|67|8[0178]|9[79])|9[0-3]\\d)|7(?:2(?:24|3[237]|4[5-9]|7[15-8])|5(?:7[12]|8[0589])|7(?:0\\d|[39][07])|9(?:0\\d|7[079]))|9(?:2(?:1[1267]|5\\d|3[01]|7[0-4])|5[67]\\d|6(?:2[0-26]|8\\d)|7\\d{2}))\\d{4}|7(?:0\\d{3}|1(?:13[01]|6(?:0[47]|1[67]|66)|71[3-69]|98\\d)|2(?:2(?:2[79]|95)|3(?:2[5-9]|6[0-6])|57\\d|7(?:0\\d|1[17]|2[27]|3[37]|44|5[057]|66|88))|3(?:2(?:1[0-6]|21|3[469]|7[159])|33\\d|5(?:0[0-4]|5[579]|9\\d)|7(?:[0-3579]\\d|4[0467]|6[67]|8[078])|9[4-6]\\d)|4(?:2(?:29|5[0257]|6[0-7]|7[1-57])|5(?:1[0-4]|8\\d|9[5-9])|7(?:0\\d|1[024589]|2[0127]|3[0137]|[46][07]|5[01]|7[5-9]|9[079])|9(?:7[015-9]|[89]\\d))|5(?:112|2(?:0\\d|2[29]|[49]4)|3[1568]\\d|52[6-9]|7(?:0[01578]|1[017]|[23]7|4[047]|[5-7]\\d|8[78]|9[079]))|6(?:2(?:2[1245]|4[2-4])|39\\d|41[179]|5(?:[349]\\d|5[0-2])|7(?:0[017]|[13]\\d|22|44|55|67|88))|9(?:22[128]|3(?:2[0-4]|7\\d)|57[05629]|7(?:2[05-9]|3[37]|4\\d|60|7[2579]|87|9[07])))\\d{4}|9[0-57-9]\\d{7}', possible_number_pattern='\\d{7,9}', example_number='912345678'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
preferred_international_prefix='8~10',
national_prefix='8',
national_prefix_for_parsing='8',
number_format=[NumberFormat(pattern='([679]\\d)(\\d{3})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', national_prefix_formatting_rule='8 \\1')])
| mit |
hidext/oemedical | oemedical_socioeconomics/oemedical_socioeconomics/oemedical_socioeconomics.py | 1 | 6182 | # -*- coding: utf-8 -*-
##############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2004-TODAY Tech-Receptives(<http://www.techreceptives.com>)
# Special Credit and Thanks to Thymbra Latinoamericana S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
class OeMedicalSocioeconomics(orm.Model):
_inherit = 'oemedical.patient'
_columns = {
'ses': fields.selection(
[(None, ''),
('0', 'Lower'),
('1', 'Lower-middle'),
('2', 'Middle'),
('3', 'Middle-upper'),
('4', 'Higher')],
'Socioeconomics', help="SES - Socioeconomic Status", sort=False),
'housing': fields.selection(
[(None, ''),
('0', 'Shanty, deficient sanitary conditions'),
('1', 'Small, crowded but with good sanitary conditions'),
('2', 'Comfortable and good sanitary conditions'),
('3', 'Roomy and excellent sanitary conditions'),
('4', 'Luxury and excellent sanitary conditions')],
'Housing conditions',
help="Housing and sanitary living conditions", sort=False),
'hostile_area': fields.boolean(
'Hostile Area',
help="Check if patient lives in a zone of high hostility (eg, war)"
),
'sewers': fields.boolean('Sanitary Sewers'),
'water': fields.boolean('Running Water'),
'trash': fields.boolean('Trash recollection'),
'electricity': fields.boolean('Electrical supply'),
'gas': fields.boolean('Gas supply'),
'telephone': fields.boolean('Telephone'),
'television': fields.boolean('Television'),
'internet': fields.boolean('Internet'),
'single_parent': fields.boolean('Single parent family'),
'domestic_violence': fields.boolean('Domestic violence'),
'working_children': fields.boolean('Working children'),
'teenage_pregnancy': fields.boolean('Teenage pregnancy'),
'sexual_abuse': fields.boolean('Sexual abuse'),
'drug_addiction': fields.boolean('Drug addiction'),
'school_withdrawal': fields.boolean('School withdrawal'),
'prison_past': fields.boolean('Has been in prison'),
'prison_current': fields.boolean('Is currently in prison'),
'relative_in_prison': fields.boolean(
'Relative in prison',
help="Check if someone from the nuclear family - parents sibblings"
" is or has been in prison"),
'ses_notes': fields.text('Extra info'),
'fam_apgar_help': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Help from family',
help="Is the patient satisfied with the level of help coming from"
" the family when there is a problem?", sort=False),
'fam_apgar_discussion': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Problems discussion',
help="Is the patient satisfied with the level talking over the"
" problems as family?", sort=False),
'fam_apgar_decisions': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Decision making',
help="Is the patient satisfied with the level of making important"
" decisions as a group ?", sort=False),
'fam_apgar_timesharing': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Time sharing',
help="Is the patient satisfied with the level of time that they"
" spend together?", sort=False),
'fam_apgar_affection': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Moderately'),
('2', 'Very much')],
'Family affection',
help="Is the patient satisfied with the level of affection coming"
" from the family ?", sort=False),
'fam_apgar_score': fields.integer(
'Score',
help="Total Family APGAR 7 - 10 : Functional Family 4 - 6 :"
"Some level of disfunction\n"
"0 - 3 : Severe disfunctional family\n"),
'income': fields.selection(
[(None, ''),
('h', 'High'),
('m', 'Medium / Average'),
('l', 'Low')],
'Income', sort=False),
'education': fields.selection(
[(None, ''),
('0', 'None'),
('1', 'Incomplete Primary School'),
('2', 'Primary School'),
('3', 'Incomplete Secondary School'),
('4', 'Secondary School'),
('5', 'University')],
'Education Level',
help="Education Level", sort=False),
'works_at_home': fields.boolean(
'Works at home',
help="Check if the patient works at his / her house"),
'hours_outside': fields.integer(
'Hours outside home',
help="Number of hours a day the patient spend outside the house"),
}
| agpl-3.0 |
jkonecki/autorest | AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/Validation/autorestvalidationtest/models/child_product.py | 2 | 1073 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ChildProduct(Model):
"""
The product documentation.
:param const_property: Constant string. Default value: "constant" .
:type const_property: str
:param count: Count
:type count: int
"""
_validation = {
'const_property': {'required': True},
}
_attribute_map = {
'const_property': {'key': 'constProperty', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
}
def __init__(self, count=None, **kwargs):
self.const_property = "constant"
self.count = count
| mit |
nikolas/edx-platform | cms/djangoapps/contentstore/views/tests/test_preview.py | 101 | 5240 | """
Tests for contentstore.views.preview.py
"""
import re
import ddt
from mock import Mock
from xblock.core import XBlock
from django.test.client import RequestFactory
from xblock.core import XBlockAside
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from contentstore.views.preview import get_preview_fragment, _preview_module_system
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.test_asides import AsideTestType
from cms.djangoapps.xblock_config.models import StudioConfig
from xmodule.modulestore.django import modulestore
class GetPreviewHtmlTestCase(ModuleStoreTestCase):
"""
Tests for get_preview_fragment.
Note that there are other existing test cases in test_contentstore that indirectly execute
get_preview_fragment via the xblock RESTful API.
"""
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
def test_preview_fragment(self):
"""
Test for calling get_preview_html. Ensures data-usage-id is correctly set and
asides are correctly included.
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
html = ItemFactory.create(
parent_location=course.location,
category="html",
data={'data': "<html>foobar</html>"}
)
config = StudioConfig.current()
config.enabled = True
config.save()
request = RequestFactory().get('/dummy-url')
request.user = UserFactory()
request.session = {}
# Call get_preview_fragment directly.
context = {
'reorderable_items': set(),
'read_only': True
}
html = get_preview_fragment(request, html, context).content
# Verify student view html is returned, and the usage ID is as expected.
html_pattern = re.escape(unicode(course.id.make_usage_key('html', 'replaceme'))).replace('replaceme', r'html_[0-9]*')
self.assertRegexpMatches(
html,
'data-usage-id="{}"'.format(html_pattern)
)
self.assertRegexpMatches(html, '<html>foobar</html>')
self.assertRegexpMatches(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertRegexpMatches(html, "Aside rendered")
# Now ensure the acid_aside is not in the result
self.assertNotRegexpMatches(html, r"data-block-type=[\"\']acid_aside[\"\']")
# Ensure about pages don't have asides
about = modulestore().get_item(course.id.make_usage_key('about', 'overview'))
html = get_preview_fragment(request, about, context).content
self.assertNotRegexpMatches(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertNotRegexpMatches(html, "Aside rendered")
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
def test_preview_no_asides(self):
"""
Test for calling get_preview_html. Ensures data-usage-id is correctly set and
asides are correctly excluded because they are not enabled.
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
html = ItemFactory.create(
parent_location=course.location,
category="html",
data={'data': "<html>foobar</html>"}
)
config = StudioConfig.current()
config.enabled = False
config.save()
request = RequestFactory().get('/dummy-url')
request.user = UserFactory()
request.session = {}
# Call get_preview_fragment directly.
context = {
'reorderable_items': set(),
'read_only': True
}
html = get_preview_fragment(request, html, context).content
self.assertNotRegexpMatches(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertNotRegexpMatches(html, "Aside rendered")
@XBlock.needs("field-data")
@XBlock.needs("i18n")
@XBlock.needs("user")
class PureXBlock(XBlock):
"""
Pure XBlock to use in tests.
"""
pass
@ddt.ddt
class StudioXBlockServiceBindingTest(ModuleStoreTestCase):
"""
Tests that the Studio Module System (XBlock Runtime) provides an expected set of services.
"""
def setUp(self):
"""
Set up the user and request that will be used.
"""
super(StudioXBlockServiceBindingTest, self).setUp()
self.user = UserFactory()
self.course = CourseFactory.create()
self.request = Mock()
self.field_data = Mock()
@XBlock.register_temp_plugin(PureXBlock, identifier='pure')
@ddt.data("user", "i18n", "field-data")
def test_expected_services_exist(self, expected_service):
"""
Tests that the 'user' and 'i18n' services are provided by the Studio runtime.
"""
descriptor = ItemFactory(category="pure", parent=self.course)
runtime = _preview_module_system(
self.request,
descriptor,
self.field_data,
)
service = runtime.service(descriptor, expected_service)
self.assertIsNotNone(service)
| agpl-3.0 |
flychen50/trunk | third_party/googletest-r108/test/gtest_xml_outfiles_test.py | 15 | 5255 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import gtest_test_utils
import os
import sys
import tempfile
import unittest
from xml.dom import minidom, Node
import gtest_xml_test_utils
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuite tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuite>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuite tests="1" failures="0" disabled="0" errors="0" time="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuite>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(tempfile.mkdtemp(), "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.removedirs(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = os.path.join(gtest_test_utils.GetBuildDir(),
test_name)
command = "cd %s && %s --gtest_output=xml:%s &> /dev/null" % (
tempfile.mkdtemp(), gtest_prog_path, self.output_dir_)
status = os.system(command)
self.assertEquals(0, gtest_test_utils.GetExitStatus(status))
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| bsd-3-clause |
dyrock/trafficserver | tests/gold_tests/basic/basic-manager.test.py | 8 | 1209 | '''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Test.Summary = '''
Test that Trafficserver starts with default configurations.
'''
ts = Test.MakeATSProcess("ts", command="traffic_manager", select_ports=True)
t = Test.AddTestRun("Test traffic server started properly")
t.StillRunningAfter = Test.Processes.ts
p = t.Processes.Default
p.Command = "curl http://127.0.0.1:{0}".format(ts.Variables.port)
p.ReturnCode = 0
p.StartBefore(Test.Processes.ts)
| apache-2.0 |
luckpizza/n8000-kernel-aufs | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
heke123/chromium-crosswalk | tools/grit/grit/format/c_format.py | 25 | 2777 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Formats as a .C file for compilation.
"""
import os
import re
import types
from grit import util
def _FormatHeader(root, output_dir):
"""Returns the required preamble for C files."""
# Find the location of the resource header file, so that we can include
# it.
resource_header = 'resource.h' # fall back to this
for output in root.GetOutputFiles():
if output.attrs['type'] == 'rc_header':
resource_header = os.path.abspath(output.GetOutputFilename())
resource_header = util.MakeRelativePath(output_dir, resource_header)
return """// This file is automatically generated by GRIT. Do not edit.
#include "%s"
// All strings are UTF-8
""" % (resource_header)
# end _FormatHeader() function
def Format(root, lang='en', output_dir='.'):
"""Outputs a C switch statement representing the string table."""
from grit.node import message
assert isinstance(lang, types.StringTypes)
yield _FormatHeader(root, output_dir)
yield 'const char* GetString(int id) {\n switch (id) {'
for item in root.ActiveDescendants():
with item:
if isinstance(item, message.MessageNode):
yield _FormatMessage(item, lang)
yield '\n default:\n return 0;\n }\n}\n'
def _HexToOct(match):
"Return the octal form of the hex numbers"
hex = match.group("hex")
result = ""
while len(hex):
next_num = int(hex[2:4], 16)
result += "\\" + '%03d' % int(oct(next_num), 10)
hex = hex[4:]
return match.group("escaped_backslashes") + result
def _FormatMessage(item, lang):
"""Format a single <message> element."""
message = item.ws_at_start + item.Translate(lang) + item.ws_at_end
# output message with non-ascii chars escaped as octal numbers
# C's grammar allows escaped hexadecimal numbers to be infinite,
# but octal is always of the form \OOO
message = message.encode('utf-8').encode('string_escape')
# an escaped char is (\xHH)+ but only if the initial
# backslash is not escaped.
not_a_backslash = r"(^|[^\\])" # beginning of line or a non-backslash char
escaped_backslashes = not_a_backslash + r"(\\\\)*"
hex_digits = r"((\\x)[0-9a-f]{2})+"
two_digit_hex_num = re.compile(
r"(?P<escaped_backslashes>%s)(?P<hex>%s)"
% (escaped_backslashes, hex_digits))
message = two_digit_hex_num.sub(_HexToOct, message)
# unescape \ (convert \\ back to \)
message = message.replace('\\\\', '\\')
message = message.replace('"', '\\"')
message = util.LINEBREAKS.sub(r'\\n', message)
name_attr = item.GetTextualIds()[0]
return '\n case %s:\n return "%s";' % (name_attr, message)
| bsd-3-clause |
polypmer/freestuff-bot | freestuffs/docs/conf.py | 2 | 9734 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# freestuffs documentation build configuration file, created by
# sphinx-quickstart on Sat Jun 11 15:48:51 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('/home/fen/access/Python/Projects/freestuff-bot/freestuffs'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'freestuffs'
copyright = '2016, Fenimore Love MIT'
author = 'Fenimore Love'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# # nature alabaster default sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'freestuffs v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'freestuffsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'freestuffs.tex', 'freestuffs Documentation',
'Fenimore Love', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'freestuffs', 'freestuffs Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'freestuffs', 'freestuffs Documentation',
author, 'freestuffs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| mit |
dann/python-hookable | docs/conf.py | 1 | 9257 | # -*- coding: utf-8 -*-
#
# hookable documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 4 21:12:53 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'hookable'
copyright = u'2012, dann'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.01'
# The full version, including alpha/beta/rc tags.
release = '0.01'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'hookabledoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'hookable.tex', u'hookable Documentation',
u'dann', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'hookable', u'hookable Documentation',
[u'dann'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'hookable', u'hookable Documentation',
u'dann', 'hookable', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'hookable'
epub_author = u'dann'
epub_publisher = u'dann'
epub_copyright = u'2012, dann'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-3-clause |
houzhenggang/hiwifi-openwrt-HC5661-HC5761 | staging_dir/host/lib/python2.7/test/test_mailbox.py | 19 | 82822 | import os
import sys
import time
import stat
import socket
import email
import email.message
import re
import StringIO
from test import test_support
import unittest
import mailbox
import glob
try:
import fcntl
except ImportError:
pass
# Silence Py3k warning
rfc822 = test_support.import_module('rfc822', deprecated=True)
class TestBase(unittest.TestCase):
def _check_sample(self, msg):
# Inspect a mailbox.Message representation of the sample message
self.assertIsInstance(msg, email.message.Message)
self.assertIsInstance(msg, mailbox.Message)
for key, value in _sample_headers.iteritems():
self.assertIn(value, msg.get_all(key))
self.assertTrue(msg.is_multipart())
self.assertEqual(len(msg.get_payload()), len(_sample_payloads))
for i, payload in enumerate(_sample_payloads):
part = msg.get_payload(i)
self.assertIsInstance(part, email.message.Message)
self.assertNotIsInstance(part, mailbox.Message)
self.assertEqual(part.get_payload(), payload)
def _delete_recursively(self, target):
# Delete a file or delete a directory recursively
if os.path.isdir(target):
for path, dirs, files in os.walk(target, topdown=False):
for name in files:
os.remove(os.path.join(path, name))
for name in dirs:
os.rmdir(os.path.join(path, name))
os.rmdir(target)
elif os.path.exists(target):
os.remove(target)
class TestMailbox(TestBase):
_factory = None # Overridden by subclasses to reuse tests
_template = 'From: foo\n\n%s'
def setUp(self):
self._path = test_support.TESTFN
self._delete_recursively(self._path)
self._box = self._factory(self._path)
def tearDown(self):
self._box.close()
self._delete_recursively(self._path)
def test_add(self):
# Add copies of a sample message
keys = []
keys.append(self._box.add(self._template % 0))
self.assertEqual(len(self._box), 1)
keys.append(self._box.add(mailbox.Message(_sample_message)))
self.assertEqual(len(self._box), 2)
keys.append(self._box.add(email.message_from_string(_sample_message)))
self.assertEqual(len(self._box), 3)
keys.append(self._box.add(StringIO.StringIO(_sample_message)))
self.assertEqual(len(self._box), 4)
keys.append(self._box.add(_sample_message))
self.assertEqual(len(self._box), 5)
self.assertEqual(self._box.get_string(keys[0]), self._template % 0)
for i in (1, 2, 3, 4):
self._check_sample(self._box[keys[i]])
def test_remove(self):
# Remove messages using remove()
self._test_remove_or_delitem(self._box.remove)
def test_delitem(self):
# Remove messages using __delitem__()
self._test_remove_or_delitem(self._box.__delitem__)
def _test_remove_or_delitem(self, method):
# (Used by test_remove() and test_delitem().)
key0 = self._box.add(self._template % 0)
key1 = self._box.add(self._template % 1)
self.assertEqual(len(self._box), 2)
method(key0)
l = len(self._box)
self.assertEqual(l, 1)
self.assertRaises(KeyError, lambda: self._box[key0])
self.assertRaises(KeyError, lambda: method(key0))
self.assertEqual(self._box.get_string(key1), self._template % 1)
key2 = self._box.add(self._template % 2)
self.assertEqual(len(self._box), 2)
method(key2)
l = len(self._box)
self.assertEqual(l, 1)
self.assertRaises(KeyError, lambda: self._box[key2])
self.assertRaises(KeyError, lambda: method(key2))
self.assertEqual(self._box.get_string(key1), self._template % 1)
method(key1)
self.assertEqual(len(self._box), 0)
self.assertRaises(KeyError, lambda: self._box[key1])
self.assertRaises(KeyError, lambda: method(key1))
def test_discard(self, repetitions=10):
# Discard messages
key0 = self._box.add(self._template % 0)
key1 = self._box.add(self._template % 1)
self.assertEqual(len(self._box), 2)
self._box.discard(key0)
self.assertEqual(len(self._box), 1)
self.assertRaises(KeyError, lambda: self._box[key0])
self._box.discard(key0)
self.assertEqual(len(self._box), 1)
self.assertRaises(KeyError, lambda: self._box[key0])
def test_get(self):
# Retrieve messages using get()
key0 = self._box.add(self._template % 0)
msg = self._box.get(key0)
self.assertEqual(msg['from'], 'foo')
self.assertEqual(msg.get_payload(), '0')
self.assertIs(self._box.get('foo'), None)
self.assertFalse(self._box.get('foo', False))
self._box.close()
self._box = self._factory(self._path, factory=rfc822.Message)
key1 = self._box.add(self._template % 1)
msg = self._box.get(key1)
self.assertEqual(msg['from'], 'foo')
self.assertEqual(msg.fp.read(), '1')
def test_getitem(self):
# Retrieve message using __getitem__()
key0 = self._box.add(self._template % 0)
msg = self._box[key0]
self.assertEqual(msg['from'], 'foo')
self.assertEqual(msg.get_payload(), '0')
self.assertRaises(KeyError, lambda: self._box['foo'])
self._box.discard(key0)
self.assertRaises(KeyError, lambda: self._box[key0])
def test_get_message(self):
# Get Message representations of messages
key0 = self._box.add(self._template % 0)
key1 = self._box.add(_sample_message)
msg0 = self._box.get_message(key0)
self.assertIsInstance(msg0, mailbox.Message)
self.assertEqual(msg0['from'], 'foo')
self.assertEqual(msg0.get_payload(), '0')
self._check_sample(self._box.get_message(key1))
def test_get_string(self):
# Get string representations of messages
key0 = self._box.add(self._template % 0)
key1 = self._box.add(_sample_message)
self.assertEqual(self._box.get_string(key0), self._template % 0)
self.assertEqual(self._box.get_string(key1), _sample_message)
def test_get_file(self):
# Get file representations of messages
key0 = self._box.add(self._template % 0)
key1 = self._box.add(_sample_message)
self.assertEqual(self._box.get_file(key0).read().replace(os.linesep, '\n'),
self._template % 0)
self.assertEqual(self._box.get_file(key1).read().replace(os.linesep, '\n'),
_sample_message)
def test_get_file_can_be_closed_twice(self):
# Issue 11700
key = self._box.add(_sample_message)
f = self._box.get_file(key)
f.close()
f.close()
def test_iterkeys(self):
# Get keys using iterkeys()
self._check_iteration(self._box.iterkeys, do_keys=True, do_values=False)
def test_keys(self):
# Get keys using keys()
self._check_iteration(self._box.keys, do_keys=True, do_values=False)
def test_itervalues(self):
# Get values using itervalues()
self._check_iteration(self._box.itervalues, do_keys=False,
do_values=True)
def test_iter(self):
# Get values using __iter__()
self._check_iteration(self._box.__iter__, do_keys=False,
do_values=True)
def test_values(self):
# Get values using values()
self._check_iteration(self._box.values, do_keys=False, do_values=True)
def test_iteritems(self):
# Get keys and values using iteritems()
self._check_iteration(self._box.iteritems, do_keys=True,
do_values=True)
def test_items(self):
# Get keys and values using items()
self._check_iteration(self._box.items, do_keys=True, do_values=True)
def _check_iteration(self, method, do_keys, do_values, repetitions=10):
for value in method():
self.fail("Not empty")
keys, values = [], []
for i in xrange(repetitions):
keys.append(self._box.add(self._template % i))
values.append(self._template % i)
if do_keys and not do_values:
returned_keys = list(method())
elif do_values and not do_keys:
returned_values = list(method())
else:
returned_keys, returned_values = [], []
for key, value in method():
returned_keys.append(key)
returned_values.append(value)
if do_keys:
self.assertEqual(len(keys), len(returned_keys))
self.assertEqual(set(keys), set(returned_keys))
if do_values:
count = 0
for value in returned_values:
self.assertEqual(value['from'], 'foo')
self.assertTrue(int(value.get_payload()) < repetitions,
(value.get_payload(), repetitions))
count += 1
self.assertEqual(len(values), count)
def test_has_key(self):
# Check existence of keys using has_key()
self._test_has_key_or_contains(self._box.has_key)
def test_contains(self):
# Check existence of keys using __contains__()
self._test_has_key_or_contains(self._box.__contains__)
def _test_has_key_or_contains(self, method):
# (Used by test_has_key() and test_contains().)
self.assertFalse(method('foo'))
key0 = self._box.add(self._template % 0)
self.assertTrue(method(key0))
self.assertFalse(method('foo'))
key1 = self._box.add(self._template % 1)
self.assertTrue(method(key1))
self.assertTrue(method(key0))
self.assertFalse(method('foo'))
self._box.remove(key0)
self.assertFalse(method(key0))
self.assertTrue(method(key1))
self.assertFalse(method('foo'))
self._box.remove(key1)
self.assertFalse(method(key1))
self.assertFalse(method(key0))
self.assertFalse(method('foo'))
def test_len(self, repetitions=10):
# Get message count
keys = []
for i in xrange(repetitions):
self.assertEqual(len(self._box), i)
keys.append(self._box.add(self._template % i))
self.assertEqual(len(self._box), i + 1)
for i in xrange(repetitions):
self.assertEqual(len(self._box), repetitions - i)
self._box.remove(keys[i])
self.assertEqual(len(self._box), repetitions - i - 1)
def test_set_item(self):
# Modify messages using __setitem__()
key0 = self._box.add(self._template % 'original 0')
self.assertEqual(self._box.get_string(key0),
self._template % 'original 0')
key1 = self._box.add(self._template % 'original 1')
self.assertEqual(self._box.get_string(key1),
self._template % 'original 1')
self._box[key0] = self._template % 'changed 0'
self.assertEqual(self._box.get_string(key0),
self._template % 'changed 0')
self._box[key1] = self._template % 'changed 1'
self.assertEqual(self._box.get_string(key1),
self._template % 'changed 1')
self._box[key0] = _sample_message
self._check_sample(self._box[key0])
self._box[key1] = self._box[key0]
self._check_sample(self._box[key1])
self._box[key0] = self._template % 'original 0'
self.assertEqual(self._box.get_string(key0),
self._template % 'original 0')
self._check_sample(self._box[key1])
self.assertRaises(KeyError,
lambda: self._box.__setitem__('foo', 'bar'))
self.assertRaises(KeyError, lambda: self._box['foo'])
self.assertEqual(len(self._box), 2)
def test_clear(self, iterations=10):
# Remove all messages using clear()
keys = []
for i in xrange(iterations):
self._box.add(self._template % i)
for i, key in enumerate(keys):
self.assertEqual(self._box.get_string(key), self._template % i)
self._box.clear()
self.assertEqual(len(self._box), 0)
for i, key in enumerate(keys):
self.assertRaises(KeyError, lambda: self._box.get_string(key))
def test_pop(self):
# Get and remove a message using pop()
key0 = self._box.add(self._template % 0)
self.assertIn(key0, self._box)
key1 = self._box.add(self._template % 1)
self.assertIn(key1, self._box)
self.assertEqual(self._box.pop(key0).get_payload(), '0')
self.assertNotIn(key0, self._box)
self.assertIn(key1, self._box)
key2 = self._box.add(self._template % 2)
self.assertIn(key2, self._box)
self.assertEqual(self._box.pop(key2).get_payload(), '2')
self.assertNotIn(key2, self._box)
self.assertIn(key1, self._box)
self.assertEqual(self._box.pop(key1).get_payload(), '1')
self.assertNotIn(key1, self._box)
self.assertEqual(len(self._box), 0)
def test_popitem(self, iterations=10):
# Get and remove an arbitrary (key, message) using popitem()
keys = []
for i in xrange(10):
keys.append(self._box.add(self._template % i))
seen = []
for i in xrange(10):
key, msg = self._box.popitem()
self.assertIn(key, keys)
self.assertNotIn(key, seen)
seen.append(key)
self.assertEqual(int(msg.get_payload()), keys.index(key))
self.assertEqual(len(self._box), 0)
for key in keys:
self.assertRaises(KeyError, lambda: self._box[key])
def test_update(self):
# Modify multiple messages using update()
key0 = self._box.add(self._template % 'original 0')
key1 = self._box.add(self._template % 'original 1')
key2 = self._box.add(self._template % 'original 2')
self._box.update({key0: self._template % 'changed 0',
key2: _sample_message})
self.assertEqual(len(self._box), 3)
self.assertEqual(self._box.get_string(key0),
self._template % 'changed 0')
self.assertEqual(self._box.get_string(key1),
self._template % 'original 1')
self._check_sample(self._box[key2])
self._box.update([(key2, self._template % 'changed 2'),
(key1, self._template % 'changed 1'),
(key0, self._template % 'original 0')])
self.assertEqual(len(self._box), 3)
self.assertEqual(self._box.get_string(key0),
self._template % 'original 0')
self.assertEqual(self._box.get_string(key1),
self._template % 'changed 1')
self.assertEqual(self._box.get_string(key2),
self._template % 'changed 2')
self.assertRaises(KeyError,
lambda: self._box.update({'foo': 'bar',
key0: self._template % "changed 0"}))
self.assertEqual(len(self._box), 3)
self.assertEqual(self._box.get_string(key0),
self._template % "changed 0")
self.assertEqual(self._box.get_string(key1),
self._template % "changed 1")
self.assertEqual(self._box.get_string(key2),
self._template % "changed 2")
def test_flush(self):
# Write changes to disk
self._test_flush_or_close(self._box.flush, True)
def test_lock_unlock(self):
# Lock and unlock the mailbox
self.assertFalse(os.path.exists(self._get_lock_path()))
self._box.lock()
self.assertTrue(os.path.exists(self._get_lock_path()))
self._box.unlock()
self.assertFalse(os.path.exists(self._get_lock_path()))
def test_close(self):
# Close mailbox and flush changes to disk
self._test_flush_or_close(self._box.close, False)
def _test_flush_or_close(self, method, should_call_close):
contents = [self._template % i for i in xrange(3)]
self._box.add(contents[0])
self._box.add(contents[1])
self._box.add(contents[2])
method()
if should_call_close:
self._box.close()
self._box = self._factory(self._path)
keys = self._box.keys()
self.assertEqual(len(keys), 3)
for key in keys:
self.assertIn(self._box.get_string(key), contents)
def test_dump_message(self):
# Write message representations to disk
for input in (email.message_from_string(_sample_message),
_sample_message, StringIO.StringIO(_sample_message)):
output = StringIO.StringIO()
self._box._dump_message(input, output)
self.assertEqual(output.getvalue(),
_sample_message.replace('\n', os.linesep))
output = StringIO.StringIO()
self.assertRaises(TypeError,
lambda: self._box._dump_message(None, output))
def _get_lock_path(self):
# Return the path of the dot lock file. May be overridden.
return self._path + '.lock'
class TestMailboxSuperclass(TestBase):
def test_notimplemented(self):
# Test that all Mailbox methods raise NotImplementedException.
box = mailbox.Mailbox('path')
self.assertRaises(NotImplementedError, lambda: box.add(''))
self.assertRaises(NotImplementedError, lambda: box.remove(''))
self.assertRaises(NotImplementedError, lambda: box.__delitem__(''))
self.assertRaises(NotImplementedError, lambda: box.discard(''))
self.assertRaises(NotImplementedError, lambda: box.__setitem__('', ''))
self.assertRaises(NotImplementedError, lambda: box.iterkeys())
self.assertRaises(NotImplementedError, lambda: box.keys())
self.assertRaises(NotImplementedError, lambda: box.itervalues().next())
self.assertRaises(NotImplementedError, lambda: box.__iter__().next())
self.assertRaises(NotImplementedError, lambda: box.values())
self.assertRaises(NotImplementedError, lambda: box.iteritems().next())
self.assertRaises(NotImplementedError, lambda: box.items())
self.assertRaises(NotImplementedError, lambda: box.get(''))
self.assertRaises(NotImplementedError, lambda: box.__getitem__(''))
self.assertRaises(NotImplementedError, lambda: box.get_message(''))
self.assertRaises(NotImplementedError, lambda: box.get_string(''))
self.assertRaises(NotImplementedError, lambda: box.get_file(''))
self.assertRaises(NotImplementedError, lambda: box.has_key(''))
self.assertRaises(NotImplementedError, lambda: box.__contains__(''))
self.assertRaises(NotImplementedError, lambda: box.__len__())
self.assertRaises(NotImplementedError, lambda: box.clear())
self.assertRaises(NotImplementedError, lambda: box.pop(''))
self.assertRaises(NotImplementedError, lambda: box.popitem())
self.assertRaises(NotImplementedError, lambda: box.update((('', ''),)))
self.assertRaises(NotImplementedError, lambda: box.flush())
self.assertRaises(NotImplementedError, lambda: box.lock())
self.assertRaises(NotImplementedError, lambda: box.unlock())
self.assertRaises(NotImplementedError, lambda: box.close())
class TestMaildir(TestMailbox):
_factory = lambda self, path, factory=None: mailbox.Maildir(path, factory)
def setUp(self):
TestMailbox.setUp(self)
if os.name in ('nt', 'os2') or sys.platform == 'cygwin':
self._box.colon = '!'
def test_add_MM(self):
# Add a MaildirMessage instance
msg = mailbox.MaildirMessage(self._template % 0)
msg.set_subdir('cur')
msg.set_info('foo')
key = self._box.add(msg)
self.assertTrue(os.path.exists(os.path.join(self._path, 'cur', '%s%sfoo' %
(key, self._box.colon))))
def test_get_MM(self):
# Get a MaildirMessage instance
msg = mailbox.MaildirMessage(self._template % 0)
msg.set_subdir('cur')
msg.set_flags('RF')
key = self._box.add(msg)
msg_returned = self._box.get_message(key)
self.assertIsInstance(msg_returned, mailbox.MaildirMessage)
self.assertEqual(msg_returned.get_subdir(), 'cur')
self.assertEqual(msg_returned.get_flags(), 'FR')
def test_set_MM(self):
# Set with a MaildirMessage instance
msg0 = mailbox.MaildirMessage(self._template % 0)
msg0.set_flags('TP')
key = self._box.add(msg0)
msg_returned = self._box.get_message(key)
self.assertEqual(msg_returned.get_subdir(), 'new')
self.assertEqual(msg_returned.get_flags(), 'PT')
msg1 = mailbox.MaildirMessage(self._template % 1)
self._box[key] = msg1
msg_returned = self._box.get_message(key)
self.assertEqual(msg_returned.get_subdir(), 'new')
self.assertEqual(msg_returned.get_flags(), '')
self.assertEqual(msg_returned.get_payload(), '1')
msg2 = mailbox.MaildirMessage(self._template % 2)
msg2.set_info('2,S')
self._box[key] = msg2
self._box[key] = self._template % 3
msg_returned = self._box.get_message(key)
self.assertEqual(msg_returned.get_subdir(), 'new')
self.assertEqual(msg_returned.get_flags(), 'S')
self.assertEqual(msg_returned.get_payload(), '3')
def test_consistent_factory(self):
# Add a message.
msg = mailbox.MaildirMessage(self._template % 0)
msg.set_subdir('cur')
msg.set_flags('RF')
key = self._box.add(msg)
# Create new mailbox with
class FakeMessage(mailbox.MaildirMessage):
pass
box = mailbox.Maildir(self._path, factory=FakeMessage)
box.colon = self._box.colon
msg2 = box.get_message(key)
self.assertIsInstance(msg2, FakeMessage)
def test_initialize_new(self):
# Initialize a non-existent mailbox
self.tearDown()
self._box = mailbox.Maildir(self._path)
self._check_basics(factory=rfc822.Message)
self._delete_recursively(self._path)
self._box = self._factory(self._path, factory=None)
self._check_basics()
def test_initialize_existing(self):
# Initialize an existing mailbox
self.tearDown()
for subdir in '', 'tmp', 'new', 'cur':
os.mkdir(os.path.normpath(os.path.join(self._path, subdir)))
self._box = mailbox.Maildir(self._path)
self._check_basics(factory=rfc822.Message)
self._box = mailbox.Maildir(self._path, factory=None)
self._check_basics()
def _check_basics(self, factory=None):
# (Used by test_open_new() and test_open_existing().)
self.assertEqual(self._box._path, os.path.abspath(self._path))
self.assertEqual(self._box._factory, factory)
for subdir in '', 'tmp', 'new', 'cur':
path = os.path.join(self._path, subdir)
mode = os.stat(path)[stat.ST_MODE]
self.assertTrue(stat.S_ISDIR(mode), "Not a directory: '%s'" % path)
def test_list_folders(self):
# List folders
self._box.add_folder('one')
self._box.add_folder('two')
self._box.add_folder('three')
self.assertEqual(len(self._box.list_folders()), 3)
self.assertEqual(set(self._box.list_folders()),
set(('one', 'two', 'three')))
def test_get_folder(self):
# Open folders
self._box.add_folder('foo.bar')
folder0 = self._box.get_folder('foo.bar')
folder0.add(self._template % 'bar')
self.assertTrue(os.path.isdir(os.path.join(self._path, '.foo.bar')))
folder1 = self._box.get_folder('foo.bar')
self.assertEqual(folder1.get_string(folder1.keys()[0]),
self._template % 'bar')
def test_add_and_remove_folders(self):
# Delete folders
self._box.add_folder('one')
self._box.add_folder('two')
self.assertEqual(len(self._box.list_folders()), 2)
self.assertEqual(set(self._box.list_folders()), set(('one', 'two')))
self._box.remove_folder('one')
self.assertEqual(len(self._box.list_folders()), 1)
self.assertEqual(set(self._box.list_folders()), set(('two',)))
self._box.add_folder('three')
self.assertEqual(len(self._box.list_folders()), 2)
self.assertEqual(set(self._box.list_folders()), set(('two', 'three')))
self._box.remove_folder('three')
self.assertEqual(len(self._box.list_folders()), 1)
self.assertEqual(set(self._box.list_folders()), set(('two',)))
self._box.remove_folder('two')
self.assertEqual(len(self._box.list_folders()), 0)
self.assertEqual(self._box.list_folders(), [])
def test_clean(self):
# Remove old files from 'tmp'
foo_path = os.path.join(self._path, 'tmp', 'foo')
bar_path = os.path.join(self._path, 'tmp', 'bar')
with open(foo_path, 'w') as f:
f.write("@")
with open(bar_path, 'w') as f:
f.write("@")
self._box.clean()
self.assertTrue(os.path.exists(foo_path))
self.assertTrue(os.path.exists(bar_path))
foo_stat = os.stat(foo_path)
os.utime(foo_path, (time.time() - 129600 - 2,
foo_stat.st_mtime))
self._box.clean()
self.assertFalse(os.path.exists(foo_path))
self.assertTrue(os.path.exists(bar_path))
def test_create_tmp(self, repetitions=10):
# Create files in tmp directory
hostname = socket.gethostname()
if '/' in hostname:
hostname = hostname.replace('/', r'\057')
if ':' in hostname:
hostname = hostname.replace(':', r'\072')
pid = os.getpid()
pattern = re.compile(r"(?P<time>\d+)\.M(?P<M>\d{1,6})P(?P<P>\d+)"
r"Q(?P<Q>\d+)\.(?P<host>[^:/]+)")
previous_groups = None
for x in xrange(repetitions):
tmp_file = self._box._create_tmp()
head, tail = os.path.split(tmp_file.name)
self.assertEqual(head, os.path.abspath(os.path.join(self._path,
"tmp")),
"File in wrong location: '%s'" % head)
match = pattern.match(tail)
self.assertTrue(match is not None, "Invalid file name: '%s'" % tail)
groups = match.groups()
if previous_groups is not None:
self.assertTrue(int(groups[0] >= previous_groups[0]),
"Non-monotonic seconds: '%s' before '%s'" %
(previous_groups[0], groups[0]))
self.assertTrue(int(groups[1] >= previous_groups[1]) or
groups[0] != groups[1],
"Non-monotonic milliseconds: '%s' before '%s'" %
(previous_groups[1], groups[1]))
self.assertTrue(int(groups[2]) == pid,
"Process ID mismatch: '%s' should be '%s'" %
(groups[2], pid))
self.assertTrue(int(groups[3]) == int(previous_groups[3]) + 1,
"Non-sequential counter: '%s' before '%s'" %
(previous_groups[3], groups[3]))
self.assertTrue(groups[4] == hostname,
"Host name mismatch: '%s' should be '%s'" %
(groups[4], hostname))
previous_groups = groups
tmp_file.write(_sample_message)
tmp_file.seek(0)
self.assertTrue(tmp_file.read() == _sample_message)
tmp_file.close()
file_count = len(os.listdir(os.path.join(self._path, "tmp")))
self.assertTrue(file_count == repetitions,
"Wrong file count: '%s' should be '%s'" %
(file_count, repetitions))
def test_refresh(self):
# Update the table of contents
self.assertEqual(self._box._toc, {})
key0 = self._box.add(self._template % 0)
key1 = self._box.add(self._template % 1)
self.assertEqual(self._box._toc, {})
self._box._refresh()
self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
key1: os.path.join('new', key1)})
key2 = self._box.add(self._template % 2)
self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
key1: os.path.join('new', key1)})
self._box._refresh()
self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
key1: os.path.join('new', key1),
key2: os.path.join('new', key2)})
def test_refresh_after_safety_period(self):
# Issue #13254: Call _refresh after the "file system safety
# period" of 2 seconds has passed; _toc should still be
# updated because this is the first call to _refresh.
key0 = self._box.add(self._template % 0)
key1 = self._box.add(self._template % 1)
self._box = self._factory(self._path)
self.assertEqual(self._box._toc, {})
# Emulate sleeping. Instead of sleeping for 2 seconds, use the
# skew factor to make _refresh think that the filesystem
# safety period has passed and re-reading the _toc is only
# required if mtimes differ.
self._box._skewfactor = -3
self._box._refresh()
self.assertEqual(sorted(self._box._toc.keys()), sorted([key0, key1]))
def test_lookup(self):
# Look up message subpaths in the TOC
self.assertRaises(KeyError, lambda: self._box._lookup('foo'))
key0 = self._box.add(self._template % 0)
self.assertEqual(self._box._lookup(key0), os.path.join('new', key0))
os.remove(os.path.join(self._path, 'new', key0))
self.assertEqual(self._box._toc, {key0: os.path.join('new', key0)})
# Be sure that the TOC is read back from disk (see issue #6896
# about bad mtime behaviour on some systems).
self._box.flush()
self.assertRaises(KeyError, lambda: self._box._lookup(key0))
self.assertEqual(self._box._toc, {})
def test_lock_unlock(self):
# Lock and unlock the mailbox. For Maildir, this does nothing.
self._box.lock()
self._box.unlock()
def test_folder (self):
# Test for bug #1569790: verify that folders returned by .get_folder()
# use the same factory function.
def dummy_factory (s):
return None
box = self._factory(self._path, factory=dummy_factory)
folder = box.add_folder('folder1')
self.assertIs(folder._factory, dummy_factory)
folder1_alias = box.get_folder('folder1')
self.assertIs(folder1_alias._factory, dummy_factory)
def test_directory_in_folder (self):
# Test that mailboxes still work if there's a stray extra directory
# in a folder.
for i in range(10):
self._box.add(mailbox.Message(_sample_message))
# Create a stray directory
os.mkdir(os.path.join(self._path, 'cur', 'stray-dir'))
# Check that looping still works with the directory present.
for msg in self._box:
pass
def test_file_permissions(self):
# Verify that message files are created without execute permissions
if not hasattr(os, "stat") or not hasattr(os, "umask"):
return
msg = mailbox.MaildirMessage(self._template % 0)
orig_umask = os.umask(0)
try:
key = self._box.add(msg)
finally:
os.umask(orig_umask)
path = os.path.join(self._path, self._box._lookup(key))
mode = os.stat(path).st_mode
self.assertEqual(mode & 0111, 0)
def test_folder_file_perms(self):
# From bug #3228, we want to verify that the file created inside a Maildir
# subfolder isn't marked as executable.
if not hasattr(os, "stat") or not hasattr(os, "umask"):
return
orig_umask = os.umask(0)
try:
subfolder = self._box.add_folder('subfolder')
finally:
os.umask(orig_umask)
path = os.path.join(subfolder._path, 'maildirfolder')
st = os.stat(path)
perms = st.st_mode
self.assertFalse((perms & 0111)) # Execute bits should all be off.
def test_reread(self):
# Do an initial unconditional refresh
self._box._refresh()
# Put the last modified times more than two seconds into the past
# (because mtime may have only a two second granularity).
for subdir in ('cur', 'new'):
os.utime(os.path.join(self._box._path, subdir),
(time.time()-5,)*2)
# Because mtime has a two second granularity in worst case (FAT), a
# refresh is done unconditionally if called for within
# two-second-plus-a-bit of the last one, just in case the mbox has
# changed; so now we have to wait for that interval to expire.
#
# Because this is a test, emulate sleeping. Instead of
# sleeping for 2 seconds, use the skew factor to make _refresh
# think that 2 seconds have passed and re-reading the _toc is
# only required if mtimes differ.
self._box._skewfactor = -3
# Re-reading causes the ._toc attribute to be assigned a new dictionary
# object, so we'll check that the ._toc attribute isn't a different
# object.
orig_toc = self._box._toc
def refreshed():
return self._box._toc is not orig_toc
self._box._refresh()
self.assertFalse(refreshed())
# Now, write something into cur and remove it. This changes
# the mtime and should cause a re-read. Note that "sleep
# emulation" is still in effect, as skewfactor is -3.
filename = os.path.join(self._path, 'cur', 'stray-file')
f = open(filename, 'w')
f.close()
os.unlink(filename)
self._box._refresh()
self.assertTrue(refreshed())
class _TestMboxMMDF(TestMailbox):
def tearDown(self):
self._box.close()
self._delete_recursively(self._path)
for lock_remnant in glob.glob(self._path + '.*'):
test_support.unlink(lock_remnant)
def test_add_from_string(self):
# Add a string starting with 'From ' to the mailbox
key = self._box.add('From foo@bar blah\nFrom: foo\n\n0')
self.assertEqual(self._box[key].get_from(), 'foo@bar blah')
self.assertEqual(self._box[key].get_payload(), '0')
def test_add_mbox_or_mmdf_message(self):
# Add an mboxMessage or MMDFMessage
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg = class_('From foo@bar blah\nFrom: foo\n\n0')
key = self._box.add(msg)
def test_open_close_open(self):
# Open and inspect previously-created mailbox
values = [self._template % i for i in xrange(3)]
for value in values:
self._box.add(value)
self._box.close()
mtime = os.path.getmtime(self._path)
self._box = self._factory(self._path)
self.assertEqual(len(self._box), 3)
for key in self._box.iterkeys():
self.assertIn(self._box.get_string(key), values)
self._box.close()
self.assertEqual(mtime, os.path.getmtime(self._path))
def test_add_and_close(self):
# Verifying that closing a mailbox doesn't change added items
self._box.add(_sample_message)
for i in xrange(3):
self._box.add(self._template % i)
self._box.add(_sample_message)
self._box._file.flush()
self._box._file.seek(0)
contents = self._box._file.read()
self._box.close()
with open(self._path, 'rb') as f:
self.assertEqual(contents, f.read())
self._box = self._factory(self._path)
@unittest.skipUnless(hasattr(os, 'fork'), "Test needs fork().")
@unittest.skipUnless(hasattr(socket, 'socketpair'), "Test needs socketpair().")
def test_lock_conflict(self):
# Fork off a child process that will lock the mailbox temporarily,
# unlock it and exit.
c, p = socket.socketpair()
self.addCleanup(c.close)
self.addCleanup(p.close)
pid = os.fork()
if pid == 0:
# child
try:
# lock the mailbox, and signal the parent it can proceed
self._box.lock()
c.send(b'c')
# wait until the parent is done, and unlock the mailbox
c.recv(1)
self._box.unlock()
finally:
os._exit(0)
# In the parent, wait until the child signals it locked the mailbox.
p.recv(1)
try:
self.assertRaises(mailbox.ExternalClashError,
self._box.lock)
finally:
# Signal the child it can now release the lock and exit.
p.send(b'p')
# Wait for child to exit. Locking should now succeed.
exited_pid, status = os.waitpid(pid, 0)
self._box.lock()
self._box.unlock()
def test_relock(self):
# Test case for bug #1575506: the mailbox class was locking the
# wrong file object in its flush() method.
msg = "Subject: sub\n\nbody\n"
key1 = self._box.add(msg)
self._box.flush()
self._box.close()
self._box = self._factory(self._path)
self._box.lock()
key2 = self._box.add(msg)
self._box.flush()
self.assertTrue(self._box._locked)
self._box.close()
class TestMbox(_TestMboxMMDF):
_factory = lambda self, path, factory=None: mailbox.mbox(path, factory)
def test_file_perms(self):
# From bug #3228, we want to verify that the mailbox file isn't executable,
# even if the umask is set to something that would leave executable bits set.
# We only run this test on platforms that support umask.
if hasattr(os, 'umask') and hasattr(os, 'stat'):
try:
old_umask = os.umask(0077)
self._box.close()
os.unlink(self._path)
self._box = mailbox.mbox(self._path, create=True)
self._box.add('')
self._box.close()
finally:
os.umask(old_umask)
st = os.stat(self._path)
perms = st.st_mode
self.assertFalse((perms & 0111)) # Execute bits should all be off.
class TestMMDF(_TestMboxMMDF):
_factory = lambda self, path, factory=None: mailbox.MMDF(path, factory)
class TestMH(TestMailbox):
_factory = lambda self, path, factory=None: mailbox.MH(path, factory)
def test_list_folders(self):
# List folders
self._box.add_folder('one')
self._box.add_folder('two')
self._box.add_folder('three')
self.assertEqual(len(self._box.list_folders()), 3)
self.assertEqual(set(self._box.list_folders()),
set(('one', 'two', 'three')))
def test_get_folder(self):
# Open folders
def dummy_factory (s):
return None
self._box = self._factory(self._path, dummy_factory)
new_folder = self._box.add_folder('foo.bar')
folder0 = self._box.get_folder('foo.bar')
folder0.add(self._template % 'bar')
self.assertTrue(os.path.isdir(os.path.join(self._path, 'foo.bar')))
folder1 = self._box.get_folder('foo.bar')
self.assertEqual(folder1.get_string(folder1.keys()[0]),
self._template % 'bar')
# Test for bug #1569790: verify that folders returned by .get_folder()
# use the same factory function.
self.assertIs(new_folder._factory, self._box._factory)
self.assertIs(folder0._factory, self._box._factory)
def test_add_and_remove_folders(self):
# Delete folders
self._box.add_folder('one')
self._box.add_folder('two')
self.assertEqual(len(self._box.list_folders()), 2)
self.assertEqual(set(self._box.list_folders()), set(('one', 'two')))
self._box.remove_folder('one')
self.assertEqual(len(self._box.list_folders()), 1)
self.assertEqual(set(self._box.list_folders()), set(('two', )))
self._box.add_folder('three')
self.assertEqual(len(self._box.list_folders()), 2)
self.assertEqual(set(self._box.list_folders()), set(('two', 'three')))
self._box.remove_folder('three')
self.assertEqual(len(self._box.list_folders()), 1)
self.assertEqual(set(self._box.list_folders()), set(('two', )))
self._box.remove_folder('two')
self.assertEqual(len(self._box.list_folders()), 0)
self.assertEqual(self._box.list_folders(), [])
def test_sequences(self):
# Get and set sequences
self.assertEqual(self._box.get_sequences(), {})
msg0 = mailbox.MHMessage(self._template % 0)
msg0.add_sequence('foo')
key0 = self._box.add(msg0)
self.assertEqual(self._box.get_sequences(), {'foo':[key0]})
msg1 = mailbox.MHMessage(self._template % 1)
msg1.set_sequences(['bar', 'replied', 'foo'])
key1 = self._box.add(msg1)
self.assertEqual(self._box.get_sequences(),
{'foo':[key0, key1], 'bar':[key1], 'replied':[key1]})
msg0.set_sequences(['flagged'])
self._box[key0] = msg0
self.assertEqual(self._box.get_sequences(),
{'foo':[key1], 'bar':[key1], 'replied':[key1],
'flagged':[key0]})
self._box.remove(key1)
self.assertEqual(self._box.get_sequences(), {'flagged':[key0]})
def test_issue2625(self):
msg0 = mailbox.MHMessage(self._template % 0)
msg0.add_sequence('foo')
key0 = self._box.add(msg0)
refmsg0 = self._box.get_message(key0)
def test_issue7627(self):
msg0 = mailbox.MHMessage(self._template % 0)
key0 = self._box.add(msg0)
self._box.lock()
self._box.remove(key0)
self._box.unlock()
def test_pack(self):
# Pack the contents of the mailbox
msg0 = mailbox.MHMessage(self._template % 0)
msg1 = mailbox.MHMessage(self._template % 1)
msg2 = mailbox.MHMessage(self._template % 2)
msg3 = mailbox.MHMessage(self._template % 3)
msg0.set_sequences(['foo', 'unseen'])
msg1.set_sequences(['foo'])
msg2.set_sequences(['foo', 'flagged'])
msg3.set_sequences(['foo', 'bar', 'replied'])
key0 = self._box.add(msg0)
key1 = self._box.add(msg1)
key2 = self._box.add(msg2)
key3 = self._box.add(msg3)
self.assertEqual(self._box.get_sequences(),
{'foo':[key0,key1,key2,key3], 'unseen':[key0],
'flagged':[key2], 'bar':[key3], 'replied':[key3]})
self._box.remove(key2)
self.assertEqual(self._box.get_sequences(),
{'foo':[key0,key1,key3], 'unseen':[key0], 'bar':[key3],
'replied':[key3]})
self._box.pack()
self.assertEqual(self._box.keys(), [1, 2, 3])
key0 = key0
key1 = key0 + 1
key2 = key1 + 1
self.assertEqual(self._box.get_sequences(),
{'foo':[1, 2, 3], 'unseen':[1], 'bar':[3], 'replied':[3]})
# Test case for packing while holding the mailbox locked.
key0 = self._box.add(msg1)
key1 = self._box.add(msg1)
key2 = self._box.add(msg1)
key3 = self._box.add(msg1)
self._box.remove(key0)
self._box.remove(key2)
self._box.lock()
self._box.pack()
self._box.unlock()
self.assertEqual(self._box.get_sequences(),
{'foo':[1, 2, 3, 4, 5],
'unseen':[1], 'bar':[3], 'replied':[3]})
def _get_lock_path(self):
return os.path.join(self._path, '.mh_sequences.lock')
class TestBabyl(TestMailbox):
_factory = lambda self, path, factory=None: mailbox.Babyl(path, factory)
def tearDown(self):
self._box.close()
self._delete_recursively(self._path)
for lock_remnant in glob.glob(self._path + '.*'):
test_support.unlink(lock_remnant)
def test_labels(self):
# Get labels from the mailbox
self.assertEqual(self._box.get_labels(), [])
msg0 = mailbox.BabylMessage(self._template % 0)
msg0.add_label('foo')
key0 = self._box.add(msg0)
self.assertEqual(self._box.get_labels(), ['foo'])
msg1 = mailbox.BabylMessage(self._template % 1)
msg1.set_labels(['bar', 'answered', 'foo'])
key1 = self._box.add(msg1)
self.assertEqual(set(self._box.get_labels()), set(['foo', 'bar']))
msg0.set_labels(['blah', 'filed'])
self._box[key0] = msg0
self.assertEqual(set(self._box.get_labels()),
set(['foo', 'bar', 'blah']))
self._box.remove(key1)
self.assertEqual(set(self._box.get_labels()), set(['blah']))
class TestMessage(TestBase):
_factory = mailbox.Message # Overridden by subclasses to reuse tests
def setUp(self):
self._path = test_support.TESTFN
def tearDown(self):
self._delete_recursively(self._path)
def test_initialize_with_eMM(self):
# Initialize based on email.message.Message instance
eMM = email.message_from_string(_sample_message)
msg = self._factory(eMM)
self._post_initialize_hook(msg)
self._check_sample(msg)
def test_initialize_with_string(self):
# Initialize based on string
msg = self._factory(_sample_message)
self._post_initialize_hook(msg)
self._check_sample(msg)
def test_initialize_with_file(self):
# Initialize based on contents of file
with open(self._path, 'w+') as f:
f.write(_sample_message)
f.seek(0)
msg = self._factory(f)
self._post_initialize_hook(msg)
self._check_sample(msg)
def test_initialize_with_nothing(self):
# Initialize without arguments
msg = self._factory()
self._post_initialize_hook(msg)
self.assertIsInstance(msg, email.message.Message)
self.assertIsInstance(msg, mailbox.Message)
self.assertIsInstance(msg, self._factory)
self.assertEqual(msg.keys(), [])
self.assertFalse(msg.is_multipart())
self.assertEqual(msg.get_payload(), None)
def test_initialize_incorrectly(self):
# Initialize with invalid argument
self.assertRaises(TypeError, lambda: self._factory(object()))
def test_become_message(self):
# Take on the state of another message
eMM = email.message_from_string(_sample_message)
msg = self._factory()
msg._become_message(eMM)
self._check_sample(msg)
def test_explain_to(self):
# Copy self's format-specific data to other message formats.
# This test is superficial; better ones are in TestMessageConversion.
msg = self._factory()
for class_ in (mailbox.Message, mailbox.MaildirMessage,
mailbox.mboxMessage, mailbox.MHMessage,
mailbox.BabylMessage, mailbox.MMDFMessage):
other_msg = class_()
msg._explain_to(other_msg)
other_msg = email.message.Message()
self.assertRaises(TypeError, lambda: msg._explain_to(other_msg))
def _post_initialize_hook(self, msg):
# Overridden by subclasses to check extra things after initialization
pass
class TestMaildirMessage(TestMessage):
_factory = mailbox.MaildirMessage
def _post_initialize_hook(self, msg):
self.assertEqual(msg._subdir, 'new')
self.assertEqual(msg._info,'')
def test_subdir(self):
# Use get_subdir() and set_subdir()
msg = mailbox.MaildirMessage(_sample_message)
self.assertEqual(msg.get_subdir(), 'new')
msg.set_subdir('cur')
self.assertEqual(msg.get_subdir(), 'cur')
msg.set_subdir('new')
self.assertEqual(msg.get_subdir(), 'new')
self.assertRaises(ValueError, lambda: msg.set_subdir('tmp'))
self.assertEqual(msg.get_subdir(), 'new')
msg.set_subdir('new')
self.assertEqual(msg.get_subdir(), 'new')
self._check_sample(msg)
def test_flags(self):
# Use get_flags(), set_flags(), add_flag(), remove_flag()
msg = mailbox.MaildirMessage(_sample_message)
self.assertEqual(msg.get_flags(), '')
self.assertEqual(msg.get_subdir(), 'new')
msg.set_flags('F')
self.assertEqual(msg.get_subdir(), 'new')
self.assertEqual(msg.get_flags(), 'F')
msg.set_flags('SDTP')
self.assertEqual(msg.get_flags(), 'DPST')
msg.add_flag('FT')
self.assertEqual(msg.get_flags(), 'DFPST')
msg.remove_flag('TDRP')
self.assertEqual(msg.get_flags(), 'FS')
self.assertEqual(msg.get_subdir(), 'new')
self._check_sample(msg)
def test_date(self):
# Use get_date() and set_date()
msg = mailbox.MaildirMessage(_sample_message)
diff = msg.get_date() - time.time()
self.assertTrue(abs(diff) < 60, diff)
msg.set_date(0.0)
self.assertEqual(msg.get_date(), 0.0)
def test_info(self):
# Use get_info() and set_info()
msg = mailbox.MaildirMessage(_sample_message)
self.assertEqual(msg.get_info(), '')
msg.set_info('1,foo=bar')
self.assertEqual(msg.get_info(), '1,foo=bar')
self.assertRaises(TypeError, lambda: msg.set_info(None))
self._check_sample(msg)
def test_info_and_flags(self):
# Test interaction of info and flag methods
msg = mailbox.MaildirMessage(_sample_message)
self.assertEqual(msg.get_info(), '')
msg.set_flags('SF')
self.assertEqual(msg.get_flags(), 'FS')
self.assertEqual(msg.get_info(), '2,FS')
msg.set_info('1,')
self.assertEqual(msg.get_flags(), '')
self.assertEqual(msg.get_info(), '1,')
msg.remove_flag('RPT')
self.assertEqual(msg.get_flags(), '')
self.assertEqual(msg.get_info(), '1,')
msg.add_flag('D')
self.assertEqual(msg.get_flags(), 'D')
self.assertEqual(msg.get_info(), '2,D')
self._check_sample(msg)
class _TestMboxMMDFMessage(TestMessage):
_factory = mailbox._mboxMMDFMessage
def _post_initialize_hook(self, msg):
self._check_from(msg)
def test_initialize_with_unixfrom(self):
# Initialize with a message that already has a _unixfrom attribute
msg = mailbox.Message(_sample_message)
msg.set_unixfrom('From foo@bar blah')
msg = mailbox.mboxMessage(msg)
self.assertEqual(msg.get_from(), 'foo@bar blah')
def test_from(self):
# Get and set "From " line
msg = mailbox.mboxMessage(_sample_message)
self._check_from(msg)
msg.set_from('foo bar')
self.assertEqual(msg.get_from(), 'foo bar')
msg.set_from('foo@bar', True)
self._check_from(msg, 'foo@bar')
msg.set_from('blah@temp', time.localtime())
self._check_from(msg, 'blah@temp')
def test_flags(self):
# Use get_flags(), set_flags(), add_flag(), remove_flag()
msg = mailbox.mboxMessage(_sample_message)
self.assertEqual(msg.get_flags(), '')
msg.set_flags('F')
self.assertEqual(msg.get_flags(), 'F')
msg.set_flags('XODR')
self.assertEqual(msg.get_flags(), 'RODX')
msg.add_flag('FA')
self.assertEqual(msg.get_flags(), 'RODFAX')
msg.remove_flag('FDXA')
self.assertEqual(msg.get_flags(), 'RO')
self._check_sample(msg)
def _check_from(self, msg, sender=None):
# Check contents of "From " line
if sender is None:
sender = "MAILER-DAEMON"
self.assertTrue(re.match(sender + r" \w{3} \w{3} [\d ]\d [\d ]\d:\d{2}:"
r"\d{2} \d{4}", msg.get_from()))
class TestMboxMessage(_TestMboxMMDFMessage):
_factory = mailbox.mboxMessage
class TestMHMessage(TestMessage):
_factory = mailbox.MHMessage
def _post_initialize_hook(self, msg):
self.assertEqual(msg._sequences, [])
def test_sequences(self):
# Get, set, join, and leave sequences
msg = mailbox.MHMessage(_sample_message)
self.assertEqual(msg.get_sequences(), [])
msg.set_sequences(['foobar'])
self.assertEqual(msg.get_sequences(), ['foobar'])
msg.set_sequences([])
self.assertEqual(msg.get_sequences(), [])
msg.add_sequence('unseen')
self.assertEqual(msg.get_sequences(), ['unseen'])
msg.add_sequence('flagged')
self.assertEqual(msg.get_sequences(), ['unseen', 'flagged'])
msg.add_sequence('flagged')
self.assertEqual(msg.get_sequences(), ['unseen', 'flagged'])
msg.remove_sequence('unseen')
self.assertEqual(msg.get_sequences(), ['flagged'])
msg.add_sequence('foobar')
self.assertEqual(msg.get_sequences(), ['flagged', 'foobar'])
msg.remove_sequence('replied')
self.assertEqual(msg.get_sequences(), ['flagged', 'foobar'])
msg.set_sequences(['foobar', 'replied'])
self.assertEqual(msg.get_sequences(), ['foobar', 'replied'])
class TestBabylMessage(TestMessage):
_factory = mailbox.BabylMessage
def _post_initialize_hook(self, msg):
self.assertEqual(msg._labels, [])
def test_labels(self):
# Get, set, join, and leave labels
msg = mailbox.BabylMessage(_sample_message)
self.assertEqual(msg.get_labels(), [])
msg.set_labels(['foobar'])
self.assertEqual(msg.get_labels(), ['foobar'])
msg.set_labels([])
self.assertEqual(msg.get_labels(), [])
msg.add_label('filed')
self.assertEqual(msg.get_labels(), ['filed'])
msg.add_label('resent')
self.assertEqual(msg.get_labels(), ['filed', 'resent'])
msg.add_label('resent')
self.assertEqual(msg.get_labels(), ['filed', 'resent'])
msg.remove_label('filed')
self.assertEqual(msg.get_labels(), ['resent'])
msg.add_label('foobar')
self.assertEqual(msg.get_labels(), ['resent', 'foobar'])
msg.remove_label('unseen')
self.assertEqual(msg.get_labels(), ['resent', 'foobar'])
msg.set_labels(['foobar', 'answered'])
self.assertEqual(msg.get_labels(), ['foobar', 'answered'])
def test_visible(self):
# Get, set, and update visible headers
msg = mailbox.BabylMessage(_sample_message)
visible = msg.get_visible()
self.assertEqual(visible.keys(), [])
self.assertIs(visible.get_payload(), None)
visible['User-Agent'] = 'FooBar 1.0'
visible['X-Whatever'] = 'Blah'
self.assertEqual(msg.get_visible().keys(), [])
msg.set_visible(visible)
visible = msg.get_visible()
self.assertEqual(visible.keys(), ['User-Agent', 'X-Whatever'])
self.assertEqual(visible['User-Agent'], 'FooBar 1.0')
self.assertEqual(visible['X-Whatever'], 'Blah')
self.assertIs(visible.get_payload(), None)
msg.update_visible()
self.assertEqual(visible.keys(), ['User-Agent', 'X-Whatever'])
self.assertIs(visible.get_payload(), None)
visible = msg.get_visible()
self.assertEqual(visible.keys(), ['User-Agent', 'Date', 'From', 'To',
'Subject'])
for header in ('User-Agent', 'Date', 'From', 'To', 'Subject'):
self.assertEqual(visible[header], msg[header])
class TestMMDFMessage(_TestMboxMMDFMessage):
_factory = mailbox.MMDFMessage
class TestMessageConversion(TestBase):
def test_plain_to_x(self):
# Convert Message to all formats
for class_ in (mailbox.Message, mailbox.MaildirMessage,
mailbox.mboxMessage, mailbox.MHMessage,
mailbox.BabylMessage, mailbox.MMDFMessage):
msg_plain = mailbox.Message(_sample_message)
msg = class_(msg_plain)
self._check_sample(msg)
def test_x_to_plain(self):
# Convert all formats to Message
for class_ in (mailbox.Message, mailbox.MaildirMessage,
mailbox.mboxMessage, mailbox.MHMessage,
mailbox.BabylMessage, mailbox.MMDFMessage):
msg = class_(_sample_message)
msg_plain = mailbox.Message(msg)
self._check_sample(msg_plain)
def test_x_to_invalid(self):
# Convert all formats to an invalid format
for class_ in (mailbox.Message, mailbox.MaildirMessage,
mailbox.mboxMessage, mailbox.MHMessage,
mailbox.BabylMessage, mailbox.MMDFMessage):
self.assertRaises(TypeError, lambda: class_(False))
def test_maildir_to_maildir(self):
# Convert MaildirMessage to MaildirMessage
msg_maildir = mailbox.MaildirMessage(_sample_message)
msg_maildir.set_flags('DFPRST')
msg_maildir.set_subdir('cur')
date = msg_maildir.get_date()
msg = mailbox.MaildirMessage(msg_maildir)
self._check_sample(msg)
self.assertEqual(msg.get_flags(), 'DFPRST')
self.assertEqual(msg.get_subdir(), 'cur')
self.assertEqual(msg.get_date(), date)
def test_maildir_to_mboxmmdf(self):
# Convert MaildirMessage to mboxmessage and MMDFMessage
pairs = (('D', ''), ('F', 'F'), ('P', ''), ('R', 'A'), ('S', 'R'),
('T', 'D'), ('DFPRST', 'RDFA'))
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg_maildir = mailbox.MaildirMessage(_sample_message)
msg_maildir.set_date(0.0)
for setting, result in pairs:
msg_maildir.set_flags(setting)
msg = class_(msg_maildir)
self.assertEqual(msg.get_flags(), result)
self.assertEqual(msg.get_from(), 'MAILER-DAEMON %s' %
time.asctime(time.gmtime(0.0)))
msg_maildir.set_subdir('cur')
self.assertEqual(class_(msg_maildir).get_flags(), 'RODFA')
def test_maildir_to_mh(self):
# Convert MaildirMessage to MHMessage
msg_maildir = mailbox.MaildirMessage(_sample_message)
pairs = (('D', ['unseen']), ('F', ['unseen', 'flagged']),
('P', ['unseen']), ('R', ['unseen', 'replied']), ('S', []),
('T', ['unseen']), ('DFPRST', ['replied', 'flagged']))
for setting, result in pairs:
msg_maildir.set_flags(setting)
self.assertEqual(mailbox.MHMessage(msg_maildir).get_sequences(),
result)
def test_maildir_to_babyl(self):
# Convert MaildirMessage to Babyl
msg_maildir = mailbox.MaildirMessage(_sample_message)
pairs = (('D', ['unseen']), ('F', ['unseen']),
('P', ['unseen', 'forwarded']), ('R', ['unseen', 'answered']),
('S', []), ('T', ['unseen', 'deleted']),
('DFPRST', ['deleted', 'answered', 'forwarded']))
for setting, result in pairs:
msg_maildir.set_flags(setting)
self.assertEqual(mailbox.BabylMessage(msg_maildir).get_labels(),
result)
def test_mboxmmdf_to_maildir(self):
# Convert mboxMessage and MMDFMessage to MaildirMessage
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg_mboxMMDF = class_(_sample_message)
msg_mboxMMDF.set_from('foo@bar', time.gmtime(0.0))
pairs = (('R', 'S'), ('O', ''), ('D', 'T'), ('F', 'F'), ('A', 'R'),
('RODFA', 'FRST'))
for setting, result in pairs:
msg_mboxMMDF.set_flags(setting)
msg = mailbox.MaildirMessage(msg_mboxMMDF)
self.assertEqual(msg.get_flags(), result)
self.assertEqual(msg.get_date(), 0.0)
msg_mboxMMDF.set_flags('O')
self.assertEqual(mailbox.MaildirMessage(msg_mboxMMDF).get_subdir(),
'cur')
def test_mboxmmdf_to_mboxmmdf(self):
# Convert mboxMessage and MMDFMessage to mboxMessage and MMDFMessage
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg_mboxMMDF = class_(_sample_message)
msg_mboxMMDF.set_flags('RODFA')
msg_mboxMMDF.set_from('foo@bar')
for class2_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg2 = class2_(msg_mboxMMDF)
self.assertEqual(msg2.get_flags(), 'RODFA')
self.assertEqual(msg2.get_from(), 'foo@bar')
def test_mboxmmdf_to_mh(self):
# Convert mboxMessage and MMDFMessage to MHMessage
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg_mboxMMDF = class_(_sample_message)
pairs = (('R', []), ('O', ['unseen']), ('D', ['unseen']),
('F', ['unseen', 'flagged']),
('A', ['unseen', 'replied']),
('RODFA', ['replied', 'flagged']))
for setting, result in pairs:
msg_mboxMMDF.set_flags(setting)
self.assertEqual(mailbox.MHMessage(msg_mboxMMDF).get_sequences(),
result)
def test_mboxmmdf_to_babyl(self):
# Convert mboxMessage and MMDFMessage to BabylMessage
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg = class_(_sample_message)
pairs = (('R', []), ('O', ['unseen']),
('D', ['unseen', 'deleted']), ('F', ['unseen']),
('A', ['unseen', 'answered']),
('RODFA', ['deleted', 'answered']))
for setting, result in pairs:
msg.set_flags(setting)
self.assertEqual(mailbox.BabylMessage(msg).get_labels(), result)
def test_mh_to_maildir(self):
# Convert MHMessage to MaildirMessage
pairs = (('unseen', ''), ('replied', 'RS'), ('flagged', 'FS'))
for setting, result in pairs:
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence(setting)
self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), result)
self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence('unseen')
msg.add_sequence('replied')
msg.add_sequence('flagged')
self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), 'FR')
self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
def test_mh_to_mboxmmdf(self):
# Convert MHMessage to mboxMessage and MMDFMessage
pairs = (('unseen', 'O'), ('replied', 'ROA'), ('flagged', 'ROF'))
for setting, result in pairs:
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence(setting)
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
self.assertEqual(class_(msg).get_flags(), result)
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence('unseen')
msg.add_sequence('replied')
msg.add_sequence('flagged')
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
self.assertEqual(class_(msg).get_flags(), 'OFA')
def test_mh_to_mh(self):
# Convert MHMessage to MHMessage
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence('unseen')
msg.add_sequence('replied')
msg.add_sequence('flagged')
self.assertEqual(mailbox.MHMessage(msg).get_sequences(),
['unseen', 'replied', 'flagged'])
def test_mh_to_babyl(self):
# Convert MHMessage to BabylMessage
pairs = (('unseen', ['unseen']), ('replied', ['answered']),
('flagged', []))
for setting, result in pairs:
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence(setting)
self.assertEqual(mailbox.BabylMessage(msg).get_labels(), result)
msg = mailbox.MHMessage(_sample_message)
msg.add_sequence('unseen')
msg.add_sequence('replied')
msg.add_sequence('flagged')
self.assertEqual(mailbox.BabylMessage(msg).get_labels(),
['unseen', 'answered'])
def test_babyl_to_maildir(self):
# Convert BabylMessage to MaildirMessage
pairs = (('unseen', ''), ('deleted', 'ST'), ('filed', 'S'),
('answered', 'RS'), ('forwarded', 'PS'), ('edited', 'S'),
('resent', 'PS'))
for setting, result in pairs:
msg = mailbox.BabylMessage(_sample_message)
msg.add_label(setting)
self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), result)
self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
msg = mailbox.BabylMessage(_sample_message)
for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
'edited', 'resent'):
msg.add_label(label)
self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), 'PRT')
self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
def test_babyl_to_mboxmmdf(self):
# Convert BabylMessage to mboxMessage and MMDFMessage
pairs = (('unseen', 'O'), ('deleted', 'ROD'), ('filed', 'RO'),
('answered', 'ROA'), ('forwarded', 'RO'), ('edited', 'RO'),
('resent', 'RO'))
for setting, result in pairs:
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
msg = mailbox.BabylMessage(_sample_message)
msg.add_label(setting)
self.assertEqual(class_(msg).get_flags(), result)
msg = mailbox.BabylMessage(_sample_message)
for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
'edited', 'resent'):
msg.add_label(label)
for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
self.assertEqual(class_(msg).get_flags(), 'ODA')
def test_babyl_to_mh(self):
# Convert BabylMessage to MHMessage
pairs = (('unseen', ['unseen']), ('deleted', []), ('filed', []),
('answered', ['replied']), ('forwarded', []), ('edited', []),
('resent', []))
for setting, result in pairs:
msg = mailbox.BabylMessage(_sample_message)
msg.add_label(setting)
self.assertEqual(mailbox.MHMessage(msg).get_sequences(), result)
msg = mailbox.BabylMessage(_sample_message)
for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
'edited', 'resent'):
msg.add_label(label)
self.assertEqual(mailbox.MHMessage(msg).get_sequences(),
['unseen', 'replied'])
def test_babyl_to_babyl(self):
# Convert BabylMessage to BabylMessage
msg = mailbox.BabylMessage(_sample_message)
msg.update_visible()
for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
'edited', 'resent'):
msg.add_label(label)
msg2 = mailbox.BabylMessage(msg)
self.assertEqual(msg2.get_labels(), ['unseen', 'deleted', 'filed',
'answered', 'forwarded', 'edited',
'resent'])
self.assertEqual(msg.get_visible().keys(), msg2.get_visible().keys())
for key in msg.get_visible().keys():
self.assertEqual(msg.get_visible()[key], msg2.get_visible()[key])
class TestProxyFileBase(TestBase):
def _test_read(self, proxy):
# Read by byte
proxy.seek(0)
self.assertEqual(proxy.read(), 'bar')
proxy.seek(1)
self.assertEqual(proxy.read(), 'ar')
proxy.seek(0)
self.assertEqual(proxy.read(2), 'ba')
proxy.seek(1)
self.assertEqual(proxy.read(-1), 'ar')
proxy.seek(2)
self.assertEqual(proxy.read(1000), 'r')
def _test_readline(self, proxy):
# Read by line
proxy.seek(0)
self.assertEqual(proxy.readline(), 'foo' + os.linesep)
self.assertEqual(proxy.readline(), 'bar' + os.linesep)
self.assertEqual(proxy.readline(), 'fred' + os.linesep)
self.assertEqual(proxy.readline(), 'bob')
proxy.seek(2)
self.assertEqual(proxy.readline(), 'o' + os.linesep)
proxy.seek(6 + 2 * len(os.linesep))
self.assertEqual(proxy.readline(), 'fred' + os.linesep)
proxy.seek(6 + 2 * len(os.linesep))
self.assertEqual(proxy.readline(2), 'fr')
self.assertEqual(proxy.readline(-10), 'ed' + os.linesep)
def _test_readlines(self, proxy):
# Read multiple lines
proxy.seek(0)
self.assertEqual(proxy.readlines(), ['foo' + os.linesep,
'bar' + os.linesep,
'fred' + os.linesep, 'bob'])
proxy.seek(0)
self.assertEqual(proxy.readlines(2), ['foo' + os.linesep])
proxy.seek(3 + len(os.linesep))
self.assertEqual(proxy.readlines(4 + len(os.linesep)),
['bar' + os.linesep, 'fred' + os.linesep])
proxy.seek(3)
self.assertEqual(proxy.readlines(1000), [os.linesep, 'bar' + os.linesep,
'fred' + os.linesep, 'bob'])
def _test_iteration(self, proxy):
# Iterate by line
proxy.seek(0)
iterator = iter(proxy)
self.assertEqual(list(iterator),
['foo' + os.linesep, 'bar' + os.linesep, 'fred' + os.linesep, 'bob'])
def _test_seek_and_tell(self, proxy):
# Seek and use tell to check position
proxy.seek(3)
self.assertEqual(proxy.tell(), 3)
self.assertEqual(proxy.read(len(os.linesep)), os.linesep)
proxy.seek(2, 1)
self.assertEqual(proxy.read(1 + len(os.linesep)), 'r' + os.linesep)
proxy.seek(-3 - len(os.linesep), 2)
self.assertEqual(proxy.read(3), 'bar')
proxy.seek(2, 0)
self.assertEqual(proxy.read(), 'o' + os.linesep + 'bar' + os.linesep)
proxy.seek(100)
self.assertEqual(proxy.read(), '')
def _test_close(self, proxy):
# Close a file
proxy.close()
# Issue 11700 subsequent closes should be a no-op, not an error.
proxy.close()
class TestProxyFile(TestProxyFileBase):
def setUp(self):
self._path = test_support.TESTFN
self._file = open(self._path, 'wb+')
def tearDown(self):
self._file.close()
self._delete_recursively(self._path)
def test_initialize(self):
# Initialize and check position
self._file.write('foo')
pos = self._file.tell()
proxy0 = mailbox._ProxyFile(self._file)
self.assertEqual(proxy0.tell(), pos)
self.assertEqual(self._file.tell(), pos)
proxy1 = mailbox._ProxyFile(self._file, 0)
self.assertEqual(proxy1.tell(), 0)
self.assertEqual(self._file.tell(), pos)
def test_read(self):
self._file.write('bar')
self._test_read(mailbox._ProxyFile(self._file))
def test_readline(self):
self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
os.linesep))
self._test_readline(mailbox._ProxyFile(self._file))
def test_readlines(self):
self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
os.linesep))
self._test_readlines(mailbox._ProxyFile(self._file))
def test_iteration(self):
self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
os.linesep))
self._test_iteration(mailbox._ProxyFile(self._file))
def test_seek_and_tell(self):
self._file.write('foo%sbar%s' % (os.linesep, os.linesep))
self._test_seek_and_tell(mailbox._ProxyFile(self._file))
def test_close(self):
self._file.write('foo%sbar%s' % (os.linesep, os.linesep))
self._test_close(mailbox._ProxyFile(self._file))
class TestPartialFile(TestProxyFileBase):
def setUp(self):
self._path = test_support.TESTFN
self._file = open(self._path, 'wb+')
def tearDown(self):
self._file.close()
self._delete_recursively(self._path)
def test_initialize(self):
# Initialize and check position
self._file.write('foo' + os.linesep + 'bar')
pos = self._file.tell()
proxy = mailbox._PartialFile(self._file, 2, 5)
self.assertEqual(proxy.tell(), 0)
self.assertEqual(self._file.tell(), pos)
def test_read(self):
self._file.write('***bar***')
self._test_read(mailbox._PartialFile(self._file, 3, 6))
def test_readline(self):
self._file.write('!!!!!foo%sbar%sfred%sbob!!!!!' %
(os.linesep, os.linesep, os.linesep))
self._test_readline(mailbox._PartialFile(self._file, 5,
18 + 3 * len(os.linesep)))
def test_readlines(self):
self._file.write('foo%sbar%sfred%sbob?????' %
(os.linesep, os.linesep, os.linesep))
self._test_readlines(mailbox._PartialFile(self._file, 0,
13 + 3 * len(os.linesep)))
def test_iteration(self):
self._file.write('____foo%sbar%sfred%sbob####' %
(os.linesep, os.linesep, os.linesep))
self._test_iteration(mailbox._PartialFile(self._file, 4,
17 + 3 * len(os.linesep)))
def test_seek_and_tell(self):
self._file.write('(((foo%sbar%s$$$' % (os.linesep, os.linesep))
self._test_seek_and_tell(mailbox._PartialFile(self._file, 3,
9 + 2 * len(os.linesep)))
def test_close(self):
self._file.write('&foo%sbar%s^' % (os.linesep, os.linesep))
self._test_close(mailbox._PartialFile(self._file, 1,
6 + 3 * len(os.linesep)))
## Start: tests from the original module (for backward compatibility).
FROM_ = "From some.body@dummy.domain Sat Jul 24 13:43:35 2004\n"
DUMMY_MESSAGE = """\
From: some.body@dummy.domain
To: me@my.domain
Subject: Simple Test
This is a dummy message.
"""
class MaildirTestCase(unittest.TestCase):
def setUp(self):
# create a new maildir mailbox to work with:
self._dir = test_support.TESTFN
os.mkdir(self._dir)
os.mkdir(os.path.join(self._dir, "cur"))
os.mkdir(os.path.join(self._dir, "tmp"))
os.mkdir(os.path.join(self._dir, "new"))
self._counter = 1
self._msgfiles = []
def tearDown(self):
map(os.unlink, self._msgfiles)
os.rmdir(os.path.join(self._dir, "cur"))
os.rmdir(os.path.join(self._dir, "tmp"))
os.rmdir(os.path.join(self._dir, "new"))
os.rmdir(self._dir)
def createMessage(self, dir, mbox=False):
t = int(time.time() % 1000000)
pid = self._counter
self._counter += 1
filename = os.extsep.join((str(t), str(pid), "myhostname", "mydomain"))
tmpname = os.path.join(self._dir, "tmp", filename)
newname = os.path.join(self._dir, dir, filename)
with open(tmpname, "w") as fp:
self._msgfiles.append(tmpname)
if mbox:
fp.write(FROM_)
fp.write(DUMMY_MESSAGE)
if hasattr(os, "link"):
os.link(tmpname, newname)
else:
with open(newname, "w") as fp:
fp.write(DUMMY_MESSAGE)
self._msgfiles.append(newname)
return tmpname
def test_empty_maildir(self):
"""Test an empty maildir mailbox"""
# Test for regression on bug #117490:
# Make sure the boxes attribute actually gets set.
self.mbox = mailbox.Maildir(test_support.TESTFN)
#self.assertTrue(hasattr(self.mbox, "boxes"))
#self.assertTrue(len(self.mbox.boxes) == 0)
self.assertIs(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
def test_nonempty_maildir_cur(self):
self.createMessage("cur")
self.mbox = mailbox.Maildir(test_support.TESTFN)
#self.assertTrue(len(self.mbox.boxes) == 1)
self.assertIsNot(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
def test_nonempty_maildir_new(self):
self.createMessage("new")
self.mbox = mailbox.Maildir(test_support.TESTFN)
#self.assertTrue(len(self.mbox.boxes) == 1)
self.assertIsNot(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
def test_nonempty_maildir_both(self):
self.createMessage("cur")
self.createMessage("new")
self.mbox = mailbox.Maildir(test_support.TESTFN)
#self.assertTrue(len(self.mbox.boxes) == 2)
self.assertIsNot(self.mbox.next(), None)
self.assertIsNot(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
self.assertIs(self.mbox.next(), None)
def test_unix_mbox(self):
### should be better!
import email.parser
fname = self.createMessage("cur", True)
n = 0
for msg in mailbox.PortableUnixMailbox(open(fname),
email.parser.Parser().parse):
n += 1
self.assertEqual(msg["subject"], "Simple Test")
self.assertEqual(len(str(msg)), len(FROM_)+len(DUMMY_MESSAGE))
self.assertEqual(n, 1)
## End: classes from the original module (for backward compatibility).
_sample_message = """\
Return-Path: <gkj@gregorykjohnson.com>
X-Original-To: gkj+person@localhost
Delivered-To: gkj+person@localhost
Received: from localhost (localhost [127.0.0.1])
by andy.gregorykjohnson.com (Postfix) with ESMTP id 356ED9DD17
for <gkj+person@localhost>; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)
Delivered-To: gkj@sundance.gregorykjohnson.com
Received: from localhost [127.0.0.1]
by localhost with POP3 (fetchmail-6.2.5)
for gkj+person@localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)
Received: from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228])
by sundance.gregorykjohnson.com (Postfix) with ESMTP id 5B056316746
for <gkj@gregorykjohnson.com>; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)
Received: by andy.gregorykjohnson.com (Postfix, from userid 1000)
id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)
Date: Wed, 13 Jul 2005 17:23:11 -0400
From: "Gregory K. Johnson" <gkj@gregorykjohnson.com>
To: gkj@gregorykjohnson.com
Subject: Sample message
Message-ID: <20050713212311.GC4701@andy.gregorykjohnson.com>
Mime-Version: 1.0
Content-Type: multipart/mixed; boundary="NMuMz9nt05w80d4+"
Content-Disposition: inline
User-Agent: Mutt/1.5.9i
--NMuMz9nt05w80d4+
Content-Type: text/plain; charset=us-ascii
Content-Disposition: inline
This is a sample message.
--
Gregory K. Johnson
--NMuMz9nt05w80d4+
Content-Type: application/octet-stream
Content-Disposition: attachment; filename="text.gz"
Content-Transfer-Encoding: base64
H4sICM2D1UIAA3RleHQAC8nILFYAokSFktSKEoW0zJxUPa7wzJIMhZLyfIWczLzUYj0uAHTs
3FYlAAAA
--NMuMz9nt05w80d4+--
"""
_sample_headers = {
"Return-Path":"<gkj@gregorykjohnson.com>",
"X-Original-To":"gkj+person@localhost",
"Delivered-To":"gkj+person@localhost",
"Received":"""from localhost (localhost [127.0.0.1])
by andy.gregorykjohnson.com (Postfix) with ESMTP id 356ED9DD17
for <gkj+person@localhost>; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""",
"Delivered-To":"gkj@sundance.gregorykjohnson.com",
"Received":"""from localhost [127.0.0.1]
by localhost with POP3 (fetchmail-6.2.5)
for gkj+person@localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""",
"Received":"""from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228])
by sundance.gregorykjohnson.com (Postfix) with ESMTP id 5B056316746
for <gkj@gregorykjohnson.com>; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""",
"Received":"""by andy.gregorykjohnson.com (Postfix, from userid 1000)
id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""",
"Date":"Wed, 13 Jul 2005 17:23:11 -0400",
"From":""""Gregory K. Johnson" <gkj@gregorykjohnson.com>""",
"To":"gkj@gregorykjohnson.com",
"Subject":"Sample message",
"Mime-Version":"1.0",
"Content-Type":"""multipart/mixed; boundary="NMuMz9nt05w80d4+\"""",
"Content-Disposition":"inline",
"User-Agent": "Mutt/1.5.9i" }
_sample_payloads = ("""This is a sample message.
--
Gregory K. Johnson
""",
"""H4sICM2D1UIAA3RleHQAC8nILFYAokSFktSKEoW0zJxUPa7wzJIMhZLyfIWczLzUYj0uAHTs
3FYlAAAA
""")
def test_main():
tests = (TestMailboxSuperclass, TestMaildir, TestMbox, TestMMDF, TestMH,
TestBabyl, TestMessage, TestMaildirMessage, TestMboxMessage,
TestMHMessage, TestBabylMessage, TestMMDFMessage,
TestMessageConversion, TestProxyFile, TestPartialFile,
MaildirTestCase)
test_support.run_unittest(*tests)
test_support.reap_children()
if __name__ == '__main__':
test_main()
| gpl-2.0 |
joopert/nad_receiver | nad_receiver/__init__.py | 1 | 11483 | """
NAD has an RS232 interface to control the receiver.
Not all receivers have all functions.
Functions can be found on the NAD website: http://nadelectronics.com/software
"""
import codecs
import socket
from time import sleep
from typing import Any, Dict, Iterable, Optional, Union
from nad_receiver.nad_commands import CMDS
from nad_receiver.nad_transport import (NadTransport, SerialPortTransport, TelnetTransportWrapper,
DEFAULT_TIMEOUT)
import logging
logging.basicConfig()
_LOGGER = logging.getLogger("nad_receiver")
# Uncomment this line to see all communication with the device:
# _LOGGER.setLevel(logging.DEBUG)
class NADReceiver:
"""NAD receiver."""
transport: NadTransport
def __init__(self, serial_port: str) -> None:
"""Create RS232 connection."""
self.transport = SerialPortTransport(serial_port)
def exec_command(self, domain: str, function: str, operator: str, value: Optional[str] =None) -> Optional[str]:
"""
Write a command to the receiver and read the value it returns.
The receiver will always return a value, also when setting a value.
"""
if operator in CMDS[domain][function]['supported_operators']:
if operator == '=' and value is None:
raise ValueError('No value provided')
cmd = ''.join([CMDS[domain][function]['cmd'], operator]) # type: ignore
assert isinstance(cmd, str)
if value:
cmd = cmd + value
else:
raise ValueError('Invalid operator provided %s' % operator)
try:
msg = self.transport.communicate(cmd)
_LOGGER.debug(f"sent: '{cmd}' reply: '{msg}'")
return msg.split('=')[1]
except IndexError:
pass
return None
def main_dimmer(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Dimmer."""
return self.exec_command('main', 'dimmer', operator, value)
def main_mute(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Mute."""
return self.exec_command('main', 'mute', operator, value)
def main_power(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Power."""
return self.exec_command('main', 'power', operator, value)
def main_volume(self, operator: str, value: Optional[str] =None) -> Optional[float]:
"""
Execute Main.Volume.
Returns float
"""
volume = self.exec_command('main', 'volume', operator, str(value))
if volume is None:
return None
try:
res = float(volume)
return res
except (ValueError):
pass
return None
def main_ir(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.IR."""
return self.exec_command('main', 'ir', operator, value)
def main_listeningmode(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.ListeningMode."""
return self.exec_command('main', 'listeningmode', operator, value)
def main_sleep(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Sleep."""
return self.exec_command('main', 'sleep', operator, value)
def main_tape_monitor(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Tape1."""
return self.exec_command('main', 'tape_monitor', operator, value)
def main_speaker_a(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.SpeakerA."""
return self.exec_command('main', 'speaker_a', operator, value)
def main_speaker_b(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.SpeakerB."""
return self.exec_command('main', 'speaker_b', operator, value)
def main_source(self, operator: str, value: Optional[str]=None) -> Optional[Union[int, str]]:
"""
Execute Main.Source.
Returns int
"""
source = self.exec_command('main', 'source', operator, str(value))
if source is None:
return None
try:
# try to return as integer, some receivers return numbers
return int(source)
except ValueError:
# return source as string
return source
return None
def main_version(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Version."""
return self.exec_command('main', 'version', operator, value)
def main_model(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Main.Model."""
return self.exec_command('main', 'model', operator, value)
def tuner_am_frequency(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.AM.Frequence."""
return self.exec_command('tuner', 'am_frequency', operator, value)
def tuner_am_preset(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.AM.Preset."""
return self.exec_command('tuner', 'am_preset', operator, value)
def tuner_band(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.Band."""
return self.exec_command('tuner', 'band', operator, value)
def tuner_fm_frequency(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Frequence."""
return self.exec_command('tuner', 'fm_frequency', operator, value)
def tuner_fm_mute(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Mute."""
return self.exec_command('tuner', 'fm_mute', operator, value)
def tuner_fm_preset(self, operator: str, value: Optional[str] =None) -> Optional[str]:
"""Execute Tuner.FM.Preset."""
return self.exec_command('tuner', 'fm_preset', operator, value)
class NADReceiverTelnet(NADReceiver):
"""
Support NAD amplifiers that use telnet for communication.
Supports all commands from the RS232 base class
Known supported model: Nad T787.
"""
def __init__(self, host: str, port: int =23, timeout: int =DEFAULT_TIMEOUT):
"""Create NADTelnet."""
self.transport = TelnetTransportWrapper(host, port, timeout)
class NADReceiverTCP:
"""
Support NAD amplifiers that use tcp for communication.
Known supported model: Nad D 7050.
"""
POLL_VOLUME = "0001020204"
POLL_POWER = "0001020209"
POLL_MUTED = "000102020a"
POLL_SOURCE = "0001020203"
CMD_POWERSAVE = "00010207000001020207"
CMD_OFF = "0001020900"
CMD_ON = "0001020901"
CMD_VOLUME = "00010204"
CMD_MUTE = "0001020a01"
CMD_UNMUTE = "0001020a00"
CMD_SOURCE = "00010203"
SOURCES = {'Coaxial 1': '00', 'Coaxial 2': '01', 'Optical 1': '02',
'Optical 2': '03', 'Computer': '04', 'Airplay': '05',
'Dock': '06', 'Bluetooth': '07'}
SOURCES_REVERSED = {value: key for key, value in
SOURCES.items()}
PORT = 50001
BUFFERSIZE = 1024
def __init__(self, host: str) -> None:
"""Setup globals."""
self._host = host
def _send(self, message: str, read_reply: bool =False) -> Optional[str]:
"""Send a command string to the amplifier."""
sock: socket.socket
for tries in range(0, 3):
try:
sock = socket.create_connection((self._host, self.PORT),
timeout=5)
break
except socket.timeout:
print("Socket connection timed out.")
return None
except (ConnectionError, BrokenPipeError):
if tries == 2:
print("socket connect failed.")
return None
sleep(0.1)
if not sock:
return None
with sock:
sock.send(codecs.decode(message.encode(), encoding='hex_codec'))
if read_reply:
sleep(0.1)
reply = ''
tries = 0
max_tries = 20
while len(reply) < len(message) and tries < max_tries:
try:
reply += codecs.encode(sock.recv(self.BUFFERSIZE), 'hex')\
.decode("utf-8")
return reply
except (ConnectionError, BrokenPipeError):
pass
tries += 1
return None
def status(self) -> Optional[Dict[str, Any]]:
"""
Return the status of the device.
Returns a dictionary with keys 'volume' (int 0-200) , 'power' (bool),
'muted' (bool) and 'source' (str).
"""
nad_reply = self._send(self.POLL_VOLUME +
self.POLL_POWER +
self.POLL_MUTED +
self.POLL_SOURCE, read_reply=True)
if nad_reply is None:
return None
# split reply into parts of 10 characters
num_chars = 10
nad_status = [nad_reply[i:i + num_chars]
for i in range(0, len(nad_reply), num_chars)]
return {'volume': int(nad_status[0][-2:], 16),
'power': nad_status[1][-2:] == '01',
'muted': nad_status[2][-2:] == '01',
'source': self.SOURCES_REVERSED[nad_status[3][-2:]]}
def power_off(self) -> None:
"""Power the device off."""
status = self.status()
if not status:
return None
if status['power']:
# Setting power off when it is already off can cause hangs
self._send(self.CMD_POWERSAVE + self.CMD_OFF)
def power_on(self) -> None:
"""Power the device on."""
status = self.status()
if not status:
return None
if not status['power']:
self._send(self.CMD_ON, read_reply=True)
sleep(0.5) # Give NAD7050 some time before next command
def set_volume(self, volume: int) -> None:
"""Set volume level of the device. Accepts integer values 0-200."""
if 0 <= volume <= 200:
volume_hex = format(volume, "02x") # Convert to hex
self._send(self.CMD_VOLUME + volume_hex)
def mute(self) -> None:
"""Mute the device."""
self._send(self.CMD_MUTE, read_reply=True)
def unmute(self) -> None:
"""Unmute the device."""
self._send(self.CMD_UNMUTE)
def select_source(self, source: str) -> None:
"""Select a source from the list of sources."""
status = self.status()
if not status:
return None
if status['power']: # Changing source when off may hang NAD7050
# Setting the source to the current source will hang the NAD7050
if status['source'] != source:
if source in self.SOURCES:
self._send(self.CMD_SOURCE + self.SOURCES[source],
read_reply=True)
def available_sources(self) -> Iterable[str]:
"""Return a list of available sources."""
return list(self.SOURCES.keys())
| mit |
kelle/astropy | astropy/vo/validator/tstquery.py | 2 | 3390 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Temporary solution until `astropy.vo.validator.Conf.conesearch_master_list`
includes ``<testQuery>`` fields.
In case USVO service is unstable, it does the following:
#. Try USVO production server.
#. If fails, try USVO test server (has latest bug fix, but does not
contain all registered services).
#. If SR > 0.1, force SR to be 0.1.
#. If fails, use RA=0 DEC=0 SR=0.1.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# STDLIB
import warnings
from xml.dom import minidom
from collections import OrderedDict
# LOCAL
from ...utils.data import get_readable_fileobj
from ...utils.decorators import deprecated
from ...utils.exceptions import AstropyUserWarning
@deprecated(
'2.0', alternative='astroquery.vo_conesearch.validator.tstquery.parse_cs')
def parse_cs(id):
"""Return ``<testQuery>`` pars as dict for given Resource ID."""
if isinstance(id, bytes): # pragma: py3
id = id.decode('ascii')
# Production server.
url = ('http://vao.stsci.edu/directory/getRecord.aspx?'
'id={0}&format=xml'.format(id))
# Test server (in case production server fails).
backup_url = ('http://vaotest.stsci.edu/directory/getRecord.aspx?'
'id={0}&format=xml'.format(id))
tqp = ['ra', 'dec', 'sr']
d = OrderedDict()
urls_failed = False
urls_errmsg = ''
try:
with get_readable_fileobj(url, encoding='binary',
show_progress=False) as fd:
dom = minidom.parse(fd)
except Exception as e: # pragma: no cover
try:
warnings.warn('{0} raised {1}, trying {2}'.format(
url, str(e), backup_url), AstropyUserWarning)
with get_readable_fileobj(backup_url, encoding='binary',
show_progress=False) as fd:
dom = minidom.parse(fd)
except Exception as e:
urls_failed = True
urls_errmsg = '{0} raised {1}, using default'.format(
backup_url, str(e))
if not urls_failed:
tq = dom.getElementsByTagName('testQuery')
if tq:
for key in tqp:
try:
d[key.upper()] = tq[0].getElementsByTagName(
key)[0].firstChild.nodeValue.strip()
except Exception as e: # pragma: no cover
urls_failed = True
urls_errmsg = ('Incomplete testQuery for {0}, '
'using default'.format(id))
else: # pragma: no cover
urls_failed = True
urls_errmsg = 'No testQuery found for {0}, using default'.format(id)
# Handle big SR returning too big a table for some queries, causing
# tests to fail due to timeout.
default_sr = '0.1'
# If no testQuery found, use default
if urls_failed: # pragma: no cover
d = OrderedDict({'RA': '0', 'DEC': '0', 'SR': default_sr})
warnings.warn(urls_errmsg, AstropyUserWarning)
# Force SR to be reasonably small
elif d['SR'] > default_sr:
warnings.warn('SR={0} is too large, using SR={1} for {2}'.format(
d['SR'], default_sr, id), AstropyUserWarning)
d['SR'] = default_sr
return d
| bsd-3-clause |
Tesora/tesora-python-troveclient | troveclient/tests/osc/v1/test_database_flavors.py | 1 | 1636 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from troveclient.osc.v1 import database_flavors
from troveclient.tests.osc.v1 import fakes
class TestFlavors(fakes.TestDatabasev1):
fake_flavors = fakes.FakeFlavors()
def setUp(self):
super(TestFlavors, self).setUp()
self.mock_client = self.app.client_manager.database
self.flavor_client = self.app.client_manager.database.flavors
class TestFlavorList(TestFlavors):
columns = database_flavors.ListDatabaseFlavors.columns
values = (1, 'm1.tiny', 512, '', '', '')
def setUp(self):
super(TestFlavorList, self).setUp()
self.cmd = database_flavors.ListDatabaseFlavors(self.app, None)
self.data = [self.fake_flavors.get_flavors_1()]
self.flavor_client.list.return_value = self.data
def test_flavor_list_defaults(self):
parsed_args = self.check_parser(self.cmd, [], [])
columns, values = self.cmd.take_action(parsed_args)
self.flavor_client.list.assert_called_once_with()
self.assertEqual(self.columns, columns)
self.assertEqual([self.values], values)
| apache-2.0 |
alanjw/GreenOpenERP-Win-X86 | python/Lib/site-packages/unidecode/x098.py | 252 | 4645 | data = (
'Hu ', # 0x00
'Ye ', # 0x01
'Ding ', # 0x02
'Qing ', # 0x03
'Pan ', # 0x04
'Xiang ', # 0x05
'Shun ', # 0x06
'Han ', # 0x07
'Xu ', # 0x08
'Yi ', # 0x09
'Xu ', # 0x0a
'Gu ', # 0x0b
'Song ', # 0x0c
'Kui ', # 0x0d
'Qi ', # 0x0e
'Hang ', # 0x0f
'Yu ', # 0x10
'Wan ', # 0x11
'Ban ', # 0x12
'Dun ', # 0x13
'Di ', # 0x14
'Dan ', # 0x15
'Pan ', # 0x16
'Po ', # 0x17
'Ling ', # 0x18
'Ce ', # 0x19
'Jing ', # 0x1a
'Lei ', # 0x1b
'He ', # 0x1c
'Qiao ', # 0x1d
'E ', # 0x1e
'E ', # 0x1f
'Wei ', # 0x20
'Jie ', # 0x21
'Gua ', # 0x22
'Shen ', # 0x23
'Yi ', # 0x24
'Shen ', # 0x25
'Hai ', # 0x26
'Dui ', # 0x27
'Pian ', # 0x28
'Ping ', # 0x29
'Lei ', # 0x2a
'Fu ', # 0x2b
'Jia ', # 0x2c
'Tou ', # 0x2d
'Hui ', # 0x2e
'Kui ', # 0x2f
'Jia ', # 0x30
'Le ', # 0x31
'Tian ', # 0x32
'Cheng ', # 0x33
'Ying ', # 0x34
'Jun ', # 0x35
'Hu ', # 0x36
'Han ', # 0x37
'Jing ', # 0x38
'Tui ', # 0x39
'Tui ', # 0x3a
'Pin ', # 0x3b
'Lai ', # 0x3c
'Tui ', # 0x3d
'Zi ', # 0x3e
'Zi ', # 0x3f
'Chui ', # 0x40
'Ding ', # 0x41
'Lai ', # 0x42
'Yan ', # 0x43
'Han ', # 0x44
'Jian ', # 0x45
'Ke ', # 0x46
'Cui ', # 0x47
'Jiong ', # 0x48
'Qin ', # 0x49
'Yi ', # 0x4a
'Sai ', # 0x4b
'Ti ', # 0x4c
'E ', # 0x4d
'E ', # 0x4e
'Yan ', # 0x4f
'Hun ', # 0x50
'Kan ', # 0x51
'Yong ', # 0x52
'Zhuan ', # 0x53
'Yan ', # 0x54
'Xian ', # 0x55
'Xin ', # 0x56
'Yi ', # 0x57
'Yuan ', # 0x58
'Sang ', # 0x59
'Dian ', # 0x5a
'Dian ', # 0x5b
'Jiang ', # 0x5c
'Ku ', # 0x5d
'Lei ', # 0x5e
'Liao ', # 0x5f
'Piao ', # 0x60
'Yi ', # 0x61
'Man ', # 0x62
'Qi ', # 0x63
'Rao ', # 0x64
'Hao ', # 0x65
'Qiao ', # 0x66
'Gu ', # 0x67
'Xun ', # 0x68
'Qian ', # 0x69
'Hui ', # 0x6a
'Zhan ', # 0x6b
'Ru ', # 0x6c
'Hong ', # 0x6d
'Bin ', # 0x6e
'Xian ', # 0x6f
'Pin ', # 0x70
'Lu ', # 0x71
'Lan ', # 0x72
'Nie ', # 0x73
'Quan ', # 0x74
'Ye ', # 0x75
'Ding ', # 0x76
'Qing ', # 0x77
'Han ', # 0x78
'Xiang ', # 0x79
'Shun ', # 0x7a
'Xu ', # 0x7b
'Xu ', # 0x7c
'Wan ', # 0x7d
'Gu ', # 0x7e
'Dun ', # 0x7f
'Qi ', # 0x80
'Ban ', # 0x81
'Song ', # 0x82
'Hang ', # 0x83
'Yu ', # 0x84
'Lu ', # 0x85
'Ling ', # 0x86
'Po ', # 0x87
'Jing ', # 0x88
'Jie ', # 0x89
'Jia ', # 0x8a
'Tian ', # 0x8b
'Han ', # 0x8c
'Ying ', # 0x8d
'Jiong ', # 0x8e
'Hai ', # 0x8f
'Yi ', # 0x90
'Pin ', # 0x91
'Hui ', # 0x92
'Tui ', # 0x93
'Han ', # 0x94
'Ying ', # 0x95
'Ying ', # 0x96
'Ke ', # 0x97
'Ti ', # 0x98
'Yong ', # 0x99
'E ', # 0x9a
'Zhuan ', # 0x9b
'Yan ', # 0x9c
'E ', # 0x9d
'Nie ', # 0x9e
'Man ', # 0x9f
'Dian ', # 0xa0
'Sang ', # 0xa1
'Hao ', # 0xa2
'Lei ', # 0xa3
'Zhan ', # 0xa4
'Ru ', # 0xa5
'Pin ', # 0xa6
'Quan ', # 0xa7
'Feng ', # 0xa8
'Biao ', # 0xa9
'Oroshi ', # 0xaa
'Fu ', # 0xab
'Xia ', # 0xac
'Zhan ', # 0xad
'Biao ', # 0xae
'Sa ', # 0xaf
'Ba ', # 0xb0
'Tai ', # 0xb1
'Lie ', # 0xb2
'Gua ', # 0xb3
'Xuan ', # 0xb4
'Shao ', # 0xb5
'Ju ', # 0xb6
'Bi ', # 0xb7
'Si ', # 0xb8
'Wei ', # 0xb9
'Yang ', # 0xba
'Yao ', # 0xbb
'Sou ', # 0xbc
'Kai ', # 0xbd
'Sao ', # 0xbe
'Fan ', # 0xbf
'Liu ', # 0xc0
'Xi ', # 0xc1
'Liao ', # 0xc2
'Piao ', # 0xc3
'Piao ', # 0xc4
'Liu ', # 0xc5
'Biao ', # 0xc6
'Biao ', # 0xc7
'Biao ', # 0xc8
'Liao ', # 0xc9
'[?] ', # 0xca
'Se ', # 0xcb
'Feng ', # 0xcc
'Biao ', # 0xcd
'Feng ', # 0xce
'Yang ', # 0xcf
'Zhan ', # 0xd0
'Biao ', # 0xd1
'Sa ', # 0xd2
'Ju ', # 0xd3
'Si ', # 0xd4
'Sou ', # 0xd5
'Yao ', # 0xd6
'Liu ', # 0xd7
'Piao ', # 0xd8
'Biao ', # 0xd9
'Biao ', # 0xda
'Fei ', # 0xdb
'Fan ', # 0xdc
'Fei ', # 0xdd
'Fei ', # 0xde
'Shi ', # 0xdf
'Shi ', # 0xe0
'Can ', # 0xe1
'Ji ', # 0xe2
'Ding ', # 0xe3
'Si ', # 0xe4
'Tuo ', # 0xe5
'Zhan ', # 0xe6
'Sun ', # 0xe7
'Xiang ', # 0xe8
'Tun ', # 0xe9
'Ren ', # 0xea
'Yu ', # 0xeb
'Juan ', # 0xec
'Chi ', # 0xed
'Yin ', # 0xee
'Fan ', # 0xef
'Fan ', # 0xf0
'Sun ', # 0xf1
'Yin ', # 0xf2
'Zhu ', # 0xf3
'Yi ', # 0xf4
'Zhai ', # 0xf5
'Bi ', # 0xf6
'Jie ', # 0xf7
'Tao ', # 0xf8
'Liu ', # 0xf9
'Ci ', # 0xfa
'Tie ', # 0xfb
'Si ', # 0xfc
'Bao ', # 0xfd
'Shi ', # 0xfe
'Duo ', # 0xff
)
| agpl-3.0 |
beiko-lab/gengis | bin/Lib/site-packages/scipy/sparse/sparsetools/csc.py | 6 | 48084 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_csc', [dirname(__file__)])
except ImportError:
import _csc
return _csc
if fp is not None:
try:
_mod = imp.load_module('_csc', fp, pathname, description)
finally:
fp.close()
return _mod
_csc = swig_import_helper()
del swig_import_helper
else:
import _csc
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def csc_matmat_pass1(*args):
"""
csc_matmat_pass1(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Bp,
int const [] Bi, int [] Cp)
"""
return _csc.csc_matmat_pass1(*args)
def csc_diagonal(*args):
"""
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_bool_wrapper const [] Ax,
npy_bool_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, signed char const [] Ax,
signed char [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned char const [] Ax,
unsigned char [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, short const [] Ax,
short [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned short const [] Ax,
unsigned short [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, int const [] Ax,
int [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned int const [] Ax,
unsigned int [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, long long const [] Ax,
long long [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, unsigned long long const [] Ax,
unsigned long long [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, float const [] Ax,
float [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, double const [] Ax,
double [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, long double const [] Ax,
long double [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_cfloat_wrapper const [] Ax,
npy_cfloat_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_cdouble_wrapper const [] Ax,
npy_cdouble_wrapper [] Yx)
csc_diagonal(int const n_row, int const n_col, int const [] Ap, int const [] Aj, npy_clongdouble_wrapper const [] Ax,
npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_diagonal(*args)
def csc_tocsr(*args):
"""
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_bool_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int [] Bp, int [] Bj, signed char [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int [] Bp, int [] Bj, unsigned char [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int [] Bp, int [] Bj, short [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int [] Bp, int [] Bj, unsigned short [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int [] Bp, int [] Bj, int [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int [] Bp, int [] Bj, unsigned int [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int [] Bp, int [] Bj, long long [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int [] Bp, int [] Bj, unsigned long long [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int [] Bp, int [] Bj, float [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int [] Bp, int [] Bj, double [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int [] Bp, int [] Bj, long double [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_cfloat_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_cdouble_wrapper [] Bx)
csc_tocsr(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int [] Bp, int [] Bj, npy_clongdouble_wrapper [] Bx)
"""
return _csc.csc_tocsr(*args)
def csc_matmat_pass2(*args):
"""
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_matmat_pass2(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_matmat_pass2(*args)
def csc_matvec(*args):
"""
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
npy_bool_wrapper const [] Xx, npy_bool_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
signed char const [] Xx, signed char [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
unsigned char const [] Xx, unsigned char [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
short const [] Xx, short [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
unsigned short const [] Xx, unsigned short [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Xx, int [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
unsigned int const [] Xx, unsigned int [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
long long const [] Xx, long long [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
unsigned long long const [] Xx, unsigned long long [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
float const [] Xx, float [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
double const [] Xx, double [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
long double const [] Xx, long double [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
npy_cfloat_wrapper const [] Xx, npy_cfloat_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
npy_cdouble_wrapper const [] Xx, npy_cdouble_wrapper [] Yx)
csc_matvec(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
npy_clongdouble_wrapper const [] Xx, npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_matvec(*args)
def csc_matvecs(*args):
"""
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_bool_wrapper const [] Ax, npy_bool_wrapper const [] Xx, npy_bool_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
signed char const [] Ax, signed char const [] Xx, signed char [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned char const [] Ax, unsigned char const [] Xx, unsigned char [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
short const [] Ax, short const [] Xx, short [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned short const [] Ax, unsigned short const [] Xx, unsigned short [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
int const [] Ax, int const [] Xx, int [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned int const [] Ax, unsigned int const [] Xx, unsigned int [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
long long const [] Ax, long long const [] Xx, long long [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
unsigned long long const [] Ax, unsigned long long const [] Xx, unsigned long long [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
float const [] Ax, float const [] Xx, float [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
double const [] Ax, double const [] Xx, double [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
long double const [] Ax, long double const [] Xx, long double [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_cfloat_wrapper const [] Ax, npy_cfloat_wrapper const [] Xx, npy_cfloat_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_cdouble_wrapper const [] Ax, npy_cdouble_wrapper const [] Xx, npy_cdouble_wrapper [] Yx)
csc_matvecs(int const n_row, int const n_col, int const n_vecs, int const [] Ap, int const [] Ai,
npy_clongdouble_wrapper const [] Ax, npy_clongdouble_wrapper const [] Xx,
npy_clongdouble_wrapper [] Yx)
"""
return _csc.csc_matvecs(*args)
def csc_elmul_csc(*args):
"""
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_elmul_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_elmul_csc(*args)
def csc_eldiv_csc(*args):
"""
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_eldiv_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_eldiv_csc(*args)
def csc_plus_csc(*args):
"""
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_plus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_plus_csc(*args)
def csc_minus_csc(*args):
"""
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, signed char [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, unsigned char [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
short [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, unsigned short [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
int [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, unsigned int [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, long long [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, unsigned long long [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
float [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
double [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, long double [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cfloat_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_cdouble_wrapper [] Cx)
csc_minus_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_clongdouble_wrapper [] Cx)
"""
return _csc.csc_minus_csc(*args)
def csc_ne_csc(*args):
"""
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ne_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_ne_csc(*args)
def csc_lt_csc(*args):
"""
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_lt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_lt_csc(*args)
def csc_gt_csc(*args):
"""
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_gt_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_gt_csc(*args)
def csc_le_csc(*args):
"""
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_le_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_le_csc(*args)
def csc_ge_csc(*args):
"""
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_bool_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_bool_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, signed char const [] Ax,
int const [] Bp, int const [] Bi, signed char const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned char const [] Ax,
int const [] Bp, int const [] Bi, unsigned char const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, short const [] Ax,
int const [] Bp, int const [] Bi, short const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned short const [] Ax,
int const [] Bp, int const [] Bi, unsigned short const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, int const [] Ax,
int const [] Bp, int const [] Bi, int const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned int const [] Ax,
int const [] Bp, int const [] Bi, unsigned int const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long long const [] Ax,
int const [] Bp, int const [] Bi, long long const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, unsigned long long const [] Ax,
int const [] Bp, int const [] Bi, unsigned long long const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, float const [] Ax,
int const [] Bp, int const [] Bi, float const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, double const [] Ax,
int const [] Bp, int const [] Bi, double const [] Bx, int [] Cp, int [] Ci,
npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, long double const [] Ax,
int const [] Bp, int const [] Bi, long double const [] Bx, int [] Cp,
int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cfloat_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cfloat_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_cdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_cdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
csc_ge_csc(int const n_row, int const n_col, int const [] Ap, int const [] Ai, npy_clongdouble_wrapper const [] Ax,
int const [] Bp, int const [] Bi, npy_clongdouble_wrapper const [] Bx,
int [] Cp, int [] Ci, npy_bool_wrapper [] Cx)
"""
return _csc.csc_ge_csc(*args)
# This file is compatible with both classic and new-style classes.
| gpl-3.0 |
saurabh6790/omnisys-lib | webnotes/model/doc.py | 11 | 20444 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Contains the Document class representing an object / record
"""
_toc = ["webnotes.model.doc.Document"]
import webnotes
import webnotes.model.meta
from webnotes.utils import *
class Document:
"""
The wn(meta-data)framework equivalent of a Database Record.
Stores,Retrieves,Updates the record in the corresponding table.
Runs the triggers required.
The `Document` class represents the basic Object-Relational Mapper (ORM). The object type is defined by
`DocType` and the object ID is represented by `name`::
Please note the anamoly in the Web Notes Framework that `ID` is always called as `name`
If both `doctype` and `name` are specified in the constructor, then the object is loaded from the database.
If only `doctype` is given, then the object is not loaded
If `fielddata` is specfied, then the object is created from the given dictionary.
**Note 1:**
The getter and setter of the object are overloaded to map to the fields of the object that
are loaded when it is instantiated.
For example: doc.name will be the `name` field and doc.owner will be the `owner` field
**Note 2 - Standard Fields:**
* `name`: ID / primary key
* `owner`: creator of the record
* `creation`: datetime of creation
* `modified`: datetime of last modification
* `modified_by` : last updating user
* `docstatus` : Status 0 - Saved, 1 - Submitted, 2- Cancelled
* `parent` : if child (table) record, this represents the parent record
* `parenttype` : type of parent record (if any)
* `parentfield` : table fieldname of parent record (if any)
* `idx` : Index (sequence) of the child record
"""
def __init__(self, doctype = None, name = None, fielddata = None):
self._roles = []
self._perms = []
self._user_defaults = {}
self._new_name_set = False
self._meta = None
if isinstance(doctype, dict):
fielddata = doctype
doctype = None
if fielddata:
self.fields = webnotes._dict(fielddata)
else:
self.fields = webnotes._dict()
if not self.fields.has_key('name'):
self.fields['name']='' # required on save
if not self.fields.has_key('doctype'):
self.fields['doctype']='' # required on save
if not self.fields.has_key('owner'):
self.fields['owner']='' # required on save
if doctype:
self.fields['doctype'] = doctype
if name:
self.fields['name'] = name
self.__initialized = 1
if (doctype and name):
self._loadfromdb(doctype, name)
else:
if not fielddata:
self.fields['__islocal'] = 1
if not self.fields.docstatus:
self.fields.docstatus = 0
def __nonzero__(self):
return True
def __str__(self):
return str(self.fields)
def __repr__(self):
return repr(self.fields)
def __unicode__(self):
return unicode(self.fields)
def __eq__(self, other):
if isinstance(other, Document):
return self.fields == other.fields
else:
return False
def __getstate__(self):
return self.fields
def __setstate__(self, d):
self.fields = d
def encode(self, encoding='utf-8'):
"""convert all unicode values to utf-8"""
from webnotes.utils import encode_dict
encode_dict(self.fields)
def _loadfromdb(self, doctype = None, name = None):
if name: self.name = name
if doctype: self.doctype = doctype
is_single = False
try:
is_single = webnotes.model.meta.is_single(self.doctype)
except Exception, e:
pass
if is_single:
self._loadsingle()
else:
try:
dataset = webnotes.conn.sql('select * from `tab%s` where name="%s"' % (self.doctype, self.name.replace('"', '\"')))
except webnotes.SQLError, e:
if e.args[0]==1146:
dataset = None
else:
raise
if not dataset:
raise webnotes.DoesNotExistError, '[WNF] %s %s does not exist' % (self.doctype, self.name)
self._load_values(dataset[0], webnotes.conn.get_description())
def _load_values(self, data, description):
if '__islocal' in self.fields:
del self.fields['__islocal']
for i in range(len(description)):
v = data[i]
self.fields[description[i][0]] = webnotes.conn.convert_to_simple_type(v)
def _merge_values(self, data, description):
for i in range(len(description)):
v = data[i]
if v: # only if value, over-write
self.fields[description[i][0]] = webnotes.conn.convert_to_simple_type(v)
def _loadsingle(self):
self.name = self.doctype
self.fields.update(getsingle(self.doctype))
def __setattr__(self, name, value):
# normal attribute
if not self.__dict__.has_key('_Document__initialized'):
self.__dict__[name] = value
elif self.__dict__.has_key(name):
self.__dict__[name] = value
else:
# field attribute
f = self.__dict__['fields']
f[name] = value
def __getattr__(self, name):
if self.__dict__.has_key(name):
return self.__dict__[name]
elif self.fields.has_key(name):
return self.fields[name]
else:
return ''
def get(self, name, value=None):
return self.fields.get(name, value)
def update(self, d):
self.fields.update(d)
return self
def insert(self):
self.fields['__islocal'] = 1
self.save()
return self
def save(self, new=0, check_links=1, ignore_fields=0, make_autoname=1,
keep_timestamps=False):
self.get_meta()
if new:
self.fields["__islocal"] = 1
# add missing parentinfo (if reqd)
if self.parent and not (self.parenttype and self.parentfield):
self.update_parentinfo()
if self.parent and not self.idx:
self.set_idx()
# if required, make new
if not self._meta.issingle:
if self.fields.get('__islocal'):
r = self._insert(make_autoname=make_autoname, keep_timestamps = keep_timestamps)
if r:
return r
else:
if not webnotes.conn.exists(self.doctype, self.name):
webnotes.msgprint(webnotes._("Cannot update a non-exiting record, try inserting.") + ": " + self.doctype + " / " + self.name,
raise_exception=1)
# save the values
self._update_values(self._meta.issingle,
check_links and self.make_link_list() or {}, ignore_fields=ignore_fields,
keep_timestamps=keep_timestamps)
self._clear_temp_fields()
def _get_amended_name(self):
am_id = 1
am_prefix = self.amended_from
if webnotes.conn.sql('select amended_from from `tab%s` where name = "%s"' % (self.doctype, self.amended_from))[0][0] or '':
am_id = cint(self.amended_from.split('-')[-1]) + 1
am_prefix = '-'.join(self.amended_from.split('-')[:-1]) # except the last hyphen
self.name = am_prefix + '-' + str(am_id)
def set_new_name(self, controller=None):
if self._new_name_set:
# already set by bean
return
self._new_name_set = True
self.get_meta()
autoname = self._meta.autoname
self.localname = self.name
# amendments
if self.amended_from:
return self._get_amended_name()
# by method
else:
# get my object
if not controller:
controller = webnotes.get_obj([self])
if hasattr(controller, 'autoname'):
return controller.autoname()
# based on a field
if autoname and autoname.startswith('field:'):
n = self.fields[autoname[6:]]
if not n:
raise Exception, 'Name is required'
self.name = n.strip()
elif autoname and autoname.startswith("naming_series:"):
self.set_naming_series()
if not self.naming_series:
webnotes.msgprint(webnotes._("Naming Series mandatory"), raise_exception=True)
self.name = make_autoname(self.naming_series+'.#####')
# call the method!
elif autoname and autoname!='Prompt':
self.name = make_autoname(autoname, self.doctype)
# given
elif self.fields.get('__newname',''):
self.name = self.fields['__newname']
# default name for table
elif self._meta.istable:
self.name = make_autoname('#########', self.doctype)
# unable to determine a name, use global series
if not self.name:
self.name = make_autoname('#########', self.doctype)
def set_naming_series(self):
if not self.naming_series:
# pick default naming series
self.naming_series = get_default_naming_series(self.doctype)
def _insert(self, make_autoname=True, keep_timestamps=False):
# set name
if make_autoname:
self.set_new_name()
# validate name
self.name = validate_name(self.doctype, self.name, self._meta.name_case)
# insert!
if not keep_timestamps:
if not self.owner:
self.owner = webnotes.session['user']
self.modified_by = webnotes.session['user']
if not self.creation:
self.creation = self.modified = now()
else:
self.modified = now()
webnotes.conn.sql("insert into `tab%(doctype)s`" % self.fields \
+ """ (name, owner, creation, modified, modified_by)
values (%(name)s, %(owner)s, %(creation)s, %(modified)s,
%(modified_by)s)""", self.fields)
def _update_single(self, link_list):
self.modified = now()
update_str, values = [], []
webnotes.conn.sql("delete from tabSingles where doctype='%s'" % self.doctype)
for f in self.fields.keys():
if not (f in ('modified', 'doctype', 'name', 'perm', 'localname', 'creation'))\
and (not f.startswith('__')): # fields not saved
# validate links
if link_list and link_list.get(f):
self.fields[f] = self._validate_link(link_list, f)
if self.fields[f]==None:
update_str.append("(%s,%s,NULL)")
values.append(self.doctype)
values.append(f)
else:
update_str.append("(%s,%s,%s)")
values.append(self.doctype)
values.append(f)
values.append(self.fields[f])
webnotes.conn.sql("insert into tabSingles(doctype, field, value) values %s" % (', '.join(update_str)), values)
def validate_links(self, link_list):
err_list = []
for f in self.fields.keys():
# validate links
old_val = self.fields[f]
if link_list and link_list.get(f):
self.fields[f] = self._validate_link(link_list, f)
if old_val and not self.fields[f]:
err_list.append("{}: {}".format(link_list[f][1], old_val))
return err_list
def make_link_list(self):
res = webnotes.model.meta.get_link_fields(self.doctype)
link_list = {}
for i in res: link_list[i[0]] = (i[1], i[2]) # options, label
return link_list
def _validate_link(self, link_list, f):
dt = link_list[f][0]
dn = self.fields.get(f)
if not dt:
webnotes.throw("Options not set for link field: " + f)
if not dt: return dn
if not dn: return None
if dt=="[Select]": return dn
if dt.lower().startswith('link:'):
dt = dt[5:]
if '\n' in dt:
dt = dt.split('\n')[0]
tmp = webnotes.conn.sql("""SELECT name FROM `tab%s`
WHERE name = %s""" % (dt, '%s'), dn)
return tmp and tmp[0][0] or ''# match case
def _update_values(self, issingle, link_list, ignore_fields=0, keep_timestamps=False):
if issingle:
self._update_single(link_list)
else:
update_str, values = [], []
# set modified timestamp
if self.modified and not keep_timestamps:
self.modified = now()
self.modified_by = webnotes.session['user']
fields_list = ignore_fields and self.get_valid_fields() or self.fields.keys()
for f in fields_list:
if (not (f in ('doctype', 'name', 'perm', 'localname',
'creation','_user_tags', "file_list", "_comments"))) and (not f.startswith('__')):
# fields not saved
# validate links
if link_list and link_list.get(f):
self.fields[f] = self._validate_link(link_list, f)
if self.fields.get(f) is None or self.fields.get(f)=='':
update_str.append("`%s`=NULL" % f)
else:
values.append(self.fields.get(f))
update_str.append("`%s`=%s" % (f, '%s'))
if values:
values.append(self.name)
r = webnotes.conn.sql("update `tab%s` set %s where name=%s" % \
(self.doctype, ', '.join(update_str), "%s"), values)
def get_valid_fields(self):
import webnotes.model.doctype
if getattr(webnotes.local, "valid_fields_map", None) is None:
webnotes.local.valid_fields_map = {}
self.get_meta()
valid_fields_map = webnotes.local.valid_fields_map
if not valid_fields_map.get(self.doctype):
if cint( self._meta.issingle):
doctypelist = webnotes.model.doctype.get(self.doctype)
valid_fields_map[self.doctype] = doctypelist.get_fieldnames({
"fieldtype": ["not in", webnotes.model.no_value_fields]})
else:
valid_fields_map[self.doctype] = \
webnotes.conn.get_table_columns(self.doctype)
return valid_fields_map.get(self.doctype)
def get_meta(self):
if not self._meta:
self._meta = webnotes.conn.get_value("DocType", self.doctype, ["autoname", "issingle",
"istable", "name_case"], as_dict=True) or webnotes._dict()
return self._meta
def update_parentinfo(self):
"""update parent type and parent field, if not explicitly specified"""
tmp = webnotes.conn.sql("""select parent, fieldname from tabDocField
where fieldtype='Table' and options=%s""", self.doctype)
if len(tmp)==0:
raise Exception, 'Incomplete parent info in child table (%s, %s)' \
% (self.doctype, self.fields.get('name', '[new]'))
elif len(tmp)>1:
raise Exception, 'Ambiguous parent info (%s, %s)' \
% (self.doctype, self.fields.get('name', '[new]'))
else:
self.parenttype = tmp[0][0]
self.parentfield = tmp[0][1]
def set_idx(self):
"""set idx"""
self.idx = (webnotes.conn.sql("""select max(idx) from `tab%s`
where parent=%s and parentfield=%s""" % (self.doctype, '%s', '%s'),
(self.parent, self.parentfield))[0][0] or 0) + 1
def _clear_temp_fields(self):
# clear temp stuff
keys = self.fields.keys()
for f in keys:
if f.startswith('__'):
del self.fields[f]
def clear_table(self, doclist, tablefield, save=0):
"""
Clears the child records from the given `doclist` for a particular `tablefield`
"""
from webnotes.model.utils import getlist
table_list = getlist(doclist, tablefield)
delete_list = [d.name for d in table_list]
if delete_list:
#filter doclist
doclist = filter(lambda d: d.name not in delete_list, doclist)
# delete from db
webnotes.conn.sql("""\
delete from `tab%s`
where parent=%s and parenttype=%s"""
% (table_list[0].doctype, '%s', '%s'),
(self.name, self.doctype))
self.fields['__unsaved'] = 1
return webnotes.doclist(doclist)
def addchild(self, fieldname, childtype = '', doclist=None):
"""
Returns a child record of the give `childtype`.
* if local is set, it does not save the record
* if doclist is passed, it append the record to the doclist
"""
from webnotes.model.doc import Document
d = Document()
d.parent = self.name
d.parenttype = self.doctype
d.parentfield = fieldname
d.doctype = childtype
d.docstatus = 0;
d.name = ''
d.owner = webnotes.session['user']
d.fields['__islocal'] = 1 # for Client to identify unsaved doc
if doclist != None:
doclist.append(d)
return d
def get_values(self):
"""get non-null fields dict withouth standard fields"""
from webnotes.model import default_fields
ret = {}
for key in self.fields:
if key not in default_fields and self.fields[key]:
ret[key] = self.fields[key]
return ret
def addchild(parent, fieldname, childtype = '', doclist=None):
"""
Create a child record to the parent doc.
Example::
c = Document('Contact','ABC')
d = addchild(c, 'contact_updates', 'Contact Update')
d.last_updated = 'Phone call'
d.save(1)
"""
return parent.addchild(fieldname, childtype, doclist)
def make_autoname(key, doctype=''):
"""
Creates an autoname from the given key:
**Autoname rules:**
* The key is separated by '.'
* '####' represents a series. The string before this part becomes the prefix:
Example: ABC.#### creates a series ABC0001, ABC0002 etc
* 'MM' represents the current month
* 'YY' and 'YYYY' represent the current year
*Example:*
* DE/./.YY./.MM./.##### will create a series like
DE/09/01/0001 where 09 is the year, 01 is the month and 0001 is the series
"""
if not "#" in key:
key = key + ".#####"
n = ''
l = key.split('.')
series_set = False
today = now_datetime()
for e in l:
en = ''
if e.startswith('#'):
if not series_set:
digits = len(e)
en = getseries(n, digits, doctype)
series_set = True
elif e=='YY':
en = today.strftime('%y')
elif e=='MM':
en = today.strftime('%m')
elif e=='DD':
en = today.strftime("%d")
elif e=='YYYY':
en = today.strftime('%Y')
else: en = e
n+=en
return n
def getseries(key, digits, doctype=''):
# series created ?
current = webnotes.conn.sql("select `current` from `tabSeries` where name=%s for update", key)
if current and current[0][0] is not None:
current = current[0][0]
# yes, update it
webnotes.conn.sql("update tabSeries set current = current+1 where name=%s", key)
current = cint(current) + 1
else:
# no, create it
webnotes.conn.sql("insert into tabSeries (name, current) values (%s, 1)", key)
current = 1
return ('%0'+str(digits)+'d') % current
def getchildren(name, childtype, field='', parenttype='', from_doctype=0):
import webnotes
from webnotes.model.doclist import DocList
condition = ""
values = []
if field:
condition += ' and parentfield=%s '
values.append(field)
if parenttype:
condition += ' and parenttype=%s '
values.append(parenttype)
dataset = webnotes.conn.sql("""select * from `tab%s` where parent=%s %s order by idx""" \
% (childtype, "%s", condition), tuple([name]+values))
desc = webnotes.conn.get_description()
l = DocList()
for i in dataset:
d = Document()
d.doctype = childtype
d._load_values(i, desc)
l.append(d)
return l
def check_page_perm(doc):
if doc.name=='Login Page':
return
if doc.publish:
return
if not webnotes.conn.sql("select name from `tabPage Role` where parent=%s and role='Guest'", doc.name):
webnotes.response['403'] = 1
raise webnotes.PermissionError, '[WNF] No read permission for %s %s' % ('Page', doc.name)
def get(dt, dn='', with_children = 1, from_controller = 0):
"""
Returns a doclist containing the main record and all child records
"""
import webnotes
import webnotes.model
from webnotes.model.doclist import DocList
dn = dn or dt
# load the main doc
doc = Document(dt, dn)
if dt=='Page' and webnotes.session['user'] == 'Guest':
check_page_perm(doc)
if not with_children:
# done
return DocList([doc,])
# get all children types
tablefields = webnotes.model.meta.get_table_fields(dt)
# load chilren
doclist = DocList([doc,])
for t in tablefields:
doclist += getchildren(doc.name, t[0], t[1], dt)
return doclist
def getsingle(doctype):
"""get single doc as dict"""
dataset = webnotes.conn.sql("select field, value from tabSingles where doctype=%s", doctype)
return dict(dataset)
def copy_common_fields(from_doc, to_doc):
from webnotes.model import default_fields
doctype_list = webnotes.get_doctype(to_doc.doctype)
for fieldname, value in from_doc.fields.items():
if fieldname in default_fields:
continue
if doctype_list.get_field(fieldname) and to_doc.fields[fieldname] != value:
to_doc.fields[fieldname] = value
def validate_name(doctype, name, case=None, merge=False):
if not merge:
if webnotes.conn.sql('select name from `tab%s` where name=%s' % (doctype,'%s'), name):
raise NameError, 'Name %s already exists' % name
# no name
if not name: return 'No Name Specified for %s' % doctype
# new..
if name.startswith('New '+doctype):
raise NameError, 'There were some errors setting the name, please contact the administrator'
if case=='Title Case': name = name.title()
if case=='UPPER CASE': name = name.upper()
name = name.strip() # no leading and trailing blanks
forbidden = ['%', "'", '"', '#', '*', '?', '`']
for f in forbidden:
if f in name:
webnotes.msgprint('%s not allowed in ID (name)' % f, raise_exception =1)
return name
def get_default_naming_series(doctype):
"""get default value for `naming_series` property"""
from webnotes.model.doctype import get_property
naming_series = get_property(doctype, "options", "naming_series")
if naming_series:
naming_series = naming_series.split("\n")
return naming_series[0] or naming_series[1]
else:
return None
| mit |
erigones/esdc-ce | ans/roles/cluster/library/pcs_resource.py | 1 | 15877 | #!/usr/bin/python
# Copyright: (c) 2018, Ondrej Famera <ondrej-xa2iel8u@famera.cz>
# GNU General Public License v3.0+ (see LICENSE-GPLv3.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0 (see LICENSE-APACHE2.txt or http://www.apache.org/licenses/LICENSE-2.0)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
author: "Ondrej Famera (@OndrejHome)"
module: pcs_resource
short_description: "wrapper module for 'pcs resource' "
description:
- "Module for creating, deleting and updating clusters resources using 'pcs' utility."
- "This module should be executed for same resorce only on one of the nodes in cluster at a time."
version_added: "2.4"
options:
state:
description:
- "'present' - ensure that cluster resource exists"
- "'absent' - ensure cluster resource doesn't exist"
required: false
default: present
choices: ['present', 'absent']
name:
description:
- "name of cluster resource - cluster resource identifier"
required: true
resource_class:
description:
- class of cluster resource
required: true
default: 'ocf'
choices: ['ocf', 'systemd', 'stonith']
resource_type:
description:
- cluster resource type
required: false
options:
description:
- "additional options passed to 'pcs' command"
required: false
force_resource_update:
description:
- "skip checking for cluster changes when updating existing resource configuration
- use 'scope=resources' when pushing the change to cluster. Useful in busy clusters,
dangerous when there are concurent updates as they can be lost."
required: false
default: no
type: bool
cib_file:
description:
- "Apply changes to specified file containing cluster CIB instead of running cluster."
- "This module requires the file to already contain cluster configuration."
required: false
notes:
- tested on CentOS 6.8, 7.3
- module can create and delete clones, groups and master resources indirectly -
resource can specify --clone, --group, --master option which will cause them to create
or become part of clone/group/master
'''
EXAMPLES = '''
- name: ensure Dummy('ocf:pacemaker:Dummy') resource with name 'test' is present
pcs_resource:
name: 'test'
resource_type: 'ocf:pacemaker:Dummy'
- name: ensure that resource with name 'vip' is not present
pcs_resource:
name: 'vip'
state: 'absent'
- name: ensure resource 'test2' of IPaddr2('ocf:heartbeat:IPaddr2') type exists an has 5 second monitor interval
pcs_resource:
name: 'test2'
resource_type: 'ocf:heartbeat:IPaddr2'
options: 'ip=192.168.1.2 op monitor interval=5'
- name: create resource in group 'testgrp'
pcs_resource:
name: 'test3'
resource_type: 'ocf:pacemaker:Dummy'
options: '--group testgrp'
- name: create complex Master/Slave resource 'test-master' of 'ocf:pacemaker:Dummy' type
pcs_resource:
name: 'test'
resource_type: 'ocf:pacemaker:Dummy'
options: >
fake=some_value --master meta master-max=1 master-node-max=1 clone-max=2 clone-node-max=1 notify=true
op monitor interval=60s meta resource-stickiness=100
'''
# TODO if group exists and is not part of group, then specifying group won't put it into group
# same problem is with clone and master - it might be better to make this functionality into separate module
import sys
import os.path
import xml.etree.ElementTree as ET
import tempfile
from distutils.spawn import find_executable
from ansible.module_utils.basic import AnsibleModule
# determine if we have 'to_native' function that we can use for 'ansible --diff' output
to_native_support = False
try:
from ansible.module_utils._text import to_native
to_native_support = True
except ImportError:
pass
def replace_element(elem, replacement):
elem.clear()
elem.text = replacement.text
elem.tail = replacement.tail
elem.tag = replacement.tag
elem.attrib = replacement.attrib
elem[:] = replacement[:]
def compare_resources(module, res1, res2):
# we now have 2 nodes that we can compare, so lets dump them into files for comparring
n1_file_fd, n1_tmp_path = tempfile.mkstemp()
n2_file_fd, n2_tmp_path = tempfile.mkstemp()
n1_file = open(n1_tmp_path, 'w')
n2_file = open(n2_tmp_path, 'w')
# dump the XML resource definitions into temporary files
sys.stdout = n1_file
ET.dump(res1)
sys.stdout = n2_file
ET.dump(res2)
sys.stdout = sys.__stdout__
# close files
n1_file.close()
n2_file.close()
# normalize the files and store results in new files - this also removes some unimportant spaces and stuff
n3_file_fd, n3_tmp_path = tempfile.mkstemp()
n4_file_fd, n4_tmp_path = tempfile.mkstemp()
rc, out, err = module.run_command('xmllint --format --output ' + n3_tmp_path + ' ' + n1_tmp_path)
rc, out, err = module.run_command('xmllint --format --output ' + n4_tmp_path + ' ' + n2_tmp_path)
# add files that should be cleaned up
module.add_cleanup_file(n1_tmp_path)
module.add_cleanup_file(n2_tmp_path)
module.add_cleanup_file(n3_tmp_path)
module.add_cleanup_file(n4_tmp_path)
# now compare files
diff = ''
rc, out, err = module.run_command('diff ' + n3_tmp_path + ' ' + n4_tmp_path)
if rc != 0:
# if there was difference then show the diff
n3_file = open(n3_tmp_path, 'r+')
n4_file = open(n4_tmp_path, 'r+')
if to_native_support:
# produce diff only where we have to_native function which give sensible output
# without 'to_native' whole text is wrapped as single line and not diffed
# seems that to_native was added in ansible-2.2 (commit 57701d7)
diff = {
'before_header': '',
'before': to_native(b''.join(n3_file.readlines())),
'after_header': '',
'after': to_native(b''.join(n4_file.readlines())),
}
return rc, diff
def find_resource(cib, resource_id):
my_resource = None
tags = ['group', 'clone', 'master', 'primitive']
for elem in list(cib):
if elem.attrib.get('id') == resource_id:
return elem
elif elem.tag in tags:
my_resource = find_resource(elem, resource_id)
if my_resource is not None:
break
return my_resource
def run_module():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="present", choices=['present', 'absent']),
name=dict(required=True),
resource_class=dict(default="ocf", choices=['ocf', 'systemd', 'stonith', 'master']),
resource_type=dict(required=False),
options=dict(default="", required=False),
force_resource_update=dict(default=False, type='bool', required=False),
cib_file=dict(required=False),
),
supports_check_mode=True
)
state = module.params['state']
resource_name = module.params['name']
resource_class = module.params['resource_class']
cib_file = module.params['cib_file']
if state == 'present' and (not module.params['resource_type']):
module.fail_json(msg='When creating cluster resource you must specify the resource_type')
result = {}
if find_executable('pcs') is None:
module.fail_json(msg="'pcs' executable not found. Install 'pcs'.")
module.params['cib_file_param'] = ''
if cib_file is not None:
# use cib_file if specified
if os.path.isfile(cib_file):
try:
current_cib = ET.parse(cib_file)
except Exception as e:
module.fail_json(msg="Error encountered parsing the cib_file - %s" % (e))
current_cib_root = current_cib.getroot()
module.params['cib_file_param'] = '-f ' + cib_file
else:
module.fail_json(msg="%(cib_file)s is not a file or doesn't exists" % module.params)
else:
# get running cluster configuration
rc, out, err = module.run_command('pcs cluster cib')
if rc == 0:
current_cib_root = ET.fromstring(out)
else:
module.fail_json(msg='Failed to load cluster configuration', out=out, error=err)
# try to find the resource that we seek
resource = None
cib_resources = current_cib_root.find('./configuration/resources')
resource = find_resource(cib_resources, resource_name)
if state == 'present' and resource is None:
# resource should be present, but we don't see it in configuration - lets create it
result['changed'] = True
if not module.check_mode:
if resource_class == 'stonith':
cmd = 'pcs %(cib_file_param)s stonith create %(name)s %(resource_type)s %(options)s' % module.params
elif resource_class == 'master':
cmd = 'pcs %(cib_file_param)s resource master %(name)s %(resource_type)s %(options)s' % module.params
else:
cmd = 'pcs %(cib_file_param)s resource create %(name)s %(resource_type)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc != 0 and "Call cib_replace failed (-62): Timer expired" in err:
# EL6: special retry when we failed to create resource because of timer waiting on cib expired
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to create resource using command '" + cmd + "'", output=out, error=err)
elif state == 'present' and resource is not None and resource_class == 'master':
# modify the master resource params directly
cmd = 'pcs resource meta %(name)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to modify resource using command '" + cmd + "'", output=out, error=err)
elif state == 'present' and resource is not None:
# resource should be present and we have find resource with such ID - lets compare it with definition if it needs a change
# lets simulate how the resource would look like if it was created using command we have
clean_cib_fd, clean_cib_path = tempfile.mkstemp()
module.add_cleanup_file(clean_cib_path)
module.do_cleanup_files()
# we must be sure that clean_cib_path is empty
if resource_class == 'stonith':
cmd = 'pcs -f ' + clean_cib_path + ' stonith create %(name)s %(resource_type)s %(options)s' % module.params
else:
cmd = 'pcs -f ' + clean_cib_path + ' resource create %(name)s %(resource_type)s %(options)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
# we have a comparable resource created in clean cluster, so lets select it and compare it
clean_cib = ET.parse(clean_cib_path)
clean_cib_root = clean_cib.getroot()
clean_resource = None
cib_clean_resources = clean_cib_root.find('./configuration/resources')
clean_resource = find_resource(cib_clean_resources, resource_name)
if clean_resource is not None:
# remove the meta_attribute element from original cluster cib when empty to make comparison clean - Issue #10
for elem in list(resource):
if elem.tag == 'meta_attributes' and len(list(elem)) == 0:
resource.remove(elem)
rc, diff = compare_resources(module, resource, clean_resource)
if rc == 0:
# if no differnces were find there is no need to update the resource
module.exit_json(changed=False)
else:
# otherwise lets replace the resource with new one
result['changed'] = True
result['diff'] = diff
if not module.check_mode:
replace_element(resource, clean_resource)
# when we use cib_file then we can dump the changed CIB directly into file
if cib_file is not None:
try:
current_cib.write(cib_file) # FIXME add try/catch for writing into file
except Exception as e:
module.fail_json(msg="Error encountered writing result to cib_file - %s" % (e))
module.exit_json(changed=True)
# when not using cib_file then we continue preparing changes for cib-push into running cluster
new_cib = ET.ElementTree(current_cib_root)
new_cib_fd, new_cib_path = tempfile.mkstemp()
module.add_cleanup_file(new_cib_path)
new_cib.write(new_cib_path)
push_scope = 'scope=resources' if module.params['force_resource_update'] else ''
push_cmd = 'pcs cluster cib-push ' + push_scope + ' ' + new_cib_path
rc, out, err = module.run_command(push_cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to push updated configuration to cluster using command '" + push_cmd + "'", output=out, error=err)
else:
module.fail_json(msg="Unable to find simulated resource, This is most probably a bug.")
else:
module.fail_json(msg="Unable to simulate resource with given definition using command '" + cmd + "'", output=out, error=err)
elif state == 'absent' and resource is not None:
# resource should not be present but we have found something - lets remove that
result['changed'] = True
if not module.check_mode:
if resource_class == 'stonith':
cmd = 'pcs %(cib_file_param)s stonith delete %(name)s' % module.params
else:
cmd = 'pcs %(cib_file_param)s resource delete %(name)s' % module.params
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to delete resource using command '" + cmd + "'", output=out, error=err)
else:
# resource should not be present and is nto there, nothing to do
result['changed'] = False
# END of module
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
| apache-2.0 |
jolid/script.module.donnie | lib/donnie/hgtv.py | 1 | 3410 | import urllib2, urllib, sys, os, re, random, copy
import urlresolver
from BeautifulSoup import BeautifulSoup, Tag, NavigableString
import xbmc,xbmcplugin,xbmcgui,xbmcaddon
from t0mm0.common.net import Net
from t0mm0.common.addon import Addon
from scrapers import CommonScraper
net = Net()
''' ###########################################################
Usage and helper functions
############################################################'''
class HGTVServiceSracper(CommonScraper):
def __init__(self, settingsid, DB=None):
if DB:
self.DB=DB
self.service='hgtv'
self.name = 'HGTV'
self.raiseError = False
self.referrer = 'http://www.hgtv.com/'
self.base_url = 'http://www.hgtv.com/'
self.user_agent = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'
self.provides = []
self.settingsid = settingsid
self._loadsettings()
def _getShows(self, silent=False):
if self.isFresh('tvshows'):
self._getRecentShows(silent=silent)
return
print "Getting All shows for " + self.service
url = self.base_url + '/full-episodes/package/index.html'
print "Scrapping: " + url
pDialog = xbmcgui.DialogProgress()
if not silent:
pDialog.create('Downloading shows from ' + self.service)
pagedata = self.getURL(url, append_base_url=False)
if pagedata=='':
return False
soup = BeautifulSoup(pagedata)
shows = soup.findAll('a', {'class' : 'banner'})
for show in shows:
percent = int((100 * shows.index(show))/len(shows))
img = show.find('img')
name = img['alt']
year = img['src']
year = re.search('HGTV/(.+?)/', year).group(1)
href = show['href']
print [name, href, year]
if not silent:
pDialog.update(percent, url, name)
#self.addShowToDB(name, href, character, year)
print 'Dowload complete!'
def _getRecentShows(self, silent=False):
print "Getting recent shows for: " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getEpisodes(self, showid, show, url, pDialog, percent, silent):
print "getting episodes for " + show
''' Do work here
'''
return True
def _getMovies(self, silent=False):
if self.isFresh('movies'):
self._getRecentMovies(silent=silent)
return
print "Getting All movies for " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getRecentMovies(self, silent):
print "Getting recent movies for: " + self.service
''' Do work here
'''
print 'Dowload complete!'
def _getStreams(self, episodeid=None, movieid=None):
streams = []
''' Do work here
'''
return streams
def _resolveStream(self, stream):
raw_url = stream.replace(self.service + '://', '')
resolved_url = ''
''' Do work here
Try to resolve with urlresolver otherwise insert call to local resolver here
'''
return resolved_url
def _resolveIMDB(self, uri): #Often needed if a sites movie index does not include imdb links but the movie page does
imdb = ''
print uri
pagedata = self.getURL(uri, append_base_url=True)
if pagedata=='':
return
imdb = re.search('http://www.imdb.com/title/(.+?)/', pagedata).group(1)
return imdb
def whichHost(self, host): #Sometimes needed
table = { 'Watch Blah' : 'blah.com',
'Watch Blah2' : 'blah2.com',
}
try:
host_url = table[host]
return host_url
except:
return 'Unknown'
| gpl-2.0 |
factorybuild/stbgui | lib/python/Screens/InfoBarGenerics.py | 1 | 119933 | from ChannelSelection import ChannelSelection, BouquetSelector, SilentBouquetSelector
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ActionMap import NumberActionMap
from Components.Harddisk import harddiskmanager
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.config import config, ConfigBoolean, ConfigClock, ConfigText
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, ConfigSelection
from Components.VolumeControl import VolumeControl
from Components.Sources.StaticText import StaticText
from EpgSelection import EPGSelection
from Plugins.Plugin import PluginDescriptor
from Screen import Screen
from Screens import ScreenSaver
from Screens import Standby
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
import Screens.Standby
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.TimeDateInput import TimeDateInput
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from Tools import Notifications, ASCIItranslit
from Tools.Directories import fileExists, getRecordingFilename, moveFiles
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, iPlayableService, eServiceReference, eEPGCache, eActionMap, getDesktop, eDVBDB
from time import time, localtime, strftime
import os
from bisect import insort
from sys import maxint
from RecordTimer import RecordTimerEntry, RecordTimer, findSafeRecordPath
# hack alert!
from Menu import MainMenu, mdom
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
if len(resumePointCache) > 50:
candidate = key
for k,v in resumePointCache.items():
if v[0] < lru:
candidate = k
del resumePointCache[candidate]
if lru - resumePointCacheLast > 3600:
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
if int(time()) - resumePointCacheLast > 3600:
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
import cPickle
try:
f = open('/home/root/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
import cPickle
try:
return cPickle.load(open('/home/root/resumepoints.pkl', 'rb'))
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
self.unhandledKeyDialog.hide()
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref and not (hasattr(self.session, "pipshown") and self.session.pipshown):
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Standby.inStandby and not Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class HideVBILine(Screen):
def __init__(self, session):
self.skin = """<screen position="0,0" size="%s,%s" flags="wfNoBorder" zPosition="1"/>""" % (getDesktop(0).size().width(), getDesktop(0).size().height() / 360 + 1)
Screen.__init__(self, session)
class SecondInfoBar(Screen):
def __init__(self, session, skinName):
Screen.__init__(self, session)
self.skinName = skinName
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
FLAG_HIDE_VBI = 512
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"toggleShow": self.okButtonCheck,
"hide": self.keyHide,
"toggleShowLong" : self.toggleShowLong,
"hideLong" : self.hideLong,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.actualSecondInfoBarScreen = None
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBar")
self.secondInfoBarScreen.show()
self.secondInfoBarScreenSimple = self.session.instantiateDialog(SecondInfoBar, "SecondInfoBarSimple")
self.secondInfoBarScreenSimple.show()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.hideVBILineScreen = self.session.instantiateDialog(HideVBILine)
self.hideVBILineScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.__onExecBegin)
def __onExecBegin(self):
self.clearScreenPath()
self.showHideVBI()
def __layoutFinished(self):
if self.actualSecondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.secondInfoBarScreenSimple.hide()
self.hideVBILineScreen.hide()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def __onHide(self):
self.__state = self.STATE_HIDDEN
if self.actualSecondInfoBarScreen:
self.actualSecondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def toggleShowLong(self):
if not config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def hideLong(self):
if config.usage.ok_is_channelselection.value:
self.toggleSecondInfoBar()
def toggleSecondInfoBar(self):
if self.actualSecondInfoBarScreen and not self.shown and not self.actualSecondInfoBarScreen.shown and self.secondInfoBarScreenSimple.skinAttributes and self.secondInfoBarScreen.skinAttributes:
self.actualSecondInfoBarScreen.hide()
config.usage.show_simple_second_infobar.value = not config.usage.show_simple_second_infobar.value
config.usage.show_simple_second_infobar.save()
self.actualSecondInfoBarScreen = config.usage.show_simple_second_infobar.value and self.secondInfoBarScreenSimple or self.secondInfoBarScreen
self.showSecondInfoBar()
def keyHide(self):
if self.__state == self.STATE_HIDDEN and self.session.pipshown and "popup" in config.usage.pip_hideOnExit.value:
if config.usage.pip_hideOnExit.value == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
elif config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
self.toggleShow()
elif self.__state == self.STATE_SHOWN:
self.hide()
def hidePipOnExitCallback(self, answer):
if answer == True:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.value:
self.doShow()
self.showHideVBI()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
if self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
idx = config.usage.show_second_infobar.index - 1
else:
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.startLongTimer(idx)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
def okButtonCheck(self):
if config.usage.ok_is_channelselection.value and hasattr(self, "openServiceList"):
if isinstance(self, InfoBarTimeshift) and self.timeshiftEnabled() and isinstance(self, InfoBarSeek) and self.seekstate == self.SEEK_STATE_PAUSE:
return
self.openServiceList()
else:
self.toggleShow()
def toggleShow(self):
if self.__state == self.STATE_HIDDEN:
self.showFirstInfoBar()
else:
self.showSecondInfoBar()
def showSecondInfoBar(self):
if isStandardInfoBar(self) and config.usage.show_second_infobar.value == "EPG":
if not(hasattr(self, "hotkeyGlobal") and self.hotkeyGlobal("info") != 0):
self.showDefaultEPG()
elif self.actualSecondInfoBarScreen and config.usage.show_second_infobar.value and not self.actualSecondInfoBarScreen.shown:
self.show()
self.actualSecondInfoBarScreen.show()
self.startHideTimer()
else:
self.hide()
self.hideTimer.stop()
def showFirstInfoBar(self):
if self.__state == self.STATE_HIDDEN or self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.shown:
self.actualSecondInfoBarScreen and self.actualSecondInfoBarScreen.hide()
self.show()
else:
self.hide()
self.hideTimer.stop()
def lockShow(self):
self.__locked = self.__locked + 1
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
self.__locked = self.__locked - 1
if self.execing:
self.startHideTimer()
def checkHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if servicepath and servicepath.startswith("/"):
if service.toString().startswith("1:"):
info = eServiceCenter.getInstance().info(service)
service = info and info.getInfoString(service, iServiceInformation.sServiceref)
return service and eDVBDB.getInstance().getFlag(eServiceReference(service)) & self.FLAG_HIDE_VBI and True
else:
return ".hidvbi." in servicepath.lower()
service = self.session.nav.getCurrentService()
info = service and service.info()
return info and info.getInfo(iServiceInformation.sHideVBI)
def showHideVBI(self):
if self.checkHideVBI():
self.hideVBILineScreen.show()
else:
self.hideVBILineScreen.hide()
def ToggleHideVBI(self):
service = self.session.nav.getCurrentlyPlayingServiceReference()
servicepath = service and service.getPath()
if not servicepath:
if eDVBDB.getInstance().getFlag(service) & self.FLAG_HIDE_VBI:
eDVBDB.getInstance().removeFlag(service, self.FLAG_HIDE_VBI)
else:
eDVBDB.getInstance().addFlag(service, self.FLAG_HIDE_VBI)
eDVBDB.getInstance().reloadBouquets()
self.showHideVBI()
class BufferIndicator(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["status"] = Label()
self.mayShow = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evBuffering: self.bufferChanged,
iPlayableService.evStart: self.__evStart,
iPlayableService.evGstreamerPlayStarted: self.__evGstreamerPlayStarted,
})
def bufferChanged(self):
if self.mayShow:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info:
value = info.getInfo(iServiceInformation.sBuffer)
if value and value != 100:
self["status"].setText(_("Buffering %d%%") % value)
if not self.shown:
self.show()
def __evStart(self):
self.mayShow = True
self.hide()
def __evGstreamerPlayStarted(self):
self.mayShow = False
self.hide()
class InfoBarBuffer():
def __init__(self):
self.bufferScreen = self.session.instantiateDialog(BufferIndicator)
self.bufferScreen.hide()
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.startTimer()
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].text = self["servicename_summary"].text = ServiceReference(self.service).getServiceName()
def keyNumberGlobal(self, number):
self.startTimer(repeat=True)
self.numberString = self.numberString + str(number)
self["number"].text = self["number_summary"].text = self.numberString
self.handleServiceName()
if len(self.numberString) >= 5:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.numberString = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["number"] = Label(self.numberString)
self["servicename"] = Label()
self["channel_summary"] = StaticText(_("Channel:"))
self["number_summary"] = StaticText(self.numberString)
self["servicename_summary"] = StaticText()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.endTimer)
self.Timer.start(250)
self.startTimer()
def startTimer(self, repeat=False):
self.timer_target = repeat and self.timer_counter < 6 and [4,4,4,5,8,10][self.timer_counter] or 12
self.timer_counter = 0
def endTimer(self):
self.timer_counter += 1
if self.timer_counter > self.timer_target:
self.keyOK()
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
elif len(self.servicelist.history) > 1:
self.checkTimeshiftRunning(self.recallPrevService)
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def recallPrevService(self, reply):
if reply:
self.servicelist.recallPrevService()
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly=False, bouquet=None):
bouquet = bouquet or self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory and not bouquet.flags & eServiceReference.isInvisible:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.value or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service and not service.flags & eServiceReference.isMarker:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"keyUp": (self.keyUpCheck, self.getKeyUpHelptext),
"keyDown": (self.keyDownCheck, self.getKeyDownHelpText),
"keyLeft": (self.keyLeftCheck, self.getKeyLeftHelptext),
"keyRight": (self.keyRightCheck, self.getKeyRightHelptext),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"keyChannelUp": (self.keyChannelUpCheck, self.getKeyChannelUpHelptext),
"keyChannelDown": (self.keyChannelDownCheck, self.getKeyChannelDownHelptext),
"openServiceList": (self.openServiceList, _("Open service list")),
"openSatellites": (self.openSatellites, _("Open satellites list")),
})
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.switchChannelDown()
def historyBack(self):
self.checkTimeshiftRunning(self.historyBackCheckTimeshiftCallback)
def historyBackCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyBack()
def historyNext(self):
self.checkTimeshiftRunning(self.historyNextCheckTimeshiftCallback)
def historyNextCheckTimeshiftCallback(self, answer):
if answer:
self.servicelist.historyNext()
def keyUpCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapDown()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelUp()
def keyDownCheck(self):
if config.usage.oldstyle_zap_controls.value:
self.zapUp()
elif config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelDown()
def keyLeftCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volDown()
else:
self.switchChannelUp()
else:
self.zapUp()
def keyRightCheck(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
VolumeControl.instance and VolumeControl.instance.volUp()
else:
self.switchChannelDown()
else:
self.zapDown()
def keyChannelUpCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapDown()
else:
self.openServiceList()
def keyChannelDownCheck(self):
if config.usage.zap_with_ch_buttons.value:
self.zapUp()
else:
self.openServiceList()
def getKeyUpHelptext(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to next channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
return value
def getKeyDownHelpText(self):
if config.usage.oldstyle_zap_controls.value:
value = _("Switch to previous channel")
else:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
return value
def getKeyLeftHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume down")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select previous channel")
else:
value = _("Switch to previous channel")
return value
def getKeyRightHelptext(self):
if config.usage.oldstyle_zap_controls.value:
if config.usage.volume_instead_of_channelselection.value:
value = _("Volume up")
else:
value = _("Open service list")
if not "keep" in config.usage.servicelist_cursor_behavior.value:
value += " " + _("and select next channel")
else:
value = _("Switch to next channel")
return value
def getKeyChannelUpHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to next channel") or _("Open service list")
def getKeyChannelDownHelptext(self):
return config.usage.zap_with_ch_buttons.value and _("Switch to previous channel") or _("Open service list")
def switchChannelUp(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
def switchChannelDown(self):
if "keep" not in config.usage.servicelist_cursor_behavior.value:
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
def zapUp(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value:
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
def zapDown(self):
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
def openSatellites(self):
self.servicelist.showSatellites()
self.session.execDialog(self.servicelist)
def openFavouritesList(self):
self.servicelist.showFavourites()
self.openServiceList()
def openServiceList(self):
self.session.execDialog(self.servicelist)
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
})
self.session.infobar = None
def mainMenu(self):
print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("Show event details")),
"showEventInfoSingleEPG": (self.openEventView, _("Show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def openEventView(self):
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewSimple, epglist[0], ServiceReference(ref), self.eventViewCallback)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class SimpleServicelist:
def __init__(self, services):
self.setServices(services)
def setServices(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = [ ]
self.bouquetSel = None
self.eventView = None
self.epglist = []
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.showDefaultEPG, _("Show EPG...")),
"showEventInfoSingleEPG": (self.showSingleEPG, _("Show single service EPG")),
"showEventInfoMultiEPG": (self.showMultiEPG, _("Show multi channel EPG")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self, getAll=False):
pluginlist = [(p.name, boundFunction(self.runPlugin, p), p.path) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'selectedevent' not in p.__call__.func_code.co_varnames] or []
from Components.ServiceEventTracker import InfoBarCount
if getAll or InfoBarCount == 1:
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG, "current_channel"))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG, "multi_epg"))
pluginlist.append((_("Current event EPG"), self.openEventView, "event_epg"))
return pluginlist
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != self.epg_bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != self.epg_bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(self.epg_bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(enable_pipzap = True)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = [ ]
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet, withCallback=True):
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
if withCallback:
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, services, self.zapToService, None, self.changeBouquetCB))
else:
self.session.open(EPGSelection, services, self.zapToService, None, self.changeBouquetCB)
def changeBouquetCB(self, direction, epg):
if self.bouquetSel:
if direction > 0:
self.bouquetSel.down()
else:
self.bouquetSel.up()
bouquet = self.bouquetSel.getCurrent()
services = self.getBouquetServices(bouquet)
if services:
self.epg_bouquet = bouquet
epg.setServices(services)
def selectBouquet(self, bouquetref, epg):
services = self.getBouquetServices(bouquetref)
if services:
self.epg_bouquet = bouquetref
self.serviceSel.setServices(services)
epg.setServices(services)
def setService(self, service):
if service:
self.serviceSel.selectService(service)
def closed(self, ret=False):
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret:
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
def openMultiServiceEPG(self, withCallback=True):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if config.usage.multiepg_ask_bouquet.value:
self.openMultiServiceEPGAskBouquet(bouquets, cnt, withCallback)
else:
self.openMultiServiceEPGSilent(bouquets, cnt, withCallback)
def openMultiServiceEPGAskBouquet(self, bouquets, cnt, withCallback):
if cnt > 1: # show bouquet list
if withCallback:
self.bouquetSel = self.session.openWithCallback(self.closed, BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
else:
self.bouquetSel = self.session.open(BouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
elif cnt == 1:
self.openBouquetEPG(bouquets[0][1], withCallback)
def openMultiServiceEPGSilent(self, bouquets, cnt, withCallback):
root = self.servicelist.getRoot()
rootstr = root.toCompareString()
current = 0
for bouquet in bouquets:
if bouquet[1].toCompareString() == rootstr:
break
current += 1
if current >= cnt:
current = 0
if cnt > 1: # create bouquet list for bouq+/-
self.bouquetSel = SilentBouquetSelector(bouquets, True, self.servicelist.getBouquetNumOffset(root))
if cnt >= 1:
self.openBouquetEPG(root, withCallback)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
def openSingleServiceEPG(self):
ref = self.servicelist.getCurrentSelection()
if ref:
if self.servicelist.getMutableList(): # bouquet in channellist
current_path = self.servicelist.getRoot()
services = self.getBouquetServices(current_path)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.epg_bouquet = current_path
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref, self.zapToService, serviceChangeCB=self.changeServiceCB, parent=self)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
else:
self.session.open(EPGSelection, ref)
def runPlugin(self, plugin):
plugin(session = self.session, servicelist = self.servicelist)
def showEventInfoPlugins(self):
pluginlist = self.getEPGPluginList()
if pluginlist:
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list=pluginlist, skin_name="EPGExtensionsList", reorderConfig="eventinfo_order", windowTitle=_("Events info menu"))
else:
self.openSingleServiceEPG()
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, None, eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr and ptr.getEventName() != "":
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
self.openEventView()
def showSingleEPG(self):
self.openSingleServiceEPG()
def showMultiEPG(self):
self.openMultiServiceEPG()
def openEventView(self):
from Components.ServiceEventTracker import InfoBarCount
if InfoBarCount > 1:
epglist = [ ]
self.epglist = epglist
service = self.session.nav.getCurrentService()
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
info = service.info()
ptr=info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr=info.getEvent(1)
if ptr:
epglist.append(ptr)
if epglist:
self.session.open(EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
if not epglist:
print "no epg for the service avail.. so we show multiepg instead of eventinfo"
self.openMultiServiceEPG(False)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.value, False, config.seek.selfdefined_13.value,
-config.seek.selfdefined_46.value, False, config.seek.selfdefined_46.value,
-config.seek.selfdefined_79.value, False, config.seek.selfdefined_79.value)[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": (self.playpauseService, _("Pauze/Continue playback")),
"pauseService": (self.pauseService, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"okButton": (self.okButton, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"jumpPreviousMark": (self.seekPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.seekNextMark, _("Jump to next marked position")),
}, prio=-1)
# give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return (0, n, 0, ">> %dx" % n)
def makeStateBackward(self, n):
return (0, -n, 0, "<< %dx" % n)
def makeStateSlowMotion(self, n):
return (0, 0, n, "/%d" % n)
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
if isStandardInfoBar(self) and self.timeshiftEnabled():
for c in self.onPlayStateChanged:
c(self.seekstate)
else:
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
# print "seekable status changed!"
if not self.isSeekable():
self["SeekActions"].setEnabled(False)
# print "not seekable, return to play"
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
# print "seekable"
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0]:
print "resolved to PAUSE"
pauseable.pause()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
print "SLOW MOTION not possible: resolved to PAUSE"
else:
print "resolved to PLAY"
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate != self.SEEK_STATE_PLAY:
self.unPauseService()
else:
self.pauseService()
def okButton(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
elif self.seekstate == self.SEEK_STATE_PAUSE:
self.pauseService()
else:
self.unPauseService()
def pauseService(self):
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.value == "play":
self.unPauseService()
elif config.seek.on_pause.value == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.value == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
print "unpause"
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.value:
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.value):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.value[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.value)))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.value) or config.seek.speeds_forward.value[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.value)
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.value) or config.seek.speeds_slowmotion.value[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.value)
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.value) or config.seek.speeds_backward.value[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.value)
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
def seekFwdManual(self):
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
print "Seek", minutes, "minutes forward"
self.doSeekRelative(minutes * 60 * 90000)
def seekBackManual(self):
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.value:
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# This is needed, because some Mediaplayer use InfoBarSeek but not InfoBarCueSheetSupport
def seekPreviousMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpPreviousMark()
def seekNextMark(self):
if isinstance(self, InfoBarCueSheetSupport):
self.jumpNextMark()
from Screens.PVRState import PVRState, TimeshiftState
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def _mayShow(self):
if self.shown and self.seekstate != self.SEEK_STATE_PLAY:
self.pvrStateDialog.show()
def __playStateChanged(self, state):
playstateString = state[3]
self.pvrStateDialog["state"].setText(playstateString)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.value and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class TimeshiftLive(Screen):
def __init__(self, session):
Screen.__init__(self, session)
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show=True)
self.timeshiftLiveScreen = self.session.instantiateDialog(TimeshiftLive)
self.onHide.append(self.timeshiftLiveScreen.hide)
if isStandardInfoBar(self):
self.secondInfoBarScreen and self.secondInfoBarScreen.onShow.append(self.timeshiftLiveScreen.hide)
self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.onShow.append(self.timeshiftLiveScreen.hide)
self.timeshiftLiveScreen.hide()
self.__hideTimer = eTimer()
self.__hideTimer.callback.append(self.__hideTimeshiftState)
self.onFirstExecBegin.append(self.pvrStateDialog.show)
def _mayShow(self):
if self.timeshiftEnabled():
if isStandardInfoBar(self):
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
if self.secondInfoBarScreenSimple and self.secondInfoBarScreenSimple.shown:
self.secondInfoBarScreenSimple.hide()
if self.timeshiftActivated():
self.pvrStateDialog.show()
self.timeshiftLiveScreen.hide()
elif self.showTimeshiftState:
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.show()
self.showTimeshiftState = False
if self.seekstate == self.SEEK_STATE_PLAY and config.usage.infobar_timeout.index and (self.pvrStateDialog.shown or self.timeshiftLiveScreen.shown):
self.__hideTimer.startLongTimer(config.usage.infobar_timeout.index)
else:
self.__hideTimeshiftState()
def __hideTimeshiftState(self):
self.pvrStateDialog.hide()
self.timeshiftLiveScreen.hide()
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
class InfoBarTimeshift:
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")) # currently undefined :), probably 'TV'
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftActivateActions"].setEnabled(False)
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.ts_start_delay_timer = eTimer()
self.ts_start_delay_timer.callback.append(self.startTimeshiftWithoutPause)
self.ts_current_event_timer = eTimer()
self.ts_current_event_timer.callback.append(self.saveTimeshiftFileForEvent)
self.save_timeshift_file = False
self.timeshift_was_activated = False
self.showTimeshiftState = False
self.save_timeshift_only_current_event = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd
})
def getTimeshift(self):
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def timeshiftActivated(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftActive()
def startTimeshift(self, pauseService = True):
print "enable timeshift"
ts = self.getTimeshift()
if ts is None:
if not pauseService and not int(config.usage.timeshift_start_delay.value):
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, simple = True)
print "no ts interface"
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
if not ts.startTimeshift():
# we remove the "relative time" for now.
#self.pvrStateDialog["timeshift"].setRelative(time.time())
if pauseService:
# PAUSE.
#self.setSeekState(self.SEEK_STATE_PAUSE)
self.activateTimeshiftEnd(False)
self.showTimeshiftState = True
else:
self.showTimeshiftState = False
# enable the "TimeshiftEnableActions", which will override
# the startTimeshift actions
self.__seekableStatusChanged()
# get current timeshift filename and calculate new
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
self.setCurrentEventTimer()
self.current_timeshift_filename = ts.getTimeshiftFilename()
self.new_timeshift_filename = self.generateNewTimeshiftFileName()
else:
print "timeshift failed"
def startTimeshiftWithoutPause(self):
self.startTimeshift(False)
def stopTimeshift(self):
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
if int(config.usage.timeshift_start_delay.value):
ts.switchToLive()
else:
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift()
self.pvrStateDialog.hide()
self.setCurrentEventTimer()
# disable actions
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
self.showTimeshiftState = True
ts = self.getTimeshift()
print "activateTimeshiftEnd"
if ts is None:
return
if ts.isTimeshiftActive():
print "!! activate timeshift called - but shouldn't this be a normal pause?"
self.pauseService()
else:
print "play, ..."
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
self.timeshift_was_activated = True
if back:
self.ts_rewind_timer.start(200, 1)
def rewindService(self):
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
# generates only filename without path
def generateNewTimeshiftFileName(self):
name = "timeshift record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
service_name = ""
if isinstance(serviceref, eServiceReference):
service_name = ServiceReference(serviceref).getServiceName()
begin_date = strftime("%Y%m%d %H%M", localtime(time()))
filename = begin_date + " - " + service_name
if config.recording.filename_composition.value == "short":
filename = strftime("%Y%m%d", localtime(time())) + " - " + info["name"]
elif config.recording.filename_composition.value == "long":
filename += " - " + info["name"] + " - " + info["description"]
else:
filename += " - " + info["name"] # standard
if config.recording.ascii_filenames.value:
filename = ASCIItranslit.legacyEncode(filename)
print "New timeshift filename: ", filename
return filename
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
print "activateTimeshiftEndAndPause"
#state = self.seekstate
self.activateTimeshiftEnd(False)
def callServiceStarted(self):
self.__serviceStarted()
def __seekableStatusChanged(self):
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActions"].setEnabled(state)
if not state:
self.setSeekState(self.SEEK_STATE_PLAY)
self.restartSubtitle()
def __serviceStarted(self):
self.pvrStateDialog.hide()
self.__seekableStatusChanged()
if self.ts_start_delay_timer.isActive():
self.ts_start_delay_timer.stop()
if int(config.usage.timeshift_start_delay.value):
self.ts_start_delay_timer.start(int(config.usage.timeshift_start_delay.value) * 1000, True)
def checkTimeshiftRunning(self, returnFunction):
if self.timeshiftEnabled() and config.usage.check_timeshift.value and self.timeshift_was_activated:
message = _("Stop timeshift?")
if not self.save_timeshift_file:
choice = [(_("Yes"), "stop"), (_("No"), "continue"), (_("Yes and save"), "save"), (_("Yes and save in movie dir"), "save_movie")]
else:
choice = [(_("Yes"), "stop"), (_("No"), "continue")]
message += "\n" + _("Reminder, you have chosen to save timeshift file.")
if self.save_timeshift_only_current_event:
remaining = self.currentEventTime()
if remaining > 0:
message += "\n" + _("The %d min remaining before the end of the event.") % abs(remaining / 60)
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
if answer:
if "movie" in answer:
self.save_timeshift_in_movie_dir = True
if "save" in answer:
self.save_timeshift_file = True
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
del ts
if "continue" not in answer:
self.saveTimeshiftFiles()
returnFunction(answer and answer != "continue")
# renames/moves timeshift files if requested
def __serviceEnd(self):
self.saveTimeshiftFiles()
self.setCurrentEventTimer()
self.timeshift_was_activated = False
def saveTimeshiftFiles(self):
if self.save_timeshift_file and self.current_timeshift_filename and self.new_timeshift_filename:
if config.usage.timeshift_path.value and not self.save_timeshift_in_movie_dir:
dirname = config.usage.timeshift_path.value
else:
dirname = defaultMoviePath()
filename = getRecordingFilename(self.new_timeshift_filename, dirname) + ".ts"
fileList = []
fileList.append((self.current_timeshift_filename, filename))
if fileExists(self.current_timeshift_filename + ".sc"):
fileList.append((self.current_timeshift_filename + ".sc", filename + ".sc"))
if fileExists(self.current_timeshift_filename + ".cuts"):
fileList.append((self.current_timeshift_filename + ".cuts", filename + ".cuts"))
moveFiles(fileList)
self.save_timeshift_file = False
self.setCurrentEventTimer()
def currentEventTime(self):
remaining = 0
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(ref, -1, 0)
if event:
now = int(time())
start = event.getBeginTime()
duration = event.getDuration()
end = start + duration
remaining = end - now
return remaining
def saveTimeshiftFileForEvent(self):
if self.timeshiftEnabled() and self.save_timeshift_only_current_event and self.timeshift_was_activated and self.save_timeshift_file:
message = _("Current event is over.\nSelect an option to save the timeshift file.")
choice = [(_("Save and stop timeshift"), "save"), (_("Save and restart timeshift"), "restart"), (_("Don't save and stop timeshift"), "stop"), (_("Do nothing"), "continue")]
self.session.openWithCallback(self.saveTimeshiftFileForEventCallback, MessageBox, message, simple = True, list = choice, timeout=15)
def saveTimeshiftFileForEventCallback(self, answer):
self.save_timeshift_only_current_event = False
if answer:
ts = self.getTimeshift()
if ts and answer in ("save", "restart", "stop"):
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
if answer in ("save", "restart"):
ts.saveTimeshiftFile()
del ts
self.saveTimeshiftFiles()
if answer == "restart":
self.ts_start_delay_timer.start(1000, True)
self.save_timeshift_file = False
self.save_timeshift_in_movie_dir = False
def setCurrentEventTimer(self, duration=0):
self.ts_current_event_timer.stop()
self.save_timeshift_only_current_event = False
if duration > 0:
self.save_timeshift_only_current_event = True
self.ts_current_event_timer.startLongTimer(duration)
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.showExtensionSelection, _("Show extensions...")),
}, 1) # lower priority
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
list.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list=list, keys=keys, skin_name="ExtensionsList", reorderConfig="extension_order", windowTitle=_("Extensions menu"))
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
self.lastPiPService = None
if SystemInfo["PIPAvailable"]:
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.activePiP, self.activePiPName),
})
if (self.allowPiP):
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
self.addExtension((self.getTogglePipzapName, self.togglePipzap, lambda: True), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.lastPiPServiceTimeoutTimer = eTimer()
self.lastPiPServiceTimeoutTimer.callback.append(self.clearLastPiPService)
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.value != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Move Picture in Picture")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
self.lastPiPServiceTimeoutTimer.stop()
slist = self.servicelist
if self.session.pipshown:
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
lastPiPServiceTimeout = int(config.usage.pip_last_service_timeout.value)
if lastPiPServiceTimeout >= 0:
self.lastPiPService = self.session.pip.getCurrentService()
if lastPiPServiceTimeout:
self.lastPiPServiceTimeoutTimer.startLongTimer(lastPiPServiceTimeout)
del self.session.pip
self.session.pipshown = False
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.lastPiPService or self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
newservice = self.session.nav.getCurrentlyPlayingServiceOrGroup() or (slist and slist.servicelist.getCurrent())
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = slist and slist.getCurrentServicePath()
else:
self.session.pipshown = False
del self.session.pip
if self.session.pipshown and hasattr(self, "screenSaverTimer"):
self.screenSaverTimer.stop()
self.lastPiPService = None
def clearLastPiPService(self):
self.lastPiPService = None
def activePiP(self):
if self.servicelist and self.servicelist.dopipzap or not self.session.pipshown:
self.showPiP()
else:
self.togglePipzap()
def activePiPName(self):
if self.servicelist and self.servicelist.dopipzap:
return _("Disable Picture in Picture")
if self.session.pipshown:
return _("Zap focus to Picture in Picture")
else:
return _("Activate Picture in Picture")
def swapPiP(self):
if self.pipShown():
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
slist = self.servicelist
if slist:
currentServicePath = slist.getCurrentServicePath()
currentBouquet = slist.getRoot()
slist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
if slist:
self.session.pip.servicePath = currentServicePath
self.session.pip.servicePath[1] = currentBouquet
if slist and slist.dopipzap:
slist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
if self.pipShown():
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.value
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
from RecordTimer import parseEvent, RecordTimerEntry
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
self.SelectedInstantServiceRef = None
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def moveToTrash(self, entry):
print "instantRecord stop and delete recording: ", entry.name
import Tools.Trashcan
trash = Tools.Trashcan.createTrashFolder(entry.Filename)
from MovieSelection import moveServiceFiles
moveServiceFiles(entry.Filename, trash, entry.name, allowCopy=False)
def stopCurrentRecording(self, entry = -1):
def confirm(answer=False):
if answer:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
if self.deleteRecording:
self.moveToTrash(self.recording[entry])
self.recording.remove(self.recording[entry])
if entry is not None and entry != -1:
msg = _("Stop recording:")
if self.deleteRecording:
msg = _("Stop and delete recording:")
msg += "\n"
msg += " - " + self.recording[entry].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def stopAllCurrentRecordings(self, list):
def confirm(answer=False):
if answer:
for entry in list:
self.session.nav.RecordTimer.removeEntry(entry[0])
self.recording.remove(entry[0])
if self.deleteRecording:
self.moveToTrash(entry[0])
msg = _("Stop recordings:")
if self.deleteRecording:
msg = _("Stop and delete recordings:")
msg += "\n"
for entry in list:
msg += " - " + entry[0].name + "\n"
self.session.openWithCallback(confirm, MessageBox, msg, MessageBox.TYPE_YESNO)
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef or self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
if hasattr(self, "SelectedInstantServiceRef") and self.SelectedInstantServiceRef:
service_info = eServiceCenter.getInstance().info(self.SelectedInstantServiceRef)
event = service_info and service_info.getEvent(self.SelectedInstantServiceRef)
else:
service = self.session.nav.getCurrentService()
event = service and service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
print "pre:\n", self.recording
if answer is None or answer[1] == "no":
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
self.deleteRecording = False
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "addrecordingtime":
if len(self.recording) == 1:
self.addRecordingTime(0)
else:
self.session.openWithCallback(self.addRecordingTime, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopdelete":
self.deleteRecording = True
if len(self.recording) == 1:
self.stopCurrentRecording(0)
else:
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] == "stopall":
self.stopAllCurrentRecordings(list)
elif answer[1] == "stopdeleteall":
self.deleteRecording = True
self.stopAllCurrentRecordings(list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif "timeshift" in answer[1]:
ts = self.getTimeshift()
if ts:
ts.saveTimeshiftFile()
self.save_timeshift_file = True
if "movie" in answer[1]:
self.save_timeshift_in_movie_dir = True
if "event" in answer[1]:
remaining = self.currentEventTime()
if remaining > 0:
self.setCurrentEventTimer(remaining-15)
print "after:\n", self.recording
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def addRecordingTime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputAddRecordingTime, InputBox, title=_("How many minutes do you want add to record?"), text="5 ", maxSize=True, type=Input.NUMBER)
def inputAddRecordingTime(self, value):
if value:
print "added", int(value), "minutes for recording."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end += 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def inputCallback(self, value):
if value:
print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self, serviceRef=None):
self.SelectedInstantServiceRef = serviceRef
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
else:
common = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (add time)"), "addrecordingtime"),
(_("Change recording (endtime)"), "changeendtime"),)
list += ((_("Stop recording"), "stop"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete recording"), "stopdelete"),)
if len(self.recording) > 1:
list += ((_("Stop all current recordings"), "stopall"),)
if config.usage.movielist_trashcan.value:
list += ((_("Stop and delete all current recordings"), "stopdeleteall"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
list += ((_("Do nothing"), "no"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self):
list += ((_("Do not record"), "no"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + ((_("Save timeshift file"), "timeshift"),
(_("Save timeshift file in movie directory"), "timeshift_movie"))
if self.currentEventTime() > 0:
list += ((_("Save timeshift only for current event"), "timeshift_event"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=title, list=list)
else:
return 0
from Tools.ISO639 import LanguageCodes
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
})
def audioSelection(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"subserviceSelection": (self.subserviceSelection, _("Subservice list...")),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def playSubservice(self, ref):
if ref.getUnsignedData(6) == 0:
ref.setName("")
self.session.nav.playService(ref, checkParentalControl=False, adjust=False)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceReference()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection = 0
elif selection < 0:
selection = n - 1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.playSubservice(newservice)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceReference()
tlist = []
idx = 0
cnt_parent = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
if i.getUnsignedData(6):
cnt_parent += 1
idx += 1
if cnt_parent and self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.value:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.playSubservice(service[1])
def addSubserviceToBouquetCallback(self, service):
if service and len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO, timeout=5)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO, timeout=5)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
print "new service started! trying to download cuts!"
self.downloadCuesheet()
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if "ask" in config.usage.on_movie_start.value or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10, default="yes" in config.usage.on_movie_start.value)
elif config.usage.on_movie_start.value == "resume":
# TRANSLATORS: The string "Resuming playback" flashes for a moment
# TRANSLATORS: at the start of a movie, when the user has selected
# TRANSLATORS: "Resume from last position" as start behavior.
# TRANSLATORS: The purpose is to notify the user that the movie starts
# TRANSLATORS: in the middle somewhere and not from the beginning.
# TRANSLATORS: (Some translators seem to have interpreted it as a
# TRANSLATORS: question or a choice, but it is a statement.)
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin and self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
subtitle = self.getCurrentServiceSubtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle and subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.value or not config.usage.remote_fallback_enabled.value:
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if not config.usage.remote_fallback_enabled.value and (error == eDVBServicePMTHandler.eventMisconfiguration or error == eDVBServicePMTHandler.eventNoResources):
self.session.nav.currentlyPlayingServiceReference = None
self.session.nav.currentlyPlayingServiceOrGroup = None
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error and not config.usage.hide_zap_errors.value:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepStartTime = 0
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year > 1970: #check if the current time is valid
duration = blocktime = extra_time = False
if config.usage.inactivity_timer_blocktime_by_weekdays.value:
weekday = curtime.tm_wday
if config.usage.inactivity_timer_blocktime_day[weekday].value:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin_day[weekday].value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end_day[weekday].value)
extra_time = config.usage.inactivity_timer_blocktime_extra_day[weekday].value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin_day[weekday].value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end_day[weekday].value)
else:
blocktime = True
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
extra_time = config.usage.inactivity_timer_blocktime_extra.value
begintime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_begin.value)
endtime_extra = tuple(config.usage.inactivity_timer_blocktime_extra_end.value)
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
if blocktime and (begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime)):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
elif extra_time and (begintime_extra <= endtime_extra and (curtime >= begintime_extra and curtime < endtime_extra) or begintime_extra > endtime_extra and (curtime >= begintime_extra or curtime < endtime_extra)):
duration = (endtime_extra[0]*3600 + endtime_extra[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
message = _("Your receiver will got to standby due to inactivity.") + "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple=True, default=False, timeout_default=True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
def sleepTimerState(self):
if self.sleepTimer.isActive():
return (self.sleepStartTime - time()) / 60
return 0
def setSleepTimer(self, sleepTime):
print "[InfoBarPowersaver] set sleeptimer", sleepTime
if sleepTime:
m = abs(sleepTime / 60)
message = _("The sleep timer has been activated.") + "\n" + _("And will put your receiver in standby over ") + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(sleepTime)
self.sleepStartTime = time() + sleepTime
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
def sleepTimerTimeout(self):
if not Screens.Standby.inStandby:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple=True, list=list, default=False, timeout_default=True)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
self.setSleepTimer(900)
elif answer:
self.goStandby()
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goStandby(self):
if not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
class InfoBarHDMI:
def HDMIIn(self):
slist = self.servicelist
if slist.dopipzap:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pip.playService(slist.servicelist.getCurrent())
else:
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
if curref and curref.type != -1 and os.path.splitext(curref.toString().split(":")[10])[1].lower() in AUDIO_EXTENSIONS.union(MOVIE_EXTENSIONS, DVD_EXTENSIONS):
setResumePoint(self.session)
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
elif isStandardInfoBar(self):
self.session.nav.playService(slist.servicelist.getCurrent())
else:
self.session.nav.playService(self.cur_service)
| gpl-2.0 |
fargalaxy1/geonode-wagtail | geonode/services/migrations/0002_auto_20160821_1919.py | 5 | 2311 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='serviceprofilerole',
name='role',
field=models.CharField(help_text='function performed by the responsible party',
max_length=255,
choices=[(b'author',
'party who authored the resource'),
(b'processor',
'party who has processed the data in a manner'
'such that the resource has been modified'),
(b'publisher', 'party who published the resource'),
(b'custodian',
'party that accepts accountability and responsibility'
'for the data and ensures appropriate care and'
'maintenance of the resource'),
(b'pointOfContact',
'party who can be contacted for acquiring knowledge'
'about or acquisition of the resource'),
(b'distributor',
'party who distributes the resource'),
(b'user', 'party who uses the resource'),
(b'resourceProvider', 'party that supplies the resource'),
(b'originator', 'party who created the resource'),
(b'owner', 'party that owns the resource'),
(b'principalInvestigator',
'key party responsible for gathering information'
'and conducting research')]),
),
]
| gpl-3.0 |
sangwook236/sangwook-library | python/src/swl/machine_learning/imgaug_batch_manager.py | 2 | 7654 | import os
import numpy as np
import imgaug as ia
from swl.machine_learning.batch_manager import BatchManager, FileBatchManager
#--------------------------------------------------------------------
# ImgaugBatchManager.
# Generates and augments batches using imgaug library in background processes.
class ImgaugBatchManager(BatchManager):
def __init__(self, augmenter, images, labels, batch_size, shuffle=True, is_label_augmented=False, is_time_major=False):
super().__init__()
self._augmenter = augmenter
self._images = images
self._labels = labels
self._batch_size = batch_size
self._shuffle = shuffle
self._is_label_augmented = is_label_augmented
batch_axis = 1 if is_time_major else 0
self._num_examples, self._num_steps = 0, 0
if self._images is not None:
self._num_examples = self._images.shape[batch_axis]
self._num_steps = ((self._num_examples - 1) // batch_size + 1) if self._num_examples > 0 else 0
#if self._images is None:
if self._num_examples <= 0:
raise ValueError('Invalid argument')
def getBatches(self, *args, **kwargs):
if self._is_label_augmented:
# FIXME [fix] >> Do not check.
batch_loader = ia.BatchLoader(self._loadBatchPairs)
augmenter_det = self._augmenter.to_deterministic()
bg_augmenter = ia.BackgroundAugmenter(batch_loader, augmenter_det)
while True:
batch = bg_augmenter.get_batch()
if batch is None:
break
yield batch.images_aug, batch.keypoints_aug
batch_loader.terminate()
bg_augmenter.terminate()
else:
batch_loader = ia.BatchLoader(self._loadBatches)
bg_augmenter = ia.BackgroundAugmenter(batch_loader, self._augmenter)
while True:
batch = bg_augmenter.get_batch()
if batch is None:
break
#images = batch.images
#images_aug = batch.images_aug
#keypoints = batch.keypoints
#keypoints_aug = batch.keypoints_aug
#data = batch.data
yield batch.images_aug, self._labels[batch.data]
batch_loader.terminate()
bg_augmenter.terminate()
# A generator that loads batches from a numpy array.
def _loadBatches(self):
indices = np.arange(self._num_examples)
if self._shuffle:
np.random.shuffle(indices)
for step in range(self._num_steps):
start = step * self._batch_size
end = start + self._batch_size
batch_indices = indices[start:end]
if batch_indices.size > 0: # If batch_indices is non-empty.
batch_images = self._images[batch_indices]
if batch_images.size > 0: # If batch_images is non-empty.
# Create the batch object to send to the background processes.
yield ia.Batch(images=batch_images, data=batch_indices)
# A generator that loads batches from a numpy array.
def _loadBatchPairs(self):
indices = np.arange(self._num_examples)
if self._shuffle:
np.random.shuffle(indices)
for step in range(self._num_steps):
start = step * self._batch_size
end = start + self._batch_size
batch_indices = indices[start:end]
if batch_indices.size > 0: # If batch_indices is non-empty.
batch_images = self._images[batch_indices]
batch_labels = self._labels[batch_indices]
if batch_images.size > 0 and batch_labels.size > 0: # If batch_images and batch_labels are non-empty.
# Create the batch object to send to the background processes.
#yield ia.Batch(images=batch_images, data=batch_labels)
yield ia.Batch(images=batch_images, keypoints=batch_labels)
#--------------------------------------------------------------------
# ImgaugBatchManagerWithFileInput.
# Loads dataset from multiple npy files.
# Generates and augment batches using imgaug library in background processes.
class ImgaugBatchManagerWithFileInput(ImgaugBatchManager):
def __init__(self, augmenter, npy_filepath_pairs, batch_size, shuffle=True, is_label_augmented=False, is_time_major=False):
images, labels = None, None
for image_filepath, label_filepath in npy_filepath_pairs:
imgs = np.load(image_filepath)
lbls = np.load(label_filepath)
images = imgs if images is None else np.concatenate((images, imgs), axis=0)
labels = lbls if labels is None else np.concatenate((labels, lbls), axis=0)
super().__init__(augmenter, images, labels, batch_size, shuffle, is_label_augmented, is_time_major)
#--------------------------------------------------------------------
# ImgaugFileBatchManager.
# Generates, augments, saves, and loads batches through npy files using imgaug library.
class ImgaugFileBatchManager(FileBatchManager):
def __init__(self, augmenter, images, labels, batch_size, shuffle=True, is_label_augmented=False, is_time_major=False, image_file_format=None, label_file_format=None):
super().__init__()
self._augmenter = augmenter
self._images = images
self._labels = labels
self._batch_size = batch_size
self._shuffle = shuffle
self._is_label_augmented = is_label_augmented
self._image_file_format = 'batch_images_{}.npy' if image_file_format is None else image_file_format
self._label_file_format = 'batch_labels_{}.npy' if label_file_format is None else label_file_format
batch_axis = 1 if is_time_major else 0
self._num_examples, self._num_steps = 0, 0
if self._images is not None:
self._num_examples = self._images.shape[batch_axis]
self._num_steps = ((self._num_examples - 1) // self._batch_size + 1) if self._num_examples > 0 else 0
#if self._images is None:
if self._num_examples <= 0:
raise ValueError('Invalid argument')
def getBatches(self, dir_path, *args, **kwargs):
for step in range(self._num_steps):
batch_images = np.load(os.path.join(dir_path, self._image_file_format.format(step)))
batch_labels = np.load(os.path.join(dir_path, self._label_file_format.format(step)))
yield batch_images, batch_labels
def putBatches(self, dir_path, *args, **kwargs):
indices = np.arange(self._num_examples)
if self._shuffle:
np.random.shuffle(indices)
for step in range(self._num_steps):
start = step * self._batch_size
end = start + self._batch_size
batch_indices = indices[start:end]
if batch_indices.size > 0: # If batch_indices is non-empty.
batch_images = self._images[batch_indices]
batch_labels = self._labels[batch_indices]
if batch_images.size > 0 and batch_labels.size > 0: # If batch_images and batch_labels are non-empty.
if self._is_label_augmented:
augseq_det = self._augmenter.to_deterministic() # Call this for each batch again, NOT only once at the start.
batch_images = augseq_det.augment_images(batch_images)
batch_labels = augseq_det.augment_images(batch_labels)
else:
batch_images = self._augmenter.augment_images(batch_images)
np.save(os.path.join(dir_path, self._image_file_format.format(step)), batch_images)
np.save(os.path.join(dir_path, self._label_file_format.format(step)), batch_labels)
#--------------------------------------------------------------------
# ImgaugFileBatchManagerWithFileInput.
# Loads dataset from multiple npy files.
# Generates, augments, saves, and loads batches through npy files using imgaug library.
class ImgaugFileBatchManagerWithFileInput(ImgaugFileBatchManager):
def __init__(self, augmenter, npy_filepath_pairs, batch_size, shuffle=True, is_label_augmented=False, is_time_major=False):
images, labels = None, None
for image_filepath, label_filepath in npy_filepath_pairs:
imgs = np.load(image_filepath)
lbls = np.load(label_filepath)
images = imgs if images is None else np.concatenate((images, imgs), axis=0)
labels = lbls if labels is None else np.concatenate((labels, lbls), axis=0)
super().__init__(augmenter, images, labels, batch_size, shuffle, is_label_augmented, is_time_major)
| gpl-2.0 |
hpcugent/easybuild-framework | easybuild/toolchains/pgi.py | 1 | 1780 | ##
# Copyright 2015 Bart Oldeman
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for PGI compiler toolchain.
:author: Bart Oldeman (McGill University, Calcul Quebec, Compute Canada)
"""
from easybuild.toolchains.compiler.pgi import Pgi
from easybuild.toolchains.gcccore import GCCcore
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
class PgiToolchain(Pgi):
"""Simple toolchain with just the PGI compilers."""
NAME = 'PGI'
# use GCCcore as subtoolchain rather than GCC, since two 'real' compiler-only toolchains don't mix well,
# in particular in a hierarchical module naming scheme
SUBTOOLCHAIN = [GCCcore.NAME, DUMMY_TOOLCHAIN_NAME]
OPTIONAL = False
| gpl-2.0 |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Tools/i18n/msgfmt.py | 32 | 6488 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Written by Martin v. Löwis <loewis@informatik.hu-berlin.de>
"""Generate binary message catalog from textual translation description.
This program converts a textual Uniforum-style message catalog (.po file) into
a binary GNU catalog (.mo file). This is essentially the same function as the
GNU msgfmt program, however, it is a simpler implementation.
Usage: msgfmt.py [OPTIONS] filename.po
Options:
-o file
--output-file=file
Specify the output file to write to. If omitted, output will go to a
file named filename.mo (based off the input file name).
-h
--help
Print this message and exit.
-V
--version
Display version information and exit.
"""
import os
import sys
import ast
import getopt
import struct
import array
__version__ = "1.1"
MESSAGES = {}
def usage(code, msg=''):
print >> sys.stderr, __doc__
if msg:
print >> sys.stderr, msg
sys.exit(code)
def add(id, str, fuzzy):
"Add a non-fuzzy translation to the dictionary."
global MESSAGES
if not fuzzy and str:
MESSAGES[id] = str
def generate():
"Return the generated output."
global MESSAGES
keys = MESSAGES.keys()
# the keys are sorted in the .mo file
keys.sort()
offsets = []
ids = strs = ''
for id in keys:
# For each string, we need size and file offset. Each string is NUL
# terminated; the NUL does not count into the size.
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
ids += id + '\0'
strs += MESSAGES[id] + '\0'
output = ''
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
# the keys start right after the index tables.
# translated string.
keystart = 7*4+16*len(keys)
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1+keystart]
voffsets += [l2, o2+valuestart]
offsets = koffsets + voffsets
output = struct.pack("Iiiiiii",
0x950412deL, # Magic
0, # Version
len(keys), # # of entries
7*4, # start of key index
7*4+len(keys)*8, # start of value index
0, 0) # size and offset of hash table
output += array.array("i", offsets).tostring()
output += ids
output += strs
return output
def make(filename, outfile):
ID = 1
STR = 2
# Compute .mo name from .po name and arguments
if filename.endswith('.po'):
infile = filename
else:
infile = filename + '.po'
if outfile is None:
outfile = os.path.splitext(infile)[0] + '.mo'
try:
lines = open(infile).readlines()
except IOError, msg:
print >> sys.stderr, msg
sys.exit(1)
section = None
fuzzy = 0
# Parse the catalog
lno = 0
for l in lines:
lno += 1
# If we get a comment line after a msgstr, this is a new entry
if l[0] == '#' and section == STR:
add(msgid, msgstr, fuzzy)
section = None
fuzzy = 0
# Record a fuzzy mark
if l[:2] == '#,' and 'fuzzy' in l:
fuzzy = 1
# Skip comments
if l[0] == '#':
continue
# Now we are in a msgid section, output previous section
if l.startswith('msgid') and not l.startswith('msgid_plural'):
if section == STR:
add(msgid, msgstr, fuzzy)
section = ID
l = l[5:]
msgid = msgstr = ''
is_plural = False
# This is a message with plural forms
elif l.startswith('msgid_plural'):
if section != ID:
print >> sys.stderr, 'msgid_plural not preceded by msgid on %s:%d' %\
(infile, lno)
sys.exit(1)
l = l[12:]
msgid += '\0' # separator of singular and plural
is_plural = True
# Now we are in a msgstr section
elif l.startswith('msgstr'):
section = STR
if l.startswith('msgstr['):
if not is_plural:
print >> sys.stderr, 'plural without msgid_plural on %s:%d' %\
(infile, lno)
sys.exit(1)
l = l.split(']', 1)[1]
if msgstr:
msgstr += '\0' # Separator of the various plural forms
else:
if is_plural:
print >> sys.stderr, 'indexed msgstr required for plural on %s:%d' %\
(infile, lno)
sys.exit(1)
l = l[6:]
# Skip empty lines
l = l.strip()
if not l:
continue
l = ast.literal_eval(l)
if section == ID:
msgid += l
elif section == STR:
msgstr += l
else:
print >> sys.stderr, 'Syntax error on %s:%d' % (infile, lno), \
'before:'
print >> sys.stderr, l
sys.exit(1)
# Add last entry
if section == STR:
add(msgid, msgstr, fuzzy)
# Compute output
output = generate()
try:
open(outfile,"wb").write(output)
except IOError,msg:
print >> sys.stderr, msg
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], 'hVo:',
['help', 'version', 'output-file='])
except getopt.error, msg:
usage(1, msg)
outfile = None
# parse options
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-V', '--version'):
print >> sys.stderr, "msgfmt.py", __version__
sys.exit(0)
elif opt in ('-o', '--output-file'):
outfile = arg
# do it
if not args:
print >> sys.stderr, 'No input file given'
print >> sys.stderr, "Try `msgfmt --help' for more information."
return
for filename in args:
make(filename, outfile)
if __name__ == '__main__':
main()
| mit |
ShinyROM/android_external_chromium_org | native_client_sdk/src/build_tools/update_sdktools.py | 70 | 4004 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script that reads omahaproxy and gsutil to determine a version of the
sdk_tools bundle to use.
Please note the differences between this script and update_nacl_manifest.py:
update_sdktools.py is run by a SDK-team developer to assist in updating to a
new sdk_tools bundle. A file on the developer's hard drive is modified, and
must be checked in for the new sdk_tools bundle to be used.
update_nacl_manifest.py is customarily run by a cron job, and does not check in
any changes. Instead it modifies the manifest file in cloud storage."""
import collections
import difflib
import json
import optparse
import re
import sys
import urllib2
from manifest_util import DownloadAndComputeHash, DictToJSON
from update_nacl_manifest import RealDelegate
SDK_TOOLS_DESCRIPTION_FORMAT = 'Native Client SDK Tools, revision %d'
BUCKET_PATH = 'nativeclient-mirror/nacl/nacl_sdk/'
GS_BUCKET_PATH = 'gs://' + BUCKET_PATH
HTTPS_BUCKET_PATH = 'https://storage.googleapis.com/' + BUCKET_PATH
def GetSdkToolsUrl(revision):
return HTTPS_BUCKET_PATH + 'trunk.%d/sdk_tools.tgz' % revision
def GetTrunkRevisions(delegate):
urls = delegate.GsUtil_ls(GS_BUCKET_PATH)
revisions = []
for url in urls:
m = re.match(GS_BUCKET_PATH + 'trunk\.(\d+)', url)
if m:
revisions.append((int(m.group(1)), url))
return sorted(revisions)
def FindMostRecentSdkTools(delegate):
for revision, url in reversed(GetTrunkRevisions(delegate)):
sdktools_url = url + 'sdk_tools.tgz'
if delegate.GsUtil_ls(sdktools_url):
return revision, sdktools_url
return None
def JsonLoadFromString(json_string):
if sys.version_info > (2, 7):
return json.loads(json_string, object_pairs_hook=collections.OrderedDict)
else:
return json.loads(json_string)
def GetBundleByName(bundles, name):
for bundle in bundles:
if bundle['name'] == name:
return bundle
return None
def UpdateSdkToolsBundle(sdk_tools_bundle, revision, url, sha1, size):
sdk_tools_bundle['description'] = SDK_TOOLS_DESCRIPTION_FORMAT % revision
sdk_tools_bundle['revision'] = revision
# Update archive for each OS
for archive in sdk_tools_bundle['archives']:
archive['url'] = url
archive['checksum']['sha1'] = sha1
archive['size'] = size
def UpdateManifest(manifest, revision):
sdk_tools_bundle = GetBundleByName(manifest['bundles'], 'sdk_tools')
url = GetSdkToolsUrl(revision)
sha1, size = DownloadAndComputeHash(urllib2.urlopen(url))
UpdateSdkToolsBundle(sdk_tools_bundle, revision, url, sha1, size)
def UpdateManifestFileToRevision(filename, revision):
with open(filename) as stream:
manifest_string = stream.read()
manifest = JsonLoadFromString(manifest_string)
UpdateManifest(manifest, revision)
new_manifest_string = DictToJSON(manifest)
diff_string = ''.join(difflib.unified_diff(manifest_string.splitlines(1),
new_manifest_string.splitlines(1)))
print 'diff %s' % filename
print diff_string
print
with open(filename, 'w') as stream:
stream.write(new_manifest_string)
def main(args):
parser = optparse.OptionParser(description=__doc__)
parser.add_option('-r', '--revision',
help='set revision manually, rather than using the latest version')
options, args = parser.parse_args(args[1:])
if len(args) != 0:
parser.error('Unexpected args: %s' % ', '.join(args))
# TODO(binji): http://crbug.com/169047. Rename RealDelegate to something else.
delegate = RealDelegate()
if not options.revision:
revision, _ = FindMostRecentSdkTools(delegate)
else:
revision = int(options.revision)
UpdateManifestFileToRevision('json/naclsdk_manifest0.json', revision)
UpdateManifestFileToRevision('json/naclsdk_manifest2.json', revision)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
Jaiz909/youtube-dl | youtube_dl/downloader/rtsp.py | 119 | 1528 | from __future__ import unicode_literals
import os
import subprocess
from .common import FileDownloader
from ..utils import (
check_executable,
encodeFilename,
)
class RtspFD(FileDownloader):
def real_download(self, filename, info_dict):
url = info_dict['url']
self.report_destination(filename)
tmpfilename = self.temp_name(filename)
if check_executable('mplayer', ['-h']):
args = [
'mplayer', '-really-quiet', '-vo', 'null', '-vc', 'dummy',
'-dumpstream', '-dumpfile', tmpfilename, url]
elif check_executable('mpv', ['-h']):
args = [
'mpv', '-really-quiet', '--vo=null', '--stream-dump=' + tmpfilename, url]
else:
self.report_error('MMS or RTSP download detected but neither "mplayer" nor "mpv" could be run. Please install any.')
return False
retval = subprocess.call(args)
if retval == 0:
fsize = os.path.getsize(encodeFilename(tmpfilename))
self.to_screen('\r[%s] %s bytes' % (args[0], fsize))
self.try_rename(tmpfilename, filename)
self._hook_progress({
'downloaded_bytes': fsize,
'total_bytes': fsize,
'filename': filename,
'status': 'finished',
})
return True
else:
self.to_stderr('\n')
self.report_error('%s exited with code %d' % (args[0], retval))
return False
| unlicense |
webounty/mezzanine | mezzanine/template/__init__.py | 40 | 4855 | from __future__ import unicode_literals
from functools import wraps
from django import template
from django.template.context import Context
from django.template.loader import get_template, select_template
from mezzanine.utils.device import templates_for_device
class Library(template.Library):
"""
Extends ``django.template.Library`` providing several shortcuts
that attempt to take the leg-work out of creating different types
of template tags.
"""
def as_tag(self, tag_func):
"""
Creates a tag expecting the format:
``{% tag_name as var_name %}``
The decorated func returns the value that is given to
``var_name`` in the template.
"""
@wraps(tag_func)
def tag_wrapper(parser, token):
class AsTagNode(template.Node):
def render(self, context):
parts = token.split_contents()
# Resolve variables if their names are given.
def resolve(arg):
try:
return template.Variable(arg).resolve(context)
except template.VariableDoesNotExist:
return arg
args, kwargs = [], {}
for arg in parts[1:-2]:
if "=" in arg:
name, val = arg.split("=", 1)
if name in tag_func.__code__.co_varnames:
kwargs[name] = resolve(val)
continue
args.append(resolve(arg))
context[parts[-1]] = tag_func(*args, **kwargs)
return ""
return AsTagNode()
return self.tag(tag_wrapper)
def render_tag(self, tag_func):
"""
Creates a tag using the decorated func as the render function
for the template tag node. The render function takes two
arguments - the template context and the tag token.
"""
@wraps(tag_func)
def tag_wrapper(parser, token):
class RenderTagNode(template.Node):
def render(self, context):
return tag_func(context, token)
return RenderTagNode()
return self.tag(tag_wrapper)
def to_end_tag(self, tag_func):
"""
Creates a tag that parses until it finds the corresponding end
tag, eg: for a tag named ``mytag`` it will parse until
``endmytag``. The decorated func's return value is used to
render the parsed content and takes three arguments - the
parsed content between the start and end tags, the template
context and the tag token.
"""
@wraps(tag_func)
def tag_wrapper(parser, token):
class ToEndTagNode(template.Node):
def __init__(self):
end_name = "end%s" % tag_func.__name__
self.nodelist = parser.parse((end_name,))
parser.delete_first_token()
def render(self, context):
args = (self.nodelist.render(context), context, token)
return tag_func(*args[:tag_func.__code__.co_argcount])
return ToEndTagNode()
return self.tag(tag_wrapper)
def inclusion_tag(self, name, context_class=Context, takes_context=False):
"""
Replacement for Django's ``inclusion_tag`` which looks up device
specific templates at render time.
"""
def tag_decorator(tag_func):
@wraps(tag_func)
def tag_wrapper(parser, token):
class InclusionTagNode(template.Node):
def render(self, context):
if not getattr(self, "nodelist", False):
try:
request = context["request"]
except KeyError:
t = get_template(name)
else:
ts = templates_for_device(request, name)
t = select_template(ts)
self.template = t
parts = [template.Variable(part).resolve(context)
for part in token.split_contents()[1:]]
if takes_context:
parts.insert(0, context)
result = tag_func(*parts)
autoescape = context.autoescape
context = context_class(result, autoescape=autoescape)
return self.template.render(context)
return InclusionTagNode()
return self.tag(tag_wrapper)
return tag_decorator
| bsd-2-clause |
agconti/njode | env/lib/python2.7/site-packages/django/db/backends/postgresql_psycopg2/version.py | 71 | 1517 | """
Extracts the version of the PostgreSQL server.
"""
import re
# This reg-exp is intentionally fairly flexible here.
# Needs to be able to handle stuff like:
# PostgreSQL 8.3.6
# EnterpriseDB 8.3
# PostgreSQL 8.3 beta4
# PostgreSQL 8.4beta1
VERSION_RE = re.compile(r'\S+ (\d+)\.(\d+)\.?(\d+)?')
def _parse_version(text):
"Internal parsing method. Factored out for testing purposes."
major, major2, minor = VERSION_RE.search(text).groups()
try:
return int(major) * 10000 + int(major2) * 100 + int(minor)
except (ValueError, TypeError):
return int(major) * 10000 + int(major2) * 100
def get_version(connection):
"""
Returns an integer representing the major, minor and revision number of the
server. Format is the one used for the return value of libpq
PQServerVersion()/``server_version`` connection attribute (available in
newer psycopg2 versions.)
For example, 80304 for 8.3.4. The last two digits will be 00 in the case of
releases (e.g., 80400 for 'PostgreSQL 8.4') or in the case of beta and
prereleases (e.g. 90100 for 'PostgreSQL 9.1beta2').
PQServerVersion()/``server_version`` doesn't execute a query so try that
first, then fallback to a ``SELECT version()`` query.
"""
if hasattr(connection, 'server_version'):
return connection.server_version
else:
with connection.cursor() as cursor:
cursor.execute("SELECT version()")
return _parse_version(cursor.fetchone()[0])
| bsd-3-clause |
dreamapplehappy/myblog | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/__init__.py | 269 | 2974 | # -*- coding: utf-8 -*-
"""
Pygments
~~~~~~~~
Pygments is a syntax highlighting package written in Python.
It is a generic syntax highlighter for general use in all kinds of software
such as forum systems, wikis or other applications that need to prettify
source code. Highlights are:
* a wide range of common languages and markup formats is supported
* special attention is paid to details, increasing quality by a fair amount
* support for new languages and formats are added easily
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
formats that PIL supports, and ANSI sequences
* it is usable as a command-line tool and as a library
* ... and it highlights even Brainfuck!
The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
.. _Pygments tip:
http://bitbucket.org/birkenfeld/pygments-main/get/tip.zip#egg=Pygments-dev
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
__version__ = '1.6'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
import sys
from pygments.util import StringIO, BytesIO
def lex(code, lexer):
"""
Lex ``code`` with ``lexer`` and return an iterable of tokens.
"""
try:
return lexer.get_tokens(code)
except TypeError, err:
if isinstance(err.args[0], str) and \
'unbound method get_tokens' in err.args[0]:
raise TypeError('lex() argument must be a lexer instance, '
'not a class')
raise
def format(tokens, formatter, outfile=None):
"""
Format a tokenlist ``tokens`` with the formatter ``formatter``.
If ``outfile`` is given and a valid file object (an object
with a ``write`` method), the result will be written to it, otherwise
it is returned as a string.
"""
try:
if not outfile:
#print formatter, 'using', formatter.encoding
realoutfile = formatter.encoding and BytesIO() or StringIO()
formatter.format(tokens, realoutfile)
return realoutfile.getvalue()
else:
formatter.format(tokens, outfile)
except TypeError, err:
if isinstance(err.args[0], str) and \
'unbound method format' in err.args[0]:
raise TypeError('format() argument must be a formatter instance, '
'not a class')
raise
def highlight(code, lexer, formatter, outfile=None):
"""
Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
If ``outfile`` is given and a valid file object (an object
with a ``write`` method), the result will be written to it, otherwise
it is returned as a string.
"""
return format(lex(code, lexer), formatter, outfile)
if __name__ == '__main__':
from pygments.cmdline import main
sys.exit(main(sys.argv))
| mit |
ruriwo/ErgoThumb072_firmware | tmk_core/tool/mbed/mbed-sdk/workspace_tools/libraries.py | 40 | 3646 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from workspace_tools.paths import *
from workspace_tools.data.support import *
from workspace_tools.tests import TEST_MBED_LIB
LIBRARIES = [
# RTOS libraries
{
"id": "rtx",
"source_dir": MBED_RTX,
"build_dir": RTOS_LIBRARIES,
"dependencies": [MBED_LIBRARIES],
},
{
"id": "rtos",
"source_dir": RTOS_ABSTRACTION,
"build_dir": RTOS_LIBRARIES,
"dependencies": [MBED_LIBRARIES, MBED_RTX],
},
# USB Device libraries
{
"id": "usb",
"source_dir": USB,
"build_dir": USB_LIBRARIES,
"dependencies": [MBED_LIBRARIES],
},
# USB Host libraries
{
"id": "usb_host",
"source_dir": USB_HOST,
"build_dir": USB_HOST_LIBRARIES,
"dependencies": [MBED_LIBRARIES, FAT_FS, MBED_RTX, RTOS_ABSTRACTION],
},
# DSP libraries
{
"id": "cmsis_dsp",
"source_dir": DSP_CMSIS,
"build_dir": DSP_LIBRARIES,
"dependencies": [MBED_LIBRARIES],
},
{
"id": "dsp",
"source_dir": DSP_ABSTRACTION,
"build_dir": DSP_LIBRARIES,
"dependencies": [MBED_LIBRARIES, DSP_CMSIS],
},
# File system libraries
{
"id": "fat",
"source_dir": [FAT_FS, SD_FS],
"build_dir": FS_LIBRARY,
"dependencies": [MBED_LIBRARIES]
},
# Network libraries
{
"id": "eth",
"source_dir": [ETH_SOURCES, LWIP_SOURCES],
"build_dir": ETH_LIBRARY,
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES]
},
{
"id": "ublox",
"source_dir": [UBLOX_SOURCES, CELLULAR_SOURCES, CELLULAR_USB_SOURCES, LWIP_SOURCES],
"build_dir": UBLOX_LIBRARY,
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES],
},
# Unit Testing library
{
"id": "cpputest",
"source_dir": [CPPUTEST_SRC, CPPUTEST_PLATFORM_SRC, CPPUTEST_TESTRUNNER_SCR],
"build_dir": CPPUTEST_LIBRARY,
"dependencies": [MBED_LIBRARIES],
'inc_dirs': [CPPUTEST_INC, CPPUTEST_PLATFORM_INC, CPPUTEST_TESTRUNNER_INC, TEST_MBED_LIB],
'inc_dirs_ext': [CPPUTEST_INC_EXT],
'macros': ["CPPUTEST_USE_MEM_LEAK_DETECTION=0", "CPPUTEST_USE_STD_CPP_LIB=0", "CPPUTEST=1"],
},
]
LIBRARY_MAP = dict([(library['id'], library) for library in LIBRARIES])
class Library:
DEFAULTS = {
"supported": DEFAULT_SUPPORT,
'dependencies': None,
'inc_dirs': None, # Include dirs required by library build
'inc_dirs_ext': None, # Include dirs required by others to use with this library
'macros': None, # Additional macros you want to define when building library
}
def __init__(self, lib_id):
self.__dict__.update(Library.DEFAULTS)
self.__dict__.update(LIBRARY_MAP[lib_id])
def is_supported(self, target, toolchain):
if not hasattr(self, 'supported'):
return True
return (target.name in self.supported) and (toolchain in self.supported[target.name])
| gpl-2.0 |
ity/pants | tests/python/pants_test/backend/jvm/tasks/test_binary_create_integration.py | 12 | 6126 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
from pants.base.build_environment import get_buildroot
from pants.util.contextutil import open_zip
from pants.util.dirutil import safe_delete
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class BinaryCreateIntegrationTest(PantsRunIntegrationTest):
def test_autovalue_classfiles(self):
self.build_and_run(
pants_args=['binary', 'examples/src/java/org/pantsbuild/example/autovalue'],
rel_out_path='dist',
java_args=['-jar', 'autovalue.jar'],
expected_output='Hello Autovalue!'
)
def test_manifest_entries(self):
self.build_and_run(
pants_args=['binary',
'testprojects/src/java/org/pantsbuild/testproject/manifest:manifest-with-source'],
rel_out_path='dist',
java_args=['-cp', 'manifest-with-source.jar', 'org.pantsbuild.testproject.manifest.Manifest'],
expected_output='Hello World! Version: 1.2.3'
)
def test_manifest_entries_no_source(self):
self.build_and_run(
pants_args=['binary',
'testprojects/src/java/org/pantsbuild/testproject/manifest:manifest-no-source'],
rel_out_path='dist',
java_args=['-cp', 'manifest-no-source.jar', 'org.pantsbuild.testproject.manifest.Manifest'],
expected_output='Hello World! Version: 4.5.6',
)
def test_manifest_entries_bundle(self):
# package level manifest entry, in this case, `Implementation-Version`, no longer work
# because package files are not included in the bundle jar, instead they are referenced
# through its manifest's Class-Path.
self.build_and_run(
pants_args=['bundle',
'testprojects/src/java/org/pantsbuild/testproject/manifest:manifest-app'],
rel_out_path=os.path.join('dist', ('testprojects.src.java.org.pantsbuild.testproject'
'.manifest.manifest-app-bundle')),
java_args=['-cp', 'manifest-no-source.jar', 'org.pantsbuild.testproject.manifest.Manifest'],
expected_output='Hello World! Version: null',
)
# If we still want to get package level manifest entries, we need to include packages files
# in the bundle jar through `--deployjar`. However use that with caution because the monolithic
# jar may have multiple packages.
self.build_and_run(
pants_args=['bundle',
'testprojects/src/java/org/pantsbuild/testproject/manifest:manifest-app',
'--bundle-jvm-deployjar'],
rel_out_path=os.path.join('dist', ('testprojects.src.java.org.pantsbuild.testproject'
'.manifest.manifest-app-bundle')),
java_args=['-cp', 'manifest-no-source.jar', 'org.pantsbuild.testproject.manifest.Manifest'],
expected_output='Hello World! Version: 4.5.6',
)
def test_deploy_excludes(self):
jar_filename = os.path.join('dist', 'deployexcludes.jar')
safe_delete(jar_filename)
command = [
'--no-compile-zinc-capture-classpath',
'binary',
'testprojects/src/java/org/pantsbuild/testproject/deployexcludes',
]
with self.pants_results(command) as pants_run:
self.assert_success(pants_run)
# The resulting binary should not contain any guava classes
with open_zip(jar_filename) as jar_file:
self.assertEquals({'META-INF/',
'META-INF/MANIFEST.MF',
'org/',
'org/pantsbuild/',
'org/pantsbuild/testproject/',
'org/pantsbuild/testproject/deployexcludes/',
'org/pantsbuild/testproject/deployexcludes/DeployExcludesMain.class'},
set(jar_file.namelist()))
# This jar should not run by itself, missing symbols
self.run_java(java_args=['-jar', jar_filename],
expected_returncode=1,
expected_output='java.lang.NoClassDefFoundError: '
'com/google/common/collect/ImmutableSortedSet')
# But adding back the deploy_excluded symbols should result in a clean run.
classpath = [jar_filename,
os.path.join(pants_run.workdir,
'ivy/jars/com.google.guava/guava/jars/guava-18.0.jar')]
self.run_java(java_args=['-cp', os.pathsep.join(classpath),
'org.pantsbuild.testproject.deployexcludes.DeployExcludesMain'],
expected_output='DeployExcludes Hello World')
def build_and_run(self, pants_args, rel_out_path, java_args, expected_output):
self.assert_success(self.run_pants(['clean-all']))
with self.pants_results(pants_args, {}) as pants_run:
self.assert_success(pants_run)
out_path = os.path.join(get_buildroot(), rel_out_path)
self.run_java(java_args=java_args, expected_output=expected_output, cwd=out_path)
def run_java(self, java_args, expected_returncode=0, expected_output=None, cwd=None):
command = ['java'] + java_args
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd)
stdout, stderr = process.communicate()
self.assertEquals(expected_returncode, process.returncode,
('Expected exit code {} from command `{}` but got {}:\n'
'stdout:\n{}\n'
'stderr:\n{}'
.format(expected_returncode,
' '.join(command),
process.returncode,
stdout,
stderr)))
self.assertIn(expected_output, stdout if expected_returncode == 0 else stderr)
| apache-2.0 |
gannetson/sportschooldeopenlucht | env/lib/python2.7/site-packages/django/core/exceptions.py | 118 | 2799 | """
Global Django exception and warning classes.
"""
from functools import reduce
class DjangoRuntimeWarning(RuntimeWarning):
pass
class ObjectDoesNotExist(Exception):
"The requested object does not exist"
silent_variable_failure = True
class MultipleObjectsReturned(Exception):
"The query returned multiple objects when only one was expected."
pass
class SuspiciousOperation(Exception):
"The user did something suspicious"
pass
class PermissionDenied(Exception):
"The user did not have permission to do that"
pass
class ViewDoesNotExist(Exception):
"The requested view does not exist"
pass
class MiddlewareNotUsed(Exception):
"This middleware is not used in this server configuration"
pass
class ImproperlyConfigured(Exception):
"Django is somehow improperly configured"
pass
class FieldError(Exception):
"""Some kind of problem with a model field."""
pass
NON_FIELD_ERRORS = '__all__'
class ValidationError(Exception):
"""An error while validating data."""
def __init__(self, message, code=None, params=None):
import operator
from django.utils.encoding import force_text
"""
ValidationError can be passed any object that can be printed (usually
a string), a list of objects or a dictionary.
"""
if isinstance(message, dict):
self.message_dict = message
# Reduce each list of messages into a single list.
message = reduce(operator.add, message.values())
if isinstance(message, list):
self.messages = [force_text(msg) for msg in message]
else:
self.code = code
self.params = params
message = force_text(message)
self.messages = [message]
def __str__(self):
# This is needed because, without a __str__(), printing an exception
# instance would result in this:
# AttributeError: ValidationError instance has no attribute 'args'
# See http://www.python.org/doc/current/tut/node10.html#handling
if hasattr(self, 'message_dict'):
return repr(self.message_dict)
return repr(self.messages)
def __repr__(self):
if hasattr(self, 'message_dict'):
return 'ValidationError(%s)' % repr(self.message_dict)
return 'ValidationError(%s)' % repr(self.messages)
def update_error_dict(self, error_dict):
if hasattr(self, 'message_dict'):
if error_dict:
for k, v in self.message_dict.items():
error_dict.setdefault(k, []).extend(v)
else:
error_dict = self.message_dict
else:
error_dict[NON_FIELD_ERRORS] = self.messages
return error_dict
| bsd-3-clause |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/numpy/f2py/rules.py | 32 | 54543 | #!/usr/bin/env python
"""
Rules for building C/API module with f2py2e.
Here is a skeleton of a new wrapper function (13Dec2001):
wrapper_function(args)
declarations
get_python_arguments, say, `a' and `b'
get_a_from_python
if (successful) {
get_b_from_python
if (successful) {
callfortran
if (succesful) {
put_a_to_python
if (succesful) {
put_b_to_python
if (succesful) {
buildvalue = ...
}
}
}
}
cleanup_b
}
cleanup_a
return buildvalue
"""
"""
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/08/30 08:58:42 $
Pearu Peterson
"""
__version__ = "$Revision: 1.129 $"[10:-1]
import __version__
f2py_version = __version__.version
import pprint
import sys
import time
import types
import copy
errmess=sys.stderr.write
outmess=sys.stdout.write
show=pprint.pprint
from auxfuncs import *
import capi_maps
from capi_maps import *
import cfuncs
import common_rules
import use_rules
import f90mod_rules
import func2subr
options={}
sepdict={}
#for k in ['need_cfuncs']: sepdict[k]=','
for k in ['decl',
'frompyobj',
'cleanupfrompyobj',
'topyarr','method',
'pyobjfrom','closepyobjfrom',
'freemem',
'userincludes',
'includes0','includes','typedefs','typedefs_generated',
'cppmacros','cfuncs','callbacks',
'latexdoc',
'restdoc',
'routine_defs','externroutines',
'initf2pywraphooks',
'commonhooks','initcommonhooks',
'f90modhooks','initf90modhooks']:
sepdict[k]='\n'
#################### Rules for C/API module #################
module_rules={
'modulebody':"""\
/* File: #modulename#module.c
* This file is auto-generated with f2py (version:#f2py_version#).
* f2py is a Fortran to Python Interface Generator (FPIG), Second Edition,
* written by Pearu Peterson <pearu@cens.ioc.ee>.
* See http://cens.ioc.ee/projects/f2py2e/
* Generation date: """+time.asctime(time.localtime(time.time()))+"""
* $R"""+"""evision:$
* $D"""+"""ate:$
* Do not edit this file directly unless you know what you are doing!!!
*/
#ifdef __cplusplus
extern \"C\" {
#endif
"""+gentitle("See f2py2e/cfuncs.py: includes")+"""
#includes#
#includes0#
"""+gentitle("See f2py2e/rules.py: mod_rules['modulebody']")+"""
static PyObject *#modulename#_error;
static PyObject *#modulename#_module;
"""+gentitle("See f2py2e/cfuncs.py: typedefs")+"""
#typedefs#
"""+gentitle("See f2py2e/cfuncs.py: typedefs_generated")+"""
#typedefs_generated#
"""+gentitle("See f2py2e/cfuncs.py: cppmacros")+"""
#cppmacros#
"""+gentitle("See f2py2e/cfuncs.py: cfuncs")+"""
#cfuncs#
"""+gentitle("See f2py2e/cfuncs.py: userincludes")+"""
#userincludes#
"""+gentitle("See f2py2e/capi_rules.py: usercode")+"""
#usercode#
/* See f2py2e/rules.py */
#externroutines#
"""+gentitle("See f2py2e/capi_rules.py: usercode1")+"""
#usercode1#
"""+gentitle("See f2py2e/cb_rules.py: buildcallback")+"""
#callbacks#
"""+gentitle("See f2py2e/rules.py: buildapi")+"""
#body#
"""+gentitle("See f2py2e/f90mod_rules.py: buildhooks")+"""
#f90modhooks#
"""+gentitle("See f2py2e/rules.py: module_rules['modulebody']")+"""
"""+gentitle("See f2py2e/common_rules.py: buildhooks")+"""
#commonhooks#
"""+gentitle("See f2py2e/rules.py")+"""
static FortranDataDef f2py_routine_defs[] = {
#routine_defs#
\t{NULL}
};
static PyMethodDef f2py_module_methods[] = {
#pymethoddef#
\t{NULL,NULL}
};
#if PY_VERSION_HEX >= 0x03000000
static struct PyModuleDef moduledef = {
\tPyModuleDef_HEAD_INIT,
\t"#modulename#",
\tNULL,
\t-1,
\tf2py_module_methods,
\tNULL,
\tNULL,
\tNULL,
\tNULL
};
#endif
#if PY_VERSION_HEX >= 0x03000000
#define RETVAL m
PyObject *PyInit_#modulename#(void) {
#else
#define RETVAL
PyMODINIT_FUNC init#modulename#(void) {
#endif
\tint i;
\tPyObject *m,*d, *s;
#if PY_VERSION_HEX >= 0x03000000
\tm = #modulename#_module = PyModule_Create(&moduledef);
#else
\tm = #modulename#_module = Py_InitModule(\"#modulename#\", f2py_module_methods);
#endif
\tPy_TYPE(&PyFortran_Type) = &PyType_Type;
\timport_array();
\tif (PyErr_Occurred())
\t\t{PyErr_SetString(PyExc_ImportError, \"can't initialize module #modulename# (failed to import numpy)\"); return RETVAL;}
\td = PyModule_GetDict(m);
\ts = PyString_FromString(\"$R"""+"""evision: $\");
\tPyDict_SetItemString(d, \"__version__\", s);
#if PY_VERSION_HEX >= 0x03000000
\ts = PyUnicode_FromString(
#else
\ts = PyString_FromString(
#endif
\t\t\"This module '#modulename#' is auto-generated with f2py (version:#f2py_version#).\\nFunctions:\\n\"\n#docs#\".\");
\tPyDict_SetItemString(d, \"__doc__\", s);
\t#modulename#_error = PyErr_NewException (\"#modulename#.error\", NULL, NULL);
\tPy_DECREF(s);
\tfor(i=0;f2py_routine_defs[i].name!=NULL;i++)
\t\tPyDict_SetItemString(d, f2py_routine_defs[i].name,PyFortranObject_NewAsAttr(&f2py_routine_defs[i]));
#initf2pywraphooks#
#initf90modhooks#
#initcommonhooks#
#interface_usercode#
#ifdef F2PY_REPORT_ATEXIT
\tif (! PyErr_Occurred())
\t\ton_exit(f2py_report_on_exit,(void*)\"#modulename#\");
#endif
\treturn RETVAL;
}
#ifdef __cplusplus
}
#endif
""",
'separatorsfor':{'latexdoc':'\n\n',
'restdoc':'\n\n'},
'latexdoc':['\\section{Module \\texttt{#texmodulename#}}\n',
'#modnote#\n',
'#latexdoc#'],
'restdoc':['Module #modulename#\n'+'='*80,
'\n#restdoc#']
}
defmod_rules=[
{'body':'/*eof body*/',
'method':'/*eof method*/',
'externroutines':'/*eof externroutines*/',
'routine_defs':'/*eof routine_defs*/',
'initf90modhooks':'/*eof initf90modhooks*/',
'initf2pywraphooks':'/*eof initf2pywraphooks*/',
'initcommonhooks':'/*eof initcommonhooks*/',
'latexdoc':'',
'restdoc':'',
'modnote':{hasnote:'#note#',l_not(hasnote):''},
}
]
routine_rules={
'separatorsfor':sepdict,
'body':"""
#begintitle#
static char doc_#apiname#[] = \"\\\nFunction signature:\\n\\\n\t#docreturn##name#(#docsignatureshort#)\\n\\\n#docstrsigns#\";
/* #declfortranroutine# */
static PyObject *#apiname#(const PyObject *capi_self,
PyObject *capi_args,
PyObject *capi_keywds,
#functype# (*f2py_func)(#callprotoargument#)) {
\tPyObject * volatile capi_buildvalue = NULL;
\tvolatile int f2py_success = 1;
#decl#
\tstatic char *capi_kwlist[] = {#kwlist##kwlistopt##kwlistxa#NULL};
#usercode#
#routdebugenter#
#ifdef F2PY_REPORT_ATEXIT
f2py_start_clock();
#endif
\tif (!PyArg_ParseTupleAndKeywords(capi_args,capi_keywds,\\
\t\t\"#argformat##keyformat##xaformat#:#pyname#\",\\
\t\tcapi_kwlist#args_capi##keys_capi##keys_xa#))\n\t\treturn NULL;
#frompyobj#
/*end of frompyobj*/
#ifdef F2PY_REPORT_ATEXIT
f2py_start_call_clock();
#endif
#callfortranroutine#
if (PyErr_Occurred())
f2py_success = 0;
#ifdef F2PY_REPORT_ATEXIT
f2py_stop_call_clock();
#endif
/*end of callfortranroutine*/
\t\tif (f2py_success) {
#pyobjfrom#
/*end of pyobjfrom*/
\t\tCFUNCSMESS(\"Building return value.\\n\");
\t\tcapi_buildvalue = Py_BuildValue(\"#returnformat#\"#return#);
/*closepyobjfrom*/
#closepyobjfrom#
\t\t} /*if (f2py_success) after callfortranroutine*/
/*cleanupfrompyobj*/
#cleanupfrompyobj#
\tif (capi_buildvalue == NULL) {
#routdebugfailure#
\t} else {
#routdebugleave#
\t}
\tCFUNCSMESS(\"Freeing memory.\\n\");
#freemem#
#ifdef F2PY_REPORT_ATEXIT
f2py_stop_clock();
#endif
\treturn capi_buildvalue;
}
#endtitle#
""",
'routine_defs':'#routine_def#',
'initf2pywraphooks':'#initf2pywraphook#',
'externroutines':'#declfortranroutine#',
'doc':'#docreturn##name#(#docsignature#)',
'docshort':'#docreturn##name#(#docsignatureshort#)',
'docs':'"\t#docreturn##name#(#docsignature#)\\n"\n',
'need':['arrayobject.h','CFUNCSMESS','MINMAX'],
'cppmacros':{debugcapi:'#define DEBUGCFUNCS'},
'latexdoc':['\\subsection{Wrapper function \\texttt{#texname#}}\n',
"""
\\noindent{{}\\verb@#docreturn##name#@{}}\\texttt{(#latexdocsignatureshort#)}
#routnote#
#latexdocstrsigns#
"""],
'restdoc':['Wrapped function ``#name#``\n'+'-'*80,
]
}
################## Rules for C/API function ##############
rout_rules=[
{ # Init
'separatorsfor': {'callfortranroutine':'\n','routdebugenter':'\n','decl':'\n',
'routdebugleave':'\n','routdebugfailure':'\n',
'setjmpbuf':' || ',
'docstrreq':'\n','docstropt':'\n','docstrout':'\n',
'docstrcbs':'\n','docstrsigns':'\\n"\n"',
'latexdocstrsigns':'\n',
'latexdocstrreq':'\n','latexdocstropt':'\n',
'latexdocstrout':'\n','latexdocstrcbs':'\n',
},
'kwlist':'','kwlistopt':'','callfortran':'','callfortranappend':'',
'docsign':'','docsignopt':'','decl':'/*decl*/',
'freemem':'/*freemem*/',
'docsignshort':'','docsignoptshort':'',
'docstrsigns':'','latexdocstrsigns':'',
'docstrreq':'Required arguments:',
'docstropt':'Optional arguments:',
'docstrout':'Return objects:',
'docstrcbs':'Call-back functions:',
'latexdocstrreq':'\\noindent Required arguments:',
'latexdocstropt':'\\noindent Optional arguments:',
'latexdocstrout':'\\noindent Return objects:',
'latexdocstrcbs':'\\noindent Call-back functions:',
'args_capi':'','keys_capi':'','functype':'',
'frompyobj':'/*frompyobj*/',
'cleanupfrompyobj':['/*end of cleanupfrompyobj*/'], #this list will be reversed
'pyobjfrom':'/*pyobjfrom*/',
'closepyobjfrom':['/*end of closepyobjfrom*/'], #this list will be reversed
'topyarr':'/*topyarr*/','routdebugleave':'/*routdebugleave*/',
'routdebugenter':'/*routdebugenter*/',
'routdebugfailure':'/*routdebugfailure*/',
'callfortranroutine':'/*callfortranroutine*/',
'argformat':'','keyformat':'','need_cfuncs':'',
'docreturn':'','return':'','returnformat':'','rformat':'',
'kwlistxa':'','keys_xa':'','xaformat':'','docsignxa':'','docsignxashort':'',
'initf2pywraphook':'',
'routnote':{hasnote:'--- #note#',l_not(hasnote):''},
},{
'apiname':'f2py_rout_#modulename#_#name#',
'pyname':'#modulename#.#name#',
'decl':'',
'_check':l_not(ismoduleroutine)
},{
'apiname':'f2py_rout_#modulename#_#f90modulename#_#name#',
'pyname':'#modulename#.#f90modulename#.#name#',
'decl':'',
'_check':ismoduleroutine
},{ # Subroutine
'functype':'void',
'declfortranroutine':{l_and(l_not(l_or(ismoduleroutine,isintent_c)),l_not(isdummyroutine)):'extern void #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
l_and(l_not(ismoduleroutine),isintent_c,l_not(isdummyroutine)):'extern void #fortranname#(#callprotoargument#);',
ismoduleroutine:'',
isdummyroutine:''
},
'routine_def':{l_not(l_or(ismoduleroutine,isintent_c,isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine),isintent_c,l_not(isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine),isdummyroutine):'\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'need':{l_and(l_not(l_or(ismoduleroutine,isintent_c)),l_not(isdummyroutine)):'F_FUNC'},
'callfortranroutine':[
{debugcapi:["""\tfprintf(stderr,\"debug-capi:Fortran subroutine `#fortranname#(#callfortran#)\'\\n\");"""]},
{hasexternals:"""\
\t\tif (#setjmpbuf#) {
\t\t\tf2py_success = 0;
\t\t} else {"""},
{isthreadsafe:'\t\t\tPy_BEGIN_ALLOW_THREADS'},
{hascallstatement:'''\t\t\t\t#callstatement#;
\t\t\t\t/*(*f2py_func)(#callfortran#);*/'''},
{l_not(l_or(hascallstatement,isdummyroutine)):'\t\t\t\t(*f2py_func)(#callfortran#);'},
{isthreadsafe:'\t\t\tPy_END_ALLOW_THREADS'},
{hasexternals:"""\t\t}"""}
],
'_check':l_and(issubroutine,l_not(issubroutine_wrap)),
},{ # Wrapped function
'functype':'void',
'declfortranroutine':{l_not(l_or(ismoduleroutine,isdummyroutine)):'extern void #F_WRAPPEDFUNC#(#name_lower#,#NAME#)(#callprotoargument#);',
isdummyroutine:'',
},
'routine_def':{l_not(l_or(ismoduleroutine,isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#F_WRAPPEDFUNC#(#name_lower#,#NAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine:'\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'initf2pywraphook':{l_not(l_or(ismoduleroutine,isdummyroutine)):'''
{
extern #ctype# #F_FUNC#(#name_lower#,#NAME#)(void);
PyObject* o = PyDict_GetItemString(d,"#name#");
PyObject_SetAttrString(o,"_cpointer", F2PyCapsule_FromVoidPtr((void*)#F_FUNC#(#name_lower#,#NAME#),NULL));
#if PY_VERSION_HEX >= 0x03000000
PyObject_SetAttrString(o,"__name__", PyUnicode_FromString("#name#"));
#else
PyObject_SetAttrString(o,"__name__", PyString_FromString("#name#"));
#endif
}
'''},
'need':{l_not(l_or(ismoduleroutine,isdummyroutine)):['F_WRAPPEDFUNC','F_FUNC']},
'callfortranroutine':[
{debugcapi:["""\tfprintf(stderr,\"debug-capi:Fortran subroutine `f2pywrap#name_lower#(#callfortran#)\'\\n\");"""]},
{hasexternals:"""\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe:'\tPy_BEGIN_ALLOW_THREADS'},
{l_not(l_or(hascallstatement,isdummyroutine)):'\t(*f2py_func)(#callfortran#);'},
{hascallstatement:'\t#callstatement#;\n\t/*(*f2py_func)(#callfortran#);*/'},
{isthreadsafe:'\tPy_END_ALLOW_THREADS'},
{hasexternals:'\t}'}
],
'_check':isfunction_wrap,
},{ # Wrapped subroutine
'functype':'void',
'declfortranroutine':{l_not(l_or(ismoduleroutine,isdummyroutine)):'extern void #F_WRAPPEDFUNC#(#name_lower#,#NAME#)(#callprotoargument#);',
isdummyroutine:'',
},
'routine_def':{l_not(l_or(ismoduleroutine,isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#F_WRAPPEDFUNC#(#name_lower#,#NAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine:'\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'initf2pywraphook':{l_not(l_or(ismoduleroutine,isdummyroutine)):'''
{
extern void #F_FUNC#(#name_lower#,#NAME#)(void);
PyObject* o = PyDict_GetItemString(d,"#name#");
PyObject_SetAttrString(o,"_cpointer", F2PyCapsule_FromVoidPtr((void*)#F_FUNC#(#name_lower#,#NAME#),NULL));
#if PY_VERSION_HEX >= 0x03000000
PyObject_SetAttrString(o,"__name__", PyUnicode_FromString("#name#"));
#else
PyObject_SetAttrString(o,"__name__", PyString_FromString("#name#"));
#endif
}
'''},
'need':{l_not(l_or(ismoduleroutine,isdummyroutine)):['F_WRAPPEDFUNC','F_FUNC']},
'callfortranroutine':[
{debugcapi:["""\tfprintf(stderr,\"debug-capi:Fortran subroutine `f2pywrap#name_lower#(#callfortran#)\'\\n\");"""]},
{hasexternals:"""\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe:'\tPy_BEGIN_ALLOW_THREADS'},
{l_not(l_or(hascallstatement,isdummyroutine)):'\t(*f2py_func)(#callfortran#);'},
{hascallstatement:'\t#callstatement#;\n\t/*(*f2py_func)(#callfortran#);*/'},
{isthreadsafe:'\tPy_END_ALLOW_THREADS'},
{hasexternals:'\t}'}
],
'_check':issubroutine_wrap,
},{ # Function
'functype':'#ctype#',
'docreturn':{l_not(isintent_hide):'#rname#,'},
'docstrout':'\t#pydocsignout#',
'latexdocstrout':['\\item[]{{}\\verb@#pydocsignout#@{}}',
{hasresultnote:'--- #resultnote#'}],
'callfortranroutine':[{l_and(debugcapi,isstringfunction):"""\
#ifdef USESCOMPAQFORTRAN
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callcompaqfortran#)\\n\");
#else
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callfortran#)\\n\");
#endif
"""},
{l_and(debugcapi,l_not(isstringfunction)):"""\
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callfortran#)\\n\");
"""}
],
'_check':l_and(isfunction,l_not(isfunction_wrap))
},{ # Scalar function
'declfortranroutine':{l_and(l_not(l_or(ismoduleroutine,isintent_c)),l_not(isdummyroutine)):'extern #ctype# #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
l_and(l_not(ismoduleroutine),isintent_c,l_not(isdummyroutine)):'extern #ctype# #fortranname#(#callprotoargument#);',
isdummyroutine:''
},
'routine_def':{l_and(l_not(l_or(ismoduleroutine,isintent_c)),l_not(isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine),isintent_c,l_not(isdummyroutine)):'\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine:'\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'decl':[{iscomplexfunction_warn:'\t#ctype# #name#_return_value={0,0};',
l_not(iscomplexfunction):'\t#ctype# #name#_return_value=0;'},
{iscomplexfunction:'\tPyObject *#name#_return_value_capi = Py_None;'}
],
'callfortranroutine':[
{hasexternals:"""\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe:'\tPy_BEGIN_ALLOW_THREADS'},
{hascallstatement:'''\t#callstatement#;
/*\t#name#_return_value = (*f2py_func)(#callfortran#);*/
'''},
{l_not(l_or(hascallstatement,isdummyroutine)):'\t#name#_return_value = (*f2py_func)(#callfortran#);'},
{isthreadsafe:'\tPy_END_ALLOW_THREADS'},
{hasexternals:'\t}'},
{l_and(debugcapi,iscomplexfunction):'\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value.r,#name#_return_value.i);'},
{l_and(debugcapi,l_not(iscomplexfunction)):'\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value);'}],
'pyobjfrom':{iscomplexfunction:'\t#name#_return_value_capi = pyobj_from_#ctype#1(#name#_return_value);'},
'need':[{l_not(isdummyroutine):'F_FUNC'},
{iscomplexfunction:'pyobj_from_#ctype#1'},
{islong_longfunction:'long_long'},
{islong_doublefunction:'long_double'}],
'returnformat':{l_not(isintent_hide):'#rformat#'},
'return':{iscomplexfunction:',#name#_return_value_capi',
l_not(l_or(iscomplexfunction,isintent_hide)):',#name#_return_value'},
'_check':l_and(isfunction,l_not(isstringfunction),l_not(isfunction_wrap))
},{ # String function # in use for --no-wrap
'declfortranroutine':'extern void #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
'routine_def':{l_not(l_or(ismoduleroutine,isintent_c)):
# '\t{\"#name#\",-1,{{-1}},0,(char *)F_FUNC(#fortranname#,#FORTRANNAME#),(void *)#apiname#,doc_#apiname#},',
'\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine),isintent_c):
# '\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(void *)#apiname#,doc_#apiname#},'
'\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},'
},
'decl':['\t#ctype# #name#_return_value = NULL;',
'\tint #name#_return_value_len = 0;'],
'callfortran':'#name#_return_value,#name#_return_value_len,',
'callfortranroutine':['\t#name#_return_value_len = #rlength#;',
'\tif ((#name#_return_value = (string)malloc(sizeof(char)*(#name#_return_value_len+1))) == NULL) {',
'\t\tPyErr_SetString(PyExc_MemoryError, \"out of memory\");',
'\t\tf2py_success = 0;',
'\t} else {',
"\t\t(#name#_return_value)[#name#_return_value_len] = '\\0';",
'\t}',
'\tif (f2py_success) {',
{hasexternals:"""\
\t\tif (#setjmpbuf#) {
\t\t\tf2py_success = 0;
\t\t} else {"""},
{isthreadsafe:'\t\tPy_BEGIN_ALLOW_THREADS'},
"""\
#ifdef USESCOMPAQFORTRAN
\t\t(*f2py_func)(#callcompaqfortran#);
#else
\t\t(*f2py_func)(#callfortran#);
#endif
""",
{isthreadsafe:'\t\tPy_END_ALLOW_THREADS'},
{hasexternals:'\t\t}'},
{debugcapi:'\t\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value_len,#name#_return_value);'},
'\t} /* if (f2py_success) after (string)malloc */',
],
'returnformat':'#rformat#',
'return':',#name#_return_value',
'freemem':'\tSTRINGFREE(#name#_return_value);',
'need':['F_FUNC','#ctype#','STRINGFREE'],
'_check':l_and(isstringfunction,l_not(isfunction_wrap)) # ???obsolete
},
{ # Debugging
'routdebugenter':'\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#(#docsignature#)\\n");',
'routdebugleave':'\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#: successful.\\n");',
'routdebugfailure':'\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#: failure.\\n");',
'_check':debugcapi
}
]
################ Rules for arguments ##################
typedef_need_dict = {islong_long:'long_long',
islong_double:'long_double',
islong_complex:'complex_long_double',
isunsigned_char:'unsigned_char',
isunsigned_short:'unsigned_short',
isunsigned:'unsigned',
isunsigned_long_long:'unsigned_long_long',
isunsigned_chararray:'unsigned_char',
isunsigned_shortarray:'unsigned_short',
isunsigned_long_longarray:'unsigned_long_long',
issigned_long_longarray:'long_long',
}
aux_rules=[
{
'separatorsfor':sepdict
},
{ # Common
'frompyobj':['\t/* Processing auxiliary variable #varname# */',
{debugcapi:'\tfprintf(stderr,"#vardebuginfo#\\n");'},],
'cleanupfrompyobj':'\t/* End of cleaning variable #varname# */',
'need':typedef_need_dict,
},
# Scalars (not complex)
{ # Common
'decl':'\t#ctype# #varname# = 0;',
'need':{hasinitvalue:'math.h'},
'frompyobj':{hasinitvalue:'\t#varname# = #init#;'},
'_check':l_and(isscalar,l_not(iscomplex)),
},
{
'return':',#varname#',
'docstrout':'\t#pydocsignout#',
'docreturn':'#outvarname#,',
'returnformat':'#varrformat#',
'_check':l_and(isscalar,l_not(iscomplex),isintent_out),
},
# Complex scalars
{ # Common
'decl':'\t#ctype# #varname#;',
'frompyobj': {hasinitvalue:'\t#varname#.r = #init.r#, #varname#.i = #init.i#;'},
'_check':iscomplex
},
# String
{ # Common
'decl':['\t#ctype# #varname# = NULL;',
'\tint slen(#varname#);',
],
'need':['len..'],
'_check':isstring
},
# Array
{ # Common
'decl':['\t#ctype# *#varname# = NULL;',
'\tnpy_intp #varname#_Dims[#rank#] = {#rank*[-1]#};',
'\tconst int #varname#_Rank = #rank#;',
],
'need':['len..',{hasinitvalue:'forcomb'},{hasinitvalue:'CFUNCSMESS'}],
'_check':isarray
},
# Scalararray
{ # Common
'_check':l_and(isarray,l_not(iscomplexarray))
},{ # Not hidden
'_check':l_and(isarray,l_not(iscomplexarray),isintent_nothide)
},
# Integer*1 array
{'need':'#ctype#',
'_check':isint1array,
'_depend':''
},
# Integer*-1 array
{'need':'#ctype#',
'_check':isunsigned_chararray,
'_depend':''
},
# Integer*-2 array
{'need':'#ctype#',
'_check':isunsigned_shortarray,
'_depend':''
},
# Integer*-8 array
{'need':'#ctype#',
'_check':isunsigned_long_longarray,
'_depend':''
},
# Complexarray
{'need':'#ctype#',
'_check':iscomplexarray,
'_depend':''
},
# Stringarray
{
'callfortranappend':{isarrayofstrings:'flen(#varname#),'},
'need':'string',
'_check':isstringarray
}
]
arg_rules=[
{
'separatorsfor':sepdict
},
{ # Common
'frompyobj':['\t/* Processing variable #varname# */',
{debugcapi:'\tfprintf(stderr,"#vardebuginfo#\\n");'},],
'cleanupfrompyobj':'\t/* End of cleaning variable #varname# */',
'_depend':'',
'need':typedef_need_dict,
},
# Doc signatures
{
'docstropt':{l_and(isoptional,isintent_nothide):'\t#pydocsign#'},
'docstrreq':{l_and(isrequired,isintent_nothide):'\t#pydocsign#'},
'docstrout':{isintent_out:'\t#pydocsignout#'},
'latexdocstropt':{l_and(isoptional,isintent_nothide):['\\item[]{{}\\verb@#pydocsign#@{}}',
{hasnote:'--- #note#'}]},
'latexdocstrreq':{l_and(isrequired,isintent_nothide):['\\item[]{{}\\verb@#pydocsign#@{}}',
{hasnote:'--- #note#'}]},
'latexdocstrout':{isintent_out:['\\item[]{{}\\verb@#pydocsignout#@{}}',
{l_and(hasnote,isintent_hide):'--- #note#',
l_and(hasnote,isintent_nothide):'--- See above.'}]},
'depend':''
},
# Required/Optional arguments
{
'kwlist':'"#varname#",',
'docsign':'#varname#,',
'_check':l_and(isintent_nothide,l_not(isoptional))
},
{
'kwlistopt':'"#varname#",',
'docsignopt':'#varname#=#showinit#,',
'docsignoptshort':'#varname#,',
'_check':l_and(isintent_nothide,isoptional)
},
# Docstring/BuildValue
{
'docreturn':'#outvarname#,',
'returnformat':'#varrformat#',
'_check':isintent_out
},
# Externals (call-back functions)
{ # Common
'docsignxa':{isintent_nothide:'#varname#_extra_args=(),'},
'docsignxashort':{isintent_nothide:'#varname#_extra_args,'},
'docstropt':{isintent_nothide:'\t#varname#_extra_args := () input tuple'},
'docstrcbs':'#cbdocstr#',
'latexdocstrcbs':'\\item[] #cblatexdocstr#',
'latexdocstropt':{isintent_nothide:'\\item[]{{}\\verb@#varname#_extra_args := () input tuple@{}} --- Extra arguments for call-back function {{}\\verb@#varname#@{}}.'},
'decl':['\tPyObject *#varname#_capi = Py_None;',
'\tPyTupleObject *#varname#_xa_capi = NULL;',
'\tPyTupleObject *#varname#_args_capi = NULL;',
'\tint #varname#_nofargs_capi = 0;',
{l_not(isintent_callback):'\t#cbname#_typedef #varname#_cptr;'}
],
'kwlistxa':{isintent_nothide:'"#varname#_extra_args",'},
'argformat':{isrequired:'O'},
'keyformat':{isoptional:'O'},
'xaformat':{isintent_nothide:'O!'},
'args_capi':{isrequired:',&#varname#_capi'},
'keys_capi':{isoptional:',&#varname#_capi'},
'keys_xa':',&PyTuple_Type,&#varname#_xa_capi',
'setjmpbuf':'(setjmp(#cbname#_jmpbuf))',
'callfortran':{l_not(isintent_callback):'#varname#_cptr,'},
'need':['#cbname#','setjmp.h'],
'_check':isexternal
},
{
'frompyobj':[{l_not(isintent_callback):"""\
if(F2PyCapsule_Check(#varname#_capi)) {
#varname#_cptr = F2PyCapsule_AsVoidPtr(#varname#_capi);
} else {
#varname#_cptr = #cbname#;
}
"""},{isintent_callback:"""\
if (#varname#_capi==Py_None) {
#varname#_capi = PyObject_GetAttrString(#modulename#_module,\"#varname#\");
if (#varname#_capi) {
if (#varname#_xa_capi==NULL) {
if (PyObject_HasAttrString(#modulename#_module,\"#varname#_extra_args\")) {
PyObject* capi_tmp = PyObject_GetAttrString(#modulename#_module,\"#varname#_extra_args\");
if (capi_tmp)
#varname#_xa_capi = (PyTupleObject *)PySequence_Tuple(capi_tmp);
else
#varname#_xa_capi = (PyTupleObject *)Py_BuildValue(\"()\");
if (#varname#_xa_capi==NULL) {
PyErr_SetString(#modulename#_error,\"Failed to convert #modulename#.#varname#_extra_args to tuple.\\n\");
return NULL;
}
}
}
}
if (#varname#_capi==NULL) {
PyErr_SetString(#modulename#_error,\"Callback #varname# not defined (as an argument or module #modulename# attribute).\\n\");
return NULL;
}
}
"""},
## {l_not(isintent_callback):"""\
## if (#varname#_capi==Py_None) {
## printf(\"hoi\\n\");
## }
## """},
"""\
\t#varname#_nofargs_capi = #cbname#_nofargs;
\tif (create_cb_arglist(#varname#_capi,#varname#_xa_capi,#maxnofargs#,#nofoptargs#,&#cbname#_nofargs,&#varname#_args_capi,\"failed in processing argument list for call-back #varname#.\")) {
\t\tjmp_buf #varname#_jmpbuf;""",
{debugcapi:["""\
\t\tfprintf(stderr,\"debug-capi:Assuming %d arguments; at most #maxnofargs#(-#nofoptargs#) is expected.\\n\",#cbname#_nofargs);
\t\tCFUNCSMESSPY(\"for #varname#=\",#cbname#_capi);""",
{l_not(isintent_callback):"""\t\tfprintf(stderr,\"#vardebugshowvalue# (call-back in C).\\n\",#cbname#);"""}]},
"""\
\t\tCFUNCSMESS(\"Saving jmpbuf for `#varname#`.\\n\");
\t\tSWAP(#varname#_capi,#cbname#_capi,PyObject);
\t\tSWAP(#varname#_args_capi,#cbname#_args_capi,PyTupleObject);
\t\tmemcpy(&#varname#_jmpbuf,&#cbname#_jmpbuf,sizeof(jmp_buf));""",
],
'cleanupfrompyobj':
"""\
\t\tCFUNCSMESS(\"Restoring jmpbuf for `#varname#`.\\n\");
\t\t#cbname#_capi = #varname#_capi;
\t\tPy_DECREF(#cbname#_args_capi);
\t\t#cbname#_args_capi = #varname#_args_capi;
\t\t#cbname#_nofargs = #varname#_nofargs_capi;
\t\tmemcpy(&#cbname#_jmpbuf,&#varname#_jmpbuf,sizeof(jmp_buf));
\t}""",
'need':['SWAP','create_cb_arglist'],
'_check':isexternal,
'_depend':''
},
# Scalars (not complex)
{ # Common
'decl':'\t#ctype# #varname# = 0;',
'pyobjfrom':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#);'},
'callfortran':{isintent_c:'#varname#,',l_not(isintent_c):'&#varname#,'},
'return':{isintent_out:',#varname#'},
'_check':l_and(isscalar,l_not(iscomplex))
},{
'need':{hasinitvalue:'math.h'},
'_check':l_and(isscalar,l_not(iscomplex)),
#'_depend':''
},{ # Not hidden
'decl':'\tPyObject *#varname#_capi = Py_None;',
'argformat':{isrequired:'O'},
'keyformat':{isoptional:'O'},
'args_capi':{isrequired:',&#varname#_capi'},
'keys_capi':{isoptional:',&#varname#_capi'},
'pyobjfrom':{isintent_inout:"""\
\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,&#varname#);
\tif (f2py_success) {"""},
'closepyobjfrom':{isintent_inout:"\t} /*if (f2py_success) of #varname# pyobjfrom*/"},
'need':{isintent_inout:'try_pyarr_from_#ctype#'},
'_check':l_and(isscalar,l_not(iscomplex),isintent_nothide)
},{
'frompyobj':[
# hasinitvalue...
# if pyobj is None:
# varname = init
# else
# from_pyobj(varname)
#
# isoptional and noinitvalue...
# if pyobj is not None:
# from_pyobj(varname)
# else:
# varname is uninitialized
#
# ...
# from_pyobj(varname)
#
{hasinitvalue:'\tif (#varname#_capi == Py_None) #varname# = #init#; else',
'_depend':''},
{l_and(isoptional,l_not(hasinitvalue)):'\tif (#varname#_capi != Py_None)',
'_depend':''},
{l_not(islogical):'''\
\t\tf2py_success = #ctype#_from_pyobj(&#varname#,#varname#_capi,"#pyname#() #nth# (#varname#) can\'t be converted to #ctype#");
\tif (f2py_success) {'''},
{islogical:'''\
\t\t#varname# = (#ctype#)PyObject_IsTrue(#varname#_capi);
\t\tf2py_success = 1;
\tif (f2py_success) {'''},
],
'cleanupfrompyobj':'\t} /*if (f2py_success) of #varname#*/',
'need':{l_not(islogical):'#ctype#_from_pyobj'},
'_check':l_and(isscalar,l_not(iscomplex),isintent_nothide),
'_depend':''
# },{ # Hidden
# '_check':l_and(isscalar,l_not(iscomplex),isintent_hide)
},{ # Hidden
'frompyobj':{hasinitvalue:'\t#varname# = #init#;'},
'need':typedef_need_dict,
'_check':l_and(isscalar,l_not(iscomplex),isintent_hide),
'_depend':''
},{ # Common
'frompyobj':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#);'},
'_check':l_and(isscalar,l_not(iscomplex)),
'_depend':''
},
# Complex scalars
{ # Common
'decl':'\t#ctype# #varname#;',
'callfortran':{isintent_c:'#varname#,',l_not(isintent_c):'&#varname#,'},
'pyobjfrom':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#.r,#varname#.i);'},
'return':{isintent_out:',#varname#_capi'},
'_check':iscomplex
},{ # Not hidden
'decl':'\tPyObject *#varname#_capi = Py_None;',
'argformat':{isrequired:'O'},
'keyformat':{isoptional:'O'},
'args_capi':{isrequired:',&#varname#_capi'},
'keys_capi':{isoptional:',&#varname#_capi'},
'need':{isintent_inout:'try_pyarr_from_#ctype#'},
'pyobjfrom':{isintent_inout:"""\
\t\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,&#varname#);
\t\tif (f2py_success) {"""},
'closepyobjfrom':{isintent_inout:"\t\t} /*if (f2py_success) of #varname# pyobjfrom*/"},
'_check':l_and(iscomplex,isintent_nothide)
},{
'frompyobj':[{hasinitvalue:'\tif (#varname#_capi==Py_None) {#varname#.r = #init.r#, #varname#.i = #init.i#;} else'},
{l_and(isoptional,l_not(hasinitvalue)):'\tif (#varname#_capi != Py_None)'},
# '\t\tf2py_success = #ctype#_from_pyobj(&#varname#,#varname#_capi,"#ctype#_from_pyobj failed in converting #nth# `#varname#\' of #pyname# to C #ctype#\\n");'
'\t\tf2py_success = #ctype#_from_pyobj(&#varname#,#varname#_capi,"#pyname#() #nth# (#varname#) can\'t be converted to #ctype#");'
'\n\tif (f2py_success) {'],
'cleanupfrompyobj':'\t} /*if (f2py_success) of #varname# frompyobj*/',
'need':['#ctype#_from_pyobj'],
'_check':l_and(iscomplex,isintent_nothide),
'_depend':''
},{ # Hidden
'decl':{isintent_out:'\tPyObject *#varname#_capi = Py_None;'},
'_check':l_and(iscomplex,isintent_hide)
},{
'frompyobj': {hasinitvalue:'\t#varname#.r = #init.r#, #varname#.i = #init.i#;'},
'_check':l_and(iscomplex,isintent_hide),
'_depend':''
},{ # Common
'pyobjfrom':{isintent_out:'\t#varname#_capi = pyobj_from_#ctype#1(#varname#);'},
'need':['pyobj_from_#ctype#1'],
'_check':iscomplex
},{
'frompyobj':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#.r,#varname#.i);'},
'_check':iscomplex,
'_depend':''
},
# String
{ # Common
'decl':['\t#ctype# #varname# = NULL;',
'\tint slen(#varname#);',
'\tPyObject *#varname#_capi = Py_None;'],
'callfortran':'#varname#,',
'callfortranappend':'slen(#varname#),',
'pyobjfrom':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",slen(#varname#),#varname#);'},
# 'freemem':'\tSTRINGFREE(#varname#);',
'return':{isintent_out:',#varname#'},
'need':['len..'],#'STRINGFREE'],
'_check':isstring
},{ # Common
'frompyobj':"""\
\tslen(#varname#) = #length#;
\tf2py_success = #ctype#_from_pyobj(&#varname#,&slen(#varname#),#init#,#varname#_capi,\"#ctype#_from_pyobj failed in converting #nth# `#varname#\' of #pyname# to C #ctype#\");
\tif (f2py_success) {""",
'cleanupfrompyobj':"""\
\t\tSTRINGFREE(#varname#);
\t} /*if (f2py_success) of #varname#*/""",
'need':['#ctype#_from_pyobj','len..','STRINGFREE'],
'_check':isstring,
'_depend':''
},{ # Not hidden
'argformat':{isrequired:'O'},
'keyformat':{isoptional:'O'},
'args_capi':{isrequired:',&#varname#_capi'},
'keys_capi':{isoptional:',&#varname#_capi'},
'pyobjfrom':{isintent_inout:'''\
\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,#varname#);
\tif (f2py_success) {'''},
'closepyobjfrom':{isintent_inout:'\t} /*if (f2py_success) of #varname# pyobjfrom*/'},
'need':{isintent_inout:'try_pyarr_from_#ctype#'},
'_check':l_and(isstring,isintent_nothide)
},{ # Hidden
'_check':l_and(isstring,isintent_hide)
},{
'frompyobj':{debugcapi:'\tfprintf(stderr,"#vardebugshowvalue#\\n",slen(#varname#),#varname#);'},
'_check':isstring,
'_depend':''
},
# Array
{ # Common
'decl':['\t#ctype# *#varname# = NULL;',
'\tnpy_intp #varname#_Dims[#rank#] = {#rank*[-1]#};',
'\tconst int #varname#_Rank = #rank#;',
'\tPyArrayObject *capi_#varname#_tmp = NULL;',
'\tint capi_#varname#_intent = 0;',
],
'callfortran':'#varname#,',
'return':{isintent_out:',capi_#varname#_tmp'},
'need':'len..',
'_check':isarray
},{ # intent(overwrite) array
'decl':'\tint capi_overwrite_#varname# = 1;',
'kwlistxa':'"overwrite_#varname#",',
'xaformat':'i',
'keys_xa':',&capi_overwrite_#varname#',
'docsignxa':'overwrite_#varname#=1,',
'docsignxashort':'overwrite_#varname#,',
'docstropt':'\toverwrite_#varname# := 1 input int',
'_check':l_and(isarray,isintent_overwrite),
},{
'frompyobj':'\tcapi_#varname#_intent |= (capi_overwrite_#varname#?0:F2PY_INTENT_COPY);',
'_check':l_and(isarray,isintent_overwrite),
'_depend':'',
},
{ # intent(copy) array
'decl':'\tint capi_overwrite_#varname# = 0;',
'kwlistxa':'"overwrite_#varname#",',
'xaformat':'i',
'keys_xa':',&capi_overwrite_#varname#',
'docsignxa':'overwrite_#varname#=0,',
'docsignxashort':'overwrite_#varname#,',
'docstropt':'\toverwrite_#varname# := 0 input int',
'_check':l_and(isarray,isintent_copy),
},{
'frompyobj':'\tcapi_#varname#_intent |= (capi_overwrite_#varname#?0:F2PY_INTENT_COPY);',
'_check':l_and(isarray,isintent_copy),
'_depend':'',
},{
'need':[{hasinitvalue:'forcomb'},{hasinitvalue:'CFUNCSMESS'}],
'_check':isarray,
'_depend':''
},{ # Not hidden
'decl':'\tPyObject *#varname#_capi = Py_None;',
'argformat':{isrequired:'O'},
'keyformat':{isoptional:'O'},
'args_capi':{isrequired:',&#varname#_capi'},
'keys_capi':{isoptional:',&#varname#_capi'},
# 'pyobjfrom':{isintent_inout:"""\
# /* Partly because of the following hack, intent(inout) is depreciated,
# Use intent(in,out) instead.
# \tif ((#varname#_capi != Py_None) && PyArray_Check(#varname#_capi) \\
# \t\t&& (#varname#_capi != (PyObject *)capi_#varname#_tmp)) {
# \t\tif (((PyArrayObject *)#varname#_capi)->nd != capi_#varname#_tmp->nd) {
# \t\t\tif (#varname#_capi != capi_#varname#_tmp->base)
# \t\t\t\tcopy_ND_array((PyArrayObject *)capi_#varname#_tmp->base,(PyArrayObject *)#varname#_capi);
# \t\t} else
# \t\t\tcopy_ND_array(capi_#varname#_tmp,(PyArrayObject *)#varname#_capi);
# \t}
# */
# """},
# 'need':{isintent_inout:'copy_ND_array'},
'_check':l_and(isarray,isintent_nothide)
},{
'frompyobj':['\t#setdims#;',
'\tcapi_#varname#_intent |= #intent#;',
{isintent_hide:'\tcapi_#varname#_tmp = array_from_pyobj(#atype#,#varname#_Dims,#varname#_Rank,capi_#varname#_intent,Py_None);'},
{isintent_nothide:'\tcapi_#varname#_tmp = array_from_pyobj(#atype#,#varname#_Dims,#varname#_Rank,capi_#varname#_intent,#varname#_capi);'},
"""\
\tif (capi_#varname#_tmp == NULL) {
\t\tif (!PyErr_Occurred())
\t\t\tPyErr_SetString(#modulename#_error,\"failed in converting #nth# `#varname#\' of #pyname# to C/Fortran array\" );
\t} else {
\t\t#varname# = (#ctype# *)(capi_#varname#_tmp->data);
""",
{hasinitvalue:[
{isintent_nothide:'\tif (#varname#_capi == Py_None) {'},
{isintent_hide:'\t{'},
{iscomplexarray:'\t\t#ctype# capi_c;'},
"""\
\t\tint *_i,capi_i=0;
\t\tCFUNCSMESS(\"#name#: Initializing #varname#=#init#\\n\");
\t\tif (initforcomb(capi_#varname#_tmp->dimensions,capi_#varname#_tmp->nd,1)) {
\t\t\twhile ((_i = nextforcomb()))
\t\t\t\t#varname#[capi_i++] = #init#; /* fortran way */
\t\t} else {
\t\t\tif (!PyErr_Occurred())
\t\t\t\tPyErr_SetString(#modulename#_error,\"Initialization of #nth# #varname# failed (initforcomb).\");
\t\t\tf2py_success = 0;
\t\t}
\t}
\tif (f2py_success) {"""]},
],
'cleanupfrompyobj':[ # note that this list will be reversed
'\t} /*if (capi_#varname#_tmp == NULL) ... else of #varname#*/',
{l_not(l_or(isintent_out,isintent_hide)):"""\
\tif((PyObject *)capi_#varname#_tmp!=#varname#_capi) {
\t\tPy_XDECREF(capi_#varname#_tmp); }"""},
{l_and(isintent_hide,l_not(isintent_out)):"""\t\tPy_XDECREF(capi_#varname#_tmp);"""},
{hasinitvalue:'\t} /*if (f2py_success) of #varname# init*/'},
],
'_check':isarray,
'_depend':''
},
# { # Hidden
# 'freemem':{l_not(isintent_out):'\tPy_XDECREF(capi_#varname#_tmp);'},
# '_check':l_and(isarray,isintent_hide)
# },
# Scalararray
{ # Common
'_check':l_and(isarray,l_not(iscomplexarray))
},{ # Not hidden
'_check':l_and(isarray,l_not(iscomplexarray),isintent_nothide)
},
# Integer*1 array
{'need':'#ctype#',
'_check':isint1array,
'_depend':''
},
# Integer*-1 array
{'need':'#ctype#',
'_check':isunsigned_chararray,
'_depend':''
},
# Integer*-2 array
{'need':'#ctype#',
'_check':isunsigned_shortarray,
'_depend':''
},
# Integer*-8 array
{'need':'#ctype#',
'_check':isunsigned_long_longarray,
'_depend':''
},
# Complexarray
{'need':'#ctype#',
'_check':iscomplexarray,
'_depend':''
},
# Stringarray
{
'callfortranappend':{isarrayofstrings:'flen(#varname#),'},
'need':'string',
'_check':isstringarray
}
]
################# Rules for checking ###############
check_rules=[
{
'frompyobj':{debugcapi:'\tfprintf(stderr,\"debug-capi:Checking `#check#\'\\n\");'},
'need':'len..'
},{
'frompyobj':'\tCHECKSCALAR(#check#,\"#check#\",\"#nth# #varname#\",\"#varshowvalue#\",#varname#) {',
'cleanupfrompyobj':'\t} /*CHECKSCALAR(#check#)*/',
'need':'CHECKSCALAR',
'_check':l_and(isscalar,l_not(iscomplex)),
'_break':''
},{
'frompyobj':'\tCHECKSTRING(#check#,\"#check#\",\"#nth# #varname#\",\"#varshowvalue#\",#varname#) {',
'cleanupfrompyobj':'\t} /*CHECKSTRING(#check#)*/',
'need':'CHECKSTRING',
'_check':isstring,
'_break':''
},{
'need':'CHECKARRAY',
'frompyobj':'\tCHECKARRAY(#check#,\"#check#\",\"#nth# #varname#\") {',
'cleanupfrompyobj':'\t} /*CHECKARRAY(#check#)*/',
'_check':isarray,
'_break':''
},{
'need':'CHECKGENERIC',
'frompyobj':'\tCHECKGENERIC(#check#,\"#check#\",\"#nth# #varname#\") {',
'cleanupfrompyobj':'\t} /*CHECKGENERIC(#check#)*/',
}
]
########## Applying the rules. No need to modify what follows #############
#################### Build C/API module #######################
def buildmodule(m,um):
"""
Return
"""
global f2py_version,options
outmess('\tBuilding module "%s"...\n'%(m['name']))
ret = {}
mod_rules=defmod_rules[:]
vrd=modsign2map(m)
rd=dictappend({'f2py_version':f2py_version},vrd)
funcwrappers = []
funcwrappers2 = [] # F90 codes
for n in m['interfaced']:
nb=None
for bi in m['body']:
if not bi['block']=='interface':
errmess('buildmodule: Expected interface block. Skipping.\n')
continue
for b in bi['body']:
if b['name']==n: nb=b;break
if not nb:
errmess('buildmodule: Could not found the body of interfaced routine "%s". Skipping.\n'%(n))
continue
nb_list = [nb]
if 'entry' in nb:
for k,a in nb['entry'].items():
nb1 = copy.deepcopy(nb)
del nb1['entry']
nb1['name'] = k
nb1['args'] = a
nb_list.append(nb1)
for nb in nb_list:
api,wrap=buildapi(nb)
if wrap:
if ismoduleroutine(nb):
funcwrappers2.append(wrap)
else:
funcwrappers.append(wrap)
ar=applyrules(api,vrd)
rd=dictappend(rd,ar)
# Construct COMMON block support
cr,wrap = common_rules.buildhooks(m)
if wrap:
funcwrappers.append(wrap)
ar=applyrules(cr,vrd)
rd=dictappend(rd,ar)
# Construct F90 module support
mr,wrap = f90mod_rules.buildhooks(m)
if wrap:
funcwrappers2.append(wrap)
ar=applyrules(mr,vrd)
rd=dictappend(rd,ar)
for u in um:
ar=use_rules.buildusevars(u,m['use'][u['name']])
rd=dictappend(rd,ar)
needs=cfuncs.get_needs()
code={}
for n in needs.keys():
code[n]=[]
for k in needs[n]:
c=''
if k in cfuncs.includes0:
c=cfuncs.includes0[k]
elif k in cfuncs.includes:
c=cfuncs.includes[k]
elif k in cfuncs.userincludes:
c=cfuncs.userincludes[k]
elif k in cfuncs.typedefs:
c=cfuncs.typedefs[k]
elif k in cfuncs.typedefs_generated:
c=cfuncs.typedefs_generated[k]
elif k in cfuncs.cppmacros:
c=cfuncs.cppmacros[k]
elif k in cfuncs.cfuncs:
c=cfuncs.cfuncs[k]
elif k in cfuncs.callbacks:
c=cfuncs.callbacks[k]
elif k in cfuncs.f90modhooks:
c=cfuncs.f90modhooks[k]
elif k in cfuncs.commonhooks:
c=cfuncs.commonhooks[k]
else:
errmess('buildmodule: unknown need %s.\n'%(`k`));continue
code[n].append(c)
mod_rules.append(code)
for r in mod_rules:
if ('_check' in r and r['_check'](m)) or ('_check' not in r):
ar=applyrules(r,vrd,m)
rd=dictappend(rd,ar)
ar=applyrules(module_rules,rd)
fn = os.path.join(options['buildpath'],vrd['coutput'])
ret['csrc'] = fn
f=open(fn,'w')
f.write(ar['modulebody'].replace('\t',2*' '))
f.close()
outmess('\tWrote C/API module "%s" to file "%s"\n'%(m['name'],fn))
if options['dorestdoc']:
fn = os.path.join(options['buildpath'],vrd['modulename']+'module.rest')
f=open(fn,'w')
f.write('.. -*- rest -*-\n')
f.write('\n'.join(ar['restdoc']))
f.close()
outmess('\tReST Documentation is saved to file "%s/%smodule.rest"\n'%(options['buildpath'],vrd['modulename']))
if options['dolatexdoc']:
fn = os.path.join(options['buildpath'],vrd['modulename']+'module.tex')
ret['ltx'] = fn
f=open(fn,'w')
f.write('%% This file is auto-generated with f2py (version:%s)\n'%(f2py_version))
if 'shortlatex' not in options:
f.write('\\documentclass{article}\n\\usepackage{a4wide}\n\\begin{document}\n\\tableofcontents\n\n')
f.write('\n'.join(ar['latexdoc']))
if 'shortlatex' not in options:
f.write('\\end{document}')
f.close()
outmess('\tDocumentation is saved to file "%s/%smodule.tex"\n'%(options['buildpath'],vrd['modulename']))
if funcwrappers:
wn = os.path.join(options['buildpath'],vrd['f2py_wrapper_output'])
ret['fsrc'] = wn
f=open(wn,'w')
f.write('C -*- fortran -*-\n')
f.write('C This file is autogenerated with f2py (version:%s)\n'%(f2py_version))
f.write('C It contains Fortran 77 wrappers to fortran functions.\n')
lines = []
for l in ('\n\n'.join(funcwrappers)+'\n').split('\n'):
if l and l[0]==' ':
while len(l)>=66:
lines.append(l[:66]+'\n &')
l = l[66:]
lines.append(l+'\n')
else: lines.append(l+'\n')
lines = ''.join(lines).replace('\n &\n','\n')
f.write(lines)
f.close()
outmess('\tFortran 77 wrappers are saved to "%s"\n'%(wn))
if funcwrappers2:
wn = os.path.join(options['buildpath'],'%s-f2pywrappers2.f90'%(vrd['modulename']))
ret['fsrc'] = wn
f=open(wn,'w')
f.write('! -*- f90 -*-\n')
f.write('! This file is autogenerated with f2py (version:%s)\n'%(f2py_version))
f.write('! It contains Fortran 90 wrappers to fortran functions.\n')
lines = []
for l in ('\n\n'.join(funcwrappers2)+'\n').split('\n'):
if len(l)>72 and l[0]==' ':
lines.append(l[:72]+'&\n &')
l = l[72:]
while len(l)>66:
lines.append(l[:66]+'&\n &')
l = l[66:]
lines.append(l+'\n')
else: lines.append(l+'\n')
lines = ''.join(lines).replace('\n &\n','\n')
f.write(lines)
f.close()
outmess('\tFortran 90 wrappers are saved to "%s"\n'%(wn))
return ret
################## Build C/API function #############
stnd={1:'st',2:'nd',3:'rd',4:'th',5:'th',6:'th',7:'th',8:'th',9:'th',0:'th'}
def buildapi(rout):
rout,wrap = func2subr.assubr(rout)
args,depargs=getargs2(rout)
capi_maps.depargs=depargs
var=rout['vars']
auxvars = [a for a in var.keys() if isintent_aux(var[a])]
if ismoduleroutine(rout):
outmess('\t\t\tConstructing wrapper function "%s.%s"...\n'%(rout['modulename'],rout['name']))
else:
outmess('\t\tConstructing wrapper function "%s"...\n'%(rout['name']))
# Routine
vrd=routsign2map(rout)
rd=dictappend({},vrd)
for r in rout_rules:
if ('_check' in r and r['_check'](rout)) or ('_check' not in r):
ar=applyrules(r,vrd,rout)
rd=dictappend(rd,ar)
# Args
nth,nthk=0,0
savevrd={}
for a in args:
vrd=sign2map(a,var[a])
if isintent_aux(var[a]):
_rules = aux_rules
else:
_rules = arg_rules
if not isintent_hide(var[a]):
if not isoptional(var[a]):
nth=nth+1
vrd['nth']=`nth`+stnd[nth%10]+' argument'
else:
nthk=nthk+1
vrd['nth']=`nthk`+stnd[nthk%10]+' keyword'
else: vrd['nth']='hidden'
savevrd[a]=vrd
for r in _rules:
if '_depend' in r:
continue
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
ar=applyrules(r,vrd,var[a])
rd=dictappend(rd,ar)
if '_break' in r:
break
for a in depargs:
if isintent_aux(var[a]):
_rules = aux_rules
else:
_rules = arg_rules
vrd=savevrd[a]
for r in _rules:
if '_depend' not in r:
continue
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
ar=applyrules(r,vrd,var[a])
rd=dictappend(rd,ar)
if '_break' in r:
break
if 'check' in var[a]:
for c in var[a]['check']:
vrd['check']=c
ar=applyrules(check_rules,vrd,var[a])
rd=dictappend(rd,ar)
if type(rd['cleanupfrompyobj']) is types.ListType:
rd['cleanupfrompyobj'].reverse()
if type(rd['closepyobjfrom']) is types.ListType:
rd['closepyobjfrom'].reverse()
rd['docsignature']=stripcomma(replace('#docsign##docsignopt##docsignxa#',
{'docsign':rd['docsign'],
'docsignopt':rd['docsignopt'],
'docsignxa':rd['docsignxa']}))
optargs=stripcomma(replace('#docsignopt##docsignxa#',
{'docsignxa':rd['docsignxashort'],
'docsignopt':rd['docsignoptshort']}
))
if optargs=='':
rd['docsignatureshort']=stripcomma(replace('#docsign#',{'docsign':rd['docsign']}))
else:
rd['docsignatureshort']=replace('#docsign#[#docsignopt#]',
{'docsign':rd['docsign'],
'docsignopt':optargs,
})
rd['latexdocsignatureshort']=rd['docsignatureshort'].replace('_','\\_')
rd['latexdocsignatureshort']=rd['latexdocsignatureshort'].replace(',',', ')
cfs=stripcomma(replace('#callfortran##callfortranappend#',{'callfortran':rd['callfortran'],'callfortranappend':rd['callfortranappend']}))
if len(rd['callfortranappend'])>1:
rd['callcompaqfortran']=stripcomma(replace('#callfortran# 0,#callfortranappend#',{'callfortran':rd['callfortran'],'callfortranappend':rd['callfortranappend']}))
else:
rd['callcompaqfortran']=cfs
rd['callfortran']=cfs
if type(rd['docreturn'])==types.ListType:
rd['docreturn']=stripcomma(replace('#docreturn#',{'docreturn':rd['docreturn']}))+' = '
rd['docstrsigns']=[]
rd['latexdocstrsigns']=[]
for k in ['docstrreq','docstropt','docstrout','docstrcbs']:
if k in rd and type(rd[k])==types.ListType:
rd['docstrsigns']=rd['docstrsigns']+rd[k]
k='latex'+k
if k in rd and type(rd[k])==types.ListType:
rd['latexdocstrsigns']=rd['latexdocstrsigns']+rd[k][0:1]+\
['\\begin{description}']+rd[k][1:]+\
['\\end{description}']
# Workaround for Python 2.6, 2.6.1 bug: http://bugs.python.org/issue4720
if rd['keyformat'] or rd['xaformat']:
argformat = rd['argformat']
if isinstance(argformat, list):
argformat.append('|')
else:
assert isinstance(argformat, str),repr((argformat, type(argformat)))
rd['argformat'] += '|'
ar=applyrules(routine_rules,rd)
if ismoduleroutine(rout):
outmess('\t\t\t %s\n'%(ar['docshort']))
else:
outmess('\t\t %s\n'%(ar['docshort']))
return ar,wrap
#################### EOF rules.py #######################
| agpl-3.0 |
abhiQmar/servo | tests/wpt/css-tests/tools/wptserve/wptserve/handlers.py | 87 | 13046 | import cgi
import json
import os
import traceback
import urllib
import urlparse
from .constants import content_types
from .pipes import Pipeline, template
from .ranges import RangeParser
from .request import Authentication
from .response import MultipartContent
from .utils import HTTPException
__all__ = ["file_handler", "python_script_handler",
"FunctionHandler", "handler", "json_handler",
"as_is_handler", "ErrorHandler", "BasicAuthHandler"]
def guess_content_type(path):
ext = os.path.splitext(path)[1].lstrip(".")
if ext in content_types:
return content_types[ext]
return "application/octet-stream"
def filesystem_path(base_path, request, url_base="/"):
if base_path is None:
base_path = request.doc_root
path = urllib.unquote(request.url_parts.path)
if path.startswith(url_base):
path = path[len(url_base):]
if ".." in path:
raise HTTPException(404)
new_path = os.path.join(base_path, path)
# Otherwise setting path to / allows access outside the root directory
if not new_path.startswith(base_path):
raise HTTPException(404)
return new_path
class DirectoryHandler(object):
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
def __repr__(self):
return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
def __call__(self, request, response):
url_path = request.url_parts.path
if not url_path.endswith("/"):
raise HTTPException(404)
path = filesystem_path(self.base_path, request, self.url_base)
assert os.path.isdir(path)
response.headers = [("Content-Type", "text/html")]
response.content = """<!doctype html>
<meta name="viewport" content="width=device-width">
<title>Directory listing for %(path)s</title>
<h1>Directory listing for %(path)s</h1>
<ul>
%(items)s
</ul>
""" % {"path": cgi.escape(url_path),
"items": "\n".join(self.list_items(url_path, path))} # flake8: noqa
def list_items(self, base_path, path):
assert base_path.endswith("/")
# TODO: this won't actually list all routes, only the
# ones that correspond to a real filesystem path. It's
# not possible to list every route that will match
# something, but it should be possible to at least list the
# statically defined ones
if base_path != "/":
link = urlparse.urljoin(base_path, "..")
yield ("""<li class="dir"><a href="%(link)s">%(name)s</a></li>""" %
{"link": link, "name": ".."})
for item in sorted(os.listdir(path)):
link = cgi.escape(urllib.quote(item))
if os.path.isdir(os.path.join(path, item)):
link += "/"
class_ = "dir"
else:
class_ = "file"
yield ("""<li class="%(class)s"><a href="%(link)s">%(name)s</a></li>""" %
{"link": link, "name": cgi.escape(item), "class": class_})
class FileHandler(object):
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
self.directory_handler = DirectoryHandler(self.base_path, self.url_base)
def __repr__(self):
return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
def __call__(self, request, response):
path = filesystem_path(self.base_path, request, self.url_base)
if os.path.isdir(path):
return self.directory_handler(request, response)
try:
#This is probably racy with some other process trying to change the file
file_size = os.stat(path).st_size
response.headers.update(self.get_headers(request, path))
if "Range" in request.headers:
try:
byte_ranges = RangeParser()(request.headers['Range'], file_size)
except HTTPException as e:
if e.code == 416:
response.headers.set("Content-Range", "bytes */%i" % file_size)
raise
else:
byte_ranges = None
data = self.get_data(response, path, byte_ranges)
response.content = data
query = urlparse.parse_qs(request.url_parts.query)
pipeline = None
if "pipe" in query:
pipeline = Pipeline(query["pipe"][-1])
elif os.path.splitext(path)[0].endswith(".sub"):
ml_extensions = {".html", ".htm", ".xht", ".xhtml", ".xml", ".svg"}
escape_type = "html" if os.path.splitext(path)[1] in ml_extensions else "none"
pipeline = Pipeline("sub(%s)" % escape_type)
if pipeline is not None:
response = pipeline(request, response)
return response
except (OSError, IOError):
raise HTTPException(404)
def get_headers(self, request, path):
rv = (self.load_headers(request, os.path.join(os.path.split(path)[0], "__dir__")) +
self.load_headers(request, path))
if not any(key.lower() == "content-type" for (key, _) in rv):
rv.insert(0, ("Content-Type", guess_content_type(path)))
return rv
def load_headers(self, request, path):
headers_path = path + ".sub.headers"
if os.path.exists(headers_path):
use_sub = True
else:
headers_path = path + ".headers"
use_sub = False
try:
with open(headers_path) as headers_file:
data = headers_file.read()
except IOError:
return []
else:
if use_sub:
data = template(request, data, escape_type="none")
return [tuple(item.strip() for item in line.split(":", 1))
for line in data.splitlines() if line]
def get_data(self, response, path, byte_ranges):
"""Return either the handle to a file, or a string containing
the content of a chunk of the file, if we have a range request."""
if byte_ranges is None:
return open(path, 'rb')
else:
with open(path, 'rb') as f:
response.status = 206
if len(byte_ranges) > 1:
parts_content_type, content = self.set_response_multipart(response,
byte_ranges,
f)
for byte_range in byte_ranges:
content.append_part(self.get_range_data(f, byte_range),
parts_content_type,
[("Content-Range", byte_range.header_value())])
return content
else:
response.headers.set("Content-Range", byte_ranges[0].header_value())
return self.get_range_data(f, byte_ranges[0])
def set_response_multipart(self, response, ranges, f):
parts_content_type = response.headers.get("Content-Type")
if parts_content_type:
parts_content_type = parts_content_type[-1]
else:
parts_content_type = None
content = MultipartContent()
response.headers.set("Content-Type", "multipart/byteranges; boundary=%s" % content.boundary)
return parts_content_type, content
def get_range_data(self, f, byte_range):
f.seek(byte_range.lower)
return f.read(byte_range.upper - byte_range.lower)
file_handler = FileHandler()
class PythonScriptHandler(object):
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
def __repr__(self):
return "<%s base_path:%s url_base:%s>" % (self.__class__.__name__, self.base_path, self.url_base)
def __call__(self, request, response):
path = filesystem_path(self.base_path, request, self.url_base)
try:
environ = {"__file__": path}
execfile(path, environ, environ)
if "main" in environ:
handler = FunctionHandler(environ["main"])
handler(request, response)
else:
raise HTTPException(500, "No main function in script %s" % path)
except IOError:
raise HTTPException(404)
python_script_handler = PythonScriptHandler()
class FunctionHandler(object):
def __init__(self, func):
self.func = func
def __call__(self, request, response):
try:
rv = self.func(request, response)
except Exception:
msg = traceback.format_exc()
raise HTTPException(500, message=msg)
if rv is not None:
if isinstance(rv, tuple):
if len(rv) == 3:
status, headers, content = rv
response.status = status
elif len(rv) == 2:
headers, content = rv
else:
raise HTTPException(500)
response.headers.update(headers)
else:
content = rv
response.content = content
#The generic name here is so that this can be used as a decorator
def handler(func):
return FunctionHandler(func)
class JsonHandler(object):
def __init__(self, func):
self.func = func
def __call__(self, request, response):
return FunctionHandler(self.handle_request)(request, response)
def handle_request(self, request, response):
rv = self.func(request, response)
response.headers.set("Content-Type", "application/json")
enc = json.dumps
if isinstance(rv, tuple):
rv = list(rv)
value = tuple(rv[:-1] + [enc(rv[-1])])
length = len(value[-1])
else:
value = enc(rv)
length = len(value)
response.headers.set("Content-Length", length)
return value
def json_handler(func):
return JsonHandler(func)
class AsIsHandler(object):
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
def __call__(self, request, response):
path = filesystem_path(self.base_path, request, self.url_base)
try:
with open(path) as f:
response.writer.write_content(f.read())
response.close_connection = True
except IOError:
raise HTTPException(404)
as_is_handler = AsIsHandler()
class BasicAuthHandler(object):
def __init__(self, handler, user, password):
"""
A Basic Auth handler
:Args:
- handler: a secondary handler for the request after authentication is successful (example file_handler)
- user: string of the valid user name or None if any / all credentials are allowed
- password: string of the password required
"""
self.user = user
self.password = password
self.handler = handler
def __call__(self, request, response):
if "authorization" not in request.headers:
response.status = 401
response.headers.set("WWW-Authenticate", "Basic")
return response
else:
auth = Authentication(request.headers)
if self.user is not None and (self.user != auth.username or self.password != auth.password):
response.set_error(403, "Invalid username or password")
return response
return self.handler(request, response)
basic_auth_handler = BasicAuthHandler(file_handler, None, None)
class ErrorHandler(object):
def __init__(self, status):
self.status = status
def __call__(self, request, response):
response.set_error(self.status)
class StaticHandler(object):
def __init__(self, path, format_args, content_type, **headers):
"""Hander that reads a file from a path and substitutes some fixed data
:param path: Path to the template file to use
:param format_args: Dictionary of values to substitute into the template file
:param content_type: Content type header to server the response with
:param headers: List of headers to send with responses"""
with open(path) as f:
self.data = f.read() % format_args
self.resp_headers = [("Content-Type", content_type)]
for k, v in headers.iteritems():
resp_headers.append((k.replace("_", "-"), v))
self.handler = handler(self.handle_request)
def handle_request(self, request, response):
return self.resp_headers, self.data
def __call__(self, request, response):
rv = self.handler(request, response)
return rv
| mpl-2.0 |
Kazade/NeHe-Website | google_appengine/lib/django-1.5/django/contrib/gis/geos/prototypes/io.py | 103 | 8497 | import threading
from ctypes import byref, c_char_p, c_int, c_char, c_size_t, Structure, POINTER
from django.contrib.gis import memoryview
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import GEOM_PTR
from django.contrib.gis.geos.prototypes.errcheck import check_geom, check_string, check_sized_string
from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
from django.utils import six
from django.utils.encoding import force_bytes
### The WKB/WKT Reader/Writer structures and pointers ###
class WKTReader_st(Structure): pass
class WKTWriter_st(Structure): pass
class WKBReader_st(Structure): pass
class WKBWriter_st(Structure): pass
WKT_READ_PTR = POINTER(WKTReader_st)
WKT_WRITE_PTR = POINTER(WKTWriter_st)
WKB_READ_PTR = POINTER(WKBReader_st)
WKB_WRITE_PTR = POINTER(WKBReader_st)
### WKTReader routines ###
wkt_reader_create = GEOSFunc('GEOSWKTReader_create')
wkt_reader_create.restype = WKT_READ_PTR
wkt_reader_destroy = GEOSFunc('GEOSWKTReader_destroy')
wkt_reader_destroy.argtypes = [WKT_READ_PTR]
wkt_reader_read = GEOSFunc('GEOSWKTReader_read')
wkt_reader_read.argtypes = [WKT_READ_PTR, c_char_p]
wkt_reader_read.restype = GEOM_PTR
wkt_reader_read.errcheck = check_geom
### WKTWriter routines ###
wkt_writer_create = GEOSFunc('GEOSWKTWriter_create')
wkt_writer_create.restype = WKT_WRITE_PTR
wkt_writer_destroy = GEOSFunc('GEOSWKTWriter_destroy')
wkt_writer_destroy.argtypes = [WKT_WRITE_PTR]
wkt_writer_write = GEOSFunc('GEOSWKTWriter_write')
wkt_writer_write.argtypes = [WKT_WRITE_PTR, GEOM_PTR]
wkt_writer_write.restype = geos_char_p
wkt_writer_write.errcheck = check_string
### WKBReader routines ###
wkb_reader_create = GEOSFunc('GEOSWKBReader_create')
wkb_reader_create.restype = WKB_READ_PTR
wkb_reader_destroy = GEOSFunc('GEOSWKBReader_destroy')
wkb_reader_destroy.argtypes = [WKB_READ_PTR]
def wkb_read_func(func):
# Although the function definitions take `const unsigned char *`
# as their parameter, we use c_char_p here so the function may
# take Python strings directly as parameters. Inside Python there
# is not a difference between signed and unsigned characters, so
# it is not a problem.
func.argtypes = [WKB_READ_PTR, c_char_p, c_size_t]
func.restype = GEOM_PTR
func.errcheck = check_geom
return func
wkb_reader_read = wkb_read_func(GEOSFunc('GEOSWKBReader_read'))
wkb_reader_read_hex = wkb_read_func(GEOSFunc('GEOSWKBReader_readHEX'))
### WKBWriter routines ###
wkb_writer_create = GEOSFunc('GEOSWKBWriter_create')
wkb_writer_create.restype = WKB_WRITE_PTR
wkb_writer_destroy = GEOSFunc('GEOSWKBWriter_destroy')
wkb_writer_destroy.argtypes = [WKB_WRITE_PTR]
# WKB Writing prototypes.
def wkb_write_func(func):
func.argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
func.restype = c_uchar_p
func.errcheck = check_sized_string
return func
wkb_writer_write = wkb_write_func(GEOSFunc('GEOSWKBWriter_write'))
wkb_writer_write_hex = wkb_write_func(GEOSFunc('GEOSWKBWriter_writeHEX'))
# WKBWriter property getter/setter prototypes.
def wkb_writer_get(func, restype=c_int):
func.argtypes = [WKB_WRITE_PTR]
func.restype = restype
return func
def wkb_writer_set(func, argtype=c_int):
func.argtypes = [WKB_WRITE_PTR, argtype]
return func
wkb_writer_get_byteorder = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getByteOrder'))
wkb_writer_set_byteorder = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setByteOrder'))
wkb_writer_get_outdim = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getOutputDimension'))
wkb_writer_set_outdim = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setOutputDimension'))
wkb_writer_get_include_srid = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getIncludeSRID'), restype=c_char)
wkb_writer_set_include_srid = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setIncludeSRID'), argtype=c_char)
### Base I/O Class ###
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
# Getting the pointer with the constructor.
self.ptr = self._constructor()
def __del__(self):
# Cleaning up with the appropriate destructor.
if self._ptr: self._destructor(self._ptr)
### Base WKB/WKT Reading and Writing objects ###
# Non-public WKB/WKT reader classes for internal use because
# their `read` methods return _pointers_ instead of GEOSGeometry
# objects.
class _WKTReader(IOBase):
_constructor = wkt_reader_create
_destructor = wkt_reader_destroy
ptr_type = WKT_READ_PTR
def read(self, wkt):
if not isinstance(wkt, (bytes, six.string_types)):
raise TypeError
return wkt_reader_read(self.ptr, force_bytes(wkt))
class _WKBReader(IOBase):
_constructor = wkb_reader_create
_destructor = wkb_reader_destroy
ptr_type = WKB_READ_PTR
def read(self, wkb):
"Returns a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, memoryview):
wkb_s = bytes(wkb)
return wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, (bytes, six.string_types)):
return wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
### WKB/WKT Writer Classes ###
class WKTWriter(IOBase):
_constructor = wkt_writer_create
_destructor = wkt_writer_destroy
ptr_type = WKT_WRITE_PTR
def write(self, geom):
"Returns the WKT representation of the given geometry."
return wkt_writer_write(self.ptr, geom.ptr)
class WKBWriter(IOBase):
_constructor = wkb_writer_create
_destructor = wkb_writer_destroy
ptr_type = WKB_WRITE_PTR
def write(self, geom):
"Returns the WKB representation of the given geometry."
return memoryview(wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t())))
def write_hex(self, geom):
"Returns the HEXEWKB representation of the given geometry."
return wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t()))
### WKBWriter Properties ###
# Property for getting/setting the byteorder.
def _get_byteorder(self):
return wkb_writer_get_byteorder(self.ptr)
def _set_byteorder(self, order):
if not order in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).')
wkb_writer_set_byteorder(self.ptr, order)
byteorder = property(_get_byteorder, _set_byteorder)
# Property for getting/setting the output dimension.
def _get_outdim(self):
return wkb_writer_get_outdim(self.ptr)
def _set_outdim(self, new_dim):
if not new_dim in (2, 3): raise ValueError('WKB output dimension must be 2 or 3')
wkb_writer_set_outdim(self.ptr, new_dim)
outdim = property(_get_outdim, _set_outdim)
# Property for getting/setting the include srid flag.
def _get_include_srid(self):
return bool(ord(wkb_writer_get_include_srid(self.ptr)))
def _set_include_srid(self, include):
if bool(include): flag = b'\x01'
else: flag = b'\x00'
wkb_writer_set_include_srid(self.ptr, flag)
srid = property(_get_include_srid, _set_include_srid)
# `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer
# objects that are local to the thread. The `GEOSGeometry` internals
# access these instances by calling the module-level functions, defined
# below.
class ThreadLocalIO(threading.local):
wkt_r = None
wkt_w = None
wkb_r = None
wkb_w = None
ewkb_w = None
thread_context = ThreadLocalIO()
# These module-level routines return the I/O object that is local to the
# thread. If the I/O object does not exist yet it will be initialized.
def wkt_r():
if not thread_context.wkt_r:
thread_context.wkt_r = _WKTReader()
return thread_context.wkt_r
def wkt_w():
if not thread_context.wkt_w:
thread_context.wkt_w = WKTWriter()
return thread_context.wkt_w
def wkb_r():
if not thread_context.wkb_r:
thread_context.wkb_r = _WKBReader()
return thread_context.wkb_r
def wkb_w(dim=2):
if not thread_context.wkb_w:
thread_context.wkb_w = WKBWriter()
thread_context.wkb_w.outdim = dim
return thread_context.wkb_w
def ewkb_w(dim=2):
if not thread_context.ewkb_w:
thread_context.ewkb_w = WKBWriter()
thread_context.ewkb_w.srid = True
thread_context.ewkb_w.outdim = dim
return thread_context.ewkb_w
| bsd-3-clause |
wbond/crlbuilder | dev/tests.py | 7 | 2294 | # coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import re
import sys
import warnings
from . import requires_oscrypto
from ._import import _preload
from tests import test_classes
if sys.version_info < (3,):
range = xrange # noqa
from cStringIO import StringIO
else:
from io import StringIO
run_args = [
{
'name': 'regex',
'kwarg': 'matcher',
},
{
'name': 'repeat_count',
'kwarg': 'repeat',
'cast': 'int',
},
]
def run(matcher=None, repeat=1, ci=False):
"""
Runs the tests
:param matcher:
A unicode string containing a regular expression to use to filter test
names by. A value of None will cause no filtering.
:param repeat:
An integer - the number of times to run the tests
:param ci:
A bool, indicating if the tests are being run as part of CI
:return:
A bool - if the tests succeeded
"""
_preload(requires_oscrypto, not ci)
warnings.filterwarnings("error")
loader = unittest.TestLoader()
# We have to manually track the list of applicable tests because for
# some reason with Python 3.4 on Windows, the tests in a suite are replaced
# with None after being executed. This breaks the repeat functionality.
test_list = []
for test_class in test_classes():
if matcher:
names = loader.getTestCaseNames(test_class)
for name in names:
if re.search(matcher, name):
test_list.append(test_class(name))
else:
test_list.append(loader.loadTestsFromTestCase(test_class))
stream = sys.stdout
verbosity = 1
if matcher and repeat == 1:
verbosity = 2
elif repeat > 1:
stream = StringIO()
for _ in range(0, repeat):
suite = unittest.TestSuite()
for test in test_list:
suite.addTest(test)
result = unittest.TextTestRunner(stream=stream, verbosity=verbosity).run(suite)
if len(result.errors) > 0 or len(result.failures) > 0:
if repeat > 1:
print(stream.getvalue())
return False
if repeat > 1:
stream.truncate(0)
return True
| mit |
JeNeSuisPasDave/hyde | hyde/tests/test_simple_copy.py | 7 | 4366 | # -*- coding: utf-8 -*-
"""
Tests the simple copy feature.
In order to mark some files to simply be copied to the
destination without any processing what so ever add this
to the config (site.yaml for example):
simple_copy:
- media/css/*.css
- media/js/*.js
- **/*.js
Matching is done with `fnmatch` module. So any `glob` that fnmatch
can process is a valid pattern.
Use nose
`$ pip install nose`
`$ nosetests`
"""
import yaml
from hyde.model import Config
from hyde.site import Site
from hyde.generator import Generator
from fswrap import File
from nose.tools import nottest
TEST_SITE_ROOT = File(__file__).parent.child_folder('sites/test_jinja')
class TestSimpleCopy(object):
@classmethod
def setup_class(cls):
cls.SITE_PATH = File(__file__).parent.child_folder(
'sites/test_jinja_with_config')
cls.SITE_PATH.make()
TEST_SITE_ROOT.copy_contents_to(cls.SITE_PATH)
@classmethod
def teardown_class(cls):
cls.SITE_PATH.delete()
@nottest
def setup_config(self, passthru):
self.config_file = File(self.SITE_PATH.child('site.yaml'))
with open(self.config_file.path) as config:
conf = yaml.load(config)
conf['simple_copy'] = passthru
self.config = Config(sitepath=self.SITE_PATH, config_dict=conf)
def test_simple_copy_basic(self):
self.setup_config([
'about.html'
])
s = Site(self.SITE_PATH, config=self.config)
s.load()
res = s.content.resource_from_relative_path('about.html')
assert res
assert res.simple_copy
def test_simple_copy_directory(self):
self.setup_config([
'**/*.html'
])
s = Site(self.SITE_PATH, config=self.config)
s.load()
res = s.content.resource_from_relative_path('about.html')
assert res
assert not res.simple_copy
res = s.content.resource_from_relative_path(
'blog/2010/december/merry-christmas.html')
assert res
assert res.simple_copy
def test_simple_copy_multiple(self):
self.setup_config([
'**/*.html',
'media/css/*.css'
])
s = Site(self.SITE_PATH, config=self.config)
s.load()
res = s.content.resource_from_relative_path('about.html')
assert res
assert not res.simple_copy
res = s.content.resource_from_relative_path(
'blog/2010/december/merry-christmas.html')
assert res
assert res.simple_copy
res = s.content.resource_from_relative_path('media/css/site.css')
assert res
assert res.simple_copy
def test_generator(self):
self.setup_config([
'**/*.html',
'media/css/*.css'
])
s = Site(self.SITE_PATH, self.config)
g = Generator(s)
g.generate_all()
source = s.content.resource_from_relative_path(
'blog/2010/december/merry-christmas.html')
target = File(
s.config.deploy_root_path.child(source.relative_deploy_path))
left = source.source_file.read_all()
right = target.read_all()
assert left == right
def test_plugins(self):
text = """
---
title: Hey
author: Me
twitter: @me
---
{%% extends "base.html" %%}
{%% block main %%}
Hi!
I am a test template to make sure jinja2 generation works well with hyde.
<span class="title">{{resource.meta.title}}</span>
<span class="author">{{resource.meta.author}}</span>
<span class="twitter">{{resource.meta.twitter}}</span>
{%% endblock %%}
"""
index = File(self.SITE_PATH.child('content/blog/index.html'))
index.write(text)
self.setup_config([
'**/*.html',
'media/css/*.css'
])
conf = {'plugins': ['hyde.ext.plugins.meta.MetaPlugin']}
conf.update(self.config.to_dict())
s = Site(self.SITE_PATH, Config(
sitepath=self.SITE_PATH, config_dict=conf))
g = Generator(s)
g.generate_all()
source = s.content.resource_from_relative_path('blog/index.html')
target = File(
s.config.deploy_root_path.child(source.relative_deploy_path))
left = source.source_file.read_all()
right = target.read_all()
assert left == right
| mit |
nvoron23/socialite | jython/Lib/test/test_timeout.py | 22 | 6675 | """Unit tests for socket timeout feature."""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
import socket
class CreationTestCase(unittest.TestCase):
"""Test case for socket.gettimeout() and socket.settimeout()"""
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown(self):
self.sock.close()
def testObjectCreation(self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def testFloatReturnValue(self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def testReturnType(self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def testTypeCheck(self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def testRangeCheck(self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def testTimeoutThenBlocking(self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def testBlockingThenTimeout(self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.addr_local = ('127.0.0.1', 25339)
def tearDown(self):
self.sock.close()
def testConnectTimeout(self):
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
# If we are too close to www.python.org, this test will fail.
# Pick a host that should be farther away.
if (socket.getfqdn().split('.')[-2:] == ['python', 'org'] or
socket.getfqdn().split('.')[-2:-1] == ['xs4all']):
self.addr_remote = ('tut.fi', 80)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.connect,
self.addr_remote)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvTimeout(self):
# Test recv() timeout
_timeout = 0.02
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testAcceptTimeout(self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
self.sock.bind(self.addr_local)
self.sock.listen(5)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testRecvfromTimeout(self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
self.sock.bind(self.addr_local)
_t1 = time.time()
self.failUnlessRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assert_(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
%(_delta, self.fuzz, _timeout))
def testSend(self):
# Test send() timeout
# couldn't figure out how to test it
pass
def testSendto(self):
# Test sendto() timeout
# couldn't figure out how to test it
pass
def testSendall(self):
# Test sendall() timeout
# couldn't figure out how to test it
pass
def test_main():
test_support.requires('network')
test_support.run_unittest(CreationTestCase, TimeoutTestCase)
if __name__ == "__main__":
test_main()
| apache-2.0 |
luvit/gyp | test/win/gyptest-link-restat-importlib.py | 115 | 1375 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure we don't cause unnecessary builds due to import libs appearing
to be out of date.
"""
import TestGyp
import os
import sys
import time
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
if not os.environ.get('ProgramFiles(x86)'):
# TODO(scottmg)
print 'Skipping test on x86, http://crbug.com/365833'
test.pass_test()
CHDIR = 'importlib'
test.run_gyp('importlib.gyp', chdir=CHDIR)
test.build('importlib.gyp', test.ALL, chdir=CHDIR)
# Delay briefly so that there's time for this touch not to have the
# timestamp as the previous run.
test.sleep()
# Touch the .cc file; the .dll will rebuild, but the import libs timestamp
# won't be updated.
test.touch('importlib/has-exports.cc')
test.build('importlib.gyp', 'test_importlib', chdir=CHDIR)
# This is the important part. The .dll above will relink and have an updated
# timestamp, however the import .libs timestamp won't be updated. So, we
# have to handle restating inputs in ninja so the final binary doesn't
# continually relink (due to thinking the .lib isn't up to date).
test.up_to_date('importlib.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
| bsd-3-clause |
thebonzitree/django-guardian | guardian/admin.py | 37 | 15993 | from __future__ import unicode_literals
from django import forms
from django.conf import settings
from guardian.compat import url, patterns
from django.contrib import admin
from django.contrib import messages
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext, ugettext_lazy as _
from guardian.compat import get_user_model
from guardian.forms import UserObjectPermissionsForm
from guardian.forms import GroupObjectPermissionsForm
from guardian.shortcuts import get_perms
from guardian.shortcuts import get_users_with_perms
from guardian.shortcuts import get_groups_with_perms
from guardian.shortcuts import get_perms_for_model
from guardian.models import Group
class AdminUserObjectPermissionsForm(UserObjectPermissionsForm):
"""
Extends :form:`UserObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class AdminGroupObjectPermissionsForm(GroupObjectPermissionsForm):
"""
Extends :form:`GroupObjectPermissionsForm`. It only overrides
``get_obj_perms_field_widget`` method so it return
``django.contrib.admin.widgets.FilteredSelectMultiple`` widget.
"""
def get_obj_perms_field_widget(self):
return FilteredSelectMultiple(_("Permissions"), False)
class GuardedModelAdmin(admin.ModelAdmin):
"""
Extends ``django.contrib.admin.ModelAdmin`` class. Provides some extra
views for object permissions management at admin panel. It also changes
default ``change_form_template`` option to
``'admin/guardian/model/change_form.html'`` which is required for proper
url (object permissions related) being shown at the model pages.
**Extra options**
``GuardedModelAdmin.obj_perms_manage_template``
*Default*: ``admin/guardian/model/obj_perms_manage.html``
``GuardedModelAdmin.obj_perms_manage_user_template``
*Default*: ``admin/guardian/model/obj_perms_manage_user.html``
``GuardedModelAdmin.obj_perms_manage_group_template``
*Default*: ``admin/guardian/model/obj_perms_manage_group.html``
``GuardedModelAdmin.user_can_access_owned_objects_only``
*Default*: ``False``
If this would be set to ``True``, ``request.user`` would be used to
filter out objects he or she doesn't own (checking ``user`` field
of used model - field name may be overridden by
``user_owned_objects_field`` option).
.. note::
Please remember that this will **NOT** affect superusers!
Admins would still see all items.
``GuardedModelAdmin.user_can_access_owned_by_group_objects_only``
*Default*: ``False``
If this would be set to ``True``, ``request.user`` would be used to
filter out objects her or his group doesn't own (checking if any group
user belongs to is set as ``group`` field of the object; name of the
field can be changed by overriding ``group_owned_objects_field``).
.. note::
Please remember that this will **NOT** affect superusers!
Admins would still see all items.
``GuardedModelAdmin.group_owned_objects_field``
*Default*: ``group``
**Usage example**
Just use :admin:`GuardedModelAdmin` instead of
``django.contrib.admin.ModelAdmin``.
.. code-block:: python
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from myapp.models import Author
class AuthorAdmin(GuardedModelAdmin):
pass
admin.site.register(Author, AuthorAdmin)
"""
change_form_template = \
'admin/guardian/model/change_form.html'
obj_perms_manage_template = \
'admin/guardian/model/obj_perms_manage.html'
obj_perms_manage_user_template = \
'admin/guardian/model/obj_perms_manage_user.html'
obj_perms_manage_group_template = \
'admin/guardian/model/obj_perms_manage_group.html'
user_can_access_owned_objects_only = False
user_owned_objects_field = 'user'
user_can_access_owned_by_group_objects_only = False
group_owned_objects_field = 'group'
def queryset(self, request):
qs = super(GuardedModelAdmin, self).queryset(request)
if request.user.is_superuser:
return qs
if self.user_can_access_owned_objects_only:
filters = {self.user_owned_objects_field: request.user}
qs = qs.filter(**filters)
if self.user_can_access_owned_by_group_objects_only:
User = get_user_model()
user_rel_name = User.groups.field.related_query_name()
qs_key = '%s__%s' % (self.group_owned_objects_field, user_rel_name)
filters = {qs_key: request.user}
qs = qs.filter(**filters)
return qs
def get_urls(self):
"""
Extends standard admin model urls with the following:
- ``.../permissions/`` under ``app_mdodel_permissions`` url name (params: object_pk)
- ``.../permissions/user-manage/<user_id>/`` under ``app_model_permissions_manage_user`` url name (params: object_pk, user_pk)
- ``.../permissions/group-manage/<group_id>/`` under ``app_model_permissions_manage_group`` url name (params: object_pk, group_pk)
.. note::
``...`` above are standard, instance detail url (i.e.
``/admin/flatpages/1/``)
"""
urls = super(GuardedModelAdmin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.module_name
myurls = patterns('',
url(r'^(?P<object_pk>.+)/permissions/$',
view=self.admin_site.admin_view(self.obj_perms_manage_view),
name='%s_%s_permissions' % info),
url(r'^(?P<object_pk>.+)/permissions/user-manage/(?P<user_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_user_view),
name='%s_%s_permissions_manage_user' % info),
url(r'^(?P<object_pk>.+)/permissions/group-manage/(?P<group_id>\-?\d+)/$',
view=self.admin_site.admin_view(
self.obj_perms_manage_group_view),
name='%s_%s_permissions_manage_group' % info),
)
return myurls + urls
def get_obj_perms_base_context(self, request, obj):
"""
Returns context dictionary with common admin and object permissions
related content.
"""
context = {
'adminform': {'model_admin': self},
'media': self.media,
'object': obj,
'app_label': self.model._meta.app_label,
'opts': self.model._meta,
'original': hasattr(obj, '__unicode__') and obj.__unicode__() or\
str(obj),
'has_change_permission': self.has_change_permission(request, obj),
'model_perms': get_perms_for_model(obj),
'title': _("Object permissions"),
}
return context
def obj_perms_manage_view(self, request, object_pk):
"""
Main object permissions view. Presents all users and groups with any
object permissions for the current model *instance*. Users or groups
without object permissions for related *instance* would **not** be
shown. In order to add or manage user or group one should use links or
forms presented within the page.
"""
obj = get_object_or_404(self.queryset(request), pk=object_pk)
users_perms = SortedDict(
get_users_with_perms(obj, attach_perms=True,
with_group_users=False))
users_perms.keyOrder.sort(key=lambda user: user.username)
groups_perms = SortedDict(
get_groups_with_perms(obj, attach_perms=True))
groups_perms.keyOrder.sort(key=lambda group: group.name)
if request.method == 'POST' and 'submit_manage_user' in request.POST:
user_form = UserManage(request.POST)
group_form = GroupManage()
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if user_form.is_valid():
user_id = user_form.cleaned_data['user'].id
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user_id]
)
return redirect(url)
elif request.method == 'POST' and 'submit_manage_group' in request.POST:
user_form = UserManage()
group_form = GroupManage(request.POST)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
if group_form.is_valid():
group_id = group_form.cleaned_data['group'].id
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group_id]
)
return redirect(url)
else:
user_form = UserManage()
group_form = GroupManage()
context = self.get_obj_perms_base_context(request, obj)
context['users_perms'] = users_perms
context['groups_perms'] = groups_perms
context['user_form'] = user_form
context['group_form'] = group_form
return render_to_response(self.get_obj_perms_manage_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_template(self):
"""
Returns main object permissions admin template. May be overridden if
need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage.html'
return self.obj_perms_manage_template
def obj_perms_manage_user_view(self, request, object_pk, user_id):
"""
Manages selected users' permissions for current object.
"""
user = get_object_or_404(get_user_model(), id=user_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_user_form()
form = form_class(user, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_user' % info,
args=[obj.pk, user.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['user_obj'] = user
context['user_perms'] = get_perms(user, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_user_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_user_template(self):
"""
Returns object permissions for user admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_user.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_user.html'
return self.obj_perms_manage_user_template
def get_obj_perms_manage_user_form(self):
"""
Returns form class for user object permissions management. By default
:form:`AdminUserObjectPermissionsForm` is returned.
"""
return AdminUserObjectPermissionsForm
def obj_perms_manage_group_view(self, request, object_pk, group_id):
"""
Manages selected groups' permissions for current object.
"""
group = get_object_or_404(Group, id=group_id)
obj = get_object_or_404(self.queryset(request), pk=object_pk)
form_class = self.get_obj_perms_manage_group_form()
form = form_class(group, obj, request.POST or None)
if request.method == 'POST' and form.is_valid():
form.save_obj_perms()
msg = ugettext("Permissions saved.")
messages.success(request, msg)
info = (
self.admin_site.name,
self.model._meta.app_label,
self.model._meta.module_name
)
url = reverse(
'%s:%s_%s_permissions_manage_group' % info,
args=[obj.pk, group.id]
)
return redirect(url)
context = self.get_obj_perms_base_context(request, obj)
context['group_obj'] = group
context['group_perms'] = get_perms(group, obj)
context['form'] = form
return render_to_response(self.get_obj_perms_manage_group_template(),
context, RequestContext(request, current_app=self.admin_site.name))
def get_obj_perms_manage_group_template(self):
"""
Returns object permissions for group admin template. May be overridden
if need to change it dynamically.
.. note::
If ``INSTALLED_APPS`` contains ``grappelli`` this function would
return ``"admin/guardian/grappelli/obj_perms_manage_group.html"``.
"""
if 'grappelli' in settings.INSTALLED_APPS:
return 'admin/guardian/contrib/grappelli/obj_perms_manage_group.html'
return self.obj_perms_manage_group_template
def get_obj_perms_manage_group_form(self):
"""
Returns form class for group object permissions management. By default
:form:`AdminGroupObjectPermissionsForm` is returned.
"""
return AdminGroupObjectPermissionsForm
class UserManage(forms.Form):
user = forms.RegexField(label=_("Username"), max_length=30,
regex=r'^[\w.@+-]+$',
error_messages = {
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters."),
'does_not_exist': _("This user does not exist")})
def clean_user(self):
"""
Returns ``User`` instance based on the given username.
"""
username = self.cleaned_data['user']
try:
user = get_user_model().objects.get(username=username)
return user
except get_user_model().DoesNotExist:
raise forms.ValidationError(
self.fields['user'].error_messages['does_not_exist'])
class GroupManage(forms.Form):
group = forms.CharField(max_length=80, error_messages={'does_not_exist':
_("This group does not exist")})
def clean_group(self):
"""
Returns ``Group`` instance based on the given group name.
"""
name = self.cleaned_data['group']
try:
group = Group.objects.get(name=name)
return group
except Group.DoesNotExist:
raise forms.ValidationError(
self.fields['group'].error_messages['does_not_exist'])
| bsd-2-clause |
s40523213/2016fallcp_hw | course/ggame/sysdeps.py | 4 | 1983 | def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('browser') and module_exists('javascript'):
from browser import window, document
from javascript import JSObject, JSConstructor
GFX = JSObject(window.PIXI)
GFX_Rectangle = JSConstructor(GFX.Rectangle)
GFX_Texture = JSConstructor(GFX.Texture)
GFX_Texture_fromImage = JSConstructor(GFX.Texture.fromImage)
GFX_Sprite = JSConstructor(GFX.Sprite)
GFX_Graphics = JSConstructor(GFX.Graphics)()
GFX_Text = JSConstructor(GFX.Text)
GFX_DetectRenderer = GFX.autoDetectRenderer
SND = JSObject(window.buzz)
SND_Sound = JSConstructor(SND.sound)
class GFX_Window(object):
def __init__(self, width, height, onclose):
self._w = window.open("", "")
self._stage = JSConstructor(GFX.Container)()
self.width = width if width != 0 else int(window.innerWidth * 0.9)
self.height = height if height != 0 else int(window.innerHeight * 0.9)
self._renderer = GFX.autoDetectRenderer(self.width, self.height, {'transparent':True})
self._w.document.body.appendChild(self._renderer.view)
self._w.onunload = onclose
def bind(self, evtspec, callback):
self._w.document.body.bind(evtspec, callback)
def add(self, obj):
self._stage.addChild(obj)
def remove(self, obj):
self._stage.removeChild(obj)
def animate(self, stepcallback):
self._renderer.render(self._stage)
self._w.requestAnimationFrame(stepcallback)
def destroy(self):
SND.all().stop()
self._stage.destroy()
elif module_exists('pygame'):
try:
from ggame.pygamedeps import *
except:
from pygamedeps import *
else:
try:
from ggame.headlessdeps import *
except:
from headlessdeps import *
| agpl-3.0 |
mcrowson/django | tests/auth_tests/test_mixins.py | 274 | 8335 | from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin, PermissionRequiredMixin, UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class StackedMixinsView2(PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser',))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username='joe', password='qwerty')
user.is_authenticated = lambda: False
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(TestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url='/accounts/login/?next=/rand'):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = '/login/'
self._test_redirect(AView.as_view(), '/login/?next=/rand')
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = 'goto'
self._test_redirect(AView.as_view(), '/accounts/login/?goto=/rand')
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), '/accounts/login/')
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get('/rand')
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, AView.as_view(), request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
def test_login_required(self):
"""
Check that login_required works on a simple view wrapped in a
login_required decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual('/accounts/login/?next=/rand', response.url)
request = self.factory.get('/rand')
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = 'auth.add_customuser'
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
raise_exception = True
request = self.factory.get('/rand')
request.user = self.user
self.assertRaises(PermissionDenied, AView.as_view(), request)
| bsd-3-clause |
zzicewind/nova | nova/tests/unit/compute/test_claims.py | 8 | 14665 | # Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for resource tracker claims."""
import uuid
import mock
from oslo_serialization import jsonutils
from nova.compute import claims
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.pci import manager as pci_manager
from nova import test
from nova.tests.unit.pci import fakes as pci_fakes
class FakeResourceHandler(object):
test_called = False
usage_is_instance = False
def test_resources(self, usage, limits):
self.test_called = True
self.usage_is_itype = usage.get('name') == 'fakeitype'
return []
class DummyTracker(object):
icalled = False
rcalled = False
ext_resources_handler = FakeResourceHandler()
def __init__(self):
self.new_pci_tracker()
def abort_instance_claim(self, *args, **kwargs):
self.icalled = True
def drop_move_claim(self, *args, **kwargs):
self.rcalled = True
def new_pci_tracker(self):
ctxt = context.RequestContext('testuser', 'testproject')
self.pci_tracker = pci_manager.PciDevTracker(ctxt)
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
class ClaimTestCase(test.NoDBTestCase):
def setUp(self):
super(ClaimTestCase, self).setUp()
self.resources = self._fake_resources()
self.tracker = DummyTracker()
def _claim(self, limits=None, overhead=None, **kwargs):
numa_topology = kwargs.pop('numa_topology', None)
instance = self._fake_instance(**kwargs)
if numa_topology:
db_numa_topology = {
'id': 1, 'created_at': None, 'updated_at': None,
'deleted_at': None, 'deleted': None,
'instance_uuid': instance['uuid'],
'numa_topology': numa_topology._to_json()
}
else:
db_numa_topology = None
if overhead is None:
overhead = {'memory_mb': 0}
with mock.patch.object(
db, 'instance_extra_get_by_instance_uuid',
return_value=db_numa_topology):
return claims.Claim('context', instance, self.tracker,
self.resources, overhead=overhead,
limits=limits)
def _fake_instance(self, **kwargs):
instance = {
'uuid': str(uuid.uuid1()),
'memory_mb': 1024,
'root_gb': 10,
'ephemeral_gb': 5,
'vcpus': 1,
'system_metadata': {},
'numa_topology': None
}
instance.update(**kwargs)
return instance
def _fake_instance_type(self, **kwargs):
instance_type = {
'id': 1,
'name': 'fakeitype',
'memory_mb': 1,
'vcpus': 1,
'root_gb': 1,
'ephemeral_gb': 2
}
instance_type.update(**kwargs)
return objects.Flavor(**instance_type)
def _fake_resources(self, values=None):
resources = {
'memory_mb': 2048,
'memory_mb_used': 0,
'free_ram_mb': 2048,
'local_gb': 20,
'local_gb_used': 0,
'free_disk_gb': 20,
'vcpus': 2,
'vcpus_used': 0,
'numa_topology': objects.NUMATopology(
cells=[objects.NUMACell(id=1, cpuset=set([1, 2]), memory=512,
memory_usage=0, cpu_usage=0,
mempages=[], siblings=[],
pinned_cpus=set([])),
objects.NUMACell(id=2, cpuset=set([3, 4]), memory=512,
memory_usage=0, cpu_usage=0,
mempages=[], siblings=[],
pinned_cpus=set([]))]
)._to_json()
}
if values:
resources.update(values)
return resources
def test_memory_unlimited(self, mock_get):
self._claim(memory_mb=99999999)
def test_disk_unlimited_root(self, mock_get):
self._claim(root_gb=999999)
def test_disk_unlimited_ephemeral(self, mock_get):
self._claim(ephemeral_gb=999999)
def test_memory_with_overhead(self, mock_get):
overhead = {'memory_mb': 8}
limits = {'memory_mb': 2048}
self._claim(memory_mb=2040, limits=limits,
overhead=overhead)
def test_memory_with_overhead_insufficient(self, mock_get):
overhead = {'memory_mb': 9}
limits = {'memory_mb': 2048}
self.assertRaises(exception.ComputeResourcesUnavailable,
self._claim, limits=limits, overhead=overhead,
memory_mb=2040)
def test_memory_oversubscription(self, mock_get):
self._claim(memory_mb=4096)
def test_memory_insufficient(self, mock_get):
limits = {'memory_mb': 8192}
self.assertRaises(exception.ComputeResourcesUnavailable,
self._claim, limits=limits, memory_mb=16384)
def test_disk_oversubscription(self, mock_get):
limits = {'disk_gb': 60}
self._claim(root_gb=10, ephemeral_gb=40,
limits=limits)
def test_disk_insufficient(self, mock_get):
limits = {'disk_gb': 45}
self.assertRaisesRegex(
exception.ComputeResourcesUnavailable,
"disk",
self._claim, limits=limits, root_gb=10, ephemeral_gb=40)
def test_disk_and_memory_insufficient(self, mock_get):
limits = {'disk_gb': 45, 'memory_mb': 8192}
self.assertRaisesRegex(
exception.ComputeResourcesUnavailable,
"memory.*disk",
self._claim, limits=limits, root_gb=10, ephemeral_gb=40,
memory_mb=16384)
@pci_fakes.patch_pci_whitelist
def test_pci_pass(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': 0,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict])
claim = self._claim()
request = objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': 'v', 'product_id': 'p'}])
mock_get.return_value = objects.InstancePCIRequests(
requests=[request])
self.assertIsNone(claim._test_pci())
@pci_fakes.patch_pci_whitelist
def test_pci_fail(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v1',
'numa_node': 1,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict])
claim = self._claim()
request = objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': 'v', 'product_id': 'p'}])
mock_get.return_value = objects.InstancePCIRequests(
requests=[request])
claim._test_pci()
@pci_fakes.patch_pci_whitelist
def test_pci_pass_no_requests(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': 0,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict])
claim = self._claim()
self.assertIsNone(claim._test_pci())
def test_ext_resources(self, mock_get):
self._claim()
self.assertTrue(self.tracker.ext_resources_handler.test_called)
self.assertFalse(self.tracker.ext_resources_handler.usage_is_itype)
def test_numa_topology_no_limit(self, mock_get):
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2]), memory=512)])
self._claim(numa_topology=huge_instance)
def test_numa_topology_fails(self, mock_get):
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2, 3, 4, 5]), memory=2048)])
limit_topo = objects.NUMATopologyLimits(
cpu_allocation_ratio=1, ram_allocation_ratio=1)
self.assertRaises(exception.ComputeResourcesUnavailable,
self._claim,
limits={'numa_topology': limit_topo},
numa_topology=huge_instance)
def test_numa_topology_passes(self, mock_get):
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2]), memory=512)])
limit_topo = objects.NUMATopologyLimits(
cpu_allocation_ratio=1, ram_allocation_ratio=1)
self._claim(limits={'numa_topology': limit_topo},
numa_topology=huge_instance)
@pci_fakes.patch_pci_whitelist
def test_numa_topology_with_pci(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': 1,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict])
request = objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': 'v', 'product_id': 'p'}])
mock_get.return_value = objects.InstancePCIRequests(
requests=[request])
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2]), memory=512)])
self._claim(numa_topology= huge_instance)
@pci_fakes.patch_pci_whitelist
def test_numa_topology_with_pci_fail(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': 1,
'status': 'available'}
dev_dict2 = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': 2,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict, dev_dict2])
request = objects.InstancePCIRequest(count=2,
spec=[{'vendor_id': 'v', 'product_id': 'p'}])
mock_get.return_value = objects.InstancePCIRequests(
requests=[request])
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2]), memory=512)])
self.assertRaises(exception.ComputeResourcesUnavailable,
self._claim,
numa_topology=huge_instance)
@pci_fakes.patch_pci_whitelist
def test_numa_topology_with_pci_no_numa_info(self, mock_get):
dev_dict = {
'compute_node_id': 1,
'address': 'a',
'product_id': 'p',
'vendor_id': 'v',
'numa_node': None,
'status': 'available'}
self.tracker.new_pci_tracker()
self.tracker.pci_tracker._set_hvdevs([dev_dict])
request = objects.InstancePCIRequest(count=1,
spec=[{'vendor_id': 'v', 'product_id': 'p'}])
mock_get.return_value = objects.InstancePCIRequests(
requests=[request])
huge_instance = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([1, 2]), memory=512)])
self._claim(numa_topology= huge_instance)
def test_abort(self, mock_get):
claim = self._abort()
self.assertTrue(claim.tracker.icalled)
def _abort(self):
claim = None
try:
with self._claim(memory_mb=4096) as claim:
raise test.TestingException("abort")
except test.TestingException:
pass
return claim
class MoveClaimTestCase(ClaimTestCase):
def setUp(self):
super(MoveClaimTestCase, self).setUp()
self.instance = self._fake_instance()
self.get_numa_constraint_patch = None
def _claim(self, limits=None, overhead=None, **kwargs):
instance_type = self._fake_instance_type(**kwargs)
numa_constraint = kwargs.pop('numa_topology', None)
if overhead is None:
overhead = {'memory_mb': 0}
with mock.patch(
'nova.virt.hardware.numa_get_constraints',
return_value=numa_constraint):
return claims.MoveClaim('context', self.instance, instance_type,
{}, self.tracker, self.resources,
overhead=overhead, limits=limits)
def _set_pci_request(self, claim):
request = [{'count': 1,
'spec': [{'vendor_id': 'v', 'product_id': 'p'}],
}]
claim.instance.update(
system_metadata={'new_pci_requests': jsonutils.dumps(request)})
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_ext_resources(self, mock_get):
self._claim()
self.assertTrue(self.tracker.ext_resources_handler.test_called)
self.assertTrue(self.tracker.ext_resources_handler.usage_is_itype)
@mock.patch('nova.objects.InstancePCIRequests.get_by_instance_uuid',
return_value=objects.InstancePCIRequests(requests=[]))
def test_abort(self, mock_get):
claim = self._abort()
self.assertTrue(claim.tracker.rcalled)
| apache-2.0 |
caphrim007/ansible | lib/ansible/modules/cloud/azure/azure_rm_publicipaddress_facts.py | 14 | 5576 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: azure_rm_publicipaddress_facts
version_added: "2.1"
short_description: Get public IP facts.
description:
- Get facts for a specific public IP or all public IPs within a resource group.
options:
name:
description:
- Only show results for a specific Public IP.
resource_group:
description:
- Limit results by resource group. Required when using name parameter.
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
extends_documentation_fragment:
- azure
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Get facts for one Public IP
azure_rm_publicipaddress_facts:
resource_group: Testing
name: publicip001
- name: Get facts for all Public IPs within a resource groups
azure_rm_publicipaddress_facts:
resource_group: Testing
'''
RETURN = '''
azure_publicipaddresses:
description: List of public IP address dicts.
returned: always
type: list
example: [{
"etag": 'W/"a31a6d7d-cb18-40a5-b16d-9f4a36c1b18a"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/publicIPAddresses/pip2001",
"location": "eastus2",
"name": "pip2001",
"properties": {
"idleTimeoutInMinutes": 4,
"provisioningState": "Succeeded",
"publicIPAllocationMethod": "Dynamic",
"resourceGuid": "29de82f4-a7da-440e-bd3d-9cabb79af95a"
},
"type": "Microsoft.Network/publicIPAddresses"
}]
'''
try:
from msrestazure.azure_exceptions import CloudError
from azure.common import AzureMissingResourceHttpError, AzureHttpError
except:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
AZURE_OBJECT_CLASS = 'PublicIp'
class AzureRMPublicIPFacts(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
name=dict(type='str'),
resource_group=dict(type='str'),
tags=dict(type='list')
)
self.results = dict(
changed=False,
ansible_facts=dict(azure_publicipaddresses=[])
)
self.name = None
self.resource_group = None
self.tags = None
super(AzureRMPublicIPFacts, self).__init__(self.module_arg_spec,
supports_tags=False,
facts_module=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name and not self.resource_group:
self.fail("Parameter error: resource group required when filtering by name.")
if self.name:
self.results['ansible_facts']['azure_publicipaddresses'] = self.get_item()
elif self.resource_group:
self.results['ansible_facts']['azure_publicipaddresses'] = self.list_resource_group()
else:
self.results['ansible_facts']['azure_publicipaddresses'] = self.list_all()
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.name))
item = None
result = []
try:
item = self.network_client.public_ip_addresses.get(self.resource_group, self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
result = [pip]
return result
def list_resource_group(self):
self.log('List items in resource groups')
try:
response = self.network_client.public_ip_addresses.list(self.resource_group)
except AzureHttpError as exc:
self.fail("Error listing items in resource groups {0} - {1}".format(self.resource_group, str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
results.append(pip)
return results
def list_all(self):
self.log('List all items')
try:
response = self.network_client.public_ip_addresses.list_all()
except AzureHttpError as exc:
self.fail("Error listing all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
results.append(pip)
return results
def main():
AzureRMPublicIPFacts()
if __name__ == '__main__':
main()
| gpl-3.0 |
ybroze/trading-with-python | cookbook/connectToTWS.py | 77 | 2837 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from ib.ext.Contract import Contract
from ib.ext.ExecutionFilter import ExecutionFilter
from ib.opt import ibConnection, message
from time import sleep
# print all messages from TWS
def watcher(msg):
print '[watcher]',msg
def dummyHandler(msg):
pass
# show Bid and Ask quotes
def my_BidAsk(msg):
print 'bid_ask'
print msg
if msg.field == 1:
print '%s: bid: %s' % (contractTuple[0], msg.price)
elif msg.field == 2:
print '%s: ask: %s' % (contractTuple[0], msg.price)
def my_BidAsk2(msg):
print 'Handler 2'
print msg
def portfolioHandler(msg):
print msg
print msg.contract.m_symbol
def makeStkContract(contractTuple):
newContract = Contract()
newContract.m_symbol = contractTuple[0]
newContract.m_secType = contractTuple[1]
newContract.m_exchange = contractTuple[2]
newContract.m_currency = contractTuple[3]
print 'Contract Values:%s,%s,%s,%s:' % contractTuple
return newContract
def testMarketData():
tickId = 1
# Note: Option quotes will give an error if they aren't shown in TWS
contractTuple = ('SPY', 'STK', 'SMART', 'USD')
stkContract = makeStkContract(contractTuple)
print '* * * * REQUESTING MARKET DATA * * * *'
con.reqMktData(tickId, stkContract, '', False)
sleep(3)
print '* * * * CANCELING MARKET DATA * * * *'
con.cancelMktData(tickId)
def testExecutions():
print 'testing executions'
f = ExecutionFilter()
#f.m_clientId = 101
f.m_time = '20110901-00:00:00'
f.m_symbol = 'SPY'
f.m_secType = 'STK'
f.m_exchange = 'SMART'
#f.m_side = 'BUY'
con.reqExecutions(f)
sleep(2)
def testAccountUpdates():
con.reqAccountUpdates(True,'')
def testHistoricData(con):
print 'Testing historic data'
contractTuple = ('SPY', 'STK', 'SMART', 'USD')
contract = makeStkContract(contractTuple)
con.reqHistoricalData(1,contract,'20120803 22:00:00','1800 S','1 secs','TRADES',1,2)
sleep(2)
def showMessageTypes():
# show available messages
m = message.registry.keys()
m.sort()
print 'Available message types\n-------------------------'
for msgType in m:
print msgType
if __name__ == '__main__':
showMessageTypes()
con = ibConnection()
con.registerAll(watcher) # show all messages
con.register(portfolioHandler,message.UpdatePortfolio)
#con.register(watcher,(message.tickPrice,))
con.connect()
testHistoricData(con)
sleep(1)
#testMarketData()
#testExecutions()
#testAccountUpdates()
con.disconnect()
sleep(2)
print 'All done!'
| bsd-3-clause |
AndrewGrossman/django | django/middleware/http.py | 247 | 2129 | from django.utils.http import http_date, parse_http_date_safe
class ConditionalGetMiddleware(object):
"""
Handles conditional GET operations. If the response has an ETag or
Last-Modified header, and the request has If-None-Match or
If-Modified-Since, the response is replaced by an HttpNotModified.
Also sets the Date and Content-Length response-headers.
"""
def process_response(self, request, response):
response['Date'] = http_date()
if not response.streaming and not response.has_header('Content-Length'):
response['Content-Length'] = str(len(response.content))
# If-None-Match must be ignored if original result would be anything
# other than a 2XX or 304 status. 304 status would result in no change.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.26
if 200 <= response.status_code < 300 and response.has_header('ETag'):
if_none_match = request.META.get('HTTP_IF_NONE_MATCH')
if if_none_match == response['ETag']:
# Setting the status is enough here. The response handling path
# automatically removes content for this status code (in
# http.conditional_content_removal()).
response.status_code = 304
# If-Modified-Since must be ignored if the original result was not a 200.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.25
if response.status_code == 200 and response.has_header('Last-Modified'):
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
if if_modified_since is not None:
if_modified_since = parse_http_date_safe(if_modified_since)
if if_modified_since is not None:
last_modified = parse_http_date_safe(response['Last-Modified'])
if last_modified is not None and last_modified <= if_modified_since:
# Setting the status code is enough here (same reasons as
# above).
response.status_code = 304
return response
| bsd-3-clause |
mitya57/django | django/contrib/gis/gdal/envelope.py | 62 | 6973 | """
The GDAL/OGR library uses an Envelope structure to hold the bounding
box information for a geometry. The envelope (bounding box) contains
two pairs of coordinates, one for the lower left coordinate and one
for the upper right coordinate:
+----------o Upper right; (max_x, max_y)
| |
| |
| |
Lower left (min_x, min_y) o----------+
"""
from ctypes import Structure, c_double
from django.contrib.gis.gdal.error import GDALException
# The OGR definition of an Envelope is a C structure containing four doubles.
# See the 'ogr_core.h' source file for more information:
# http://www.gdal.org/ogr__core_8h_source.html
class OGREnvelope(Structure):
"Represent the OGREnvelope C Structure."
_fields_ = [("MinX", c_double),
("MaxX", c_double),
("MinY", c_double),
("MaxY", c_double),
]
class Envelope:
"""
The Envelope object is a C structure that contains the minimum and
maximum X, Y coordinates for a rectangle bounding box. The naming
of the variables is compatible with the OGR Envelope structure.
"""
def __init__(self, *args):
"""
The initialization function may take an OGREnvelope structure, 4-element
tuple or list, or 4 individual arguments.
"""
if len(args) == 1:
if isinstance(args[0], OGREnvelope):
# OGREnvelope (a ctypes Structure) was passed in.
self._envelope = args[0]
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) != 4:
raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
self._from_sequence(args[0])
else:
raise TypeError('Incorrect type of argument: %s' % type(args[0]))
elif len(args) == 4:
# Individual parameters passed in.
# Thanks to ww for the help
self._from_sequence([float(a) for a in args])
else:
raise GDALException('Incorrect number (%d) of arguments.' % len(args))
# Checking the x,y coordinates
if self.min_x > self.max_x:
raise GDALException('Envelope minimum X > maximum X.')
if self.min_y > self.max_y:
raise GDALException('Envelope minimum Y > maximum Y.')
def __eq__(self, other):
"""
Return True if the envelopes are equivalent; can compare against
other Envelopes and 4-tuples.
"""
if isinstance(other, Envelope):
return (self.min_x == other.min_x) and (self.min_y == other.min_y) and \
(self.max_x == other.max_x) and (self.max_y == other.max_y)
elif isinstance(other, tuple) and len(other) == 4:
return (self.min_x == other[0]) and (self.min_y == other[1]) and \
(self.max_x == other[2]) and (self.max_y == other[3])
else:
raise GDALException('Equivalence testing only works with other Envelopes.')
def __str__(self):
"Return a string representation of the tuple."
return str(self.tuple)
def _from_sequence(self, seq):
"Initialize the C OGR Envelope structure from the given sequence."
self._envelope = OGREnvelope()
self._envelope.MinX = seq[0]
self._envelope.MinY = seq[1]
self._envelope.MaxX = seq[2]
self._envelope.MaxY = seq[3]
def expand_to_include(self, *args):
"""
Modify the envelope to expand to include the boundaries of
the passed-in 2-tuple (a point), 4-tuple (an extent) or
envelope.
"""
# We provide a number of different signatures for this method,
# and the logic here is all about converting them into a
# 4-tuple single parameter which does the actual work of
# expanding the envelope.
if len(args) == 1:
if isinstance(args[0], Envelope):
return self.expand_to_include(args[0].tuple)
elif hasattr(args[0], 'x') and hasattr(args[0], 'y'):
return self.expand_to_include(args[0].x, args[0].y, args[0].x, args[0].y)
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) == 2:
return self.expand_to_include((args[0][0], args[0][1], args[0][0], args[0][1]))
elif len(args[0]) == 4:
(minx, miny, maxx, maxy) = args[0]
if minx < self._envelope.MinX:
self._envelope.MinX = minx
if miny < self._envelope.MinY:
self._envelope.MinY = miny
if maxx > self._envelope.MaxX:
self._envelope.MaxX = maxx
if maxy > self._envelope.MaxY:
self._envelope.MaxY = maxy
else:
raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
raise TypeError('Incorrect type of argument: %s' % type(args[0]))
elif len(args) == 2:
# An x and an y parameter were passed in
return self.expand_to_include((args[0], args[1], args[0], args[1]))
elif len(args) == 4:
# Individual parameters passed in.
return self.expand_to_include(args)
else:
raise GDALException('Incorrect number (%d) of arguments.' % len(args[0]))
@property
def min_x(self):
"Return the value of the minimum X coordinate."
return self._envelope.MinX
@property
def min_y(self):
"Return the value of the minimum Y coordinate."
return self._envelope.MinY
@property
def max_x(self):
"Return the value of the maximum X coordinate."
return self._envelope.MaxX
@property
def max_y(self):
"Return the value of the maximum Y coordinate."
return self._envelope.MaxY
@property
def ur(self):
"Return the upper-right coordinate."
return (self.max_x, self.max_y)
@property
def ll(self):
"Return the lower-left coordinate."
return (self.min_x, self.min_y)
@property
def tuple(self):
"Return a tuple representing the envelope."
return (self.min_x, self.min_y, self.max_x, self.max_y)
@property
def wkt(self):
"Return WKT representing a Polygon for this envelope."
# TODO: Fix significant figures.
return 'POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % \
(self.min_x, self.min_y, self.min_x, self.max_y,
self.max_x, self.max_y, self.max_x, self.min_y,
self.min_x, self.min_y)
| bsd-3-clause |
Milad-Rakhsha/chrono | src/demos/python/irrlicht/demo_IRR_callbackNSC.py | 3 | 6089 | # =============================================================================
# PROJECT CHRONO - http:#projectchrono.org
#
# Copyright (c) 2014 projectchrono.org
# All rights reserved.
#
# Use of this source code is governed by a BSD-style license that can be found
# in the LICENSE file at the top level of the distribution and at
# http:#projectchrono.org/license-chrono.txt.
#
# =============================================================================
# Authors: Simone Benatti
# =============================================================================
#
# Chrono demonstration of using contact callbacks for non-smooth contacts
# (complementarity-based).
#
# The global reference frame has Y up.
#
# =============================================================================
import pychrono as chrono
import pychrono.irrlicht as chronoirr
# -----------------------------------------------------------------------------
# Callback class for contact reporting
# -----------------------------------------------------------------------------
class ContactReporter (chrono.ReportContactCallback):
def __init__(self, box) :
self.m_box = box
super().__init__()
def OnReportContact( self,
pA,
pB,
plane_coord,
distance,
eff_radius,
cforce,
ctorque,
modA,
modB):
bodyA = chrono.CastToChBody(modA)
bodyB = chrono.CastToChBody(modB)
if (bodyA == self.m_box) :
print(" ", pA.x, pA.y, pA.z)
elif (bodyB == self.m_box) :
print(" ", pB.x, pB.y, pB.z)
return True
# -----------------------------------------------------------------------------
# Callback class for modifying composite material
# -----------------------------------------------------------------------------
class ContactMaterial(chrono.AddContactCallback):
def __init__(self):
super().__init__()
def OnAddContact( self,
contactinfo,
material):
# Downcast to appropriate composite material type
mat = chrono.CastToChMaterialCompositeNSC(material)
# Set different friction for left/right halfs
if (contactinfo.vpA.z > 0) :
friction = 0.3
else:
friction = 0.8
mat.static_friction = friction
mat.sliding_friction = friction
print( "Copyright (c) 2017 projectchrono.org")
# ----------------
# Parameters
# ----------------
friction = 0.6
collision_envelope = .001
# -----------------
# Create the system
# -----------------
system = chrono.ChSystemNSC()
system.Set_G_acc(chrono.ChVectorD(0, -10, 0))
# Set solver settings
system.SetSolverMaxIterations(100)
system.SetMaxPenetrationRecoverySpeed(1e8)
system.SetSolverForceTolerance(0)
# --------------------------------------------------
# Create a contact material, shared among all bodies
# --------------------------------------------------
material = chrono.ChMaterialSurfaceNSC()
material.SetFriction(friction)
# ----------
# Add bodies
# ----------
container = chrono.ChBody()
system.Add(container)
container.SetPos(chrono.ChVectorD(0, 0, 0))
container.SetBodyFixed(True)
container.SetIdentifier(-1)
container.SetCollide(True)
container.GetCollisionModel().SetEnvelope(collision_envelope)
container.GetCollisionModel().ClearModel()
chrono.AddBoxGeometry(container, material, chrono.ChVectorD(4, 0.5, 4), chrono.ChVectorD(0, -0.5, 0))
container.GetCollisionModel().BuildModel()
container.AddAsset(chrono.ChColorAsset(chrono.ChColor(0.4, 0.4, 0.4)))
box1 = chrono.ChBody()
box1.SetMass(10)
box1.SetInertiaXX(chrono.ChVectorD(1, 1, 1))
box1.SetPos(chrono.ChVectorD(-1, 0.21, -1))
box1.SetPos_dt(chrono.ChVectorD(5, 0, 0))
box1.SetCollide(True)
box1.GetCollisionModel().SetEnvelope(collision_envelope)
box1.GetCollisionModel().ClearModel()
chrono.AddBoxGeometry(box1, material, chrono.ChVectorD(0.4, 0.2, 0.1))
box1.GetCollisionModel().BuildModel()
box1.AddAsset(chrono.ChColorAsset(chrono.ChColor(0.1, 0.1, 0.4)))
system.AddBody(box1)
box2 = chrono.ChBody(system.NewBody())
box2.SetMass(10)
box2.SetInertiaXX(chrono.ChVectorD(1, 1, 1))
box2.SetPos(chrono.ChVectorD(-1, 0.21, +1))
box2.SetPos_dt(chrono.ChVectorD(5, 0, 0))
box2.SetCollide(True)
box2.GetCollisionModel().SetEnvelope(collision_envelope)
box2.GetCollisionModel().ClearModel()
chrono.AddBoxGeometry(box2, material, chrono.ChVectorD(0.4, 0.2, 0.1))
box2.GetCollisionModel().BuildModel()
box2.AddAsset(chrono.ChColorAsset(chrono.ChColor(0.4, 0.1, 0.1)))
system.AddBody(box2)
# -------------------------------
# Create the visualization window
# -------------------------------
application = chronoirr.ChIrrApp(system, "NSC callbacks", chronoirr.dimension2du(800, 600))
application.AddTypicalLogo(chrono.GetChronoDataFile('logo_pychrono_alpha.png'))
application.AddTypicalSky()
application.AddTypicalLights()
application.AddTypicalCamera(chronoirr.vector3df(4, 4, -6))
application.AssetBindAll()
application.AssetUpdateAll()
# ---------------
# Simulate system
# ---------------
creporter = ContactReporter(box1)
cmaterial = ContactMaterial()
system.GetContactContainer().RegisterAddContactCallback(cmaterial)
application.SetTimestep(1e-3)
while (application.GetDevice().run()) :
application.BeginScene(True, True, chronoirr.SColor(255, 140, 161, 192))
application.DrawAll()
chronoirr.drawGrid(application.GetVideoDriver(), 0.5, 0.5, 12, 12,
chrono.ChCoordsysD(chrono.ChVectorD(0, 0, 0), chrono.Q_from_AngX(chrono.CH_C_PI_2)))
application.DoStep()
application.EndScene()
# Process contacts
print(str(system.GetChTime() ) + " " + str(system.GetNcontacts()) )
system.GetContactContainer().ReportAllContacts(creporter)
| bsd-3-clause |
40223249-1/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/collections/__init__.py | 625 | 25849 | #__all__ = ['deque', 'defaultdict', 'Counter']
from _collections import deque, defaultdict
#from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList',
'UserString', 'Counter', 'OrderedDict']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
# fixme brython.. there is an issue with _abcoll
#from _abcoll import *
#from _abcoll import Set
from _abcoll import MutableMapping
#import _abcoll
#__all__ += _abcoll.__all__
from collections.abc import *
import collections.abc
__all__ += collections.abc.__all__
from _collections import deque, defaultdict, namedtuple
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
import heapq as _heapq
#fixme brython
#from weakref import proxy as _proxy
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
from reprlib import recursive_repr as _recursive_repr
class Set(set):
pass
class Sequence(list):
pass
def _proxy(obj):
return obj
################################################################################
### OrderedDict
################################################################################
class _Link(object):
__slots__ = 'prev', 'next', 'key', '__weakref__'
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
# The prev links are weakref proxies (to prevent circular references).
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__hardroot = _Link()
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
self.__map[key] = link = Link()
root = self.__root
last = root.prev
link.prev, link.next, link.key = last, root, key
last.next = link
root.prev = proxy(link)
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link = self.__map.pop(key)
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
root = self.__root
curr = root.next
while curr is not root:
yield curr.key
curr = curr.next
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
root = self.__root
curr = root.prev
while curr is not root:
yield curr.key
curr = curr.prev
def clear(self):
'od.clear() -> None. Remove all items from od.'
root = self.__root
root.prev = root.next = root
self.__map.clear()
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root.prev
link_prev = link.prev
link_prev.next = root
root.prev = link_prev
else:
link = root.next
link_next = link.next
root.next = link_next
link_next.prev = root
key = link.key
del self.__map[key]
value = dict.pop(self, key)
return key, value
def move_to_end(self, key, last=True):
'''Move an existing element to the end (or beginning if last==False).
Raises KeyError if the element does not exist.
When last=True, acts like a fast version of self[key]=self.pop(key).
'''
link = self.__map[key]
link_prev = link.prev
link_next = link.next
link_prev.next = link_next
link_next.prev = link_prev
root = self.__root
if last:
last = root.prev
link.prev = last
link.next = root
last.next = root.prev = link
else:
first = root.next
link.prev = root
link.next = first
root.next = first.prev = link
def __sizeof__(self):
sizeof = _sys.getsizeof
n = len(self) + 1 # number of links including root
size = sizeof(self.__dict__) # instance dictionary
size += sizeof(self.__map) * 2 # internal dict and inherited dict
size += sizeof(self.__hardroot) * n # link objects
size += sizeof(self.__root) * n # proxy objects
return size
#fixme brython.. Issue with _abcoll, which contains MutableMapping
update = __update = MutableMapping.update
keys = MutableMapping.keys
values = MutableMapping.values
items = MutableMapping.items
__ne__ = MutableMapping.__ne__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
#fixme, brython issue
#@_recursive_repr()
def __repr__(self):
'od.__repr__() <==> repr(od)'
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
########################################################################
### Counter
########################################################################
def _count_elements(mapping, iterable):
'Tally elements from the iterable.'
mapping_get = mapping.get
for elem in iterable:
mapping[elem] = mapping_get(elem, 0) + 1
#try: # Load C helper function if available
# from _collections import _count_elements
#except ImportError:
# pass
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
#super().__init__() #BE modified since super not supported
dict.__init__(self)
self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.items(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.items(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.items()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.items():
self[elem] = count + self_get(elem, 0)
else:
super().update(iterable) # fast path when counter is empty
else:
_count_elements(self, iterable)
if kwds:
self.update(kwds)
def subtract(self, iterable=None, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super().__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
try:
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
except TypeError:
# handle case where values are not orderable
return '{0}({1!r})'.format(self.__class__.__name__, dict(self))
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
########################################################################
### ChainMap (helper for configparser)
########################################################################
class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
def __bool__(self):
return any(self.maps)
#fixme, brython
#@_recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps)))
def __repr__(self):
return ','.join(str(_map) for _map in self.maps)
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self): # like Django's Context.push()
'New ChainMap with a new dict followed by all previous maps.'
return self.__class__({}, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
try:
return self.maps[0].pop(key, *args)
except KeyError:
#raise KeyError('Key not found in the first mapping: {!r}'.format(key))
raise KeyError('Key not found in the first mapping: %s' % key)
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
################################################################################
### UserDict
################################################################################
class UserDict(MutableMapping):
# Start by filling-out the abstract methods
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def __iter__(self):
return iter(self.data)
# Modify __contains__ to work correctly when __missing__ is present
def __contains__(self, key):
return key in self.data
# Now, add the methods in dicts but not in MutableMapping
def __repr__(self): return repr(self.data)
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
################################################################################
### UserList
################################################################################
################################################################################
### UserString
################################################################################
| agpl-3.0 |
jbremer/monitor | src/capstone/bindings/python/test_xcore.py | 3 | 2325 | #!/usr/bin/env python
# Capstone Python bindings, by Nguyen Anh Quynnh <aquynh@gmail.com>
from __future__ import print_function
from capstone import *
from capstone.xcore import *
from xprint import to_x, to_hex, to_x_32
XCORE_CODE = b"\xfe\x0f\xfe\x17\x13\x17\xc6\xfe\xec\x17\x97\xf8\xec\x4f\x1f\xfd\xec\x37\x07\xf2\x45\x5b\xf9\xfa\x02\x06\x1b\x10\x09\xfd\xec\xa7"
all_tests = (
(CS_ARCH_XCORE, 0, XCORE_CODE, "XCore"),
)
def print_insn_detail(insn):
# print address, mnemonic and operands
print("0x%x:\t%s\t%s" % (insn.address, insn.mnemonic, insn.op_str))
# "data" instruction generated by SKIPDATA option has no detail
if insn.id == 0:
return
if len(insn.operands) > 0:
print("\top_count: %u" % len(insn.operands))
c = 0
for i in insn.operands:
if i.type == XCORE_OP_REG:
print("\t\toperands[%u].type: REG = %s" % (c, insn.reg_name(i.reg)))
if i.type == XCORE_OP_IMM:
print("\t\toperands[%u].type: IMM = 0x%s" % (c, to_x(i.imm)))
if i.type == XCORE_OP_MEM:
print("\t\toperands[%u].type: MEM" % c)
if i.mem.base != 0:
print("\t\t\toperands[%u].mem.base: REG = %s" \
% (c, insn.reg_name(i.mem.base)))
if i.mem.index != 0:
print("\t\t\toperands[%u].mem.index: REG = %s" \
% (c, insn.reg_name(i.mem.index)))
if i.mem.disp != 0:
print("\t\t\toperands[%u].mem.disp: 0x%s" \
% (c, to_x(i.mem.disp)))
if i.mem.direct != 1:
print("\t\t\toperands[%u].mem.direct: -1")
c += 1
# ## Test class Cs
def test_class():
for (arch, mode, code, comment) in all_tests:
print("*" * 16)
print("Platform: %s" %comment)
print("Code: %s" % to_hex(code))
print("Disasm:")
try:
md = Cs(arch, mode)
md.detail = True
for insn in md.disasm(code, 0x1000):
print_insn_detail(insn)
print ()
print("0x%x:\n" % (insn.address + insn.size))
except CsError as e:
print("ERROR: %s" %e)
if __name__ == '__main__':
test_class()
| gpl-3.0 |
chewable/django | django/contrib/gis/sitemaps/views.py | 15 | 4114 | from django.http import HttpResponse, Http404
from django.template import loader
from django.contrib.gis.db.backend import SpatialBackend
from django.contrib.sites.models import Site
from django.core import urlresolvers
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.db.models import get_model
from django.contrib.gis.db.models.fields import GeometryField
from django.utils.encoding import smart_str
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
def index(request, sitemaps):
"""
This view generates a sitemap index that uses the proper view
for resolving geographic section sitemap URLs.
"""
current_site = Site.objects.get_current()
sites = []
protocol = request.is_secure() and 'https' or 'http'
for section, site in sitemaps.items():
if callable(site):
pages = site().paginator.num_pages
else:
pages = site.paginator.num_pages
sitemap_url = urlresolvers.reverse('django.contrib.gis.sitemaps.views.sitemap', kwargs={'section': section})
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
if pages > 1:
for page in range(2, pages+1):
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
return HttpResponse(xml, mimetype='application/xml')
def sitemap(request, sitemaps, section=None):
"""
This view generates a sitemap with additional geographic
elements defined by Google.
"""
maps, urls = [], []
if section is not None:
if section not in sitemaps:
raise Http404("No sitemap available for section: %r" % section)
maps.append(sitemaps[section])
else:
maps = sitemaps.values()
page = request.GET.get("p", 1)
for site in maps:
try:
if callable(site):
urls.extend(site().get_urls(page))
else:
urls.extend(site.get_urls(page))
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page '%s'" % page)
xml = smart_str(loader.render_to_string('gis/sitemaps/geo_sitemap.xml', {'urlset': urls}))
return HttpResponse(xml, mimetype='application/xml')
def kml(request, label, model, field_name=None, compress=False):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
klass = get_model(label, model)
if not klass:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
info = klass._meta.get_field_by_name(field_name)
if not isinstance(info[0], GeometryField):
raise Exception
except:
raise Http404('Invalid geometry field.')
if SpatialBackend.postgis:
# PostGIS will take care of transformation.
placemarks = klass._default_manager.kml(field_name=field_name)
else:
# There's no KML method on Oracle or MySQL, so we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if SpatialBackend.oracle:
qs = klass._default_manager.transform(4326, field_name=field_name)
else:
qs = klass._default_manager.all()
for mod in qs:
setattr(mod, 'kml', getattr(mod, field_name).kml)
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places' : placemarks})
def kmz(request, label, model, field_name=None):
"""
This view returns KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, True)
| bsd-3-clause |
felixma/nova | nova/cells/weights/ram_by_instance_type.py | 63 | 1971 | # Copyright (c) 2012-2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Weigh cells by memory needed in a way that spreads instances.
"""
from oslo_config import cfg
from nova.cells import weights
ram_weigher_opts = [
cfg.FloatOpt('ram_weight_multiplier',
default=10.0,
help='Multiplier used for weighing ram. Negative '
'numbers mean to stack vs spread.'),
]
CONF = cfg.CONF
CONF.register_opts(ram_weigher_opts, group='cells')
class RamByInstanceTypeWeigher(weights.BaseCellWeigher):
"""Weigh cells by instance_type requested."""
def weight_multiplier(self):
return CONF.cells.ram_weight_multiplier
def _weigh_object(self, cell, weight_properties):
"""Use the 'ram_free' for a particular instance_type advertised from a
child cell's capacity to compute a weight. We want to direct the
build to a cell with a higher capacity. Since higher weights win,
we just return the number of units available for the instance_type.
"""
request_spec = weight_properties['request_spec']
instance_type = request_spec['instance_type']
memory_needed = instance_type['memory_mb']
ram_free = cell.capacities.get('ram_free', {})
units_by_mb = ram_free.get('units_by_mb', {})
return units_by_mb.get(str(memory_needed), 0)
| apache-2.0 |
MacHu-GWU/pyknackhq-project | setup.py | 1 | 4034 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Frequent used classifiers List = [
"Development Status :: 1 - Planning",
"Development Status :: 2 - Pre-Alpha",
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Development Status :: 7 - Inactive",
"Intended Audience :: Customer Service",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Information Technology",
"Intended Audience :: Legal Industry",
"Intended Audience :: Manufacturing",
"Intended Audience :: Other Audience",
"Intended Audience :: Religion",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Telecommunications Industry",
"License :: OSI Approved :: BSD License",
"License :: OSI Approved :: MIT License",
"License :: OSI Approved :: Apache Software License",
"License :: OSI Approved :: GNU General Public License (GPL)",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Natural Language :: English",
"Natural Language :: Chinese (Simplified)",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3 :: Only",
]
"""
from setuptools import setup, find_packages
from datetime import datetime
import os
GITHUB_ACCOUNT = "MacHu-GWU" # your GitHub account name
RELEASE_TAG = "2015-11-20" # the GitHub release tag
NAME = "pyknackhq" # name your package
VERSION = __import__(NAME).__version__
PACKAGES = [NAME] + ["%s.%s" % (NAME, i) for i in find_packages(NAME)]
PACKAGE_DATA = {
}
SHORT_DESCRIPTION = __import__(NAME).__short_description__ # GitHub Short Description
AUTHOR = "Sanhe Hu"
AUTHOR_EMAIL = "husanhe@gmail.com"
MAINTAINER = AUTHOR
MAINTAINER_EMAIL = AUTHOR_EMAIL
PROJECT_NAME = os.path.basename(os.getcwd()) # the project dir is the project name
URL = "https://github.com/{0}/{1}".format(GITHUB_ACCOUNT, PROJECT_NAME)
DOWNLOAD_URL = "https://github.com/{0}/{1}/tarball/{2}".format(
GITHUB_ACCOUNT, PROJECT_NAME, RELEASE_TAG)
with open("readme.rst", "rb") as f:
LONG_DESCRIPTION = f.read().decode("utf-8")
LICENSE = "MIT"
PLATFORMS = ["Windows", "MacOS", "Unix"]
CLASSIFIERS = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
]
with open("requirements.txt", "rb") as f:
REQUIRES = [i.strip() for i in f.read().decode("utf-8").split("\n")]
setup(
name = NAME,
packages = PACKAGES,
include_package_data = True,
package_data = PACKAGE_DATA,
version = VERSION,
author = AUTHOR,
author_email = AUTHOR_EMAIL,
maintainer = MAINTAINER,
maintainer_email = MAINTAINER_EMAIL,
url = URL,
description = SHORT_DESCRIPTION,
long_description = LONG_DESCRIPTION,
download_url = DOWNLOAD_URL,
classifiers = CLASSIFIERS,
platforms = PLATFORMS,
license = LICENSE,
install_requires = REQUIRES,
) | mit |
JesseLivezey/pylearn2 | pylearn2/dataset_get/helper-scripts/make-archive.py | 44 | 3085 | #!/usr/bin/env python
# -*- coding: utf-8
__authors__ = "Steven Pigeon"
__copyright__ = "(c) 2012 Université de Montréal"
__contact__ = "Steven Pigeon: pigeon@iro.umontreal.ca"
__version__ = "make-archive 0.1"
__licence__ = "BSD 3-Clause http://www.opensource.org/licenses/BSD-3-Clause "
import logging
import os, sys, tarfile
from theano.compat.six.moves import input
logger = logging.getLogger(__name__)
########################################
def checks(path):
"""
Checks if pretty much everything is
there, aborts if mandatory elements
are missing, warns if strongly
suggested are not found.
:param path: path to the root of the dataset
:returns: True, if the archive passed the test, False otherwise.
"""
# path,
# m for mandatory,
# o for optional
# s for strongly suggested,
#
check_for=[ ("data/",'m'),
("docs/",'m'),
("docs/license.txt",'m'),
("scripts/",'m'),
("scripts/getscript",'o'),
("scripts/postinst",'o'),
("scripts/prerm",'o'),
("scripts/postrm",'o'),
("readme.1rst",'s')
]
found=0
for (filename,mode) in check_for:
this_check=os.path.join(path,filename)
if os.path.exists(this_check):
if os.path.isdir(this_check):
if len(os.listdir(this_check))==0:
logger.warning("directory '{0}' "
"is empty.".format(this_check))
found+=1;
else:
if mode=='m':
# fatal
logger.error("'{0}' not found "
"but mandatory".format(this_check))
return False
elif mode=='s':
# benign
logger.warning("no '{0}' found".format(this_check))
else:
# whatever
pass
return (found>0)
########################################
def create_archive( source, archive_name ):
if os.path.exists(archive_name):
r = input("'%s' exists, overwrite? [yes/N] " % archive_name)
if (r!="y") and (r!="yes"):
logger.info("taking '{0}' for no, so there.".format(r))
#bail out
return
try:
tar=tarfile.open(archive_name,mode="w:bz2")
except Exception as e:
logger.exception(e)
return
else:
for root, dirs, files in os.walk(source):
for filename in files:
this_file = os.path.join(root,filename)
logger.info("adding '{0}'".format(this_file))
tar.add(this_file)
tar.close()
if __name__=="__main__":
filename=sys.argv[1]
if checks(filename):
basename=os.path.basename(filename)
ext=".tar.bz2"
archive_name=basename+ext
logger.info("Creating Archive '{0}'".format(archive_name))
create_archive(filename,archive_name)
else:
logger.info("nothing found, aborting.")
| bsd-3-clause |
svn2github/audacity | lib-src/lv2/lv2/waflib/Options.py | 330 | 5458 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,tempfile,optparse,sys,re
from waflib import Logs,Utils,Context
cmds='distclean configure build install clean uninstall check dist distcheck'.split()
options={}
commands=[]
lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)
try:cache_global=os.path.abspath(os.environ['WAFCACHE'])
except KeyError:cache_global=''
platform=Utils.unversioned_sys_platform()
class opt_parser(optparse.OptionParser):
def __init__(self,ctx):
optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
self.formatter.width=Logs.get_term_cols()
p=self.add_option
self.ctx=ctx
jobs=ctx.jobs()
p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
p('-k','--keep',dest='keep',default=0,action='count',help='keep running happily even if errors are found')
p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
p('--nocache',dest='nocache',default=False,action='store_true',help='ignore the WAFCACHE (if set)')
p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
gr=optparse.OptionGroup(self,'configure options')
self.add_option_group(gr)
gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
default_prefix=os.environ.get('PREFIX')
if not default_prefix:
if platform=='win32':
d=tempfile.gettempdir()
default_prefix=d[0].upper()+d[1:]
else:
default_prefix='/usr/local/'
gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
gr.add_option('--download',dest='download',default=False,action='store_true',help='try to download the tools if missing')
gr=optparse.OptionGroup(self,'build and install options')
self.add_option_group(gr)
gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
gr=optparse.OptionGroup(self,'step options')
self.add_option_group(gr)
gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
default_destdir=os.environ.get('DESTDIR','')
gr=optparse.OptionGroup(self,'install/uninstall options')
self.add_option_group(gr)
gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
gr.add_option('--distcheck-args',help='arguments to pass to distcheck',default=None,action='store')
def get_usage(self):
cmds_str={}
for cls in Context.classes:
if not cls.cmd or cls.cmd=='options':
continue
s=cls.__doc__ or''
cmds_str[cls.cmd]=s
if Context.g_module:
for(k,v)in Context.g_module.__dict__.items():
if k in['options','init','shutdown']:
continue
if type(v)is type(Context.create_context):
if v.__doc__ and not k.startswith('_'):
cmds_str[k]=v.__doc__
just=0
for k in cmds_str:
just=max(just,len(k))
lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()]
lst.sort()
ret='\n'.join(lst)
return'''waf [commands] [options]
Main commands (example: ./waf build -j4)
%s
'''%ret
class OptionsContext(Context.Context):
cmd='options'
fun='options'
def __init__(self,**kw):
super(OptionsContext,self).__init__(**kw)
self.parser=opt_parser(self)
self.option_groups={}
def jobs(self):
count=int(os.environ.get('JOBS',0))
if count<1:
if'NUMBER_OF_PROCESSORS'in os.environ:
count=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
else:
if hasattr(os,'sysconf_names'):
if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
count=int(os.sysconf('SC_NPROCESSORS_ONLN'))
elif'SC_NPROCESSORS_CONF'in os.sysconf_names:
count=int(os.sysconf('SC_NPROCESSORS_CONF'))
if not count and os.name not in('nt','java'):
try:
tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0)
except Exception:
pass
else:
if re.match('^[0-9]+$',tmp):
count=int(tmp)
if count<1:
count=1
elif count>1024:
count=1024
return count
def add_option(self,*k,**kw):
return self.parser.add_option(*k,**kw)
def add_option_group(self,*k,**kw):
try:
gr=self.option_groups[k[0]]
except KeyError:
gr=self.parser.add_option_group(*k,**kw)
self.option_groups[k[0]]=gr
return gr
def get_option_group(self,opt_str):
try:
return self.option_groups[opt_str]
except KeyError:
for group in self.parser.option_groups:
if group.title==opt_str:
return group
return None
def parse_args(self,_args=None):
global options,commands
(options,leftover_args)=self.parser.parse_args(args=_args)
commands=leftover_args
if options.destdir:
options.destdir=os.path.abspath(os.path.expanduser(options.destdir))
if options.verbose>=1:
self.load('errcheck')
def execute(self):
super(OptionsContext,self).execute()
self.parse_args()
| gpl-2.0 |
linktlh/Toontown-journey | toontown/speedchat/TTSCToontaskMenu.py | 5 | 1507 | from otp.speedchat.SCMenu import SCMenu
from TTSCToontaskTerminal import TTSCToontaskTerminal
from otp.speedchat.SCStaticTextTerminal import SCStaticTextTerminal
from toontown.quest import Quests
class TTSCToontaskMenu(SCMenu):
def __init__(self):
SCMenu.__init__(self)
self.accept('questsChanged', self.__tasksChanged)
self.__tasksChanged()
def destroy(self):
SCMenu.destroy(self)
def __tasksChanged(self):
self.clearMenu()
try:
lt = base.localAvatar
except:
return
phrases = []
def addTerminal(terminal, self = self, phrases = phrases):
displayText = terminal.getDisplayText()
if displayText not in phrases:
self.append(terminal)
phrases.append(displayText)
for task in lt.quests:
taskId, fromNpcId, toNpcId, rewardId, toonProgress = task
q = Quests.getQuest(taskId)
if q is None:
continue
msgs = q.getSCStrings(toNpcId, toonProgress)
if type(msgs) != type([]):
msgs = [msgs]
for i in xrange(len(msgs)):
addTerminal(TTSCToontaskTerminal(msgs[i], taskId, toNpcId, toonProgress, i))
needToontask = 1
if hasattr(lt, 'questCarryLimit'):
needToontask = len(lt.quests) != lt.questCarryLimit
if needToontask:
addTerminal(SCStaticTextTerminal(1299))
return
| apache-2.0 |
mne-tools/mne-tools.github.io | 0.14/_downloads/plot_topo_compare_conditions.py | 3 | 2175 | """
=================================================
Compare evoked responses for different conditions
=================================================
In this example, an Epochs object for visual and
auditory responses is created. Both conditions
are then accessed by their respective names to
create a sensor layout plot of the related
evoked responses.
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.viz import plot_evoked_topo
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
###############################################################################
# Set parameters
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
event_id = 1
tmin = -0.2
tmax = 0.5
# Setup for reading the raw data
raw = mne.io.read_raw_fif(raw_fname)
events = mne.read_events(event_fname)
# Set up pick list: MEG + STI 014 - bad channels (modify to your needs)
include = [] # or stim channels ['STI 014']
# bad channels in raw.info['bads'] will be automatically excluded
# Set up amplitude-peak rejection values for MEG channels
reject = dict(grad=4000e-13, mag=4e-12)
# pick MEG channels
picks = mne.pick_types(raw.info, meg=True, eeg=False, stim=False, eog=True,
include=include, exclude='bads')
# Create epochs including different events
event_id = {'audio/left': 1, 'audio/right': 2,
'visual/left': 3, 'visual/right': 4}
epochs = mne.Epochs(raw, events, event_id, tmin, tmax,
picks=picks, baseline=(None, 0), reject=reject)
# Generate list of evoked objects from conditions names
evokeds = [epochs[name].average() for name in ('left', 'right')]
###############################################################################
# Show topography for two different conditions
colors = 'yellow', 'green'
title = 'MNE sample data - left vs right (A/V combined)'
plot_evoked_topo(evokeds, color=colors, title=title)
plt.show()
| bsd-3-clause |
INFN-Catania/FedManager | fednodes/abstract_classes.py | 1 | 2405 | """
Copyright 2015 INFN (Italy)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = 'maurizio'
from abc import ABCMeta, abstractmethod
from fednodes.messaging import MessageScheduler
class iConsumer(object):
__metaclass__ = ABCMeta
def __init__(self, messageScheduler, configuration):
self._ms = messageScheduler
self._conf = configuration
self.configure()
@abstractmethod
def configure(self):
pass
class iProducer():
__metaclass__ = ABCMeta
def __init__(self, configuration):
# self._ms=messageScheduler
self._conf = configuration
self.configure()
@abstractmethod
def configure(self):
pass
@abstractmethod
def sendMessage(self, fedMessageAsString, topic_target):
pass
# TODO: add 'add_actor' method
class Fednode():
def __init__(self, configuration, message_class, consumer_class, producer_class):
self._configuration = configuration
producer = producer_class(configuration)
self._ms = MessageScheduler(message_class, producer, configuration)
consumer = consumer_class(self._ms, configuration)
def get_configuration(self):
return self._configuration
def get_ms(self):
return self._ms
class iFedMessage():
__metaclass__ = ABCMeta
@abstractmethod
def setSource(self, source):
pass
@abstractmethod
def setId(self, id):
pass
@abstractmethod
def getId(self):
pass
@abstractmethod
def getSource(self):
pass
@abstractmethod
def getTarget(self):
pass
@abstractmethod
def getBody(self):
pass
@abstractmethod
def getBodyUriType(self):
pass
@abstractmethod
def toString(self):
pass
@classmethod
def createMessageFromString(cls, msg):
raise NotImplementedError()
| apache-2.0 |
KJin99/zulip | zerver/management/commands/set_default_streams.py | 113 | 1657 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zerver.models import Realm
from zerver.lib.actions import set_default_streams
from optparse import make_option
import sys
class Command(BaseCommand):
help = """Set default streams for a realm
Users created under this realm will start out with these streams. This
command is not additive: if you re-run it on a domain with a different
set of default streams, those will be the new complete set of default
streams.
For example:
python manage.py set_default_streams --domain=foo.com --streams=foo,bar,baz
python manage.py set_default_streams --domain=foo.com --streams="foo,bar,baz with space"
python manage.py set_default_streams --domain=foo.com --streams=
"""
option_list = BaseCommand.option_list + (
make_option('-d', '--domain',
dest='domain',
type='str',
help='The name of the existing realm to which to attach default streams.'),
make_option('-s', '--streams',
dest='streams',
type='str',
help='A comma-separated list of stream names.'),
)
def handle(self, **options):
if options["domain"] is None or options["streams"] is None:
print >>sys.stderr, "Please provide both a domain name and a default \
set of streams (which can be empty, with `--streams=`)."
exit(1)
stream_names = [stream.strip() for stream in options["streams"].split(",")]
realm = Realm.objects.get(domain=options["domain"])
set_default_streams(realm, stream_names)
| apache-2.0 |
borosnborea/SwordGO_app | example/kivymap/.buildozer/applibs/requests/packages/chardet/mbcsgroupprober.py | 2769 | 1967 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
CP949Prober(),
Big5Prober(),
EUCTWProber()
]
self.reset()
| gpl-3.0 |
Nindaleth/ansible-modules-core | network/basics/slurp.py | 15 | 2145 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: slurp
version_added: historical
short_description: Slurps a file from remote nodes
description:
- This module works like M(fetch). It is used for fetching a base64-
encoded blob containing the data in a remote file.
options:
src:
description:
- The file on the remote system to fetch. This I(must) be a file, not a
directory.
required: true
default: null
aliases: []
notes:
- "See also: M(fetch)"
requirements: []
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
ansible host -m slurp -a 'src=/tmp/xx'
host | success >> {
"content": "aGVsbG8gQW5zaWJsZSB3b3JsZAo=",
"encoding": "base64"
}
'''
import base64
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True, aliases=['path'], type='path'),
),
supports_check_mode=True
)
source = module.params['src']
if not os.path.exists(source):
module.fail_json(msg="file not found: %s" % source)
if not os.access(source, os.R_OK):
module.fail_json(msg="file is not readable: %s" % source)
data = base64.b64encode(open(source, 'rb').read())
module.exit_json(content=data, source=source, encoding='base64')
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
karllessard/tensorflow | tensorflow/python/keras/layers/preprocessing/benchmarks/category_encoding_benchmark.py | 4 | 3162 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmark for Keras category_encoding preprocessing layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from absl import flags
import numpy as np
from tensorflow.python import keras
from tensorflow.python.compat import v2_compat
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.keras.layers.preprocessing import category_encoding
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
FLAGS = flags.FLAGS
v2_compat.enable_v2_behavior()
class BenchmarkLayer(benchmark.Benchmark):
"""Benchmark the layer forward pass."""
def run_dataset_implementation(self, output_mode, batch_size, sequence_length,
max_tokens):
input_t = keras.Input(shape=(sequence_length,), dtype=dtypes.int32)
layer = category_encoding.CategoryEncoding(
max_tokens=max_tokens, output_mode=output_mode)
_ = layer(input_t)
num_repeats = 5
starts = []
ends = []
for _ in range(num_repeats):
ds = dataset_ops.Dataset.from_tensor_slices(
random_ops.random_uniform([batch_size * 10, sequence_length],
minval=0,
maxval=max_tokens - 1,
dtype=dtypes.int32))
ds = ds.shuffle(batch_size * 100)
ds = ds.batch(batch_size)
num_batches = 5
ds = ds.take(num_batches)
ds = ds.prefetch(num_batches)
starts.append(time.time())
# Benchmarked code begins here.
for i in ds:
_ = layer(i)
# Benchmarked code ends here.
ends.append(time.time())
avg_time = np.mean(np.array(ends) - np.array(starts)) / num_batches
name = "category_encoding|batch_%s|seq_length_%s|%s_max_tokens" % (
batch_size, sequence_length, max_tokens)
self.report_benchmark(iters=num_repeats, wall_time=avg_time, name=name)
def benchmark_vocab_size_by_batch(self):
for batch in [32, 256, 2048]:
for sequence_length in [10, 1000]:
for num_tokens in [100, 1000, 20000]:
self.run_dataset_implementation(
output_mode="count",
batch_size=batch,
sequence_length=sequence_length,
max_tokens=num_tokens)
if __name__ == "__main__":
test.main()
| apache-2.0 |
mayankcu/Django-social | venv/Lib/encodings/iso8859_4.py | 593 | 13632 | """ Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-4',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u0138' # 0xA2 -> LATIN SMALL LETTER KRA
u'\u0156' # 0xA3 -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
u'\u013b' # 0xA6 -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0112' # 0xAA -> LATIN CAPITAL LETTER E WITH MACRON
u'\u0122' # 0xAB -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0166' # 0xAC -> LATIN CAPITAL LETTER T WITH STROKE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
u'\u02db' # 0xB2 -> OGONEK
u'\u0157' # 0xB3 -> LATIN SMALL LETTER R WITH CEDILLA
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
u'\u013c' # 0xB6 -> LATIN SMALL LETTER L WITH CEDILLA
u'\u02c7' # 0xB7 -> CARON
u'\xb8' # 0xB8 -> CEDILLA
u'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
u'\u0113' # 0xBA -> LATIN SMALL LETTER E WITH MACRON
u'\u0123' # 0xBB -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0167' # 0xBC -> LATIN SMALL LETTER T WITH STROKE
u'\u014a' # 0xBD -> LATIN CAPITAL LETTER ENG
u'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
u'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
u'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\u012a' # 0xCF -> LATIN CAPITAL LETTER I WITH MACRON
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
u'\u0136' # 0xD3 -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0168' # 0xDD -> LATIN CAPITAL LETTER U WITH TILDE
u'\u016a' # 0xDE -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\u012b' # 0xEF -> LATIN SMALL LETTER I WITH MACRON
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
u'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
u'\u0137' # 0xF3 -> LATIN SMALL LETTER K WITH CEDILLA
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u0169' # 0xFD -> LATIN SMALL LETTER U WITH TILDE
u'\u016b' # 0xFE -> LATIN SMALL LETTER U WITH MACRON
u'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.