repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
FrankBian/kuma | vendor/packages/sqlalchemy/lib/sqlalchemy/test/schema.py | 8 | 2739 | """Enhanced versions of schema.Table and schema.Column which establish
desired state for different backends.
"""
from sqlalchemy.test import testing
from sqlalchemy import schema
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testing.against('mysql'):
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.foreign_keys]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
# take just the table name: on FB there cannot be
# a schema, so the first element is always the
# table name, possibly followed by the field name
name = unpack(ref)[0]
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
col = schema.Column(*args, **kw)
if 'test_needs_autoincrement' in test_opts and \
kw.get('primary_key', False) and \
testing.against('firebird', 'oracle'):
def add_seq(tbl, c):
c._init_items(
schema.Sequence(_truncate_name(testing.db.dialect, tbl.name + '_' + c.name + '_seq'), optional=True)
)
col._on_table_attach(add_seq)
return col
def _truncate_name(dialect, name):
if len(name) > dialect.max_identifier_length:
return name[0:max(dialect.max_identifier_length - 6, 0)] + "_" + hex(hash(name) % 64)[2:]
else:
return name
| mpl-2.0 |
batra-mlp-lab/DIGITS | digits/model/forms.py | 1 | 11983 | # Copyright (c) 2014-2015, NVIDIA CORPORATION. All rights reserved.
import os
import flask
from flask.ext.wtf import Form
import wtforms
from wtforms import validators
from digits.config import config_value
from digits.device_query import get_device, get_nvml_info
from digits import utils
from digits.utils import sizeof_fmt
from digits.utils.forms import validate_required_iff
from digits import frameworks
class ModelForm(Form):
### Methods
def selection_exists_in_choices(form, field):
found = False
for choice in field.choices:
if choice[0] == field.data:
found = True
if not found:
raise validators.ValidationError("Selected job doesn't exist. Maybe it was deleted by another user.")
def validate_NetParameter(form, field):
fw = frameworks.get_framework_by_id(form['framework'].data)
try:
# below function raises a BadNetworkException in case of validation error
fw.validate_network(field.data)
except frameworks.errors.BadNetworkError as e:
raise validators.ValidationError('Bad network: %s' % e.message)
def validate_file_exists(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if not from_client and field.type == 'StringField':
filename = field.data
if filename == '': return
if not os.path.isfile(filename):
raise validators.ValidationError('Server side file, %s, does not exist.' % filename)
def validate_py_ext(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if from_client and field.type == 'FileField':
filename = flask.request.files[field.name].filename
elif not from_client and field.type == 'StringField':
filename = field.data
if filename == '': return
(root, ext) = os.path.splitext(filename)
if ext != '.py' and ext != '.pyc':
raise validators.ValidationError('Python file, %s, needs .py or .pyc extension.' % filename)
### Fields
# The options for this get set in the view (since they are dynamic)
dataset = utils.forms.SelectField('Select Dataset', choices=[],
tooltip = "Choose the dataset to use for this model."
)
python_layer_from_client = wtforms.BooleanField(u'Use client side file',
default=False)
python_layer_client_file = utils.forms.FileField(
u'Python Layer File (client side)',
validators=[
validate_py_ext
],
tooltip = "Choose the python file on the client containing layer functions."
)
python_layer_server_file = utils.forms.StringField(
u'Python Layer File (server side)',
validators=[
validate_file_exists,
validate_py_ext
],
tooltip = "Choose the python file on the server containing layer functions."
)
train_epochs = utils.forms.IntegerField('Training epochs',
validators = [
validators.NumberRange(min=1)
],
default=30,
tooltip = "How many passes through the training data?"
)
snapshot_interval = utils.forms.FloatField('Snapshot interval (in epochs)',
default = 1,
validators = [
validators.NumberRange(min=0),
],
tooltip = "How many epochs of training between taking a snapshot?"
)
val_interval = utils.forms.FloatField('Validation interval (in epochs)',
default = 1,
validators = [
validators.NumberRange(min=0)
],
tooltip = "How many epochs of training between running through one pass of the validation data?"
)
random_seed = utils.forms.IntegerField('Random seed',
validators = [
validators.NumberRange(min=0),
validators.Optional(),
],
tooltip = "If you provide a random seed, then back-to-back runs with the same model and dataset should give identical results."
)
batch_size = utils.forms.IntegerField('Batch size',
validators = [
validators.NumberRange(min=1),
validators.Optional(),
],
tooltip = "How many images to process at once. If blank, values are used from the network definition."
)
### Solver types
solver_type = utils.forms.SelectField('Solver type',
choices = [
('SGD', 'Stochastic gradient descent (SGD)'),
('ADAGRAD', 'Adaptive gradient (AdaGrad)'),
('NESTEROV', "Nesterov's accelerated gradient (NAG)"),
],
default = 'SGD',
tooltip = "What type of solver will be used??"
)
### Learning rate
learning_rate = utils.forms.FloatField('Base Learning Rate',
default = 0.01,
validators = [
validators.NumberRange(min=0),
],
tooltip = "Affects how quickly the network learns. If you are getting NaN for your loss, you probably need to lower this value."
)
lr_policy = wtforms.SelectField('Policy',
choices = [
('fixed', 'Fixed'),
('step', 'Step Down'),
('multistep', 'Step Down (arbitrary steps)'),
('exp', 'Exponential Decay'),
('inv', 'Inverse Decay'),
('poly', 'Polynomial Decay'),
('sigmoid', 'Sigmoid Decay'),
],
default = 'step'
)
lr_step_size = wtforms.FloatField('Step Size',
default = 33
)
lr_step_gamma = wtforms.FloatField('Gamma',
default = 0.1
)
lr_multistep_values = wtforms.StringField('Step Values',
default = "50,85"
)
def validate_lr_multistep_values(form, field):
if form.lr_policy.data == 'multistep':
for value in field.data.split(','):
try:
float(value)
except ValueError:
raise validators.ValidationError('invalid value')
lr_multistep_gamma = wtforms.FloatField('Gamma',
default = 0.5
)
lr_exp_gamma = wtforms.FloatField('Gamma',
default = 0.95
)
lr_inv_gamma = wtforms.FloatField('Gamma',
default = 0.1
)
lr_inv_power = wtforms.FloatField('Power',
default = 0.5
)
lr_poly_power = wtforms.FloatField('Power',
default = 3
)
lr_sigmoid_step = wtforms.FloatField('Step',
default = 50
)
lr_sigmoid_gamma = wtforms.FloatField('Gamma',
default = 0.1
)
### Network
# Use a SelectField instead of a HiddenField so that the default value
# is used when nothing is provided (through the REST API)
method = wtforms.SelectField(u'Network type',
choices = [
('standard', 'Standard network'),
('previous', 'Previous network'),
('custom', 'Custom network'),
],
default='standard',
)
## framework - hidden field, set by Javascript to the selected framework ID
framework = wtforms.HiddenField('framework',
validators = [
validators.AnyOf(
[fw.get_id() for fw in frameworks.get_frameworks()],
message='The framework you choose is not currently supported.'
)
],
default = frameworks.get_frameworks()[0].get_id()
)
# The options for this get set in the view (since they are dependent on the data type)
standard_networks = wtforms.RadioField('Standard Networks',
validators = [
validate_required_iff(method='standard'),
],
)
previous_networks = wtforms.RadioField('Previous Networks',
choices = [],
validators = [
validate_required_iff(method='previous'),
selection_exists_in_choices,
],
)
custom_network = utils.forms.TextAreaField('Custom Network',
validators = [
validate_required_iff(method='custom'),
validate_NetParameter,
],
)
custom_network_snapshot = utils.forms.TextField('Pretrained model',
tooltip = "Path to pretrained model file. Only edit this field if you understand how fine-tuning works in caffe"
)
def validate_custom_network_snapshot(form, field):
if form.method.data == 'custom':
snapshot = field.data.strip()
if snapshot:
if not os.path.exists(snapshot):
raise validators.ValidationError('File does not exist')
# Select one of several GPUs
select_gpu = wtforms.RadioField('Select which GPU you would like to use',
choices = [('next', 'Next available')] + [(
index,
'#%s - %s%s' % (
index,
get_device(index).name,
' (%s memory)' % sizeof_fmt(get_nvml_info(index)['memory']['total'])
if get_nvml_info(index) and 'memory' in get_nvml_info(index) else '',
),
) for index in config_value('gpu_list').split(',') if index],
default = 'next',
)
# Select N of several GPUs
select_gpus = utils.forms.SelectMultipleField('Select which GPU[s] you would like to use',
choices = [(
index,
'#%s - %s%s' % (
index,
get_device(index).name,
' (%s memory)' % sizeof_fmt(get_nvml_info(index)['memory']['total'])
if get_nvml_info(index) and 'memory' in get_nvml_info(index) else '',
),
) for index in config_value('gpu_list').split(',') if index],
tooltip = "The job won't start until all of the chosen GPUs are available."
)
# XXX For testing
# The Flask test framework can't handle SelectMultipleFields correctly
select_gpus_list = wtforms.StringField('Select which GPU[s] you would like to use (comma separated)')
def validate_select_gpus(form, field):
if form.select_gpus_list.data:
field.data = form.select_gpus_list.data.split(',')
# Use next available N GPUs
select_gpu_count = wtforms.IntegerField('Use this many GPUs (next available)',
validators = [
validators.NumberRange(min=1, max=len(config_value('gpu_list').split(',')))
],
default = 1,
)
def validate_select_gpu_count(form, field):
if field.data is None:
if form.select_gpus.data:
# Make this field optional
field.errors[:] = []
raise validators.StopValidation()
model_name = utils.forms.StringField('Model Name',
validators = [
validators.DataRequired()
],
tooltip = "An identifier, later used to refer to this model in the Application."
)
# allows shuffling data during training (for frameworks that support this, as indicated by
# their Framework.can_shuffle_data() method)
shuffle = utils.forms.BooleanField('Shuffle Train Data',
default = True,
tooltip = 'For every epoch, shuffle the data before training.'
)
| bsd-3-clause |
xavfernandez/pip | src/pip/_vendor/html5lib/_trie/py.py | 1323 | 1775 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from bisect import bisect_left
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
if not all(isinstance(x, text_type) for x in data.keys()):
raise TypeError("All keys must be strings")
self._data = data
self._keys = sorted(data.keys())
self._cachestr = ""
self._cachepoints = (0, len(data))
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
if prefix is None or prefix == "" or not self._keys:
return set(self._keys)
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
start = i = bisect_left(self._keys, prefix, lo, hi)
else:
start = i = bisect_left(self._keys, prefix)
keys = set()
if start == len(self._keys):
return keys
while self._keys[i].startswith(prefix):
keys.add(self._keys[i])
i += 1
self._cachestr = prefix
self._cachepoints = (start, i)
return keys
def has_keys_with_prefix(self, prefix):
if prefix in self._data:
return True
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
i = bisect_left(self._keys, prefix, lo, hi)
else:
i = bisect_left(self._keys, prefix)
if i == len(self._keys):
return False
return self._keys[i].startswith(prefix)
| mit |
Cloudino/Cloudino-Arduino-IDE | arduino-core/src/processing/app/i18n/python/requests/packages/charade/sjisprober.py | 1182 | 3734 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJISSMModel
from . import constants
class SJISProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(SJISSMModel)
self._mDistributionAnalyzer = SJISDistributionAnalysis()
self._mContextAnalyzer = SJISContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
def get_charset_name(self):
return "SHIFT_JIS"
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
- charLen], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mContextAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)
| lgpl-2.1 |
vivekanand1101/statscache | fedmsg.d/endpoints.py | 6 | 1857 | # This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
import socket
hostname = socket.gethostname().split('.', 1)[0]
config = dict(
# This is a dict of possible addresses from which fedmsg can send
# messages. fedmsg.init(...) requires that a 'name' argument be passed
# to it which corresponds with one of the keys in this dict.
endpoints={
# These are here so your local box can listen to the upstream
# infrastructure's bus. Cool, right? :)
"fedora-infrastructure": [
"tcp://hub.fedoraproject.org:9940",
#"tcp://stg.fedoraproject.org:9940",
],
# For other, more 'normal' services, fedmsg will try to guess the
# name of it's calling module to determine which endpoint definition
# to use. This can be overridden by explicitly providing the name in
# the initial call to fedmsg.init(...).
#"bodhi.%s" % hostname: ["tcp://127.0.0.1:3001"],
#"fas.%s" % hostname: ["tcp://127.0.0.1:3002"],
#"fedoratagger.%s" % hostname: ["tcp://127.0.0.1:3003"],
},
)
| gpl-2.0 |
jotes/boto | tests/unit/beanstalk/test_exception.py | 114 | 2085 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.beanstalk.exception import simple
from tests.compat import unittest
class FakeError(object):
def __init__(self, code, status, reason, body):
self.code = code
self.status = status
self.reason = reason
self.body = body
class TestExceptions(unittest.TestCase):
def test_exception_class_names(self):
# Create exception from class name
error = FakeError('TooManyApplications', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Create exception from class name + 'Exception' as seen from the
# live service today
error = FakeError('TooManyApplicationsException', 400, 'foo', 'bar')
exception = simple(error)
self.assertEqual(exception.__class__.__name__, 'TooManyApplications')
# Make sure message body is present
self.assertEqual(exception.message, 'bar')
| mit |
ulule/django-linguist | linguist/tests/test_signals.py | 1 | 3254 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from exam import around, fixture
from django.db.models.signals import pre_save, post_save
from ..models import Translation
from .base import BaseTestCase
from .models import BarModel
class PostDeleteSignalTest(BaseTestCase):
"""
Tests Linguist post_delete Signals.
"""
def test_post_delete(self):
self.instance.activate_language("en")
self.instance.title = "Hello"
self.instance.save()
self.instance.activate_language("fr")
self.instance.title = "Bonjour"
self.instance.save()
self.assertEqual(Translation.objects.count(), 2)
bar_instance = BarModel()
bar_instance.activate_language("fr")
bar_instance.title = "Bonjour"
bar_instance.save()
self.assertEqual(Translation.objects.count(), 3)
self.instance.delete()
self.assertEqual(Translation.objects.count(), 1)
bar_instance.delete()
self.assertEqual(Translation.objects.count(), 0)
class PrePostSaveSignalsTest(BaseTestCase):
"""
Tests Linguist with pre_save/post_save Signals.
"""
@fixture
def bar(self):
bar_instance = BarModel()
bar_instance.activate_language("en")
bar_instance.title = "Hello"
bar_instance.save()
bar_instance.activate_language("fr")
bar_instance.title = "Bonjour"
bar_instance.save()
return bar_instance
@around
def pre_post_save_wrapper(self):
self.pre_save_called = False
self.post_save_called = False
self.bar
def pre_save_handler(sender, instance, **kwargs):
# Assert that a pre_save handler gets the proper information on which fields have changed
# (hence the translations have not yet been saved to the db).
self.pre_save_called = True
field_fr = instance._linguist_translations["title"].get("fr", None)
field_en = instance._linguist_translations["title"].get("en", None)
assert field_fr.has_changed
assert not field_en.has_changed
pre_save.connect(pre_save_handler, sender=BarModel)
def post_save_handler(sender, instance, **kwargs):
# Assert that a post_save handler that refreshes the model gets the saved translations
# (hence the translations have been saved to the db).
self.post_save_called = True
title_fr = instance.title_fr
title_en = instance.title_en
instance = BarModel.objects.get(pk=instance.pk)
assert instance.title_fr == title_fr
assert instance.title_en == title_en
post_save.connect(post_save_handler, sender=BarModel)
yield
pre_save.disconnect(pre_save_handler, sender=BarModel)
post_save.disconnect(post_save_handler, sender=BarModel)
def test_pre_post_save(self):
assert self.pre_save_called is False
assert self.post_save_called is False
self.bar.activate_language("fr")
self.bar.title = "Bonjour signal"
self.bar.save()
assert self.pre_save_called is True
assert self.post_save_called is True
| mit |
schleichdi2/OPENNFR-6.1-CORE | opennfr-openembedded-core/meta/lib/oeqa/core/runner.py | 1 | 2466 | # Copyright (C) 2016 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
import os
import time
import unittest
import logging
xmlEnabled = False
try:
import xmlrunner
from xmlrunner.result import _XMLTestResult as _TestResult
from xmlrunner.runner import XMLTestRunner as _TestRunner
xmlEnabled = True
except ImportError:
# use the base runner instead
from unittest import TextTestResult as _TestResult
from unittest import TextTestRunner as _TestRunner
class OEStreamLogger(object):
def __init__(self, logger):
self.logger = logger
self.buffer = ""
def write(self, msg):
if len(msg) > 1 and msg[0] != '\n':
self.buffer += msg
else:
self.logger.log(logging.INFO, self.buffer.rstrip("\n"))
self.buffer = ""
def flush(self):
for handler in self.logger.handlers:
handler.flush()
class OETestResult(_TestResult):
def __init__(self, tc, *args, **kwargs):
super(OETestResult, self).__init__(*args, **kwargs)
self.tc = tc
self.tc._results['failures'] = self.failures
self.tc._results['errors'] = self.errors
self.tc._results['skipped'] = self.skipped
self.tc._results['expectedFailures'] = self.expectedFailures
def startTest(self, test):
super(OETestResult, self).startTest(test)
class OETestRunner(_TestRunner):
def __init__(self, tc, *args, **kwargs):
if xmlEnabled:
if not kwargs.get('output'):
kwargs['output'] = os.path.join(os.getcwd(),
'TestResults_%s' % time.strftime("%Y%m%d%H%M%S"))
super(OETestRunner, self).__init__(*args, **kwargs)
self.tc = tc
self.resultclass = OETestResult
# XXX: The unittest-xml-reporting package defines _make_result method instead
# of _makeResult standard on unittest.
if xmlEnabled:
def _make_result(self):
"""
Creates a TestResult object which will be used to store
information about the executed tests.
"""
# override in subclasses if necessary.
return self.resultclass(self.tc,
self.stream, self.descriptions, self.verbosity, self.elapsed_times
)
else:
def _makeResult(self):
return self.resultclass(self.tc, self.stream, self.descriptions,
self.verbosity)
| gpl-2.0 |
jiangzhuo/kbengine | kbe/res/scripts/common/Lib/test/test_binop.py | 139 | 12658 | """Tests for binary operators on subtypes of built-in types."""
import unittest
from test import support
from operator import eq, ne, lt, gt, le, ge
def gcd(a, b):
"""Greatest common divisor using Euclid's algorithm."""
while a:
a, b = b%a, a
return b
def isint(x):
"""Test whether an object is an instance of int."""
return isinstance(x, int)
def isnum(x):
"""Test whether an object is an instance of a built-in numeric type."""
for T in int, float, complex:
if isinstance(x, T):
return 1
return 0
def isRat(x):
"""Test wheter an object is an instance of the Rat class."""
return isinstance(x, Rat)
class Rat(object):
"""Rational number implemented as a normalized pair of ints."""
__slots__ = ['_Rat__num', '_Rat__den']
def __init__(self, num=0, den=1):
"""Constructor: Rat([num[, den]]).
The arguments must be ints, and default to (0, 1)."""
if not isint(num):
raise TypeError("Rat numerator must be int (%r)" % num)
if not isint(den):
raise TypeError("Rat denominator must be int (%r)" % den)
# But the zero is always on
if den == 0:
raise ZeroDivisionError("zero denominator")
g = gcd(den, num)
self.__num = int(num//g)
self.__den = int(den//g)
def _get_num(self):
"""Accessor function for read-only 'num' attribute of Rat."""
return self.__num
num = property(_get_num, None)
def _get_den(self):
"""Accessor function for read-only 'den' attribute of Rat."""
return self.__den
den = property(_get_den, None)
def __repr__(self):
"""Convert a Rat to an string resembling a Rat constructor call."""
return "Rat(%d, %d)" % (self.__num, self.__den)
def __str__(self):
"""Convert a Rat to a string resembling a decimal numeric value."""
return str(float(self))
def __float__(self):
"""Convert a Rat to a float."""
return self.__num*1.0/self.__den
def __int__(self):
"""Convert a Rat to an int; self.den must be 1."""
if self.__den == 1:
try:
return int(self.__num)
except OverflowError:
raise OverflowError("%s too large to convert to int" %
repr(self))
raise ValueError("can't convert %s to int" % repr(self))
def __add__(self, other):
"""Add two Rats, or a Rat and a number."""
if isint(other):
other = Rat(other)
if isRat(other):
return Rat(self.__num*other.__den + other.__num*self.__den,
self.__den*other.__den)
if isnum(other):
return float(self) + other
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
"""Subtract two Rats, or a Rat and a number."""
if isint(other):
other = Rat(other)
if isRat(other):
return Rat(self.__num*other.__den - other.__num*self.__den,
self.__den*other.__den)
if isnum(other):
return float(self) - other
return NotImplemented
def __rsub__(self, other):
"""Subtract two Rats, or a Rat and a number (reversed args)."""
if isint(other):
other = Rat(other)
if isRat(other):
return Rat(other.__num*self.__den - self.__num*other.__den,
self.__den*other.__den)
if isnum(other):
return other - float(self)
return NotImplemented
def __mul__(self, other):
"""Multiply two Rats, or a Rat and a number."""
if isRat(other):
return Rat(self.__num*other.__num, self.__den*other.__den)
if isint(other):
return Rat(self.__num*other, self.__den)
if isnum(other):
return float(self)*other
return NotImplemented
__rmul__ = __mul__
def __truediv__(self, other):
"""Divide two Rats, or a Rat and a number."""
if isRat(other):
return Rat(self.__num*other.__den, self.__den*other.__num)
if isint(other):
return Rat(self.__num, self.__den*other)
if isnum(other):
return float(self) / other
return NotImplemented
def __rtruediv__(self, other):
"""Divide two Rats, or a Rat and a number (reversed args)."""
if isRat(other):
return Rat(other.__num*self.__den, other.__den*self.__num)
if isint(other):
return Rat(other*self.__den, self.__num)
if isnum(other):
return other / float(self)
return NotImplemented
def __floordiv__(self, other):
"""Divide two Rats, returning the floored result."""
if isint(other):
other = Rat(other)
elif not isRat(other):
return NotImplemented
x = self/other
return x.__num // x.__den
def __rfloordiv__(self, other):
"""Divide two Rats, returning the floored result (reversed args)."""
x = other/self
return x.__num // x.__den
def __divmod__(self, other):
"""Divide two Rats, returning quotient and remainder."""
if isint(other):
other = Rat(other)
elif not isRat(other):
return NotImplemented
x = self//other
return (x, self - other * x)
def __rdivmod__(self, other):
"""Divide two Rats, returning quotient and remainder (reversed args)."""
if isint(other):
other = Rat(other)
elif not isRat(other):
return NotImplemented
return divmod(other, self)
def __mod__(self, other):
"""Take one Rat modulo another."""
return divmod(self, other)[1]
def __rmod__(self, other):
"""Take one Rat modulo another (reversed args)."""
return divmod(other, self)[1]
def __eq__(self, other):
"""Compare two Rats for equality."""
if isint(other):
return self.__den == 1 and self.__num == other
if isRat(other):
return self.__num == other.__num and self.__den == other.__den
if isnum(other):
return float(self) == other
return NotImplemented
def __ne__(self, other):
"""Compare two Rats for inequality."""
return not self == other
class RatTestCase(unittest.TestCase):
"""Unit tests for Rat class and its support utilities."""
def test_gcd(self):
self.assertEqual(gcd(10, 12), 2)
self.assertEqual(gcd(10, 15), 5)
self.assertEqual(gcd(10, 11), 1)
self.assertEqual(gcd(100, 15), 5)
self.assertEqual(gcd(-10, 2), -2)
self.assertEqual(gcd(10, -2), 2)
self.assertEqual(gcd(-10, -2), -2)
for i in range(1, 20):
for j in range(1, 20):
self.assertTrue(gcd(i, j) > 0)
self.assertTrue(gcd(-i, j) < 0)
self.assertTrue(gcd(i, -j) > 0)
self.assertTrue(gcd(-i, -j) < 0)
def test_constructor(self):
a = Rat(10, 15)
self.assertEqual(a.num, 2)
self.assertEqual(a.den, 3)
a = Rat(10, -15)
self.assertEqual(a.num, -2)
self.assertEqual(a.den, 3)
a = Rat(-10, 15)
self.assertEqual(a.num, -2)
self.assertEqual(a.den, 3)
a = Rat(-10, -15)
self.assertEqual(a.num, 2)
self.assertEqual(a.den, 3)
a = Rat(7)
self.assertEqual(a.num, 7)
self.assertEqual(a.den, 1)
try:
a = Rat(1, 0)
except ZeroDivisionError:
pass
else:
self.fail("Rat(1, 0) didn't raise ZeroDivisionError")
for bad in "0", 0.0, 0j, (), [], {}, None, Rat, unittest:
try:
a = Rat(bad)
except TypeError:
pass
else:
self.fail("Rat(%r) didn't raise TypeError" % bad)
try:
a = Rat(1, bad)
except TypeError:
pass
else:
self.fail("Rat(1, %r) didn't raise TypeError" % bad)
def test_add(self):
self.assertEqual(Rat(2, 3) + Rat(1, 3), 1)
self.assertEqual(Rat(2, 3) + 1, Rat(5, 3))
self.assertEqual(1 + Rat(2, 3), Rat(5, 3))
self.assertEqual(1.0 + Rat(1, 2), 1.5)
self.assertEqual(Rat(1, 2) + 1.0, 1.5)
def test_sub(self):
self.assertEqual(Rat(7, 2) - Rat(7, 5), Rat(21, 10))
self.assertEqual(Rat(7, 5) - 1, Rat(2, 5))
self.assertEqual(1 - Rat(3, 5), Rat(2, 5))
self.assertEqual(Rat(3, 2) - 1.0, 0.5)
self.assertEqual(1.0 - Rat(1, 2), 0.5)
def test_mul(self):
self.assertEqual(Rat(2, 3) * Rat(5, 7), Rat(10, 21))
self.assertEqual(Rat(10, 3) * 3, 10)
self.assertEqual(3 * Rat(10, 3), 10)
self.assertEqual(Rat(10, 5) * 0.5, 1.0)
self.assertEqual(0.5 * Rat(10, 5), 1.0)
def test_div(self):
self.assertEqual(Rat(10, 3) / Rat(5, 7), Rat(14, 3))
self.assertEqual(Rat(10, 3) / 3, Rat(10, 9))
self.assertEqual(2 / Rat(5), Rat(2, 5))
self.assertEqual(3.0 * Rat(1, 2), 1.5)
self.assertEqual(Rat(1, 2) * 3.0, 1.5)
def test_floordiv(self):
self.assertEqual(Rat(10) // Rat(4), 2)
self.assertEqual(Rat(10, 3) // Rat(4, 3), 2)
self.assertEqual(Rat(10) // 4, 2)
self.assertEqual(10 // Rat(4), 2)
def test_eq(self):
self.assertEqual(Rat(10), Rat(20, 2))
self.assertEqual(Rat(10), 10)
self.assertEqual(10, Rat(10))
self.assertEqual(Rat(10), 10.0)
self.assertEqual(10.0, Rat(10))
def test_true_div(self):
self.assertEqual(Rat(10, 3) / Rat(5, 7), Rat(14, 3))
self.assertEqual(Rat(10, 3) / 3, Rat(10, 9))
self.assertEqual(2 / Rat(5), Rat(2, 5))
self.assertEqual(3.0 * Rat(1, 2), 1.5)
self.assertEqual(Rat(1, 2) * 3.0, 1.5)
self.assertEqual(eval('1/2'), 0.5)
# XXX Ran out of steam; TO DO: divmod, div, future division
class OperationLogger:
"""Base class for classes with operation logging."""
def __init__(self, logger):
self.logger = logger
def log_operation(self, *args):
self.logger(*args)
def op_sequence(op, *classes):
"""Return the sequence of operations that results from applying
the operation `op` to instances of the given classes."""
log = []
instances = []
for c in classes:
instances.append(c(log.append))
try:
op(*instances)
except TypeError:
pass
return log
class A(OperationLogger):
def __eq__(self, other):
self.log_operation('A.__eq__')
return NotImplemented
def __le__(self, other):
self.log_operation('A.__le__')
return NotImplemented
def __ge__(self, other):
self.log_operation('A.__ge__')
return NotImplemented
class B(OperationLogger):
def __eq__(self, other):
self.log_operation('B.__eq__')
return NotImplemented
def __le__(self, other):
self.log_operation('B.__le__')
return NotImplemented
def __ge__(self, other):
self.log_operation('B.__ge__')
return NotImplemented
class C(B):
def __eq__(self, other):
self.log_operation('C.__eq__')
return NotImplemented
def __le__(self, other):
self.log_operation('C.__le__')
return NotImplemented
def __ge__(self, other):
self.log_operation('C.__ge__')
return NotImplemented
class OperationOrderTests(unittest.TestCase):
def test_comparison_orders(self):
self.assertEqual(op_sequence(eq, A, A), ['A.__eq__', 'A.__eq__'])
self.assertEqual(op_sequence(eq, A, B), ['A.__eq__', 'B.__eq__'])
self.assertEqual(op_sequence(eq, B, A), ['B.__eq__', 'A.__eq__'])
# C is a subclass of B, so C.__eq__ is called first
self.assertEqual(op_sequence(eq, B, C), ['C.__eq__', 'B.__eq__'])
self.assertEqual(op_sequence(eq, C, B), ['C.__eq__', 'B.__eq__'])
self.assertEqual(op_sequence(le, A, A), ['A.__le__', 'A.__ge__'])
self.assertEqual(op_sequence(le, A, B), ['A.__le__', 'B.__ge__'])
self.assertEqual(op_sequence(le, B, A), ['B.__le__', 'A.__ge__'])
self.assertEqual(op_sequence(le, B, C), ['C.__ge__', 'B.__le__'])
self.assertEqual(op_sequence(le, C, B), ['C.__le__', 'B.__ge__'])
def test_main():
support.run_unittest(RatTestCase, OperationOrderTests)
if __name__ == "__main__":
test_main()
| lgpl-3.0 |
trishnaguha/pywikibot-core | pywikibot/tools/djvu.py | 6 | 3504 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Wrapper around djvulibre to access djvu files properties and content."""
#
# (C) Pywikibot team, 2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'
import os.path
import subprocess
import pywikibot
class DjVuFile(object):
"""Wrapper around djvulibre to access djvu files properties and content.
Perform file existance checks.
Control characters in djvu text-layer are converted for convenience
(see http://djvu.sourceforge.net/doc/man/djvused.html for control chars
details).
"""
def __init__(self, file_djvu):
"""
Constructor.
@param file_djvu: filename (including path) to djvu file
@type file_djvu: string/unicode
"""
file_djvu = os.path.expanduser(file_djvu)
# Check file exists and has read permissions.
with open(file_djvu):
self.file_djvu = file_djvu
def number_of_images(self):
"""Return the (cached) number of images in the djvu file."""
if not hasattr(self, '_image_count'):
dp = subprocess.Popen(['djvused', '-e', 'n', self.file_djvu],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = dp.communicate()
if dp.returncode != 0:
pywikibot.error('djvulibre library error!\n%s' % stderrdata)
self._image_count = int(stdoutdata)
return self._image_count
def has_text(self):
"""Test if the djvu file has a text-layer."""
if not hasattr(self, '_has_text'):
dp = subprocess.Popen(['djvudump', self.file_djvu],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = dp.communicate()
if dp.returncode != 0:
pywikibot.error('djvulibre library error!\n%s' % stderrdata)
txt = stdoutdata.decode('utf-8')
self._has_text = 'TXTz' in txt
return self._has_text
def _remove_control_chars(self, data):
"""Remove djvu format control characters.
See http://djvu.sourceforge.net/doc/man/djvused.html for control chars.
"""
txt = data.decode('utf-8')
# vertical tab (\013=\x0b): remove
txt = txt.replace('\x0b', '')
# group (\035=\x1d) separator: replace with \n
txt = txt.replace('\x1d', '\n')
# unit separator (\037=\x1f): replace with \n
txt = txt.replace('\x1f', '\n')
# feed char (\f=\x0c), \n and trailing spaces: strip
txt = txt.strip('\x0c\n ')
return txt
def get_page(self, n):
"""Get page n for djvu file."""
if not self.has_text():
raise ValueError('Djvu file %s has no text layer.' % self.file_djvu)
if not (1 <= n <= self.number_of_images()):
raise ValueError('Requested page number %d is not in file %s'
' page range [%d-%d]'
% (n, self.file_djvu, 1, self.number_of_images()))
dp = subprocess.Popen(['djvutxt', '--page=%d' % n, self.file_djvu],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdoutdata, stderrdata) = dp.communicate()
if dp.returncode != 0:
pywikibot.error('djvulibre library error!\n%s' % stderrdata)
return self._remove_control_chars(stdoutdata)
| mit |
Lunatixz/script.skin.helper.service | resources/lib/musicbrainzngs/caa.py | 1 | 6079 | # This file is part of the musicbrainzngs library
# Copyright (C) Alastair Porter, Wieland Hoffmann, and others
# This file is distributed under a BSD-2-Clause type license.
# See the COPYING file for more information.
__all__ = [
'set_caa_hostname', 'get_image_list', 'get_release_group_image_list',
'get_release_group_image_front', 'get_image_front', 'get_image_back',
'get_image'
]
import json
import compat
import musicbrainz
hostname = "coverartarchive.org"
def set_caa_hostname(new_hostname):
"""Set the base hostname for Cover Art Archive requests.
Defaults to 'coverartarchive.org'."""
global hostname
hostname = new_hostname
def _caa_request(mbid, imageid=None, size=None, entitytype="release"):
""" Make a CAA request.
:param imageid: ``front``, ``back`` or a number from the listing obtained
with :meth:`get_image_list`.
:type imageid: str
:param size: 250, 500
:type size: str or None
:param entitytype: ``release`` or ``release-group``
:type entitytype: str
"""
# Construct the full URL for the request, including hostname and
# query string.
path = [entitytype, mbid]
if imageid and size:
path.append("%s-%s" % (imageid, size))
elif imageid:
path.append(imageid)
url = compat.urlunparse((
'http',
hostname,
'/%s' % '/'.join(path),
'',
'',
''
))
musicbrainz._log.debug("GET request for %s" % (url, ))
# Set up HTTP request handler and URL opener.
httpHandler = compat.HTTPHandler(debuglevel=0)
handlers = [httpHandler]
opener = compat.build_opener(*handlers)
# Make request.
req = musicbrainz._MusicbrainzHttpRequest("GET", url, None)
# Useragent isn't needed for CAA, but we'll add it if it exists
if musicbrainz._useragent != "":
req.add_header('User-Agent', musicbrainz._useragent)
musicbrainz._log.debug("requesting with UA %s" % musicbrainz._useragent)
resp = musicbrainz._safe_read(opener, req, None)
# TODO: The content type declared by the CAA for JSON files is
# 'applicaiton/octet-stream'. This is not useful to detect whether the
# content is JSON, so default to decoding JSON if no imageid was supplied.
# http://tickets.musicbrainz.org/browse/CAA-75
if imageid:
# If we asked for an image, return the image
return resp
else:
# Otherwise it's json
return json.loads(resp)
def get_image_list(releaseid):
"""Get the list of cover art associated with a release.
The return value is the deserialized response of the `JSON listing
<http://musicbrainz.org/doc/Cover_Art_Archive/API#.2Frelease.2F.7Bmbid.7D.2F>`_
returned by the Cover Art Archive API.
If an error occurs then a :class:`~musicbrainzngs.ResponseError` will
be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
"""
return _caa_request(releaseid)
def get_release_group_image_list(releasegroupid):
"""Get the list of cover art associated with a release group.
The return value is the deserialized response of the `JSON listing
<http://musicbrainz.org/doc/Cover_Art_Archive/API#.2Frelease-group.2F.7Bmbid.7D.2F>`_
returned by the Cover Art Archive API.
If an error occurs then a :class:`~musicbrainzngs.ResponseError` will
be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
"""
return _caa_request(releasegroupid, entitytype="release-group")
def get_release_group_image_front(releasegroupid, size=None):
"""Download the front cover art for a release group.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releasegroupid, "front", size=size,
entitytype="release-group")
def get_image_front(releaseid, size=None):
"""Download the front cover art for a release.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releaseid, "front", size=size)
def get_image_back(releaseid, size=None):
"""Download the back cover art for a release.
The `size` argument and the possible error conditions are the same as for
:meth:`get_image`.
"""
return get_image(releaseid, "back", size=size)
def get_image(mbid, coverid, size=None, entitytype="release"):
"""Download cover art for a release. The coverart file to download
is specified by the `coverid` argument.
If `size` is not specified, download the largest copy present, which can be
very large.
If an error occurs then a :class:`~musicbrainzngs.ResponseError`
will be raised with one of the following HTTP codes:
* 400: `Releaseid` is not a valid UUID or `coverid` is invalid
* 404: No release exists with an MBID of `releaseid`
* 503: Ratelimit exceeded
:param coverid: ``front``, ``back`` or a number from the listing obtained with
:meth:`get_image_list`
:type coverid: int or str
:param size: 250, 500 or None. If it is None, the largest available picture
will be downloaded. If the image originally uploaded to the
Cover Art Archive was smaller than the requested size, only
the original image will be returned.
:type size: str or None
:param entitytype: The type of entity for which to download the cover art.
This is either ``release`` or ``release-group``.
:type entitytype: str
:return: The binary image data
:type: str
"""
if isinstance(coverid, int):
coverid = "%d" % (coverid, )
if isinstance(size, int):
size = "%d" % (size, )
return _caa_request(mbid, coverid, size=size, entitytype=entitytype)
| gpl-2.0 |
rdkls/django-audit-mongodb | tests/fixtures/sampledjango/manage.py | 1 | 1321 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010, 2degrees Limited <egoddard@tech.2degreesnetwork.com>.
# All Rights Reserved.
#
# This file is part of djangoaudit <https://launchpad.net/django-audit/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| bsd-3-clause |
vipul-sharma20/oh-mainline | vendor/packages/whoosh/src/whoosh/support/testing.py | 17 | 4771 | # Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
import os.path, shutil, sys, random
from functools import wraps
from whoosh.filedb.filestore import FileStorage
class TempStorage(object):
def __init__(self, basename=None, parentdir=".", suppress=frozenset(),
keepdir=False):
self.basename = basename or hex(random.randint(0, 1000000000))[2:]
self.parentdir = parentdir
self.suppress = suppress
self.keepdir = keepdir
self.dir = None
def _mkdir(self):
self.dir = os.path.join(self.parentdir, "tmp",
self.basename + ".tmpix")
if not os.path.exists(self.dir):
os.makedirs(self.dir)
def __enter__(self):
self._mkdir()
return FileStorage(self.dir)
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.keepdir:
try:
shutil.rmtree(self.dir)
except OSError:
e = sys.exc_info()[1]
print("Can't remove temp dir: " + str(e))
if exc_type is not None:
if self.keepdir:
print("Temp dir=", self.dir)
if exc_type not in self.suppress:
return False
class TempIndex(TempStorage):
def __init__(self, schema, ixname='', **kwargs):
super(TempIndex, self).__init__(basename=ixname, **kwargs)
self.schema = schema
def __enter__(self):
fstore = super(TempIndex, self).__enter__()
return fstore.create_index(self.schema, indexname=self.basename)
def skip_if(cond):
"""A Nose test decorator that skips the decorated test if the given
function returns True at runtime.
"""
def decorating_function(testfn):
@wraps(testfn)
def wrapper(*args, **kwargs):
if cond():
from nose.plugins.skip import SkipTest
raise SkipTest
else:
return testfn(*args, **kwargs)
return wrapper
return decorating_function
def skip_if_unavailable(modulename):
"""A Nose test decorator that only runs the decorated test if a module
can be imported::
@skip_if_unavailable("multiprocessing")
def test_mp():
Raises ``SkipTest`` if the module cannot be imported.
"""
def cantimport():
try:
__import__(modulename)
except ImportError:
return True
else:
return False
return skip_if(cantimport)
def is_abstract_method(attr):
"""Returns True if the given object has __isabstractmethod__ == True.
"""
return (hasattr(attr, "__isabstractmethod__")
and getattr(attr, "__isabstractmethod__"))
def check_abstract_methods(base, subclass):
"""Raises AssertionError if ``subclass`` does not override a method on
``base`` that is marked as an abstract method.
"""
for attrname in dir(base):
if attrname.startswith("_"):
continue
attr = getattr(base, attrname)
if is_abstract_method(attr):
oattr = getattr(subclass, attrname)
if is_abstract_method(oattr):
raise Exception("%s.%s not overridden"
% (subclass.__name__, attrname))
| agpl-3.0 |
Panos-Bletsos/spark-cost-model-optimizer | python/pyspark/sql/session.py | 11 | 24874 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
import warnings
from functools import reduce
from threading import RLock
if sys.version >= '3':
basestring = unicode = str
else:
from itertools import imap as map
from pyspark import since
from pyspark.rdd import RDD, ignore_unicode_prefix
from pyspark.sql.catalog import Catalog
from pyspark.sql.conf import RuntimeConfig
from pyspark.sql.dataframe import DataFrame
from pyspark.sql.readwriter import DataFrameReader
from pyspark.sql.streaming import DataStreamReader
from pyspark.sql.types import Row, DataType, StringType, StructType, _verify_type, \
_infer_schema, _has_nulltype, _merge_type, _create_converter, _parse_datatype_string
from pyspark.sql.utils import install_exception_handler
__all__ = ["SparkSession"]
def _monkey_patch_RDD(sparkSession):
def toDF(self, schema=None, sampleRatio=None):
"""
Converts current :class:`RDD` into a :class:`DataFrame`
This is a shorthand for ``spark.createDataFrame(rdd, schema, sampleRatio)``
:param schema: a :class:`pyspark.sql.types.StructType` or list of names of columns
:param samplingRatio: the sample ratio of rows used for inferring
:return: a DataFrame
>>> rdd.toDF().collect()
[Row(name=u'Alice', age=1)]
"""
return sparkSession.createDataFrame(self, schema, sampleRatio)
RDD.toDF = toDF
class SparkSession(object):
"""The entry point to programming Spark with the Dataset and DataFrame API.
A SparkSession can be used create :class:`DataFrame`, register :class:`DataFrame` as
tables, execute SQL over tables, cache tables, and read parquet files.
To create a SparkSession, use the following builder pattern:
>>> spark = SparkSession.builder \\
... .master("local") \\
... .appName("Word Count") \\
... .config("spark.some.config.option", "some-value") \\
... .getOrCreate()
"""
class Builder(object):
"""Builder for :class:`SparkSession`.
"""
_lock = RLock()
_options = {}
@since(2.0)
def config(self, key=None, value=None, conf=None):
"""Sets a config option. Options set using this method are automatically propagated to
both :class:`SparkConf` and :class:`SparkSession`'s own configuration.
For an existing SparkConf, use `conf` parameter.
>>> from pyspark.conf import SparkConf
>>> SparkSession.builder.config(conf=SparkConf())
<pyspark.sql.session...
For a (key, value) pair, you can omit parameter names.
>>> SparkSession.builder.config("spark.some.config.option", "some-value")
<pyspark.sql.session...
:param key: a key name string for configuration property
:param value: a value for configuration property
:param conf: an instance of :class:`SparkConf`
"""
with self._lock:
if conf is None:
self._options[key] = str(value)
else:
for (k, v) in conf.getAll():
self._options[k] = v
return self
@since(2.0)
def master(self, master):
"""Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]"
to run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone
cluster.
:param master: a url for spark master
"""
return self.config("spark.master", master)
@since(2.0)
def appName(self, name):
"""Sets a name for the application, which will be shown in the Spark web UI.
If no application name is set, a randomly generated name will be used.
:param name: an application name
"""
return self.config("spark.app.name", name)
@since(2.0)
def enableHiveSupport(self):
"""Enables Hive support, including connectivity to a persistent Hive metastore, support
for Hive serdes, and Hive user-defined functions.
"""
return self.config("spark.sql.catalogImplementation", "hive")
@since(2.0)
def getOrCreate(self):
"""Gets an existing :class:`SparkSession` or, if there is no existing one, creates a
new one based on the options set in this builder.
This method first checks whether there is a valid global default SparkSession, and if
yes, return that one. If no valid global default SparkSession exists, the method
creates a new SparkSession and assigns the newly created SparkSession as the global
default.
>>> s1 = SparkSession.builder.config("k1", "v1").getOrCreate()
>>> s1.conf.get("k1") == s1.sparkContext.getConf().get("k1") == "v1"
True
In case an existing SparkSession is returned, the config options specified
in this builder will be applied to the existing SparkSession.
>>> s2 = SparkSession.builder.config("k2", "v2").getOrCreate()
>>> s1.conf.get("k1") == s2.conf.get("k1")
True
>>> s1.conf.get("k2") == s2.conf.get("k2")
True
"""
with self._lock:
from pyspark.context import SparkContext
from pyspark.conf import SparkConf
session = SparkSession._instantiatedContext
if session is None:
sparkConf = SparkConf()
for key, value in self._options.items():
sparkConf.set(key, value)
sc = SparkContext.getOrCreate(sparkConf)
# This SparkContext may be an existing one.
for key, value in self._options.items():
# we need to propagate the confs
# before we create the SparkSession. Otherwise, confs like
# warehouse path and metastore url will not be set correctly (
# these confs cannot be changed once the SparkSession is created).
sc._conf.set(key, value)
session = SparkSession(sc)
for key, value in self._options.items():
session._jsparkSession.sessionState().conf().setConfString(key, value)
for key, value in self._options.items():
session.sparkContext._conf.set(key, value)
return session
builder = Builder()
_instantiatedContext = None
@ignore_unicode_prefix
def __init__(self, sparkContext, jsparkSession=None):
"""Creates a new SparkSession.
>>> from datetime import datetime
>>> spark = SparkSession(sc)
>>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1,
... b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1),
... time=datetime(2014, 8, 1, 14, 1, 5))])
>>> df = allTypes.toDF()
>>> df.createOrReplaceTempView("allTypes")
>>> spark.sql('select i+1, d+1, not b, list[1], dict["s"], time, row.a '
... 'from allTypes where b and i > 0').collect()
[Row((i + CAST(1 AS BIGINT))=2, (d + CAST(1 AS DOUBLE))=2.0, (NOT b)=False, list[1]=2, \
dict[s]=0, time=datetime.datetime(2014, 8, 1, 14, 1, 5), a=1)]
>>> df.rdd.map(lambda x: (x.i, x.s, x.d, x.l, x.b, x.time, x.row.a, x.list)).collect()
[(1, u'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])]
"""
from pyspark.sql.context import SQLContext
self._sc = sparkContext
self._jsc = self._sc._jsc
self._jvm = self._sc._jvm
if jsparkSession is None:
jsparkSession = self._jvm.SparkSession(self._jsc.sc())
self._jsparkSession = jsparkSession
self._jwrapped = self._jsparkSession.sqlContext()
self._wrapped = SQLContext(self._sc, self, self._jwrapped)
_monkey_patch_RDD(self)
install_exception_handler()
if SparkSession._instantiatedContext is None:
SparkSession._instantiatedContext = self
@since(2.0)
def newSession(self):
"""
Returns a new SparkSession as new session, that has separate SQLConf,
registered temporary views and UDFs, but shared SparkContext and
table cache.
"""
return self.__class__(self._sc, self._jsparkSession.newSession())
@property
@since(2.0)
def sparkContext(self):
"""Returns the underlying :class:`SparkContext`."""
return self._sc
@property
@since(2.0)
def version(self):
"""The version of Spark on which this application is running."""
return self._jsparkSession.version()
@property
@since(2.0)
def conf(self):
"""Runtime configuration interface for Spark.
This is the interface through which the user can get and set all Spark and Hadoop
configurations that are relevant to Spark SQL. When getting the value of a config,
this defaults to the value set in the underlying :class:`SparkContext`, if any.
"""
if not hasattr(self, "_conf"):
self._conf = RuntimeConfig(self._jsparkSession.conf())
return self._conf
@property
@since(2.0)
def catalog(self):
"""Interface through which the user may create, drop, alter or query underlying
databases, tables, functions etc.
"""
if not hasattr(self, "_catalog"):
self._catalog = Catalog(self)
return self._catalog
@property
@since(2.0)
def udf(self):
"""Returns a :class:`UDFRegistration` for UDF registration.
:return: :class:`UDFRegistration`
"""
from pyspark.sql.context import UDFRegistration
return UDFRegistration(self._wrapped)
@since(2.0)
def range(self, start, end=None, step=1, numPartitions=None):
"""
Create a :class:`DataFrame` with single :class:`pyspark.sql.types.LongType` column named
``id``, containing elements in a range from ``start`` to ``end`` (exclusive) with
step value ``step``.
:param start: the start value
:param end: the end value (exclusive)
:param step: the incremental step (default: 1)
:param numPartitions: the number of partitions of the DataFrame
:return: :class:`DataFrame`
>>> spark.range(1, 7, 2).collect()
[Row(id=1), Row(id=3), Row(id=5)]
If only one argument is specified, it will be used as the end value.
>>> spark.range(3).collect()
[Row(id=0), Row(id=1), Row(id=2)]
"""
if numPartitions is None:
numPartitions = self._sc.defaultParallelism
if end is None:
jdf = self._jsparkSession.range(0, int(start), int(step), int(numPartitions))
else:
jdf = self._jsparkSession.range(int(start), int(end), int(step), int(numPartitions))
return DataFrame(jdf, self._wrapped)
def _inferSchemaFromList(self, data):
"""
Infer schema from list of Row or tuple.
:param data: list of Row or tuple
:return: :class:`pyspark.sql.types.StructType`
"""
if not data:
raise ValueError("can not infer schema from empty dataset")
first = data[0]
if type(first) is dict:
warnings.warn("inferring schema from dict is deprecated,"
"please use pyspark.sql.Row instead")
schema = reduce(_merge_type, map(_infer_schema, data))
if _has_nulltype(schema):
raise ValueError("Some of types cannot be determined after inferring")
return schema
def _inferSchema(self, rdd, samplingRatio=None):
"""
Infer schema from an RDD of Row or tuple.
:param rdd: an RDD of Row or tuple
:param samplingRatio: sampling ratio, or no sampling (default)
:return: :class:`pyspark.sql.types.StructType`
"""
first = rdd.first()
if not first:
raise ValueError("The first row in RDD is empty, "
"can not infer schema")
if type(first) is dict:
warnings.warn("Using RDD of dict to inferSchema is deprecated. "
"Use pyspark.sql.Row instead")
if samplingRatio is None:
schema = _infer_schema(first)
if _has_nulltype(schema):
for row in rdd.take(100)[1:]:
schema = _merge_type(schema, _infer_schema(row))
if not _has_nulltype(schema):
break
else:
raise ValueError("Some of types cannot be determined by the "
"first 100 rows, please try again with sampling")
else:
if samplingRatio < 0.99:
rdd = rdd.sample(False, float(samplingRatio))
schema = rdd.map(_infer_schema).reduce(_merge_type)
return schema
def _createFromRDD(self, rdd, schema, samplingRatio):
"""
Create an RDD for DataFrame from an existing RDD, returns the RDD and schema.
"""
if schema is None or isinstance(schema, (list, tuple)):
struct = self._inferSchema(rdd, samplingRatio)
converter = _create_converter(struct)
rdd = rdd.map(converter)
if isinstance(schema, (list, tuple)):
for i, name in enumerate(schema):
struct.fields[i].name = name
struct.names[i] = name
schema = struct
elif not isinstance(schema, StructType):
raise TypeError("schema should be StructType or list or None, but got: %s" % schema)
# convert python objects to sql data
rdd = rdd.map(schema.toInternal)
return rdd, schema
def _createFromLocal(self, data, schema):
"""
Create an RDD for DataFrame from a list or pandas.DataFrame, returns
the RDD and schema.
"""
# make sure data could consumed multiple times
if not isinstance(data, list):
data = list(data)
if schema is None or isinstance(schema, (list, tuple)):
struct = self._inferSchemaFromList(data)
converter = _create_converter(struct)
data = map(converter, data)
if isinstance(schema, (list, tuple)):
for i, name in enumerate(schema):
struct.fields[i].name = name
struct.names[i] = name
schema = struct
elif not isinstance(schema, StructType):
raise TypeError("schema should be StructType or list or None, but got: %s" % schema)
# convert python objects to sql data
data = [schema.toInternal(row) for row in data]
return self._sc.parallelize(data), schema
@since(2.0)
@ignore_unicode_prefix
def createDataFrame(self, data, schema=None, samplingRatio=None, verifySchema=True):
"""
Creates a :class:`DataFrame` from an :class:`RDD`, a list or a :class:`pandas.DataFrame`.
When ``schema`` is a list of column names, the type of each column
will be inferred from ``data``.
When ``schema`` is ``None``, it will try to infer the schema (column names and types)
from ``data``, which should be an RDD of :class:`Row`,
or :class:`namedtuple`, or :class:`dict`.
When ``schema`` is :class:`pyspark.sql.types.DataType` or a datatype string, it must match
the real data, or an exception will be thrown at runtime. If the given schema is not
:class:`pyspark.sql.types.StructType`, it will be wrapped into a
:class:`pyspark.sql.types.StructType` as its only field, and the field name will be "value",
each record will also be wrapped into a tuple, which can be converted to row later.
If schema inference is needed, ``samplingRatio`` is used to determined the ratio of
rows used for schema inference. The first row will be used if ``samplingRatio`` is ``None``.
:param data: an RDD of any kind of SQL data representation(e.g. row, tuple, int, boolean,
etc.), or :class:`list`, or :class:`pandas.DataFrame`.
:param schema: a :class:`pyspark.sql.types.DataType` or a datatype string or a list of
column names, default is ``None``. The data type string format equals to
:class:`pyspark.sql.types.DataType.simpleString`, except that top level struct type can
omit the ``struct<>`` and atomic types use ``typeName()`` as their format, e.g. use
``byte`` instead of ``tinyint`` for :class:`pyspark.sql.types.ByteType`. We can also use
``int`` as a short name for ``IntegerType``.
:param samplingRatio: the sample ratio of rows used for inferring
:param verifySchema: verify data types of every row against schema.
:return: :class:`DataFrame`
.. versionchanged:: 2.1
Added verifySchema.
>>> l = [('Alice', 1)]
>>> spark.createDataFrame(l).collect()
[Row(_1=u'Alice', _2=1)]
>>> spark.createDataFrame(l, ['name', 'age']).collect()
[Row(name=u'Alice', age=1)]
>>> d = [{'name': 'Alice', 'age': 1}]
>>> spark.createDataFrame(d).collect()
[Row(age=1, name=u'Alice')]
>>> rdd = sc.parallelize(l)
>>> spark.createDataFrame(rdd).collect()
[Row(_1=u'Alice', _2=1)]
>>> df = spark.createDataFrame(rdd, ['name', 'age'])
>>> df.collect()
[Row(name=u'Alice', age=1)]
>>> from pyspark.sql import Row
>>> Person = Row('name', 'age')
>>> person = rdd.map(lambda r: Person(*r))
>>> df2 = spark.createDataFrame(person)
>>> df2.collect()
[Row(name=u'Alice', age=1)]
>>> from pyspark.sql.types import *
>>> schema = StructType([
... StructField("name", StringType(), True),
... StructField("age", IntegerType(), True)])
>>> df3 = spark.createDataFrame(rdd, schema)
>>> df3.collect()
[Row(name=u'Alice', age=1)]
>>> spark.createDataFrame(df.toPandas()).collect() # doctest: +SKIP
[Row(name=u'Alice', age=1)]
>>> spark.createDataFrame(pandas.DataFrame([[1, 2]])).collect() # doctest: +SKIP
[Row(0=1, 1=2)]
>>> spark.createDataFrame(rdd, "a: string, b: int").collect()
[Row(a=u'Alice', b=1)]
>>> rdd = rdd.map(lambda row: row[1])
>>> spark.createDataFrame(rdd, "int").collect()
[Row(value=1)]
>>> spark.createDataFrame(rdd, "boolean").collect() # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
Py4JJavaError: ...
"""
if isinstance(data, DataFrame):
raise TypeError("data is already a DataFrame")
if isinstance(schema, basestring):
schema = _parse_datatype_string(schema)
try:
import pandas
has_pandas = True
except Exception:
has_pandas = False
if has_pandas and isinstance(data, pandas.DataFrame):
if schema is None:
schema = [str(x) for x in data.columns]
data = [r.tolist() for r in data.to_records(index=False)]
verify_func = _verify_type if verifySchema else lambda _, t: True
if isinstance(schema, StructType):
def prepare(obj):
verify_func(obj, schema)
return obj
elif isinstance(schema, DataType):
dataType = schema
schema = StructType().add("value", schema)
def prepare(obj):
verify_func(obj, dataType)
return obj,
else:
if isinstance(schema, list):
schema = [x.encode('utf-8') if not isinstance(x, str) else x for x in schema]
prepare = lambda obj: obj
if isinstance(data, RDD):
rdd, schema = self._createFromRDD(data.map(prepare), schema, samplingRatio)
else:
rdd, schema = self._createFromLocal(map(prepare, data), schema)
jrdd = self._jvm.SerDeUtil.toJavaArray(rdd._to_java_object_rdd())
jdf = self._jsparkSession.applySchemaToPythonRDD(jrdd.rdd(), schema.json())
df = DataFrame(jdf, self._wrapped)
df._schema = schema
return df
@ignore_unicode_prefix
@since(2.0)
def sql(self, sqlQuery):
"""Returns a :class:`DataFrame` representing the result of the given query.
:return: :class:`DataFrame`
>>> df.createOrReplaceTempView("table1")
>>> df2 = spark.sql("SELECT field1 AS f1, field2 as f2 from table1")
>>> df2.collect()
[Row(f1=1, f2=u'row1'), Row(f1=2, f2=u'row2'), Row(f1=3, f2=u'row3')]
"""
return DataFrame(self._jsparkSession.sql(sqlQuery), self._wrapped)
@since(2.0)
def table(self, tableName):
"""Returns the specified table as a :class:`DataFrame`.
:return: :class:`DataFrame`
>>> df.createOrReplaceTempView("table1")
>>> df2 = spark.table("table1")
>>> sorted(df.collect()) == sorted(df2.collect())
True
"""
return DataFrame(self._jsparkSession.table(tableName), self._wrapped)
@property
@since(2.0)
def read(self):
"""
Returns a :class:`DataFrameReader` that can be used to read data
in as a :class:`DataFrame`.
:return: :class:`DataFrameReader`
"""
return DataFrameReader(self._wrapped)
@property
@since(2.0)
def readStream(self):
"""
Returns a :class:`DataStreamReader` that can be used to read data streams
as a streaming :class:`DataFrame`.
.. note:: Experimental.
:return: :class:`DataStreamReader`
"""
return DataStreamReader(self._wrapped)
@property
@since(2.0)
def streams(self):
"""Returns a :class:`StreamingQueryManager` that allows managing all the
:class:`StreamingQuery` StreamingQueries active on `this` context.
.. note:: Experimental.
:return: :class:`StreamingQueryManager`
"""
from pyspark.sql.streaming import StreamingQueryManager
return StreamingQueryManager(self._jsparkSession.streams())
@since(2.0)
def stop(self):
"""Stop the underlying :class:`SparkContext`.
"""
self._sc.stop()
SparkSession._instantiatedContext = None
@since(2.0)
def __enter__(self):
"""
Enable 'with SparkSession.builder.(...).getOrCreate() as session: app' syntax.
"""
return self
@since(2.0)
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Enable 'with SparkSession.builder.(...).getOrCreate() as session: app' syntax.
Specifically stop the SparkSession on exit of the with block.
"""
self.stop()
def _test():
import os
import doctest
from pyspark.context import SparkContext
from pyspark.sql import Row
import pyspark.sql.session
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.sql.session.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['sc'] = sc
globs['spark'] = SparkSession(sc)
globs['rdd'] = rdd = sc.parallelize(
[Row(field1=1, field2="row1"),
Row(field1=2, field2="row2"),
Row(field1=3, field2="row3")])
globs['df'] = rdd.toDF()
(failure_count, test_count) = doctest.testmod(
pyspark.sql.session, globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
globs['sc'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
willthames/ansible | lib/ansible/modules/cloud/rackspace/rax_clb_nodes.py | 4 | 8779 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_clb_nodes
short_description: add, modify and remove nodes from a Rackspace Cloud Load Balancer
description:
- Adds, modifies and removes nodes from a Rackspace Cloud Load Balancer
version_added: "1.4"
options:
address:
required: false
description:
- IP address or domain name of the node
condition:
required: false
choices:
- enabled
- disabled
- draining
description:
- Condition for the node, which determines its role within the load
balancer
load_balancer_id:
required: true
description:
- Load balancer id
node_id:
required: false
description:
- Node id
port:
required: false
description:
- Port number of the load balanced service on the node
state:
required: false
default: "present"
choices:
- present
- absent
description:
- Indicate desired state of the node
type:
required: false
choices:
- primary
- secondary
description:
- Type of node
wait:
required: false
default: "no"
choices:
- "yes"
- "no"
description:
- Wait for the load balancer to become active before returning
wait_timeout:
required: false
default: 30
description:
- How long to wait before giving up and returning an error
weight:
required: false
description:
- Weight of node
author: "Lukasz Kawczynski (@neuroid)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
# Add a new node to the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
address: 10.2.2.3
port: 80
condition: enabled
type: primary
wait: yes
credentials: /path/to/credentials
# Drain connections from a node
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
condition: draining
wait: yes
credentials: /path/to/credentials
# Remove a node from the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
state: absent
wait: yes
credentials: /path/to/credentials
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def _activate_virtualenv(path):
path = os.path.expanduser(path)
activate_this = os.path.join(path, 'bin', 'activate_this.py')
with open(activate_this) as f:
code = compile(f.read(), activate_this, 'exec')
exec(code)
def _get_node(lb, node_id=None, address=None, port=None):
"""Return a matching node"""
for node in getattr(lb, 'nodes', []):
match_list = []
if node_id is not None:
match_list.append(getattr(node, 'id', None) == node_id)
if address is not None:
match_list.append(getattr(node, 'address', None) == address)
if port is not None:
match_list.append(getattr(node, 'port', None) == port)
if match_list and all(match_list):
return node
return None
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
address=dict(),
condition=dict(choices=['enabled', 'disabled', 'draining']),
load_balancer_id=dict(required=True, type='int'),
node_id=dict(type='int'),
port=dict(type='int'),
state=dict(default='present', choices=['present', 'absent']),
type=dict(choices=['primary', 'secondary']),
virtualenv=dict(),
wait=dict(default=False, type='bool'),
wait_timeout=dict(default=30, type='int'),
weight=dict(type='int'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
address = module.params['address']
condition = (module.params['condition'] and
module.params['condition'].upper())
load_balancer_id = module.params['load_balancer_id']
node_id = module.params['node_id']
port = module.params['port']
state = module.params['state']
typ = module.params['type'] and module.params['type'].upper()
virtualenv = module.params['virtualenv']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout'] or 1
weight = module.params['weight']
if virtualenv:
try:
_activate_virtualenv(virtualenv)
except IOError as e:
module.fail_json(msg='Failed to activate virtualenv %s (%s)' % (
virtualenv, e))
setup_rax_module(module, pyrax)
if not pyrax.cloud_loadbalancers:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
try:
lb = pyrax.cloud_loadbalancers.get(load_balancer_id)
except pyrax.exc.PyraxException as e:
module.fail_json(msg='%s' % e.message)
node = _get_node(lb, node_id, address, port)
result = rax_clb_node_to_dict(node)
if state == 'absent':
if not node: # Removing a non-existent node
module.exit_json(changed=False, state=state)
try:
lb.delete_node(node)
result = {}
except pyrax.exc.NotFound:
module.exit_json(changed=False, state=state)
except pyrax.exc.PyraxException as e:
module.fail_json(msg='%s' % e.message)
else: # present
if not node:
if node_id: # Updating a non-existent node
msg = 'Node %d not found' % node_id
if lb.nodes:
msg += (' (available nodes: %s)' %
', '.join([str(x.id) for x in lb.nodes]))
module.fail_json(msg=msg)
else: # Creating a new node
try:
node = pyrax.cloudloadbalancers.Node(
address=address, port=port, condition=condition,
weight=weight, type=typ)
resp, body = lb.add_nodes([node])
result.update(body['nodes'][0])
except pyrax.exc.PyraxException as e:
module.fail_json(msg='%s' % e.message)
else: # Updating an existing node
mutable = {
'condition': condition,
'type': typ,
'weight': weight,
}
for name, value in mutable.items():
if value is None or value == getattr(node, name):
mutable.pop(name)
if not mutable:
module.exit_json(changed=False, state=state, node=result)
try:
# The diff has to be set explicitly to update node's weight and
# type; this should probably be fixed in pyrax
lb.update_node(node, diff=mutable)
result.update(mutable)
except pyrax.exc.PyraxException as e:
module.fail_json(msg='%s' % e.message)
if wait:
pyrax.utils.wait_until(lb, "status", "ACTIVE", interval=1,
attempts=wait_timeout)
if lb.status != 'ACTIVE':
module.fail_json(
msg='Load balancer not active after %ds (current status: %s)' %
(wait_timeout, lb.status.lower()))
kwargs = {'node': result} if result else {}
module.exit_json(changed=True, state=state, **kwargs)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
if __name__ == '__main__':
main()
| gpl-3.0 |
vhosouza/invesalius3 | invesalius/data/viewer_volume.py | 5 | 69761 | # -*- coding: utf-8 -*-
#--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
# from math import cos, sin
import os
import sys
import time
import numpy as np
from numpy.core.umath_tests import inner1d
import wx
import vtk
from vtk.wx.wxVTKRenderWindowInteractor import wxVTKRenderWindowInteractor
from pubsub import pub as Publisher
import random
from scipy.spatial import distance
from imageio import imsave
import invesalius.constants as const
import invesalius.data.slice_ as sl
import invesalius.data.styles_3d as styles
import invesalius.data.transformations as tr
import invesalius.data.vtk_utils as vtku
import invesalius.project as prj
import invesalius.style as st
import invesalius.utils as utils
from invesalius import inv_paths
if sys.platform == 'win32':
try:
import win32api
_has_win32api = True
except ImportError:
_has_win32api = False
else:
_has_win32api = False
PROP_MEASURE = 0.8
# from invesalius.gui.widgets.canvas_renderer import CanvasRendererCTX, Polygon
class Viewer(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, size=wx.Size(320, 320))
self.SetBackgroundColour(wx.Colour(0, 0, 0))
self.interaction_style = st.StyleStateManager()
self.initial_focus = None
self.staticballs = []
self.style = None
interactor = wxVTKRenderWindowInteractor(self, -1, size = self.GetSize())
self.interactor = interactor
self.interactor.SetRenderWhenDisabled(True)
self.enable_style(const.STATE_DEFAULT)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(interactor, 1, wx.EXPAND)
self.sizer = sizer
self.SetSizer(sizer)
self.Layout()
# It would be more correct (API-wise) to call interactor.Initialize() and
# interactor.Start() here, but Initialize() calls RenderWindow.Render().
# That Render() call will get through before we can setup the
# RenderWindow() to render via the wxWidgets-created context; this
# causes flashing on some platforms and downright breaks things on
# other platforms. Instead, we call widget.Enable(). This means
# that the RWI::Initialized ivar is not set, but in THIS SPECIFIC CASE,
# that doesn't matter.
interactor.Enable(1)
ren = vtk.vtkRenderer()
self.ren = ren
canvas_renderer = vtk.vtkRenderer()
canvas_renderer.SetLayer(1)
canvas_renderer.SetInteractive(0)
canvas_renderer.PreserveDepthBufferOn()
self.canvas_renderer = canvas_renderer
interactor.GetRenderWindow().SetNumberOfLayers(2)
interactor.GetRenderWindow().AddRenderer(ren)
interactor.GetRenderWindow().AddRenderer(canvas_renderer)
self.raycasting_volume = False
self.onclick = False
self.text = vtku.TextZero()
self.text.SetValue("")
self.text.SetPosition(const.TEXT_POS_LEFT_UP)
self.ren.AddActor(self.text.actor)
# self.polygon = Polygon(None, is_3d=False)
# self.canvas = CanvasRendererCTX(self, self.ren, self.canvas_renderer, 'AXIAL')
# self.canvas.draw_list.append(self.text)
# self.canvas.draw_list.append(self.polygon)
# axes = vtk.vtkAxesActor()
# axes.SetXAxisLabelText('x')
# axes.SetYAxisLabelText('y')
# axes.SetZAxisLabelText('z')
# axes.SetTotalLength(50, 50, 50)
#
# self.ren.AddActor(axes)
self.slice_plane = None
self.view_angle = None
self.__bind_events()
self.__bind_events_wx()
self.mouse_pressed = 0
self.on_wl = False
self.picker = vtk.vtkPointPicker()
interactor.SetPicker(self.picker)
self.seed_points = []
self.points_reference = []
self.measure_picker = vtk.vtkPropPicker()
#self.measure_picker.SetTolerance(0.005)
self.measures = []
self.repositioned_axial_plan = 0
self.repositioned_sagital_plan = 0
self.repositioned_coronal_plan = 0
self.added_actor = 0
self.camera_state = const.CAM_MODE
self.nav_status = False
self.ball_actor = None
self.obj_actor = None
self.obj_axes = None
self.obj_name = False
self.obj_state = None
self.obj_actor_list = None
self.arrow_actor_list = None
#self.pTarget = [0., 0., 0.]
# self.obj_axes = None
self.x_actor = None
self.y_actor = None
self.z_actor = None
self.mark_actor = None
self._mode_cross = False
self._to_show_ball = 0
self._ball_ref_visibility = False
self.probe = False
self.ref = False
self.obj = False
self.timer = False
self.index = False
self.target_coord = None
self.aim_actor = None
self.dummy_coil_actor = None
self.target_mode = False
self.polydata = None
self.anglethreshold = const.COIL_ANGLES_THRESHOLD
self.distthreshold = const.COIL_COORD_THRESHOLD
self.actor_tracts = None
self.actor_peel = None
self.seed_offset = const.SEED_OFFSET
# initialize Trekker parameters
slic = sl.Slice()
affine = slic.affine
self.affine_vtk = vtku.numpy_to_vtkMatrix4x4(affine)
def __bind_events(self):
Publisher.subscribe(self.LoadActor,
'Load surface actor into viewer')
Publisher.subscribe(self.RemoveActor,
'Remove surface actor from viewer')
# Publisher.subscribe(self.OnShowSurface, 'Show surface')
Publisher.subscribe(self.UpdateRender,
'Render volume viewer')
Publisher.subscribe(self.ChangeBackgroundColour,
'Change volume viewer background colour')
# Raycating - related
Publisher.subscribe(self.LoadVolume,
'Load volume into viewer')
Publisher.subscribe(self.UnloadVolume,
'Unload volume')
Publisher.subscribe(self.OnSetWindowLevelText,
'Set volume window and level text')
Publisher.subscribe(self.OnHideRaycasting,
'Hide raycasting volume')
Publisher.subscribe(self.OnShowRaycasting,
'Update raycasting preset')
###
Publisher.subscribe(self.AppendActor,'AppendActor')
Publisher.subscribe(self.SetWidgetInteractor,
'Set Widget Interactor')
Publisher.subscribe(self.OnSetViewAngle,
'Set volume view angle')
Publisher.subscribe(self.OnDisableBrightContrast,
'Set interaction mode '+
str(const.MODE_SLICE_EDITOR))
Publisher.subscribe(self.OnExportSurface, 'Export surface to file')
Publisher.subscribe(self.LoadSlicePlane, 'Load slice plane')
Publisher.subscribe(self.ResetCamClippingRange, 'Reset cam clipping range')
# Publisher.subscribe(self.SetVolumeCamera, 'Update cross position')
# Publisher.subscribe(self.SetVolumeCamera, 'Co-registered points')
# Publisher.subscribe(self.SetVolumeCamera, 'Set camera in volume')
Publisher.subscribe(self.SetVolumeCameraState, 'Update volume camera state')
Publisher.subscribe(self.enable_style, 'Enable style')
Publisher.subscribe(self.OnDisableStyle, 'Disable style')
Publisher.subscribe(self.OnHideText,
'Hide text actors on viewers')
Publisher.subscribe(self.AddActors, 'Add actors ' + str(const.SURFACE))
Publisher.subscribe(self.RemoveActors, 'Remove actors ' + str(const.SURFACE))
Publisher.subscribe(self.OnShowText,
'Show text actors on viewers')
Publisher.subscribe(self.OnCloseProject, 'Close project data')
Publisher.subscribe(self.RemoveAllActor, 'Remove all volume actors')
Publisher.subscribe(self.OnExportPicture,'Export picture to file')
Publisher.subscribe(self.OnStartSeed,'Create surface by seeding - start')
Publisher.subscribe(self.OnEndSeed,'Create surface by seeding - end')
Publisher.subscribe(self.SetStereoMode, 'Set stereo mode')
Publisher.subscribe(self.Reposition3DPlane, 'Reposition 3D Plane')
Publisher.subscribe(self.RemoveVolume, 'Remove Volume')
# Publisher.subscribe(self.UpdateCameraBallPosition,
# 'Update cross position')
Publisher.subscribe(self.UpdateCameraBallPosition, 'Set cross focal point')
Publisher.subscribe(self._check_ball_reference, 'Enable style')
Publisher.subscribe(self._uncheck_ball_reference, 'Disable style')
Publisher.subscribe(self.OnSensors, 'Sensors ID')
Publisher.subscribe(self.OnRemoveSensorsID, 'Remove sensors ID')
# Related to marker creation in navigation tools
Publisher.subscribe(self.AddMarker, 'Add marker')
Publisher.subscribe(self.HideAllMarkers, 'Hide all markers')
Publisher.subscribe(self.ShowAllMarkers, 'Show all markers')
Publisher.subscribe(self.RemoveAllMarkers, 'Remove all markers')
Publisher.subscribe(self.RemoveMarker, 'Remove marker')
Publisher.subscribe(self.BlinkMarker, 'Blink Marker')
Publisher.subscribe(self.StopBlinkMarker, 'Stop Blink Marker')
Publisher.subscribe(self.SetNewColor, 'Set new color')
# Related to object tracking during neuronavigation
Publisher.subscribe(self.OnNavigationStatus, 'Navigation status')
Publisher.subscribe(self.UpdateObjectOrientation, 'Update object matrix')
Publisher.subscribe(self.UpdateTrackObjectState, 'Update track object state')
Publisher.subscribe(self.UpdateShowObjectState, 'Update show object state')
Publisher.subscribe(self.ActivateTargetMode, 'Target navigation mode')
Publisher.subscribe(self.OnUpdateObjectTargetGuide, 'Update object matrix')
Publisher.subscribe(self.OnUpdateTargetCoordinates, 'Update target')
Publisher.subscribe(self.OnRemoveTarget, 'Disable or enable coil tracker')
Publisher.subscribe(self.OnTargetMarkerTransparency, 'Set target transparency')
Publisher.subscribe(self.OnUpdateAngleThreshold, 'Update angle threshold')
Publisher.subscribe(self.OnUpdateDistThreshold, 'Update dist threshold')
Publisher.subscribe(self.OnUpdateTracts, 'Update tracts')
Publisher.subscribe(self.OnRemoveTracts, 'Remove tracts')
Publisher.subscribe(self.UpdateSeedOffset, 'Update seed offset')
Publisher.subscribe(self.UpdateMarkerOffsetState, 'Update marker offset state')
Publisher.subscribe(self.UpdateMarkerOffsetPosition, 'Update marker offset')
Publisher.subscribe(self.AddPeeledSurface, 'Update peel')
Publisher.subscribe(self.load_mask_preview, 'Load mask preview')
Publisher.subscribe(self.remove_mask_preview, 'Remove mask preview')
def SetStereoMode(self, mode):
ren_win = self.interactor.GetRenderWindow()
if mode == const.STEREO_OFF:
ren_win.StereoRenderOff()
else:
if mode == const.STEREO_RED_BLUE:
ren_win.SetStereoTypeToRedBlue()
elif mode == const.STEREO_CRISTAL:
ren_win.SetStereoTypeToCrystalEyes()
elif mode == const.STEREO_INTERLACED:
ren_win.SetStereoTypeToInterlaced()
elif mode == const.STEREO_LEFT:
ren_win.SetStereoTypeToLeft()
elif mode == const.STEREO_RIGHT:
ren_win.SetStereoTypeToRight()
elif mode == const.STEREO_DRESDEN:
ren_win.SetStereoTypeToDresden()
elif mode == const.STEREO_CHECKBOARD:
ren_win.SetStereoTypeToCheckerboard()
elif mode == const.STEREO_ANAGLYPH:
ren_win.SetStereoTypeToAnaglyph()
ren_win.StereoRenderOn()
self.interactor.Render()
def _check_ball_reference(self, style):
if style == const.SLICE_STATE_CROSS:
self._mode_cross = True
# self._check_and_set_ball_visibility()
self._ball_ref_visibility = True
# if self._to_show_ball:
if not self.ball_actor:
self.CreateBallReference()
self.interactor.Render()
def _uncheck_ball_reference(self, style):
if style == const.SLICE_STATE_CROSS:
self._mode_cross = False
# self.RemoveBallReference()
self._ball_ref_visibility = False
if self.ball_actor:
self.ren.RemoveActor(self.ball_actor)
self.ball_actor = None
self.interactor.Render()
def OnSensors(self, probe_id, ref_id, obj_id=0):
if not self.probe:
self.CreateSensorID()
if probe_id:
colour1 = (0, 1, 0)
else:
colour1 = (1, 0, 0)
if ref_id:
colour2 = (0, 1, 0)
else:
colour2 = (1, 0, 0)
if obj_id:
colour3 = (0, 1, 0)
else:
colour3 = (1, 0, 0)
self.probe.SetColour(colour1)
self.ref.SetColour(colour2)
self.obj.SetColour(colour3)
self.Refresh()
def CreateSensorID(self):
probe = vtku.Text()
probe.SetSize(const.TEXT_SIZE_LARGE)
probe.SetPosition((const.X, const.Y))
probe.ShadowOff()
probe.SetValue("P")
self.probe = probe
self.ren.AddActor(probe.actor)
ref = vtku.Text()
ref.SetSize(const.TEXT_SIZE_LARGE)
ref.SetPosition((const.X+0.04, const.Y))
ref.ShadowOff()
ref.SetValue("R")
self.ref = ref
self.ren.AddActor(ref.actor)
obj = vtku.Text()
obj.SetSize(const.TEXT_SIZE_LARGE)
obj.SetPosition((const.X+0.08, const.Y))
obj.ShadowOff()
obj.SetValue("O")
self.obj = obj
self.ren.AddActor(obj.actor)
self.interactor.Render()
def OnRemoveSensorsID(self):
if self.probe:
self.ren.RemoveActor(self.probe.actor)
self.ren.RemoveActor(self.ref.actor)
self.ren.RemoveActor(self.obj.actor)
self.probe = self.ref = self.obj = False
self.interactor.Render()
# def OnShowSurface(self, index, visibility):
# if visibility:
# self._to_show_ball += 1
# else:
# self._to_show_ball -= 1
# self._check_and_set_ball_visibility()
def OnStartSeed(self):
self.seed_points = []
def OnEndSeed(self):
Publisher.sendMessage("Create surface from seeds",
seeds=self.seed_points)
def OnExportPicture(self, orientation, filename, filetype):
if orientation == const.VOLUME:
Publisher.sendMessage('Begin busy cursor')
if _has_win32api:
utils.touch(filename)
win_filename = win32api.GetShortPathName(filename)
self._export_picture(orientation, win_filename, filetype)
else:
self._export_picture(orientation, filename, filetype)
Publisher.sendMessage('End busy cursor')
def _export_picture(self, id, filename, filetype):
if filetype == const.FILETYPE_POV:
renwin = self.interactor.GetRenderWindow()
image = vtk.vtkWindowToImageFilter()
image.SetInput(renwin)
writer = vtk.vtkPOVExporter()
writer.SetFileName(filename.encode(const.FS_ENCODE))
writer.SetRenderWindow(renwin)
writer.Write()
else:
#Use tiling to generate a large rendering.
image = vtk.vtkRenderLargeImage()
image.SetInput(self.ren)
image.SetMagnification(1)
image.Update()
image = image.GetOutput()
# write image file
if (filetype == const.FILETYPE_BMP):
writer = vtk.vtkBMPWriter()
elif (filetype == const.FILETYPE_JPG):
writer = vtk.vtkJPEGWriter()
elif (filetype == const.FILETYPE_PNG):
writer = vtk.vtkPNGWriter()
elif (filetype == const.FILETYPE_PS):
writer = vtk.vtkPostScriptWriter()
elif (filetype == const.FILETYPE_TIF):
writer = vtk.vtkTIFFWriter()
filename = u"%s.tif"%filename.strip(".tif")
writer.SetInputData(image)
writer.SetFileName(filename.encode(const.FS_ENCODE))
writer.Write()
if not os.path.exists(filename):
wx.MessageBox(_("InVesalius was not able to export this picture"), _("Export picture error"))
def OnCloseProject(self):
if self.raycasting_volume:
self.raycasting_volume = False
if self.slice_plane:
self.slice_plane.Disable()
self.slice_plane.DeletePlanes()
del self.slice_plane
Publisher.sendMessage('Uncheck image plane menu')
self.mouse_pressed = 0
self.on_wl = False
self.slice_plane = 0
self.interaction_style.Reset()
self.SetInteractorStyle(const.STATE_DEFAULT)
def OnHideText(self):
self.text.Hide()
self.interactor.Render()
def OnShowText(self):
if self.on_wl:
self.text.Show()
self.interactor.Render()
def AddActors(self, actors):
"Inserting actors"
for actor in actors:
self.ren.AddActor(actor)
def RemoveVolume(self):
volumes = self.ren.GetVolumes()
if (volumes.GetNumberOfItems()):
self.ren.RemoveVolume(volumes.GetLastProp())
self.interactor.Render()
# self._to_show_ball -= 1
# self._check_and_set_ball_visibility()
def RemoveActors(self, actors):
"Remove a list of actors"
for actor in actors:
self.ren.RemoveActor(actor)
def AddPointReference(self, position, radius=1, colour=(1, 0, 0)):
"""
Add a point representation in the given x,y,z position with a optional
radius and colour.
"""
point = vtk.vtkSphereSource()
point.SetCenter(position)
point.SetRadius(radius)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(point.GetOutput())
p = vtk.vtkProperty()
p.SetColor(colour)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.SetProperty(p)
actor.PickableOff()
self.ren.AddActor(actor)
self.points_reference.append(actor)
def RemoveAllPointsReference(self):
for actor in self.points_reference:
self.ren.RemoveActor(actor)
self.points_reference = []
def RemovePointReference(self, point):
"""
Remove the point reference. The point argument is the position that is
added.
"""
actor = self.points_reference.pop(point)
self.ren.RemoveActor(actor)
def AddMarker(self, ball_id, size, colour, coord):
"""
Markers created by navigation tools and rendered in volume viewer.
"""
self.ball_id = ball_id
coord_flip = list(coord)
coord_flip[1] = -coord_flip[1]
ball_ref = vtk.vtkSphereSource()
ball_ref.SetRadius(size)
ball_ref.SetCenter(coord_flip)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(ball_ref.GetOutputPort())
prop = vtk.vtkProperty()
prop.SetColor(colour[0:3])
#adding a new actor for the present ball
self.staticballs.append(vtk.vtkActor())
self.staticballs[self.ball_id].SetMapper(mapper)
self.staticballs[self.ball_id].SetProperty(prop)
self.ren.AddActor(self.staticballs[self.ball_id])
self.ball_id = self.ball_id + 1
#self.UpdateRender()
self.Refresh()
def add_marker(self, coord, color):
"""Simplified version for creating a spherical marker in the 3D scene
:param coord:
:param color:
:return: vtkActor
"""
# x, y, z = coord
ball_ref = vtk.vtkSphereSource()
ball_ref.SetRadius(2)
ball_ref.SetCenter(coord)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(ball_ref.GetOutputPort())
prop = vtk.vtkProperty()
prop.SetColor(color)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.SetProperty(prop)
actor.GetProperty().SetOpacity(.5)
# ren.AddActor(actor)
return actor
def HideAllMarkers(self, indexes):
ballid = indexes
for i in range(0, ballid):
self.staticballs[i].SetVisibility(0)
self.UpdateRender()
def ShowAllMarkers(self, indexes):
ballid = indexes
for i in range(0, ballid):
self.staticballs[i].SetVisibility(1)
self.UpdateRender()
def RemoveAllMarkers(self, indexes):
ballid = indexes
for i in range(0, ballid):
self.ren.RemoveActor(self.staticballs[i])
self.staticballs = []
self.UpdateRender()
def RemoveMarker(self, index):
for i in reversed(index):
self.ren.RemoveActor(self.staticballs[i])
del self.staticballs[i]
self.ball_id = self.ball_id - 1
self.UpdateRender()
def BlinkMarker(self, index):
if self.timer:
self.timer.Stop()
self.staticballs[self.index].SetVisibility(1)
self.index = index
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnBlinkMarker, self.timer)
self.timer.Start(500)
self.timer_count = 0
def OnBlinkMarker(self, evt):
self.staticballs[self.index].SetVisibility(int(self.timer_count % 2))
self.Refresh()
self.timer_count += 1
def StopBlinkMarker(self, index=None):
if self.timer:
self.timer.Stop()
if index is None:
self.staticballs[self.index].SetVisibility(1)
self.Refresh()
self.index = False
def SetNewColor(self, index, color):
self.staticballs[index].GetProperty().SetColor(color)
self.Refresh()
def OnTargetMarkerTransparency(self, status, index):
if status:
self.staticballs[index].GetProperty().SetOpacity(1)
# self.staticballs[index].GetProperty().SetOpacity(0.4)
else:
self.staticballs[index].GetProperty().SetOpacity(1)
def OnUpdateAngleThreshold(self, angle):
self.anglethreshold = angle
def OnUpdateDistThreshold(self, dist_threshold):
self.distthreshold = dist_threshold
def ActivateTargetMode(self, evt=None, target_mode=None):
vtk_colors = vtk.vtkNamedColors()
self.target_mode = target_mode
if self.target_coord and self.target_mode:
self.CreateTargetAim()
# Create a line
self.ren.SetViewport(0, 0, 0.75, 1)
self.ren2 = vtk.vtkRenderer()
self.interactor.GetRenderWindow().AddRenderer(self.ren2)
self.ren2.SetViewport(0.75, 0, 1, 1)
self.CreateTextDistance()
obj_polydata = self.CreateObjectPolyData(self.obj_name)
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(obj_polydata)
normals.SetFeatureAngle(80)
normals.AutoOrientNormalsOn()
normals.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(normals.GetOutput())
mapper.ScalarVisibilityOff()
#mapper.ImmediateModeRenderingOn() # improve performance
obj_roll = vtk.vtkActor()
obj_roll.SetMapper(mapper)
obj_roll.GetProperty().SetColor(1, 1, 1)
# obj_roll.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
# obj_roll.GetProperty().SetSpecular(30)
# obj_roll.GetProperty().SetSpecularPower(80)
obj_roll.SetPosition(0, 25, -30)
obj_roll.RotateX(-60)
obj_roll.RotateZ(180)
obj_yaw = vtk.vtkActor()
obj_yaw.SetMapper(mapper)
obj_yaw.GetProperty().SetColor(1, 1, 1)
# obj_yaw.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
# obj_yaw.GetProperty().SetSpecular(30)
# obj_yaw.GetProperty().SetSpecularPower(80)
obj_yaw.SetPosition(0, -115, 5)
obj_yaw.RotateZ(180)
obj_pitch = vtk.vtkActor()
obj_pitch.SetMapper(mapper)
obj_pitch.GetProperty().SetColor(1, 1, 1)
# obj_pitch.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
# obj_pitch.GetProperty().SetSpecular(30)
# obj_pitch.GetProperty().SetSpecularPower(80)
obj_pitch.SetPosition(5, -265, 5)
obj_pitch.RotateY(90)
obj_pitch.RotateZ(180)
arrow_roll_z1 = self.CreateArrowActor([-50, -35, 12], [-50, -35, 50])
arrow_roll_z1.GetProperty().SetColor(1, 1, 0)
arrow_roll_z1.RotateX(-60)
arrow_roll_z1.RotateZ(180)
arrow_roll_z2 = self.CreateArrowActor([50, -35, 0], [50, -35, -50])
arrow_roll_z2.GetProperty().SetColor(1, 1, 0)
arrow_roll_z2.RotateX(-60)
arrow_roll_z2.RotateZ(180)
arrow_yaw_y1 = self.CreateArrowActor([-50, -35, 0], [-50, 5, 0])
arrow_yaw_y1.GetProperty().SetColor(0, 1, 0)
arrow_yaw_y1.SetPosition(0, -150, 0)
arrow_yaw_y1.RotateZ(180)
arrow_yaw_y2 = self.CreateArrowActor([50, -35, 0], [50, -75, 0])
arrow_yaw_y2.GetProperty().SetColor(0, 1, 0)
arrow_yaw_y2.SetPosition(0, -150, 0)
arrow_yaw_y2.RotateZ(180)
arrow_pitch_x1 = self.CreateArrowActor([0, 65, 38], [0, 65, 68])
arrow_pitch_x1.GetProperty().SetColor(1, 0, 0)
arrow_pitch_x1.SetPosition(0, -300, 0)
arrow_pitch_x1.RotateY(90)
arrow_pitch_x1.RotateZ(180)
arrow_pitch_x2 = self.CreateArrowActor([0, -55, 5], [0, -55, -30])
arrow_pitch_x2.GetProperty().SetColor(1, 0, 0)
arrow_pitch_x2.SetPosition(0, -300, 0)
arrow_pitch_x2.RotateY(90)
arrow_pitch_x2.RotateZ(180)
self.obj_actor_list = obj_roll, obj_yaw, obj_pitch
self.arrow_actor_list = arrow_roll_z1, arrow_roll_z2, arrow_yaw_y1, arrow_yaw_y2,\
arrow_pitch_x1, arrow_pitch_x2
for ind in self.obj_actor_list:
self.ren2.AddActor(ind)
for ind in self.arrow_actor_list:
self.ren2.AddActor(ind)
self.ren.ResetCamera()
self.SetCameraTarget()
#self.ren.GetActiveCamera().Zoom(4)
self.ren2.ResetCamera()
self.ren2.GetActiveCamera().Zoom(2)
self.ren2.InteractiveOff()
self.interactor.Render()
else:
self.DisableCoilTracker()
def OnUpdateObjectTargetGuide(self, m_img, coord):
vtk_colors = vtk.vtkNamedColors()
if self.target_coord and self.target_mode:
target_dist = distance.euclidean(coord[0:3],
(self.target_coord[0], -self.target_coord[1], self.target_coord[2]))
# self.txt.SetCoilDistanceValue(target_dist)
self.tdist.SetValue('Distance: ' + str("{:06.2f}".format(target_dist)) + ' mm')
self.ren.ResetCamera()
self.SetCameraTarget()
if target_dist > 100:
target_dist = 100
# ((-0.0404*dst) + 5.0404) is the linear equation to normalize the zoom between 1 and 5 times with
# the distance between 1 and 100 mm
self.ren.GetActiveCamera().Zoom((-0.0404 * target_dist) + 5.0404)
if target_dist <= self.distthreshold:
thrdist = True
self.aim_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Green'))
else:
thrdist = False
self.aim_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Yellow'))
coordx = self.target_coord[3] - coord[3]
if coordx > const.ARROW_UPPER_LIMIT:
coordx = const.ARROW_UPPER_LIMIT
elif coordx < -const.ARROW_UPPER_LIMIT:
coordx = -const.ARROW_UPPER_LIMIT
coordx = const.ARROW_SCALE * coordx
coordy = self.target_coord[4] - coord[4]
if coordy > const.ARROW_UPPER_LIMIT:
coordy = const.ARROW_UPPER_LIMIT
elif coordy < -const.ARROW_UPPER_LIMIT:
coordy = -const.ARROW_UPPER_LIMIT
coordy = const.ARROW_SCALE * coordy
coordz = self.target_coord[5] - coord[5]
if coordz > const.ARROW_UPPER_LIMIT:
coordz = const.ARROW_UPPER_LIMIT
elif coordz < -const.ARROW_UPPER_LIMIT:
coordz = -const.ARROW_UPPER_LIMIT
coordz = const.ARROW_SCALE * coordz
for ind in self.arrow_actor_list:
self.ren2.RemoveActor(ind)
if self.anglethreshold * const.ARROW_SCALE > coordx > -self.anglethreshold * const.ARROW_SCALE:
thrcoordx = True
# self.obj_actor_list[0].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Green'))
self.obj_actor_list[0].GetProperty().SetColor(0, 1, 0)
else:
thrcoordx = False
# self.obj_actor_list[0].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
self.obj_actor_list[0].GetProperty().SetColor(1, 1, 1)
offset = 5
arrow_roll_x1 = self.CreateArrowActor([-55, -35, offset], [-55, -35, offset - coordx])
arrow_roll_x1.RotateX(-60)
arrow_roll_x1.RotateZ(180)
arrow_roll_x1.GetProperty().SetColor(1, 1, 0)
arrow_roll_x2 = self.CreateArrowActor([55, -35, offset], [55, -35, offset + coordx])
arrow_roll_x2.RotateX(-60)
arrow_roll_x2.RotateZ(180)
arrow_roll_x2.GetProperty().SetColor(1, 1, 0)
if self.anglethreshold * const.ARROW_SCALE > coordz > -self.anglethreshold * const.ARROW_SCALE:
thrcoordz = True
# self.obj_actor_list[1].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Green'))
self.obj_actor_list[1].GetProperty().SetColor(0, 1, 0)
else:
thrcoordz = False
# self.obj_actor_list[1].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
self.obj_actor_list[1].GetProperty().SetColor(1, 1, 1)
offset = -35
arrow_yaw_z1 = self.CreateArrowActor([-55, offset, 0], [-55, offset - coordz, 0])
arrow_yaw_z1.SetPosition(0, -150, 0)
arrow_yaw_z1.RotateZ(180)
arrow_yaw_z1.GetProperty().SetColor(0, 1, 0)
arrow_yaw_z2 = self.CreateArrowActor([55, offset, 0], [55, offset + coordz, 0])
arrow_yaw_z2.SetPosition(0, -150, 0)
arrow_yaw_z2.RotateZ(180)
arrow_yaw_z2.GetProperty().SetColor(0, 1, 0)
if self.anglethreshold * const.ARROW_SCALE > coordy > -self.anglethreshold * const.ARROW_SCALE:
thrcoordy = True
#self.obj_actor_list[2].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Green'))
self.obj_actor_list[2].GetProperty().SetColor(0, 1, 0)
else:
thrcoordy = False
#self.obj_actor_list[2].GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('GhostWhite'))
self.obj_actor_list[2].GetProperty().SetColor(1, 1, 1)
offset = 38
arrow_pitch_y1 = self.CreateArrowActor([0, 65, offset], [0, 65, offset + coordy])
arrow_pitch_y1.SetPosition(0, -300, 0)
arrow_pitch_y1.RotateY(90)
arrow_pitch_y1.RotateZ(180)
arrow_pitch_y1.GetProperty().SetColor(1, 0, 0)
offset = 5
arrow_pitch_y2 = self.CreateArrowActor([0, -55, offset], [0, -55, offset - coordy])
arrow_pitch_y2.SetPosition(0, -300, 0)
arrow_pitch_y2.RotateY(90)
arrow_pitch_y2.RotateZ(180)
arrow_pitch_y2.GetProperty().SetColor(1, 0, 0)
if thrdist and thrcoordx and thrcoordy and thrcoordz:
self.dummy_coil_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Green'))
else:
self.dummy_coil_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('DarkOrange'))
self.arrow_actor_list = arrow_roll_x1, arrow_roll_x2, arrow_yaw_z1, arrow_yaw_z2, \
arrow_pitch_y1, arrow_pitch_y2
for ind in self.arrow_actor_list:
self.ren2.AddActor(ind)
self.Refresh()
def OnUpdateTargetCoordinates(self, coord):
self.target_coord = coord
self.target_coord[1] = -self.target_coord[1]
self.CreateTargetAim()
def OnRemoveTarget(self, status):
if not status:
self.target_mode = None
self.target_coord = None
self.RemoveTargetAim()
self.DisableCoilTracker()
def CreateTargetAim(self):
if self.aim_actor:
self.RemoveTargetAim()
self.aim_actor = None
vtk_colors = vtk.vtkNamedColors()
a, b, g = np.radians(self.target_coord[3:])
r_ref = tr.euler_matrix(a, b, g, 'sxyz')
t_ref = tr.translation_matrix(self.target_coord[:3])
m_img = np.asmatrix(tr.concatenate_matrices(t_ref, r_ref))
m_img_vtk = vtk.vtkMatrix4x4()
for row in range(0, 4):
for col in range(0, 4):
m_img_vtk.SetElement(row, col, m_img[row, col])
self.m_img_vtk = m_img_vtk
filename = os.path.join(inv_paths.OBJ_DIR, "aim.stl")
reader = vtk.vtkSTLReader()
reader.SetFileName(filename)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(reader.GetOutputPort())
# Transform the polydata
transform = vtk.vtkTransform()
transform.SetMatrix(m_img_vtk)
transformPD = vtk.vtkTransformPolyDataFilter()
transformPD.SetTransform(transform)
transformPD.SetInputConnection(reader.GetOutputPort())
transformPD.Update()
# mapper transform
mapper.SetInputConnection(transformPD.GetOutputPort())
aim_actor = vtk.vtkActor()
aim_actor.SetMapper(mapper)
aim_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('Yellow'))
aim_actor.GetProperty().SetSpecular(.2)
aim_actor.GetProperty().SetSpecularPower(100)
aim_actor.GetProperty().SetOpacity(1.)
self.aim_actor = aim_actor
self.ren.AddActor(aim_actor)
if self.polydata:
obj_polydata = self.polydata
else:
obj_polydata = self.CreateObjectPolyData(os.path.join(inv_paths.OBJ_DIR, "magstim_fig8_coil_no_handle.stl"))
transform = vtk.vtkTransform()
transform.RotateZ(90)
transform_filt = vtk.vtkTransformPolyDataFilter()
transform_filt.SetTransform(transform)
transform_filt.SetInputData(obj_polydata)
transform_filt.Update()
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(transform_filt.GetOutput())
normals.SetFeatureAngle(80)
normals.AutoOrientNormalsOn()
normals.Update()
obj_mapper = vtk.vtkPolyDataMapper()
obj_mapper.SetInputData(normals.GetOutput())
obj_mapper.ScalarVisibilityOff()
#obj_mapper.ImmediateModeRenderingOn() # improve performance
self.dummy_coil_actor = vtk.vtkActor()
self.dummy_coil_actor.SetMapper(obj_mapper)
self.dummy_coil_actor.GetProperty().SetDiffuseColor(vtk_colors.GetColor3d('DarkOrange'))
self.dummy_coil_actor.GetProperty().SetSpecular(0.5)
self.dummy_coil_actor.GetProperty().SetSpecularPower(10)
self.dummy_coil_actor.GetProperty().SetOpacity(.3)
self.dummy_coil_actor.SetVisibility(1)
self.dummy_coil_actor.SetUserMatrix(m_img_vtk)
self.ren.AddActor(self.dummy_coil_actor)
self.Refresh()
def RemoveTargetAim(self):
self.ren.RemoveActor(self.aim_actor)
self.ren.RemoveActor(self.dummy_coil_actor)
self.Refresh()
def CreateTextDistance(self):
tdist = vtku.Text()
tdist.SetSize(const.TEXT_SIZE_DIST_NAV)
tdist.SetPosition((const.X, 1.-const.Y))
tdist.SetVerticalJustificationToBottom()
tdist.BoldOn()
self.ren.AddActor(tdist.actor)
self.tdist = tdist
def DisableCoilTracker(self):
try:
self.ren.SetViewport(0, 0, 1, 1)
self.interactor.GetRenderWindow().RemoveRenderer(self.ren2)
self.SetViewAngle(const.VOL_FRONT)
self.ren.RemoveActor(self.tdist.actor)
self.CreateTargetAim()
self.interactor.Render()
except:
None
def CreateArrowActor(self, startPoint, endPoint):
# Compute a basis
normalizedX = [0 for i in range(3)]
normalizedY = [0 for i in range(3)]
normalizedZ = [0 for i in range(3)]
# The X axis is a vector from start to end
math = vtk.vtkMath()
math.Subtract(endPoint, startPoint, normalizedX)
length = math.Norm(normalizedX)
math.Normalize(normalizedX)
# The Z axis is an arbitrary vector cross X
arbitrary = [0 for i in range(3)]
arbitrary[0] = random.uniform(-10, 10)
arbitrary[1] = random.uniform(-10, 10)
arbitrary[2] = random.uniform(-10, 10)
math.Cross(normalizedX, arbitrary, normalizedZ)
math.Normalize(normalizedZ)
# The Y axis is Z cross X
math.Cross(normalizedZ, normalizedX, normalizedY)
matrix = vtk.vtkMatrix4x4()
# Create the direction cosine matrix
matrix.Identity()
for i in range(3):
matrix.SetElement(i, 0, normalizedX[i])
matrix.SetElement(i, 1, normalizedY[i])
matrix.SetElement(i, 2, normalizedZ[i])
# Apply the transforms arrow 1
transform_1 = vtk.vtkTransform()
transform_1.Translate(startPoint)
transform_1.Concatenate(matrix)
transform_1.Scale(length, length, length)
# source
arrowSource1 = vtk.vtkArrowSource()
arrowSource1.SetTipResolution(50)
# Create a mapper and actor
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(arrowSource1.GetOutputPort())
# Transform the polydata
transformPD = vtk.vtkTransformPolyDataFilter()
transformPD.SetTransform(transform_1)
transformPD.SetInputConnection(arrowSource1.GetOutputPort())
# mapper transform
mapper.SetInputConnection(transformPD.GetOutputPort())
# actor
actor_arrow = vtk.vtkActor()
actor_arrow.SetMapper(mapper)
return actor_arrow
def CenterOfMass(self):
proj = prj.Project()
surface = proj.surface_dict[0].polydata
barycenter = [0.0, 0.0, 0.0]
n = surface.GetNumberOfPoints()
for i in range(n):
point = surface.GetPoint(i)
barycenter[0] += point[0]
barycenter[1] += point[1]
barycenter[2] += point[2]
barycenter[0] /= n
barycenter[1] /= n
barycenter[2] /= n
return barycenter
def Plane(self, x0, pTarget):
v3 = np.array(pTarget) - x0 # normal to the plane
v3 = v3 / np.linalg.norm(v3) # unit vector
d = np.dot(v3, x0)
# prevents division by zero.
if v3[0] == 0.0:
v3[0] = 1e-09
x1 = np.array([(d - v3[1] - v3[2]) / v3[0], 1, 1])
v2 = x1 - x0
v2 = v2 / np.linalg.norm(v2) # unit vector
v1 = np.cross(v3, v2)
v1 = v1 / np.linalg.norm(v1) # unit vector
x2 = x0 + v1
# calculates the matrix for the change of coordinate systems (from canonical to the plane's).
# remember that, in np.dot(M,p), even though p is a line vector (e.g.,np.array([1,2,3])), it is treated as a column for the dot multiplication.
M_plane_inv = np.array([[v1[0], v2[0], v3[0], x0[0]],
[v1[1], v2[1], v3[1], x0[1]],
[v1[2], v2[2], v3[2], x0[2]],
[0, 0, 0, 1]])
return v3, M_plane_inv
def SetCameraTarget(self):
cam_focus = self.target_coord[0:3]
cam = self.ren.GetActiveCamera()
oldcamVTK = vtk.vtkMatrix4x4()
oldcamVTK.DeepCopy(cam.GetViewTransformMatrix())
newvtk = vtk.vtkMatrix4x4()
newvtk.Multiply4x4(self.m_img_vtk, oldcamVTK, newvtk)
transform = vtk.vtkTransform()
transform.SetMatrix(newvtk)
transform.Update()
cam.ApplyTransform(transform)
cam.Roll(90)
cam_pos0 = np.array(cam.GetPosition())
cam_focus0 = np.array(cam.GetFocalPoint())
v0 = cam_pos0 - cam_focus0
v0n = np.sqrt(inner1d(v0, v0))
v1 = (cam_focus[0] - cam_focus0[0], cam_focus[1] - cam_focus0[1], cam_focus[2] - cam_focus0[2])
v1n = np.sqrt(inner1d(v1, v1))
if not v1n:
v1n = 1.0
cam_pos = (v1 / v1n) * v0n + cam_focus
cam.SetFocalPoint(cam_focus)
cam.SetPosition(cam_pos)
def CreateBallReference(self):
"""
Red sphere on volume visualization to reference center of
cross in slice planes.
The sphere's radius will be scale times bigger than the average of
image spacing values.
"""
scale = 2.0
proj = prj.Project()
s = proj.spacing
r = (s[0] + s[1] + s[2]) / 3.0 * scale
ball_source = vtk.vtkSphereSource()
ball_source.SetRadius(r)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(ball_source.GetOutputPort())
self.ball_actor = vtk.vtkActor()
self.ball_actor.SetMapper(mapper)
self.ball_actor.GetProperty().SetColor(1, 0, 0)
self.ren.AddActor(self.ball_actor)
# def SetCrossFocalPoint(self, position):
# self.UpdateCameraBallPosition(None, position)
def UpdateCameraBallPosition(self, position):
coord_flip = list(position[:3])
coord_flip[1] = -coord_flip[1]
self.ball_actor.SetPosition(coord_flip)
self.SetVolumeCamera(coord_flip)
def CreateObjectPolyData(self, filename):
"""
Coil for navigation rendered in volume viewer.
"""
filename = utils.decode(filename, const.FS_ENCODE)
if filename:
if filename.lower().endswith('.stl'):
reader = vtk.vtkSTLReader()
elif filename.lower().endswith('.ply'):
reader = vtk.vtkPLYReader()
elif filename.lower().endswith('.obj'):
reader = vtk.vtkOBJReader()
elif filename.lower().endswith('.vtp'):
reader = vtk.vtkXMLPolyDataReader()
else:
wx.MessageBox(_("File format not reconized by InVesalius"), _("Import surface error"))
return
else:
filename = os.path.join(inv_paths.OBJ_DIR, "magstim_fig8_coil.stl")
reader = vtk.vtkSTLReader()
if _has_win32api:
obj_name = win32api.GetShortPathName(filename).encode(const.FS_ENCODE)
else:
obj_name = filename.encode(const.FS_ENCODE)
reader.SetFileName(obj_name)
reader.Update()
obj_polydata = reader.GetOutput()
if obj_polydata.GetNumberOfPoints() == 0:
wx.MessageBox(_("InVesalius was not able to import this surface"), _("Import surface error"))
obj_polydata = None
return obj_polydata
def AddObjectActor(self, obj_name):
"""
Coil for navigation rendered in volume viewer.
"""
vtk_colors = vtk.vtkNamedColors()
obj_polydata = self.CreateObjectPolyData(obj_name)
transform = vtk.vtkTransform()
transform.RotateZ(90)
transform_filt = vtk.vtkTransformPolyDataFilter()
transform_filt.SetTransform(transform)
transform_filt.SetInputData(obj_polydata)
transform_filt.Update()
normals = vtk.vtkPolyDataNormals()
normals.SetInputData(transform_filt.GetOutput())
normals.SetFeatureAngle(80)
normals.AutoOrientNormalsOn()
normals.Update()
obj_mapper = vtk.vtkPolyDataMapper()
obj_mapper.SetInputData(normals.GetOutput())
obj_mapper.ScalarVisibilityOff()
#obj_mapper.ImmediateModeRenderingOn() # improve performance
self.obj_actor = vtk.vtkActor()
self.obj_actor.SetMapper(obj_mapper)
self.obj_actor.GetProperty().SetAmbientColor(vtk_colors.GetColor3d('GhostWhite'))
self.obj_actor.GetProperty().SetSpecular(30)
self.obj_actor.GetProperty().SetSpecularPower(80)
self.obj_actor.GetProperty().SetOpacity(.4)
self.obj_actor.SetVisibility(0)
self.x_actor = self.add_line([0., 0., 0.], [1., 0., 0.], color=[.0, .0, 1.0])
self.y_actor = self.add_line([0., 0., 0.], [0., 1., 0.], color=[.0, 1.0, .0])
self.z_actor = self.add_line([0., 0., 0.], [0., 0., 1.], color=[1.0, .0, .0])
self.ren.AddActor(self.obj_actor)
self.ren.AddActor(self.x_actor)
self.ren.AddActor(self.y_actor)
self.ren.AddActor(self.z_actor)
# self.obj_axes = vtk.vtkAxesActor()
# self.obj_axes.SetShaftTypeToCylinder()
# self.obj_axes.SetXAxisLabelText("x")
# self.obj_axes.SetYAxisLabelText("y")
# self.obj_axes.SetZAxisLabelText("z")
# self.obj_axes.SetTotalLength(50.0, 50.0, 50.0)
# self.ren.AddActor(self.obj_axes)
def add_line(self, p1, p2, color=[0.0, 0.0, 1.0]):
line = vtk.vtkLineSource()
line.SetPoint1(p1)
line.SetPoint2(p2)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(line.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(color)
return actor
def AddPeeledSurface(self, flag, actor):
if self.actor_peel:
self.ren.RemoveActor(self.actor_peel)
self.actor_peel = None
if flag and actor:
self.ren.AddActor(actor)
self.actor_peel = actor
self.Refresh()
def OnNavigationStatus(self, nav_status, vis_status):
self.nav_status = nav_status
self.tracts_status = vis_status[1]
self.pTarget = self.CenterOfMass()
if self.nav_status:
if self.obj_actor:
self.obj_actor.SetVisibility(self.obj_state)
self.x_actor.SetVisibility(self.obj_state)
self.y_actor.SetVisibility(self.obj_state)
self.z_actor.SetVisibility(self.obj_state)
self.Refresh()
def UpdateSeedOffset(self, data):
self.seed_offset = data
def UpdateMarkerOffsetState(self, create=False):
if create:
if not self.mark_actor:
self.mark_actor = self.add_marker([0., 0., 0.], color=[0., 1., 1.])
self.ren.AddActor(self.mark_actor)
else:
if self.mark_actor:
self.ren.RemoveActor(self.mark_actor)
self.mark_actor = None
self.Refresh()
def CreateMarkerOffset(self):
self.mark_actor = self.add_marker([0., 0., 0.], color=[0., 1., 1.])
self.ren.AddActor(self.mark_actor)
self.Refresh()
def UpdateMarkerOffsetPosition(self, coord_offset):
self.mark_actor.SetPosition(coord_offset)
self.Refresh()
def UpdateObjectOrientation(self, m_img, coord):
# print("Update object orientation")
m_img_flip = m_img.copy()
m_img_flip[1, -1] = -m_img_flip[1, -1]
# translate coregistered coordinate to display a marker where Trekker seed is computed
# coord_offset = m_img_flip[:3, -1] - self.seed_offset * m_img_flip[:3, 2]
# print("m_img copy in viewer_vol: {}".format(m_img_copy))
# m_img[:3, 0] is from posterior to anterior direction of the coil
# m_img[:3, 1] is from left to right direction of the coil
# m_img[:3, 2] is from bottom to up direction of the coil
m_img_vtk = vtku.numpy_to_vtkMatrix4x4(m_img_flip)
self.obj_actor.SetUserMatrix(m_img_vtk)
# self.obj_axes.SetUserMatrix(m_rot_vtk)
self.x_actor.SetUserMatrix(m_img_vtk)
self.y_actor.SetUserMatrix(m_img_vtk)
self.z_actor.SetUserMatrix(m_img_vtk)
self.Refresh()
def UpdateTrackObjectState(self, evt=None, flag=None, obj_name=None, polydata=None):
if flag:
self.obj_name = obj_name
self.polydata = polydata
if not self.obj_actor:
self.AddObjectActor(self.obj_name)
else:
if self.obj_actor:
self.ren.RemoveActor(self.obj_actor)
self.ren.RemoveActor(self.x_actor)
self.ren.RemoveActor(self.y_actor)
self.ren.RemoveActor(self.z_actor)
self.ren.RemoveActor(self.mark_actor)
self.obj_actor = None
self.x_actor = None
self.y_actor = None
self.z_actor = None
self.mark_actor = None
self.Refresh()
def UpdateShowObjectState(self, state):
self.obj_state = state
if self.obj_actor and not self.obj_state:
self.obj_actor.SetVisibility(self.obj_state)
self.x_actor.SetVisibility(self.obj_state)
self.y_actor.SetVisibility(self.obj_state)
self.z_actor.SetVisibility(self.obj_state)
self.Refresh()
def OnUpdateTracts(self, root=None, affine_vtk=None, coord_offset=None):
mapper = vtk.vtkCompositePolyDataMapper2()
mapper.SetInputDataObject(root)
self.actor_tracts = vtk.vtkActor()
self.actor_tracts.SetMapper(mapper)
self.actor_tracts.SetUserMatrix(affine_vtk)
self.ren.AddActor(self.actor_tracts)
if self.mark_actor:
self.mark_actor.SetPosition(coord_offset)
self.Refresh()
def OnRemoveTracts(self):
if self.actor_tracts:
self.ren.RemoveActor(self.actor_tracts)
self.actor_tracts = None
self.Refresh()
def __bind_events_wx(self):
#self.Bind(wx.EVT_SIZE, self.OnSize)
# self.canvas.subscribe_event('LeftButtonPressEvent', self.on_insert_point)
pass
def on_insert_point(self, evt):
pos = evt.position
self.polygon.append_point(pos)
self.canvas.Refresh()
arr = self.canvas.draw_element_to_array([self.polygon,])
imsave('/tmp/polygon.png', arr)
def SetInteractorStyle(self, state):
cleanup = getattr(self.style, 'CleanUp', None)
if cleanup:
self.style.CleanUp()
del self.style
style = styles.Styles.get_style(state)(self)
setup = getattr(style, 'SetUp', None)
if setup:
style.SetUp()
self.style = style
self.interactor.SetInteractorStyle(style)
self.interactor.Render()
self.state = state
def enable_style(self, style):
if styles.Styles.has_style(style):
new_state = self.interaction_style.AddState(style)
self.SetInteractorStyle(new_state)
else:
new_state = self.interaction_style.RemoveState(style)
self.SetInteractorStyle(new_state)
def OnDisableStyle(self, style):
new_state = self.interaction_style.RemoveState(style)
self.SetInteractorStyle(new_state)
def ResetCamClippingRange(self):
self.ren.ResetCamera()
self.ren.ResetCameraClippingRange()
def SetVolumeCameraState(self, camera_state):
self.camera_state = camera_state
# def SetVolumeCamera(self, arg, position):
def SetVolumeCamera(self, cam_focus):
if self.camera_state:
# TODO: exclude dependency on initial focus
# cam_focus = np.array(bases.flip_x(position[:3]))
# cam_focus = np.array(bases.flip_x(position))
cam = self.ren.GetActiveCamera()
if self.initial_focus is None:
self.initial_focus = np.array(cam.GetFocalPoint())
cam_pos0 = np.array(cam.GetPosition())
cam_focus0 = np.array(cam.GetFocalPoint())
v0 = cam_pos0 - cam_focus0
v0n = np.sqrt(inner1d(v0, v0))
if self.obj_state:
v1 = (cam_focus[0] - self.pTarget[0], cam_focus[1] - self.pTarget[1], cam_focus[2] - self.pTarget[2])
else:
v1 = (cam_focus - self.initial_focus)
v1n = np.sqrt(inner1d(v1, v1))
if not v1n:
v1n = 1.0
cam_pos = (v1/v1n)*v0n + cam_focus
cam.SetFocalPoint(cam_focus)
cam.SetPosition(cam_pos)
# It works without doing the reset. Check with trackers if there is any difference.
# Need to be outside condition for sphere marker position update
# self.ren.ResetCameraClippingRange()
# self.ren.ResetCamera()
#self.interactor.Render()
self.Refresh()
def OnExportSurface(self, filename, filetype):
if filetype not in (const.FILETYPE_STL,
const.FILETYPE_VTP,
const.FILETYPE_PLY,
const.FILETYPE_STL_ASCII):
if _has_win32api:
utils.touch(filename)
win_filename = win32api.GetShortPathName(filename)
self._export_surface(win_filename, filetype)
else:
self._export_surface(filename, filetype)
def _export_surface(self, filename, filetype):
fileprefix = filename.split(".")[-2]
renwin = self.interactor.GetRenderWindow()
if filetype == const.FILETYPE_RIB:
writer = vtk.vtkRIBExporter()
writer.SetFilePrefix(fileprefix)
writer.SetTexturePrefix(fileprefix)
writer.SetInput(renwin)
writer.Write()
elif filetype == const.FILETYPE_VRML:
writer = vtk.vtkVRMLExporter()
writer.SetFileName(filename)
writer.SetInput(renwin)
writer.Write()
elif filetype == const.FILETYPE_X3D:
writer = vtk.vtkX3DExporter()
writer.SetInput(renwin)
writer.SetFileName(filename)
writer.Update()
writer.Write()
elif filetype == const.FILETYPE_OBJ:
writer = vtk.vtkOBJExporter()
writer.SetFilePrefix(fileprefix)
writer.SetInput(renwin)
writer.Write()
elif filetype == const.FILETYPE_IV:
writer = vtk.vtkIVExporter()
writer.SetFileName(filename)
writer.SetInput(renwin)
writer.Write()
def OnEnableBrightContrast(self):
style = self.style
style.AddObserver("MouseMoveEvent", self.OnMove)
style.AddObserver("LeftButtonPressEvent", self.OnClick)
style.AddObserver("LeftButtonReleaseEvent", self.OnRelease)
def OnDisableBrightContrast(self):
style = vtk.vtkInteractorStyleTrackballCamera()
self.interactor.SetInteractorStyle(style)
self.style = style
def OnSetWindowLevelText(self, ww, wl):
if self.raycasting_volume:
self.text.SetValue("WL: %d WW: %d"%(wl, ww))
# self.canvas.modified = True
def OnShowRaycasting(self):
if not self.raycasting_volume:
self.raycasting_volume = True
# self._to_show_ball += 1
# self._check_and_set_ball_visibility()
if self.on_wl:
self.text.Show()
def OnHideRaycasting(self):
self.raycasting_volume = False
self.text.Hide()
# self._to_show_ball -= 1
# self._check_and_set_ball_visibility()
def OnSize(self, evt):
self.UpdateRender()
self.Refresh()
self.interactor.UpdateWindowUI()
self.interactor.Update()
evt.Skip()
def ChangeBackgroundColour(self, colour):
self.ren.SetBackground(colour[:3])
self.UpdateRender()
def LoadActor(self, actor):
self.added_actor = 1
ren = self.ren
ren.AddActor(actor)
if not (self.view_angle):
self.SetViewAngle(const.VOL_FRONT)
self.view_angle = 1
else:
ren.ResetCamera()
ren.ResetCameraClippingRange()
#self.ShowOrientationCube()
self.interactor.Render()
# self._to_show_ball += 1
# self._check_and_set_ball_visibility()
def RemoveActor(self, actor):
utils.debug("RemoveActor")
ren = self.ren
ren.RemoveActor(actor)
self.interactor.Render()
# self._to_show_ball -= 1
# self._check_and_set_ball_visibility()
def RemoveAllActor(self):
utils.debug("RemoveAllActor")
self.ren.RemoveAllProps()
Publisher.sendMessage('Render volume viewer')
def LoadSlicePlane(self):
self.slice_plane = SlicePlane()
def LoadVolume(self, volume, colour, ww, wl):
self.raycasting_volume = True
# self._to_show_ball += 1
# self._check_and_set_ball_visibility()
self.light = self.ren.GetLights().GetNextItem()
self.ren.AddVolume(volume)
self.text.SetValue("WL: %d WW: %d"%(wl, ww))
if self.on_wl:
self.text.Show()
else:
self.text.Hide()
self.ren.SetBackground(colour)
if not (self.view_angle):
self.SetViewAngle(const.VOL_FRONT)
else:
self.ren.ResetCamera()
self.ren.ResetCameraClippingRange()
self.UpdateRender()
def UnloadVolume(self, volume):
self.ren.RemoveVolume(volume)
del volume
self.raycasting_volume = False
# self._to_show_ball -= 1
# self._check_and_set_ball_visibility()
def load_mask_preview(self, mask_3d_actor, flag=True):
if flag:
self.ren.AddVolume(mask_3d_actor)
else:
self.ren.RemoveVolume(mask_3d_actor)
if self.ren.GetActors().GetNumberOfItems() == 0 and self.ren.GetVolumes().GetNumberOfItems() == 1:
self.ren.ResetCamera()
self.ren.ResetCameraClippingRange()
def remove_mask_preview(self, mask_3d_actor):
self.ren.RemoveVolume(mask_3d_actor)
def OnSetViewAngle(self, view):
self.SetViewAngle(view)
def SetViewAngle(self, view):
cam = self.ren.GetActiveCamera()
cam.SetFocalPoint(0,0,0)
proj = prj.Project()
orig_orien = proj.original_orientation
xv,yv,zv = const.VOLUME_POSITION[const.AXIAL][0][view]
xp,yp,zp = const.VOLUME_POSITION[const.AXIAL][1][view]
cam.SetViewUp(xv,yv,zv)
cam.SetPosition(xp,yp,zp)
self.ren.ResetCameraClippingRange()
self.ren.ResetCamera()
self.interactor.Render()
def ShowOrientationCube(self):
cube = vtk.vtkAnnotatedCubeActor()
cube.GetXMinusFaceProperty().SetColor(1,0,0)
cube.GetXPlusFaceProperty().SetColor(1,0,0)
cube.GetYMinusFaceProperty().SetColor(0,1,0)
cube.GetYPlusFaceProperty().SetColor(0,1,0)
cube.GetZMinusFaceProperty().SetColor(0,0,1)
cube.GetZPlusFaceProperty().SetColor(0,0,1)
cube.GetTextEdgesProperty().SetColor(0,0,0)
# anatomic labelling
cube.SetXPlusFaceText ("A")
cube.SetXMinusFaceText("P")
cube.SetYPlusFaceText ("L")
cube.SetYMinusFaceText("R")
cube.SetZPlusFaceText ("S")
cube.SetZMinusFaceText("I")
axes = vtk.vtkAxesActor()
axes.SetShaftTypeToCylinder()
axes.SetTipTypeToCone()
axes.SetXAxisLabelText("X")
axes.SetYAxisLabelText("Y")
axes.SetZAxisLabelText("Z")
#axes.SetNormalizedLabelPosition(.5, .5, .5)
orientation_widget = vtk.vtkOrientationMarkerWidget()
orientation_widget.SetOrientationMarker(cube)
orientation_widget.SetViewport(0.85,0.85,1.0,1.0)
#orientation_widget.SetOrientationMarker(axes)
orientation_widget.SetInteractor(self.interactor)
orientation_widget.SetEnabled(1)
orientation_widget.On()
orientation_widget.InteractiveOff()
def UpdateRender(self):
self.interactor.Render()
def SetWidgetInteractor(self, widget=None):
widget.SetInteractor(self.interactor._Iren)
def AppendActor(self, actor):
self.ren.AddActor(actor)
def Reposition3DPlane(self, plane_label):
if not(self.added_actor) and not(self.raycasting_volume):
if not(self.repositioned_axial_plan) and (plane_label == 'Axial'):
self.SetViewAngle(const.VOL_ISO)
self.repositioned_axial_plan = 1
elif not(self.repositioned_sagital_plan) and (plane_label == 'Sagital'):
self.SetViewAngle(const.VOL_ISO)
self.repositioned_sagital_plan = 1
elif not(self.repositioned_coronal_plan) and (plane_label == 'Coronal'):
self.SetViewAngle(const.VOL_ISO)
self.repositioned_coronal_plan = 1
# def _check_and_set_ball_visibility(self):
# #TODO: When creating Raycasting volume and cross is pressed, it is not
# # automatically creating the ball reference.
# # print("mode_cross, show_ball, ball_vis ", self._mode_cross, self._to_show_ball, self._ball_ref_visibility)
# if self._mode_cross:
# if self._to_show_ball > 0 and not self._ball_ref_visibility:
# self.ActivateBallReference()
# self.interactor.Render()
# elif not self._to_show_ball and self._ball_ref_visibility:
# self.RemoveBallReference()
# self.interactor.Render()
class SlicePlane:
def __init__(self):
project = prj.Project()
self.original_orientation = project.original_orientation
self.Create()
self.enabled = False
self.__bind_evt()
def __bind_evt(self):
Publisher.subscribe(self.Enable, 'Enable plane')
Publisher.subscribe(self.Disable, 'Disable plane')
Publisher.subscribe(self.ChangeSlice, 'Change slice from slice plane')
Publisher.subscribe(self.UpdateAllSlice, 'Update all slice')
def Create(self):
plane_x = self.plane_x = vtk.vtkImagePlaneWidget()
plane_x.InteractionOff()
#Publisher.sendMessage('Input Image in the widget',
#(plane_x, 'SAGITAL'))
plane_x.SetPlaneOrientationToXAxes()
plane_x.TextureVisibilityOn()
plane_x.SetLeftButtonAction(0)
plane_x.SetRightButtonAction(0)
plane_x.SetMiddleButtonAction(0)
cursor_property = plane_x.GetCursorProperty()
cursor_property.SetOpacity(0)
plane_y = self.plane_y = vtk.vtkImagePlaneWidget()
plane_y.DisplayTextOff()
#Publisher.sendMessage('Input Image in the widget',
#(plane_y, 'CORONAL'))
plane_y.SetPlaneOrientationToYAxes()
plane_y.TextureVisibilityOn()
plane_y.SetLeftButtonAction(0)
plane_y.SetRightButtonAction(0)
plane_y.SetMiddleButtonAction(0)
prop1 = plane_y.GetPlaneProperty()
cursor_property = plane_y.GetCursorProperty()
cursor_property.SetOpacity(0)
plane_z = self.plane_z = vtk.vtkImagePlaneWidget()
plane_z.InteractionOff()
#Publisher.sendMessage('Input Image in the widget',
#(plane_z, 'AXIAL'))
plane_z.SetPlaneOrientationToZAxes()
plane_z.TextureVisibilityOn()
plane_z.SetLeftButtonAction(0)
plane_z.SetRightButtonAction(0)
plane_z.SetMiddleButtonAction(0)
cursor_property = plane_z.GetCursorProperty()
cursor_property.SetOpacity(0)
prop3 = plane_z.GetPlaneProperty()
prop3.SetColor(1, 0, 0)
selected_prop3 = plane_z.GetSelectedPlaneProperty()
selected_prop3.SetColor(1,0,0)
prop1 = plane_x.GetPlaneProperty()
prop1.SetColor(0, 0, 1)
selected_prop1 = plane_x.GetSelectedPlaneProperty()
selected_prop1.SetColor(0, 0, 1)
prop2 = plane_y.GetPlaneProperty()
prop2.SetColor(0, 1, 0)
selected_prop2 = plane_y.GetSelectedPlaneProperty()
selected_prop2.SetColor(0, 1, 0)
Publisher.sendMessage('Set Widget Interactor', widget=plane_x)
Publisher.sendMessage('Set Widget Interactor', widget=plane_y)
Publisher.sendMessage('Set Widget Interactor', widget=plane_z)
self.Render()
def Enable(self, plane_label=None):
if plane_label:
if(plane_label == "Axial"):
self.plane_z.On()
elif(plane_label == "Coronal"):
self.plane_y.On()
elif(plane_label == "Sagital"):
self.plane_x.On()
Publisher.sendMessage('Reposition 3D Plane', plane_label=plane_label)
else:
self.plane_z.On()
self.plane_x.On()
self.plane_y.On()
Publisher.sendMessage('Set volume view angle',
view=const.VOL_ISO)
self.Render()
def Disable(self, plane_label=None):
if plane_label:
if(plane_label == "Axial"):
self.plane_z.Off()
elif(plane_label == "Coronal"):
self.plane_y.Off()
elif(plane_label == "Sagital"):
self.plane_x.Off()
else:
self.plane_z.Off()
self.plane_x.Off()
self.plane_y.Off()
self.Render()
def Render(self):
Publisher.sendMessage('Render volume viewer')
def ChangeSlice(self, orientation, index):
if orientation == "CORONAL" and self.plane_y.GetEnabled():
Publisher.sendMessage('Update slice 3D',
widget=self.plane_y,
orientation=orientation)
self.Render()
elif orientation == "SAGITAL" and self.plane_x.GetEnabled():
Publisher.sendMessage('Update slice 3D',
widget=self.plane_x,
orientation=orientation)
self.Render()
elif orientation == 'AXIAL' and self.plane_z.GetEnabled() :
Publisher.sendMessage('Update slice 3D',
widget=self.plane_z,
orientation=orientation)
self.Render()
def UpdateAllSlice(self):
Publisher.sendMessage('Update slice 3D',
widget=self.plane_y,
orientation="CORONAL")
Publisher.sendMessage('Update slice 3D',
widget=self.plane_x,
orientation="SAGITAL")
Publisher.sendMessage('Update slice 3D',
widget=self.plane_z,
orientation="AXIAL")
def DeletePlanes(self):
del self.plane_x
del self.plane_y
del self.plane_z
| gpl-2.0 |
alrusdi/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/forms/tests/models.py | 39 | 9207 | # -*- coding: utf-8 -*-
import datetime
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
from django.db import connection
from django.forms import Form, ModelForm, FileField, ModelChoiceField
from django.test import TestCase
from regressiontests.forms.models import ChoiceModel, ChoiceOptionModel, ChoiceFieldModel, FileModel, Group, BoundaryModel, Defaults
class ChoiceFieldForm(ModelForm):
class Meta:
model = ChoiceFieldModel
class FileForm(Form):
file1 = FileField()
class TestTicket12510(TestCase):
''' It is not necessary to generate choices for ModelChoiceField (regression test for #12510). '''
def setUp(self):
self.groups = [Group.objects.create(name=name) for name in 'abc']
self.old_debug = settings.DEBUG
# turn debug on to get access to connection.queries
settings.DEBUG = True
def tearDown(self):
settings.DEBUG = self.old_debug
def test_choices_not_fetched_when_not_rendering(self):
initial_queries = len(connection.queries)
field = ModelChoiceField(Group.objects.order_by('-name'))
self.assertEqual('a', field.clean(self.groups[0].pk).name)
# only one query is required to pull the model from DB
self.assertEqual(initial_queries+1, len(connection.queries))
class ModelFormCallableModelDefault(TestCase):
def test_no_empty_option(self):
"If a model's ForeignKey has blank=False and a default, no empty option is created (Refs #10792)."
option = ChoiceOptionModel.objects.create(name='default')
choices = list(ChoiceFieldForm().fields['choice'].choices)
self.assertEquals(len(choices), 1)
self.assertEquals(choices[0], (option.pk, unicode(option)))
def test_callable_initial_value(self):
"The initial value for a callable default returning a queryset is the pk (refs #13769)"
obj1 = ChoiceOptionModel.objects.create(id=1, name='default')
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
self.assertEquals(ChoiceFieldForm().as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice" value="1" id="initial-id_choice" /></p>
<p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice_int" value="1" id="initial-id_choice_int" /></p>
<p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice" value="1" id="initial-id_multi_choice_0" /> Hold down "Control", or "Command" on a Mac, to select more than one.</p>
<p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int">
<option value="1" selected="selected">ChoiceOption 1</option>
<option value="2">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice_int" value="1" id="initial-id_multi_choice_int_0" /> Hold down "Control", or "Command" on a Mac, to select more than one.</p>""")
def test_initial_instance_value(self):
"Initial instances for model fields may also be instances (refs #7287)"
obj1 = ChoiceOptionModel.objects.create(id=1, name='default')
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
self.assertEquals(ChoiceFieldForm(initial={
'choice': obj2,
'choice_int': obj2,
'multi_choice': [obj2,obj3],
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
}).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice" value="2" id="initial-id_choice" /></p>
<p><label for="id_choice_int">Choice int:</label> <select name="choice_int" id="id_choice_int">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3">ChoiceOption 3</option>
</select><input type="hidden" name="initial-choice_int" value="2" id="initial-id_choice_int" /></p>
<p><label for="id_multi_choice">Multi choice:</label> <select multiple="multiple" name="multi_choice" id="id_multi_choice">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3" selected="selected">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice" value="2" id="initial-id_multi_choice_0" />
<input type="hidden" name="initial-multi_choice" value="3" id="initial-id_multi_choice_1" /> Hold down "Control", or "Command" on a Mac, to select more than one.</p>
<p><label for="id_multi_choice_int">Multi choice int:</label> <select multiple="multiple" name="multi_choice_int" id="id_multi_choice_int">
<option value="1">ChoiceOption 1</option>
<option value="2" selected="selected">ChoiceOption 2</option>
<option value="3" selected="selected">ChoiceOption 3</option>
</select><input type="hidden" name="initial-multi_choice_int" value="2" id="initial-id_multi_choice_int_0" />
<input type="hidden" name="initial-multi_choice_int" value="3" id="initial-id_multi_choice_int_1" /> Hold down "Control", or "Command" on a Mac, to select more than one.</p>""")
class FormsModelTestCase(TestCase):
def test_unicode_filename(self):
# FileModel with unicode filename and data #########################
f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह')}, auto_id=False)
self.assertTrue(f.is_valid())
self.assertTrue('file1' in f.cleaned_data)
m = FileModel.objects.create(file=f.cleaned_data['file1'])
self.assertEqual(m.file.name, u'tests/\u6211\u96bb\u6c23\u588a\u8239\u88dd\u6eff\u6652\u9c54.txt')
m.delete()
def test_boundary_conditions(self):
# Boundary conditions on a PostitiveIntegerField #########################
class BoundaryForm(ModelForm):
class Meta:
model = BoundaryModel
f = BoundaryForm({'positive_integer': 100})
self.assertTrue(f.is_valid())
f = BoundaryForm({'positive_integer': 0})
self.assertTrue(f.is_valid())
f = BoundaryForm({'positive_integer': -100})
self.assertFalse(f.is_valid())
def test_formfield_initial(self):
# Formfield initial values ########
# If the model has default values for some fields, they are used as the formfield
# initial values.
class DefaultsForm(ModelForm):
class Meta:
model = Defaults
self.assertEqual(DefaultsForm().fields['name'].initial, u'class default value')
self.assertEqual(DefaultsForm().fields['def_date'].initial, datetime.date(1980, 1, 1))
self.assertEqual(DefaultsForm().fields['value'].initial, 42)
r1 = DefaultsForm()['callable_default'].as_widget()
r2 = DefaultsForm()['callable_default'].as_widget()
self.assertNotEqual(r1, r2)
# In a ModelForm that is passed an instance, the initial values come from the
# instance's values, not the model's defaults.
foo_instance = Defaults(name=u'instance value', def_date=datetime.date(1969, 4, 4), value=12)
instance_form = DefaultsForm(instance=foo_instance)
self.assertEqual(instance_form.initial['name'], u'instance value')
self.assertEqual(instance_form.initial['def_date'], datetime.date(1969, 4, 4))
self.assertEqual(instance_form.initial['value'], 12)
from django.forms import CharField
class ExcludingForm(ModelForm):
name = CharField(max_length=255)
class Meta:
model = Defaults
exclude = ['name', 'callable_default']
f = ExcludingForm({'name': u'Hello', 'value': 99, 'def_date': datetime.date(1999, 3, 2)})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], u'Hello')
obj = f.save()
self.assertEqual(obj.name, u'class default value')
self.assertEqual(obj.value, 99)
self.assertEqual(obj.def_date, datetime.date(1999, 3, 2))
| gpl-3.0 |
szilveszter/django | django/contrib/redirects/middleware.py | 109 | 1861 | from __future__ import unicode_literals
from django.apps import apps
from django.conf import settings
from django.contrib.redirects.models import Redirect
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ImproperlyConfigured
from django import http
class RedirectFallbackMiddleware(object):
# Defined as class-level attributes to be subclassing-friendly.
response_gone_class = http.HttpResponseGone
response_redirect_class = http.HttpResponsePermanentRedirect
def __init__(self):
if not apps.is_installed('django.contrib.sites'):
raise ImproperlyConfigured(
"You cannot use RedirectFallbackMiddleware when "
"django.contrib.sites is not installed."
)
def process_response(self, request, response):
# No need to check for a redirect for non-404 responses.
if response.status_code != 404:
return response
full_path = request.get_full_path()
current_site = get_current_site(request)
r = None
try:
r = Redirect.objects.get(site=current_site, old_path=full_path)
except Redirect.DoesNotExist:
pass
if settings.APPEND_SLASH and not request.path.endswith('/'):
# Try appending a trailing slash.
path_len = len(request.path)
full_path = full_path[:path_len] + '/' + full_path[path_len:]
try:
r = Redirect.objects.get(site=current_site, old_path=full_path)
except Redirect.DoesNotExist:
pass
if r is not None:
if r.new_path == '':
return self.response_gone_class()
return self.response_redirect_class(r.new_path)
# No redirect was found. Return the response.
return response
| bsd-3-clause |
ryandougherty/mwa-capstone | MWA_Tools/build/matplotlib/doc/mpl_examples/widgets/menu.py | 3 | 4882 | import numpy as np
import matplotlib
import matplotlib.colors as colors
import matplotlib.patches as patches
import matplotlib.mathtext as mathtext
import matplotlib.pyplot as plt
import matplotlib.artist as artist
import matplotlib.image as image
class ItemProperties:
def __init__(self, fontsize=14, labelcolor='black', bgcolor='yellow',
alpha=1.0):
self.fontsize = fontsize
self.labelcolor = labelcolor
self.bgcolor = bgcolor
self.alpha = alpha
self.labelcolor_rgb = colors.colorConverter.to_rgb(labelcolor)
self.bgcolor_rgb = colors.colorConverter.to_rgb(bgcolor)
class MenuItem(artist.Artist):
parser = mathtext.MathTextParser("Bitmap")
padx = 5
pady = 5
def __init__(self, fig, labelstr, props=None, hoverprops=None,
on_select=None):
artist.Artist.__init__(self)
self.set_figure(fig)
self.labelstr = labelstr
if props is None:
props = ItemProperties()
if hoverprops is None:
hoverprops = ItemProperties()
self.props = props
self.hoverprops = hoverprops
self.on_select = on_select
x, self.depth = self.parser.to_mask(
labelstr, fontsize=props.fontsize, dpi=fig.dpi)
if props.fontsize!=hoverprops.fontsize:
raise NotImplementedError(
'support for different font sizes not implemented')
self.labelwidth = x.shape[1]
self.labelheight = x.shape[0]
self.labelArray = np.zeros((x.shape[0], x.shape[1], 4))
self.labelArray[:, :, -1] = x/255.
self.label = image.FigureImage(fig, origin='upper')
self.label.set_array(self.labelArray)
# we'll update these later
self.rect = patches.Rectangle((0,0), 1,1)
self.set_hover_props(False)
fig.canvas.mpl_connect('button_release_event', self.check_select)
def check_select(self, event):
over, junk = self.rect.contains(event)
if not over:
return
if self.on_select is not None:
self.on_select(self)
def set_extent(self, x, y, w, h):
print x, y, w, h
self.rect.set_x(x)
self.rect.set_y(y)
self.rect.set_width(w)
self.rect.set_height(h)
self.label.ox = x+self.padx
self.label.oy = y-self.depth+self.pady/2.
self.rect._update_patch_transform()
self.hover = False
def draw(self, renderer):
self.rect.draw(renderer)
self.label.draw(renderer)
def set_hover_props(self, b):
if b:
props = self.hoverprops
else:
props = self.props
r, g, b = props.labelcolor_rgb
self.labelArray[:, :, 0] = r
self.labelArray[:, :, 1] = g
self.labelArray[:, :, 2] = b
self.label.set_array(self.labelArray)
self.rect.set(facecolor=props.bgcolor, alpha=props.alpha)
def set_hover(self, event):
'check the hover status of event and return true if status is changed'
b,junk = self.rect.contains(event)
changed = (b != self.hover)
if changed:
self.set_hover_props(b)
self.hover = b
return changed
class Menu:
def __init__(self, fig, menuitems):
self.figure = fig
fig.suppressComposite = True
self.menuitems = menuitems
self.numitems = len(menuitems)
maxw = max([item.labelwidth for item in menuitems])
maxh = max([item.labelheight for item in menuitems])
totalh = self.numitems*maxh + (self.numitems+1)*2*MenuItem.pady
x0 = 100
y0 = 400
width = maxw + 2*MenuItem.padx
height = maxh+MenuItem.pady
for item in menuitems:
left = x0
bottom = y0-maxh-MenuItem.pady
item.set_extent(left, bottom, width, height)
fig.artists.append(item)
y0 -= maxh + MenuItem.pady
fig.canvas.mpl_connect('motion_notify_event', self.on_move)
def on_move(self, event):
draw = False
for item in self.menuitems:
draw = item.set_hover(event)
if draw:
self.figure.canvas.draw()
break
fig = plt.figure()
fig.subplots_adjust(left=0.3)
props = ItemProperties(labelcolor='black', bgcolor='yellow',
fontsize=15, alpha=0.2)
hoverprops = ItemProperties(labelcolor='white', bgcolor='blue',
fontsize=15, alpha=0.2)
menuitems = []
for label in ('open', 'close', 'save', 'save as', 'quit'):
def on_select(item):
print 'you selected', item.labelstr
item = MenuItem(fig, label, props=props, hoverprops=hoverprops,
on_select=on_select)
menuitems.append(item)
menu = Menu(fig, menuitems)
plt.show()
| gpl-2.0 |
mglukhikh/intellij-community | plugins/hg4idea/testData/bin/mercurial/mdiff.py | 92 | 11609 | # mdiff.py - diff and patch routines for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
import bdiff, mpatch, util
import re, struct, base85, zlib
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
lines = [l + '\n' for l in text.split('\n')]
if lines:
if lines[-1] == '\n':
lines.pop()
else:
lines[-1] = lines[-1][:-1]
return lines
class diffopts(object):
'''context is the number of context lines
text treats all files as text
showfunc enables diff -p output
git enables the git extended patch format
nodates removes dates from diff headers
ignorews ignores all whitespace changes in the diff
ignorewsamount ignores changes in the amount of whitespace
ignoreblanklines ignores changes whose lines are all blank
upgrade generates git diffs to avoid data loss
'''
defaults = {
'context': 3,
'text': False,
'showfunc': False,
'git': False,
'nodates': False,
'ignorews': False,
'ignorewsamount': False,
'ignoreblanklines': False,
'upgrade': False,
}
__slots__ = defaults.keys()
def __init__(self, **opts):
for k in self.__slots__:
v = opts.get(k)
if v is None:
v = self.defaults[k]
setattr(self, k, v)
try:
self.context = int(self.context)
except ValueError:
raise util.Abort(_('diff context lines count must be '
'an integer, not %r') % self.context)
def copy(self, **kwargs):
opts = dict((k, getattr(self, k)) for k in self.defaults)
opts.update(kwargs)
return diffopts(**opts)
defaultopts = diffopts()
def wsclean(opts, text, blank=True):
if opts.ignorews:
text = bdiff.fixws(text, 1)
elif opts.ignorewsamount:
text = bdiff.fixws(text, 0)
if blank and opts.ignoreblanklines:
text = re.sub('\n+', '\n', text).strip('\n')
return text
def splitblock(base1, lines1, base2, lines2, opts):
# The input lines matches except for interwoven blank lines. We
# transform it into a sequence of matching blocks and blank blocks.
lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1]
lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2]
s1, e1 = 0, len(lines1)
s2, e2 = 0, len(lines2)
while s1 < e1 or s2 < e2:
i1, i2, btype = s1, s2, '='
if (i1 >= e1 or lines1[i1] == 0
or i2 >= e2 or lines2[i2] == 0):
# Consume the block of blank lines
btype = '~'
while i1 < e1 and lines1[i1] == 0:
i1 += 1
while i2 < e2 and lines2[i2] == 0:
i2 += 1
else:
# Consume the matching lines
while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1:
i1 += 1
i2 += 1
yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype
s1 = i1
s2 = i2
def allblocks(text1, text2, opts=None, lines1=None, lines2=None, refine=False):
"""Return (block, type) tuples, where block is an mdiff.blocks
line entry. type is '=' for blocks matching exactly one another
(bdiff blocks), '!' for non-matching blocks and '~' for blocks
matching only after having filtered blank lines. If refine is True,
then '~' blocks are refined and are only made of blank lines.
line1 and line2 are text1 and text2 split with splitnewlines() if
they are already available.
"""
if opts is None:
opts = defaultopts
if opts.ignorews or opts.ignorewsamount:
text1 = wsclean(opts, text1, False)
text2 = wsclean(opts, text2, False)
diff = bdiff.blocks(text1, text2)
for i, s1 in enumerate(diff):
# The first match is special.
# we've either found a match starting at line 0 or a match later
# in the file. If it starts later, old and new below will both be
# empty and we'll continue to the next match.
if i > 0:
s = diff[i - 1]
else:
s = [0, 0, 0, 0]
s = [s[1], s1[0], s[3], s1[2]]
# bdiff sometimes gives huge matches past eof, this check eats them,
# and deals with the special first match case described above
if s[0] != s[1] or s[2] != s[3]:
type = '!'
if opts.ignoreblanklines:
if lines1 is None:
lines1 = splitnewlines(text1)
if lines2 is None:
lines2 = splitnewlines(text2)
old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
if old == new:
type = '~'
yield s, type
yield s1, '='
def unidiff(a, ad, b, bd, fn1, fn2, opts=defaultopts):
def datetag(date, fn=None):
if not opts.git and not opts.nodates:
return '\t%s\n' % date
if fn and ' ' in fn:
return '\t\n'
return '\n'
if not a and not b:
return ""
epoch = util.datestr((0, 0))
fn1 = util.pconvert(fn1)
fn2 = util.pconvert(fn2)
if not opts.text and (util.binary(a) or util.binary(b)):
if a and b and len(a) == len(b) and a == b:
return ""
l = ['Binary file %s has changed\n' % fn1]
elif not a:
b = splitnewlines(b)
if a is None:
l1 = '--- /dev/null%s' % datetag(epoch)
else:
l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
l3 = "@@ -0,0 +1,%d @@\n" % len(b)
l = [l1, l2, l3] + ["+" + e for e in b]
elif not b:
a = splitnewlines(a)
l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
if b is None:
l2 = '+++ /dev/null%s' % datetag(epoch)
else:
l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
l3 = "@@ -1,%d +0,0 @@\n" % len(a)
l = [l1, l2, l3] + ["-" + e for e in a]
else:
al = splitnewlines(a)
bl = splitnewlines(b)
l = list(_unidiff(a, b, al, bl, opts=opts))
if not l:
return ""
l.insert(0, "--- a/%s%s" % (fn1, datetag(ad, fn1)))
l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd, fn2)))
for ln in xrange(len(l)):
if l[ln][-1] != '\n':
l[ln] += "\n\ No newline at end of file\n"
return "".join(l)
# creates a headerless unified diff
# t1 and t2 are the text to be diffed
# l1 and l2 are the text broken up into lines
def _unidiff(t1, t2, l1, l2, opts=defaultopts):
def contextend(l, len):
ret = l + opts.context
if ret > len:
ret = len
return ret
def contextstart(l):
ret = l - opts.context
if ret < 0:
return 0
return ret
lastfunc = [0, '']
def yieldhunk(hunk):
(astart, a2, bstart, b2, delta) = hunk
aend = contextend(a2, len(l1))
alen = aend - astart
blen = b2 - bstart + aend - a2
func = ""
if opts.showfunc:
lastpos, func = lastfunc
# walk backwards from the start of the context up to the start of
# the previous hunk context until we find a line starting with an
# alphanumeric char.
for i in xrange(astart - 1, lastpos - 1, -1):
if l1[i][0].isalnum():
func = ' ' + l1[i].rstrip()[:40]
lastfunc[1] = func
break
# by recording this hunk's starting point as the next place to
# start looking for function lines, we avoid reading any line in
# the file more than once.
lastfunc[0] = astart
# zero-length hunk ranges report their start line as one less
if alen:
astart += 1
if blen:
bstart += 1
yield "@@ -%d,%d +%d,%d @@%s\n" % (astart, alen,
bstart, blen, func)
for x in delta:
yield x
for x in xrange(a2, aend):
yield ' ' + l1[x]
# bdiff.blocks gives us the matching sequences in the files. The loop
# below finds the spaces between those matching sequences and translates
# them into diff output.
#
hunk = None
ignoredlines = 0
for s, stype in allblocks(t1, t2, opts, l1, l2):
a1, a2, b1, b2 = s
if stype != '!':
if stype == '~':
# The diff context lines are based on t1 content. When
# blank lines are ignored, the new lines offsets must
# be adjusted as if equivalent blocks ('~') had the
# same sizes on both sides.
ignoredlines += (b2 - b1) - (a2 - a1)
continue
delta = []
old = l1[a1:a2]
new = l2[b1:b2]
b1 -= ignoredlines
b2 -= ignoredlines
astart = contextstart(a1)
bstart = contextstart(b1)
prev = None
if hunk:
# join with the previous hunk if it falls inside the context
if astart < hunk[1] + opts.context + 1:
prev = hunk
astart = hunk[1]
bstart = hunk[3]
else:
for x in yieldhunk(hunk):
yield x
if prev:
# we've joined the previous hunk, record the new ending points.
hunk[1] = a2
hunk[3] = b2
delta = hunk[4]
else:
# create a new hunk
hunk = [astart, a2, bstart, b2, delta]
delta[len(delta):] = [' ' + x for x in l1[astart:a1]]
delta[len(delta):] = ['-' + x for x in old]
delta[len(delta):] = ['+' + x for x in new]
if hunk:
for x in yieldhunk(hunk):
yield x
def b85diff(to, tn):
'''print base85-encoded binary diff'''
def fmtline(line):
l = len(line)
if l <= 26:
l = chr(ord('A') + l - 1)
else:
l = chr(l - 26 + ord('a') - 1)
return '%c%s\n' % (l, base85.b85encode(line, True))
def chunk(text, csize=52):
l = len(text)
i = 0
while i < l:
yield text[i:i + csize]
i += csize
if to is None:
to = ''
if tn is None:
tn = ''
if to == tn:
return ''
# TODO: deltas
ret = []
ret.append('GIT binary patch\n')
ret.append('literal %s\n' % len(tn))
for l in chunk(zlib.compress(tn)):
ret.append(fmtline(l))
ret.append('\n')
return ''.join(ret)
def patchtext(bin):
pos = 0
t = []
while pos < len(bin):
p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
pos += 12
t.append(bin[pos:pos + l])
pos += l
return "".join(t)
def patch(a, bin):
if len(a) == 0:
# skip over trivial delta header
return util.buffer(bin, 12)
return mpatch.patches(a, [bin])
# similar to difflib.SequenceMatcher.get_matching_blocks
def get_matching_blocks(a, b):
return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
def trivialdiffheader(length):
return struct.pack(">lll", 0, 0, length)
patches = mpatch.patches
patchedsize = mpatch.patchedsize
textdiff = bdiff.bdiff
| apache-2.0 |
pvagner/orca | src/orca/scripts/apps/soffice/spellcheck.py | 1 | 3428 | # Orca
#
# Copyright 2015 Igalia, S.L.
#
# Author: Joanmarie Diggs <jdiggs@igalia.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Customized support for spellcheck in LibreOffice."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2015 Igalia, S.L."
__license__ = "LGPL"
import pyatspi
from orca import debug
from orca import messages
from orca import settings
from orca import spellcheck
class SpellCheck(spellcheck.SpellCheck):
def __init__(self, script):
super().__init__(script, hasChangeToEntry=False)
def _isCandidateWindow(self, window):
if window and window.childCount and window.getRole() == pyatspi.ROLE_FRAME:
child = window[0]
if child.getRole() == pyatspi.ROLE_DIALOG:
isPageTabList = lambda x: x and x.getRole() == pyatspi.ROLE_PAGE_TAB_LIST
if not pyatspi.findDescendant(child, isPageTabList):
return True
return False
def _findErrorWidget(self, root):
isError = lambda x: x and x.getRole() == pyatspi.ROLE_TEXT and x.name \
and x.parent.getRole() != pyatspi.ROLE_COMBO_BOX
return pyatspi.findDescendant(root, isError)
def _findSuggestionsList(self, root):
isList = lambda x: x and x.getRole() == pyatspi.ROLE_LIST and x.name \
and 'Selection' in x.get_interfaces() \
and x.parent.getRole() != pyatspi.ROLE_COMBO_BOX
return pyatspi.findDescendant(root, isList)
def _getSuggestionIndexAndPosition(self, suggestion):
index, total = self._script.utilities.getPositionAndSetSize(suggestion)
return index + 1, total
def getMisspelledWord(self):
try:
text = self._errorWidget.queryText()
except:
return ""
for i in range(text.characterCount):
attributes, start, end = text.getAttributeRun(i, False)
if attributes and start != end:
string = text.getText(start, end)
break
else:
msg = "INFO: No text attributes for word in %s." % self._errorWidget
debug.println(debug.LEVEL_INFO, msg)
string = text.getText(0, -1)
return string
def presentContext(self):
if not self.isActive():
return False
try:
text = self._errorWidget.queryText()
except:
return False
string = text.getText(0, -1)
if not string:
return False
voice = self._script.voices.get(settings.DEFAULT_VOICE)
self._script.speakMessage(messages.MISSPELLED_WORD_CONTEXT % string, voice=voice)
return True
| lgpl-2.1 |
3dfxsoftware/cbss-addons | openerp_print/pyPdf/utils.py | 1 | 3722 | # vim: sw=4:expandtab:foldmethod=marker
#
# Copyright (c) 2006, Mathieu Fenniak
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Utility functions for PDF library.
"""
__author__ = "Mathieu Fenniak"
__author_email__ = "biziqe@mathieu.fenniak.net"
# ENABLE_PSYCO = False
# if ENABLE_PSYCO:
# try:
# import psyco
# except ImportError:
# ENABLE_PSYCO = False
#
# if not ENABLE_PSYCO:
# class psyco:
# def proxy(func):
# return func
# proxy = staticmethod(proxy)
def readUntilWhitespace(stream, maxchars=None):
txt = ""
while True:
tok = stream.read(1)
if tok.isspace() or not tok:
break
txt += tok
if len(txt) == maxchars:
break
return txt
def readNonWhitespace(stream):
tok = ' '
while tok == '\n' or tok == '\r' or tok == ' ' or tok == '\t':
tok = stream.read(1)
return tok
class ConvertFunctionsToVirtualList(object):
def __init__(self, lengthFunction, getFunction):
self.lengthFunction = lengthFunction
self.getFunction = getFunction
def __len__(self):
return self.lengthFunction()
def __getitem__(self, index):
if not isinstance(index, int):
raise TypeError("sequence indices must be integers")
len_self = len(self)
if index < 0:
# support negative indexes
index = len_self + index
if index < 0 or index >= len_self:
raise IndexError("sequence index out of range")
return self.getFunction(index)
def RC4_encrypt(key, plaintext):
S = [i for i in range(256)]
j = 0
for i in range(256):
j = (j + S[i] + ord(key[i % len(key)])) % 256
S[i], S[j] = S[j], S[i]
i, j = 0, 0
retval = ""
for x in range(len(plaintext)):
i = (i + 1) % 256
j = (j + S[i]) % 256
S[i], S[j] = S[j], S[i]
t = S[(S[i] + S[j]) % 256]
retval += chr(ord(plaintext[x]) ^ t)
return retval
class PdfReadError(Exception):
pass
if __name__ == "__main__":
# test RC4
out = RC4_encrypt("Key", "Plaintext")
print repr(out)
pt = RC4_encrypt("Key", out)
print repr(pt)
| gpl-2.0 |
ionrock/designate | designate/i18n.py | 7 | 1131 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n as i18n
_translators = i18n.TranslatorFactory(domain='designate')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
| apache-2.0 |
haveal/googleads-python-lib | examples/dfp/v201508/suggested_ad_unit_service/get_all_suggested_ad_units.py | 4 | 1809 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all suggested ad units.
To approve suggested ad units, run approve_suggested_ad_units.py. This feature
is only available to DFP premium solution networks.
"""
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
suggested_ad_unit_service = client.GetService(
'SuggestedAdUnitService', version='v201508')
# Create a filter statement.
statement = dfp.FilterStatement()
# Get suggested ad units by statement.
while True:
response = suggested_ad_unit_service.getSuggestedAdUnitsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for suggested_ad_unit in response['results']:
print ('Ad unit with id \'%s\' and number of requests \'%s\' was found.'
% (suggested_ad_unit['id'], suggested_ad_unit['numRequests']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| apache-2.0 |
anandology/pyjamas | examples/funnysortedgridthing/SortedGridThing.py | 9 | 2659 | import pyjd # this is dummy in pyjs.
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui.Button import Button
from pyjamas.ui.HTML import HTML
from pyjamas.ui.HorizontalPanel import HorizontalPanel
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui.ScrollPanel import ScrollPanel
from pyjamas.ui.Grid import Grid
from pyjamas.ui.DisclosurePanel import DisclosurePanel
class OddGridWidget(DockPanel):
def __init__(self, **kwargs):
DockPanel.__init__(self, **kwargs)
self.grid = Grid(StyleName="datagrid")
self.sp = ScrollPanel(self.grid, Width="100%", Height="100%")
self.header = Grid(Height="50px")
self.add(self.header, DockPanel.NORTH)
self.add(self.sp, DockPanel.CENTER)
cf = self.setCellHeight(self.header, "50px")
cf = self.setCellHeight(self.sp, "100%")
self.sortcol = 0
def setData(self, data):
self.data = data
self.redraw()
def sortfn(self, row1, row2):
return cmp(row1[self.sortcol], row2[self.sortcol])
def redraw(self):
self.data.sort(self.sortfn)
rows = len(self.data)
cols = 0
if rows > 0:
cols = len(self.data[0])
self.grid.resize(rows, cols)
self.header.resize(1, cols)
cf = self.grid.getCellFormatter()
for (nrow, row) in enumerate(self.data):
for (ncol, item) in enumerate(row):
self.grid.setHTML(nrow, ncol, str(item))
cf.setWidth(nrow, ncol, "200px")
cf = self.header.getCellFormatter()
self.sortbuttons = []
for ncol in range(cols):
sb = Button("sort col %d" % ncol)
sb.addClickListener(self)
self.header.setWidget(0, ncol, sb)
cf.setWidth(0, ncol, "200px")
self.sortbuttons.append(sb)
def onClick(self, sender):
for (ncol, b) in enumerate(self.sortbuttons):
if sender == b:
self.sortcol = ncol
self.redraw()
data = [["hello", "fred", 52],
["bye", "joe", 98],
["greetings", "alien", 0],
["sayonara", "jun", 1],
["gutentaag", "volker", 2],
["bonjour", "francois", 5],
["au reservoir", "fabrice", 8],
["go away", "mary", 73]
]
if __name__ == '__main__':
pyjd.setup("public/SortedGridThing.html")
ogw = OddGridWidget(Width="600px", Height="200px", StyleName="ogw")
ogw.setData(data)
dp = DisclosurePanel("Click to disclose / hide", True, Width="602px")
dp.add(ogw)
RootPanel().add(dp)
pyjd.run()
| apache-2.0 |
prospwro/odoo | addons/project/__openerp__.py | 259 | 2562 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Management',
'version': '1.1',
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/project-management',
'category': 'Project Management',
'sequence': 8,
'summary': 'Projects, Tasks',
'depends': [
'base_setup',
'product',
'analytic',
'board',
'mail',
'resource',
'web_kanban'
],
'description': """
Track multi-level projects, tasks, work done on tasks
=====================================================
This application allows an operational project management system to organize your activities into tasks and plan the work you need to get the tasks completed.
Gantt diagrams will give you a graphical representation of your project plans, as well as resources availability and workload.
Dashboard / Reports for Project Management will include:
--------------------------------------------------------
* My Tasks
* Open Tasks
* Tasks Analysis
* Cumulative Flow
""",
'data': [
'security/project_security.xml',
'wizard/project_task_delegate_view.xml',
'security/ir.model.access.csv',
'project_data.xml',
'project_view.xml',
'res_partner_view.xml',
'report/project_report_view.xml',
'report/project_cumulative.xml',
'res_config_view.xml',
'views/project.xml',
],
'demo': ['project_demo.xml'],
'test': [
],
'installable': True,
'auto_install': False,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
rahul67/hue | desktop/core/ext-py/Django-1.6.10/tests/dispatch/tests/test_saferef.py | 108 | 1898 | from django.dispatch.saferef import safeRef
from django.utils.six.moves import xrange
from django.utils import unittest
class Test1(object):
def x(self):
pass
def test2(obj):
pass
class Test2(object):
def __call__(self, obj):
pass
class SaferefTests(unittest.TestCase):
def setUp(self):
ts = []
ss = []
for x in xrange(5000):
t = Test1()
ts.append(t)
s = safeRef(t.x, self._closure)
ss.append(s)
ts.append(test2)
ss.append(safeRef(test2, self._closure))
for x in xrange(30):
t = Test2()
ts.append(t)
s = safeRef(t, self._closure)
ss.append(s)
self.ts = ts
self.ss = ss
self.closureCount = 0
def tearDown(self):
del self.ts
del self.ss
def testIn(self):
"""Test the "in" operator for safe references (cmp)"""
for t in self.ts[:50]:
self.assertTrue(safeRef(t.x) in self.ss)
def testValid(self):
"""Test that the references are valid (return instance methods)"""
for s in self.ss:
self.assertTrue(s())
def testShortCircuit(self):
"""Test that creation short-circuits to reuse existing references"""
sd = {}
for s in self.ss:
sd[s] = 1
for t in self.ts:
if hasattr(t, 'x'):
self.assertTrue(safeRef(t.x) in sd)
else:
self.assertTrue(safeRef(t) in sd)
def testRepresentation(self):
"""Test that the reference object's representation works
XXX Doesn't currently check the results, just that no error
is raised
"""
repr(self.ss[-1])
def _closure(self, ref):
"""Dumb utility mechanism to increment deletion counter"""
self.closureCount +=1
| apache-2.0 |
scality/manila | manila/tests/network/test_standalone_network_plugin.py | 1 | 18570 | # Copyright 2015 Mirantis, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
import netaddr
from oslo_config import cfg
import six
from manila.common import constants
from manila import context
from manila import exception
from manila.network import standalone_network_plugin as plugin
from manila import test
from manila.tests import utils as test_utils
CONF = cfg.CONF
fake_context = context.RequestContext(
user_id='fake user', project_id='fake project', is_admin=False)
fake_share_server = dict(id='fake_share_server_id')
fake_share_network = dict(id='fake_share_network_id')
@ddt.ddt
class StandaloneNetworkPluginTest(test.TestCase):
@ddt.data('custom_config_group_name', 'DEFAULT')
def test_init_only_with_required_data_v4(self, group_name):
data = {
group_name: {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '24',
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin(
config_group_name=group_name)
self.assertEqual('10.0.0.1', instance.gateway)
self.assertEqual('24', instance.mask)
self.assertIsNone(instance.segmentation_id)
self.assertIsNone(instance.allowed_ip_ranges)
self.assertEqual(4, instance.ip_version)
self.assertEqual(netaddr.IPNetwork('10.0.0.1/24'), instance.net)
self.assertEqual(['10.0.0.1/24'], instance.allowed_cidrs)
self.assertEqual(
('10.0.0.0', '10.0.0.1', '10.0.0.255'),
instance.reserved_addresses)
@ddt.data('custom_config_group_name', 'DEFAULT')
def test_init_with_all_data_v4(self, group_name):
data = {
group_name: {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '255.255.0.0',
'standalone_network_plugin_network_type': 'vlan',
'standalone_network_plugin_segmentation_id': '1001',
'standalone_network_plugin_allowed_ip_ranges': (
'10.0.0.3-10.0.0.7,10.0.0.69-10.0.0.157,10.0.0.213'),
'standalone_network_plugin_ip_version': 4,
},
}
allowed_cidrs = [
'10.0.0.3/32', '10.0.0.4/30', '10.0.0.69/32', '10.0.0.70/31',
'10.0.0.72/29', '10.0.0.80/28', '10.0.0.96/27', '10.0.0.128/28',
'10.0.0.144/29', '10.0.0.152/30', '10.0.0.156/31', '10.0.0.213/32',
]
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin(
config_group_name=group_name)
self.assertEqual(4, instance.ip_version)
self.assertEqual('10.0.0.1', instance.gateway)
self.assertEqual('255.255.0.0', instance.mask)
self.assertEqual('vlan', instance.network_type)
self.assertEqual('1001', instance.segmentation_id)
self.assertEqual(allowed_cidrs, instance.allowed_cidrs)
self.assertEqual(
['10.0.0.3-10.0.0.7', '10.0.0.69-10.0.0.157', '10.0.0.213'],
instance.allowed_ip_ranges)
self.assertEqual(
netaddr.IPNetwork('10.0.0.1/255.255.0.0'), instance.net)
self.assertEqual(
('10.0.0.0', '10.0.0.1', '10.0.255.255'),
instance.reserved_addresses)
@ddt.data('custom_config_group_name', 'DEFAULT')
def test_init_only_with_required_data_v6(self, group_name):
data = {
group_name: {
'standalone_network_plugin_gateway': (
'2001:cdba::3257:9652'),
'standalone_network_plugin_mask': '48',
'standalone_network_plugin_ip_version': 6,
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin(
config_group_name=group_name)
self.assertEqual(
'2001:cdba::3257:9652', instance.gateway)
self.assertEqual('48', instance.mask)
self.assertIsNone(instance.segmentation_id)
self.assertIsNone(instance.allowed_ip_ranges)
self.assertEqual(6, instance.ip_version)
self.assertEqual(
netaddr.IPNetwork('2001:cdba::3257:9652/48'),
instance.net)
self.assertEqual(
['2001:cdba::3257:9652/48'], instance.allowed_cidrs)
self.assertEqual(
('2001:cdba::', '2001:cdba::3257:9652',
'2001:cdba:0:ffff:ffff:ffff:ffff:ffff'),
instance.reserved_addresses)
@ddt.data('custom_config_group_name', 'DEFAULT')
def test_init_with_all_data_v6(self, group_name):
data = {
group_name: {
'standalone_network_plugin_gateway': '2001:db8::0001',
'standalone_network_plugin_mask': '88',
'standalone_network_plugin_network_type': 'vlan',
'standalone_network_plugin_segmentation_id': '3999',
'standalone_network_plugin_allowed_ip_ranges': (
'2001:db8::-2001:db8:0000:0000:0000:007f:ffff:ffff'),
'standalone_network_plugin_ip_version': 6,
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin(
config_group_name=group_name)
self.assertEqual(6, instance.ip_version)
self.assertEqual('2001:db8::0001', instance.gateway)
self.assertEqual('88', instance.mask)
self.assertEqual('vlan', instance.network_type)
self.assertEqual('3999', instance.segmentation_id)
self.assertEqual(['2001:db8::/89'], instance.allowed_cidrs)
self.assertEqual(
['2001:db8::-2001:db8:0000:0000:0000:007f:ffff:ffff'],
instance.allowed_ip_ranges)
self.assertEqual(
netaddr.IPNetwork('2001:db8::0001/88'), instance.net)
self.assertEqual(
('2001:db8::', '2001:db8::0001', '2001:db8::ff:ffff:ffff'),
instance.reserved_addresses)
@ddt.data('flat', 'vlan', 'vxlan', 'gre')
def test_init_with_valid_network_types_v4(self, network_type):
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '255.255.0.0',
'standalone_network_plugin_network_type': network_type,
'standalone_network_plugin_segmentation_id': '1001',
'standalone_network_plugin_ip_version': 4,
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin(
config_group_name='DEFAULT')
self.assertEqual(instance.network_type, network_type)
@ddt.data(
'foo', 'foovlan', 'vlanfoo', 'foovlanbar', 'None', 'Vlan', 'vlaN')
def test_init_with_fake_network_types_v4(self, fake_network_type):
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '255.255.0.0',
'standalone_network_plugin_network_type': fake_network_type,
'standalone_network_plugin_segmentation_id': '1001',
'standalone_network_plugin_ip_version': 4,
},
}
with test_utils.create_temp_config_with_opts(data):
self.assertRaises(
cfg.ConfigFileValueError,
plugin.StandaloneNetworkPlugin,
config_group_name='DEFAULT',
)
@ddt.data('custom_config_group_name', 'DEFAULT')
def test_invalid_init_without_any_config_definitions(self, group_name):
self.assertRaises(
exception.NetworkBadConfigurationException,
plugin.StandaloneNetworkPlugin,
config_group_name=group_name)
@ddt.data(
{},
{'gateway': '20.0.0.1'},
{'mask': '8'},
{'gateway': '20.0.0.1', 'mask': '33'},
{'gateway': '20.0.0.256', 'mask': '16'})
def test_invalid_init_required_data_improper(self, data):
group_name = 'custom_group_name'
if 'gateway' in data:
data['standalone_network_plugin_gateway'] = data.pop('gateway')
if 'mask' in data:
data['standalone_network_plugin_mask'] = data.pop('mask')
data = {group_name: data}
with test_utils.create_temp_config_with_opts(data):
self.assertRaises(
exception.NetworkBadConfigurationException,
plugin.StandaloneNetworkPlugin,
config_group_name=group_name)
@ddt.data(
'fake',
'11.0.0.0-11.0.0.5-11.0.0.11',
'11.0.0.0-11.0.0.5',
'10.0.10.0-10.0.10.5',
'10.0.0.0-10.0.0.5,fake',
'10.0.10.0-10.0.10.5,10.0.0.0-10.0.0.5',
'10.0.10.0-10.0.10.5,10.0.0.10-10.0.10.5',
'10.0.0.0-10.0.0.5,10.0.10.0-10.0.10.5')
def test_invalid_init_incorrect_allowed_ip_ranges_v4(self, ip_range):
group_name = 'DEFAULT'
data = {
group_name: {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '255.255.255.0',
'standalone_network_plugin_allowed_ip_ranges': ip_range,
},
}
with test_utils.create_temp_config_with_opts(data):
self.assertRaises(
exception.NetworkBadConfigurationException,
plugin.StandaloneNetworkPlugin,
config_group_name=group_name)
@ddt.data(
{'gateway': '2001:db8::0001', 'vers': 4},
{'gateway': '10.0.0.1', 'vers': 6})
@ddt.unpack
def test_invalid_init_mismatch_of_versions(self, gateway, vers):
group_name = 'DEFAULT'
data = {
group_name: {
'standalone_network_plugin_gateway': gateway,
'standalone_network_plugin_ip_version': vers,
'standalone_network_plugin_mask': '25',
},
}
with test_utils.create_temp_config_with_opts(data):
self.assertRaises(
exception.NetworkBadConfigurationException,
plugin.StandaloneNetworkPlugin,
config_group_name=group_name)
def test_deallocate_network(self):
share_server_id = 'fake_share_server_id'
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '24',
},
}
fake_allocations = [{'id': 'fake1'}, {'id': 'fake2'}]
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(
instance.db, 'network_allocations_get_for_share_server',
mock.Mock(return_value=fake_allocations))
self.mock_object(instance.db, 'network_allocation_delete')
instance.deallocate_network(fake_context, share_server_id)
instance.db.network_allocations_get_for_share_server.\
assert_called_once_with(fake_context, share_server_id)
instance.db.network_allocation_delete.\
assert_has_calls([
mock.call(fake_context, 'fake1'),
mock.call(fake_context, 'fake2'),
])
def test_allocate_network_zero_addresses_ipv4(self):
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '24',
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(instance.db, 'share_network_update')
allocations = instance.allocate_network(
fake_context, fake_share_server, fake_share_network, count=0)
self.assertEqual([], allocations)
instance.db.share_network_update.assert_called_once_with(
fake_context, fake_share_network['id'],
dict(network_type=None, segmentation_id=None,
cidr=six.text_type(instance.net.cidr), ip_version=4))
def test_allocate_network_zero_addresses_ipv6(self):
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '2001:db8::0001',
'standalone_network_plugin_mask': '64',
'standalone_network_plugin_ip_version': 6,
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(instance.db, 'share_network_update')
allocations = instance.allocate_network(
fake_context, fake_share_server, fake_share_network, count=0)
self.assertEqual([], allocations)
instance.db.share_network_update.assert_called_once_with(
fake_context, fake_share_network['id'],
dict(network_type=None, segmentation_id=None,
cidr=six.text_type(instance.net.cidr), ip_version=6))
def test_allocate_network_one_ip_address_ipv4_no_usages_exist(self):
data = {
'DEFAULT': {
'standalone_network_plugin_network_type': 'vlan',
'standalone_network_plugin_segmentation_id': '1003',
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '24',
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(instance.db, 'share_network_update')
self.mock_object(instance.db, 'network_allocation_create')
self.mock_object(
instance.db, 'network_allocations_get_by_ip_address',
mock.Mock(return_value=[]))
allocations = instance.allocate_network(
fake_context, fake_share_server, fake_share_network)
self.assertEqual(1, len(allocations))
instance.db.share_network_update.assert_called_once_with(
fake_context, fake_share_network['id'],
dict(network_type='vlan', segmentation_id='1003',
cidr=six.text_type(instance.net.cidr), ip_version=4))
instance.db.network_allocations_get_by_ip_address.assert_has_calls(
[mock.call(fake_context, '10.0.0.2')])
instance.db.network_allocation_create.assert_called_once_with(
fake_context,
dict(share_server_id=fake_share_server['id'],
ip_address='10.0.0.2', status=constants.STATUS_ACTIVE))
def test_allocate_network_two_ip_addresses_ipv4_two_usages_exist(self):
ctxt = type('FakeCtxt', (object,), {'fake': ['10.0.0.2', '10.0.0.4']})
def fake_get_allocations_by_ip_address(context, ip_address):
if ip_address not in context.fake:
context.fake.append(ip_address)
return []
else:
return context.fake
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '24',
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(instance.db, 'share_network_update')
self.mock_object(instance.db, 'network_allocation_create')
self.mock_object(
instance.db, 'network_allocations_get_by_ip_address',
mock.Mock(side_effect=fake_get_allocations_by_ip_address))
allocations = instance.allocate_network(
ctxt, fake_share_server, fake_share_network, count=2)
self.assertEqual(2, len(allocations))
instance.db.share_network_update.assert_called_once_with(
ctxt, fake_share_network['id'],
dict(network_type=None, segmentation_id=None,
cidr=six.text_type(instance.net.cidr), ip_version=4))
instance.db.network_allocations_get_by_ip_address.assert_has_calls(
[mock.call(ctxt, '10.0.0.2'), mock.call(ctxt, '10.0.0.3'),
mock.call(ctxt, '10.0.0.4'), mock.call(ctxt, '10.0.0.5')])
instance.db.network_allocation_create.assert_has_calls([
mock.call(
ctxt,
dict(share_server_id=fake_share_server['id'],
ip_address='10.0.0.3', status=constants.STATUS_ACTIVE)),
mock.call(
ctxt,
dict(share_server_id=fake_share_server['id'],
ip_address='10.0.0.5', status=constants.STATUS_ACTIVE)),
])
def test_allocate_network_no_available_ipv4_addresses(self):
data = {
'DEFAULT': {
'standalone_network_plugin_gateway': '10.0.0.1',
'standalone_network_plugin_mask': '30',
},
}
with test_utils.create_temp_config_with_opts(data):
instance = plugin.StandaloneNetworkPlugin()
self.mock_object(instance.db, 'share_network_update')
self.mock_object(instance.db, 'network_allocation_create')
self.mock_object(
instance.db, 'network_allocations_get_by_ip_address',
mock.Mock(return_value=['not empty list']))
self.assertRaises(
exception.NetworkBadConfigurationException,
instance.allocate_network,
fake_context, fake_share_server, fake_share_network)
instance.db.share_network_update.assert_called_once_with(
fake_context, fake_share_network['id'],
dict(network_type=None, segmentation_id=None,
cidr=six.text_type(instance.net.cidr), ip_version=4))
instance.db.network_allocations_get_by_ip_address.assert_has_calls(
[mock.call(fake_context, '10.0.0.2')])
| apache-2.0 |
foarsitter/decide-exchange-model | decide/model/observers/sqliteobserver.py | 1 | 8383 | import datetime
from collections import defaultdict
from typing import List
from decide import results
from decide.data import database as db
from decide.data.database import connection
from decide.model import calculations
from decide.model.base import AbstractExchange, Actor, Issue, AbstractExchangeActor
from decide.model.observers.observer import Observer, Observable
class SQLiteObserver(Observer):
"""
Observer to store all the data in a sqlite database
"""
def __init__(self, observable: "Observable", output_directory: str):
super().__init__(observable)
self.repetitions = {}
self.iterations = defaultdict(lambda: {})
self.actors = {}
self.issues = {}
self.model_run_ids = []
self.data_set = None
self.model_run = None
if not output_directory.endswith(".db") and output_directory != ":memory:":
output_directory += "/decide-data.sqlite.db"
self.log("logging to database {}".format(output_directory))
if not output_directory.startswith('sqlite:///'):
output_directory = "sqlite:///" + output_directory
self.database_path = output_directory
def before_model(self):
# initialize the database
manager = db.Manager(self.database_path)
manager.init_database()
manager.create_tables()
with db.connection.atomic():
data_set, created = db.DataSet.get_or_create(
name=self.model_ref.data_set_name
)
self.data_set = data_set
for actor in self.model_ref.actors.values(): # type: Actor
actor, created = db.Actor.get_or_create(
name=actor.name, key=actor.actor_id, data_set=self.data_set
)
self.actors[actor] = actor
for issue in self.model_ref.issues.values(): # type: Issue
issue, created = db.Issue.get_or_create(
name=issue.name,
key=issue.issue_id,
lower=issue.lower,
upper=issue.upper,
data_set=self.data_set,
)
self.issues[issue] = issue
def before_repetitions(self, repetitions, iterations, randomized_value=None):
"""
Create a new data set when needed and add all the actors
"""
# setup
self.repetitions = {}
self.iterations = defaultdict(lambda: {})
# create a data set row or find existing one
# add the Issues and Actors when they are not present
with db.connection.atomic():
self.model_run = db.ModelRun.create(
p=randomized_value or self.model_ref.randomized_value,
iterations=iterations,
repetitions=repetitions,
data_set=self.data_set,
)
def before_iterations(self, repetition):
with db.connection.atomic():
repetition = db.Repetition.create(
pointer=repetition,
model_run=self.model_run,
p=self.model_ref.randomized_value,
)
self.repetitions[repetition] = repetition
def before_loop(self, iteration: int, repetition: int):
with db.connection.atomic():
self._write_actor_issues(iteration, repetition)
def after_loop(
self, realized: List[AbstractExchange], iteration: int, repetition: int
):
iteration = self.iterations[repetition][iteration]
with db.connection.atomic():
for exchange in realized:
db_exchange = db.Exchange()
db_exchange.i = self._create_exchange_actor(exchange.i)
db_exchange.j = self._create_exchange_actor(exchange.j)
if db_exchange.i.eu < 1e-10:
pass
if db_exchange.j.eu < 1e-10:
pass
db_exchange.iteration = iteration
db_exchange.save()
self._write_externalities(exchange, db_exchange)
def end_loop(self, iteration: int, repetition: int):
with db.connection.atomic():
self._write_actor_issues(iteration, repetition, "after")
def after_repetitions(self):
self.model_run.finished_at = datetime.datetime.now()
self.model_run.save()
self.model_run_ids.append(self.model_run.id)
results.covariance.write_result(connection, self.model_run.iterations - 1, self.model_run.id, self.output_directory)
def after_model(self):
try:
results.externalities.write_summary_result(db.connection, self.model_run_ids, self.output_directory)
results.descriptives.write_summary_result(db.connection, self.model_run_ids, self.output_directory)
results.issuecomparison.write_summary_result(db.connection, self.model_run_ids, self.output_directory)
results.nashbargainingsolution.write_summary_result(db.connection, self.model_run_ids, self.output_directory)
results.nashbargainingsolution.write_summary_result(db.connection, self.model_run_ids, self.output_directory, "after")
except Exception as e:
print(e)
def _write_externalities(
self, exchange: AbstractExchange, db_exchange: db.Exchange
):
issue_set_key = self.model_ref.create_existing_issue_set_key(
exchange.p, exchange.q
)
inner = exchange.get_inner_groups()
for actor in self.actors:
externality = db.Externality()
externality.actor = actor
externality.exchange = db_exchange
externality.supply = db_exchange.i.supply_issue
externality.demand = db_exchange.i.demand_issue
externality.iteration = db_exchange.iteration
externality_size = calculations.actor_externalities(
actor, self.model_ref, exchange
)
is_inner = self.model_ref.is_inner_group_member(
str(actor.key), inner, issue_set_key
)
if actor.key == exchange.i.actor.actor_id:
externality.own = exchange.i.eu
elif actor.key == exchange.j.actor.actor_id:
exchange.own = exchange.j.eu
else:
if externality_size < 0:
if is_inner:
externality.inner_negative = externality_size
else:
externality.outer_negative = externality_size
else:
if is_inner:
externality.inner_positive = externality_size
else:
externality.outer_positive = externality_size
externality.save()
def _write_actor_issues(self, iteration: int, repetition: int, _type="before"):
with db.connection.atomic():
repetition = self.repetitions[repetition]
iteration, _ = db.Iteration.get_or_create(
pointer=iteration, repetition=repetition
)
self.iterations[repetition][iteration] = iteration
for (
issue_obj,
actors,
) in self.model_ref.actor_issues.items():
for actor_obj, actor_issue in actors.items():
db.ActorIssue.create(
issue=self.issues[issue_obj.issue_id],
actor=self.actors[actor_obj.actor_id],
power=actor_issue.power,
salience=actor_issue.salience,
position=actor_issue.position,
iteration=iteration,
type=_type,
)
def _create_exchange_actor(self, i: AbstractExchangeActor):
exchange_actor = db.ExchangeActor()
exchange_actor.actor = self.actors[i.actor]
exchange_actor.supply_issue = self.issues[i.supply.issue]
exchange_actor.demand_issue = self.issues[i.demand.issue]
exchange_actor.eu = i.eu
exchange_actor.x = i.supply.position
exchange_actor.y = i.y
exchange_actor.demand_position = i.opposite_actor.demand.position
exchange_actor.save()
return exchange_actor
| gpl-3.0 |
lilmuck/lilmuck | plugin.video.streamoase_ll/mechanize/_form.py | 131 | 120863 | """HTML form handling for web clients.
HTML form handling for web clients: useful for parsing HTML forms, filling them
in and returning the completed forms to the server. This code developed from a
port of Gisle Aas' Perl module HTML::Form, from the libwww-perl library, but
the interface is not the same.
The most useful docstring is the one for HTMLForm.
RFC 1866: HTML 2.0
RFC 1867: Form-based File Upload in HTML
RFC 2388: Returning Values from Forms: multipart/form-data
HTML 3.2 Specification, W3C Recommendation 14 January 1997 (for ISINDEX)
HTML 4.01 Specification, W3C Recommendation 24 December 1999
Copyright 2002-2007 John J. Lee <jjl@pobox.com>
Copyright 2005 Gary Poster
Copyright 2005 Zope Corporation
Copyright 1998-2000 Gisle Aas.
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
# TODO:
# Clean up post the merge into mechanize
# * Remove code that was duplicated in ClientForm and mechanize
# * Remove weird import stuff
# * Remove pre-Python 2.4 compatibility cruft
# * Clean up tests
# * Later release: Remove the ClientForm 0.1 backwards-compatibility switch
# Remove parser testing hack
# Clean action URI
# Switch to unicode throughout
# See Wichert Akkerman's 2004-01-22 message to c.l.py.
# Apply recommendations from google code project CURLIES
# Apply recommendations from HTML 5 spec
# Add charset parameter to Content-type headers? How to find value??
# Functional tests to add:
# Single and multiple file upload
# File upload with missing name (check standards)
# mailto: submission & enctype text/plain??
# Replace by_label etc. with moniker / selector concept. Allows, e.g., a
# choice between selection by value / id / label / element contents. Or
# choice between matching labels exactly or by substring. etc.
__all__ = ['AmbiguityError', 'CheckboxControl', 'Control',
'ControlNotFoundError', 'FileControl', 'FormParser', 'HTMLForm',
'HiddenControl', 'IgnoreControl', 'ImageControl', 'IsindexControl',
'Item', 'ItemCountError', 'ItemNotFoundError', 'Label',
'ListControl', 'LocateError', 'Missing', 'ParseError', 'ParseFile',
'ParseFileEx', 'ParseResponse', 'ParseResponseEx','PasswordControl',
'RadioControl', 'ScalarControl', 'SelectControl',
'SubmitButtonControl', 'SubmitControl', 'TextControl',
'TextareaControl', 'XHTMLCompatibleFormParser']
import HTMLParser
from cStringIO import StringIO
import inspect
import logging
import random
import re
import sys
import urllib
import urlparse
import warnings
import _beautifulsoup
import _request
# from Python itself, for backwards compatibility of raised exceptions
import sgmllib
# bundled copy of sgmllib
import _sgmllib_copy
VERSION = "0.2.11"
CHUNK = 1024 # size of chunks fed to parser, in bytes
DEFAULT_ENCODING = "latin-1"
_logger = logging.getLogger("mechanize.forms")
OPTIMIZATION_HACK = True
def debug(msg, *args, **kwds):
if OPTIMIZATION_HACK:
return
caller_name = inspect.stack()[1][3]
extended_msg = '%%s %s' % msg
extended_args = (caller_name,)+args
_logger.debug(extended_msg, *extended_args, **kwds)
def _show_debug_messages():
global OPTIMIZATION_HACK
OPTIMIZATION_HACK = False
_logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
_logger.addHandler(handler)
def deprecation(message, stack_offset=0):
warnings.warn(message, DeprecationWarning, stacklevel=3+stack_offset)
class Missing: pass
_compress_re = re.compile(r"\s+")
def compress_text(text): return _compress_re.sub(" ", text.strip())
def normalize_line_endings(text):
return re.sub(r"(?:(?<!\r)\n)|(?:\r(?!\n))", "\r\n", text)
def unescape(data, entities, encoding=DEFAULT_ENCODING):
if data is None or "&" not in data:
return data
def replace_entities(match, entities=entities, encoding=encoding):
ent = match.group()
if ent[1] == "#":
return unescape_charref(ent[2:-1], encoding)
repl = entities.get(ent)
if repl is not None:
if type(repl) != type(""):
try:
repl = repl.encode(encoding)
except UnicodeError:
repl = ent
else:
repl = ent
return repl
return re.sub(r"&#?[A-Za-z0-9]+?;", replace_entities, data)
def unescape_charref(data, encoding):
name, base = data, 10
if name.startswith("x"):
name, base= name[1:], 16
uc = unichr(int(name, base))
if encoding is None:
return uc
else:
try:
repl = uc.encode(encoding)
except UnicodeError:
repl = "&#%s;" % data
return repl
def get_entitydefs():
import htmlentitydefs
from codecs import latin_1_decode
entitydefs = {}
try:
htmlentitydefs.name2codepoint
except AttributeError:
entitydefs = {}
for name, char in htmlentitydefs.entitydefs.items():
uc = latin_1_decode(char)[0]
if uc.startswith("&#") and uc.endswith(";"):
uc = unescape_charref(uc[2:-1], None)
entitydefs["&%s;" % name] = uc
else:
for name, codepoint in htmlentitydefs.name2codepoint.items():
entitydefs["&%s;" % name] = unichr(codepoint)
return entitydefs
def issequence(x):
try:
x[0]
except (TypeError, KeyError):
return False
except IndexError:
pass
return True
def isstringlike(x):
try: x+""
except: return False
else: return True
def choose_boundary():
"""Return a string usable as a multipart boundary."""
# follow IE and firefox
nonce = "".join([str(random.randint(0, sys.maxint-1)) for i in 0,1,2])
return "-"*27 + nonce
# This cut-n-pasted MimeWriter from standard library is here so can add
# to HTTP headers rather than message body when appropriate. It also uses
# \r\n in place of \n. This is a bit nasty.
class MimeWriter:
"""Generic MIME writer.
Methods:
__init__()
addheader()
flushheaders()
startbody()
startmultipartbody()
nextpart()
lastpart()
A MIME writer is much more primitive than a MIME parser. It
doesn't seek around on the output file, and it doesn't use large
amounts of buffer space, so you have to write the parts in the
order they should occur on the output file. It does buffer the
headers you add, allowing you to rearrange their order.
General usage is:
f = <open the output file>
w = MimeWriter(f)
...call w.addheader(key, value) 0 or more times...
followed by either:
f = w.startbody(content_type)
...call f.write(data) for body data...
or:
w.startmultipartbody(subtype)
for each part:
subwriter = w.nextpart()
...use the subwriter's methods to create the subpart...
w.lastpart()
The subwriter is another MimeWriter instance, and should be
treated in the same way as the toplevel MimeWriter. This way,
writing recursive body parts is easy.
Warning: don't forget to call lastpart()!
XXX There should be more state so calls made in the wrong order
are detected.
Some special cases:
- startbody() just returns the file passed to the constructor;
but don't use this knowledge, as it may be changed.
- startmultipartbody() actually returns a file as well;
this can be used to write the initial 'if you can read this your
mailer is not MIME-aware' message.
- If you call flushheaders(), the headers accumulated so far are
written out (and forgotten); this is useful if you don't need a
body part at all, e.g. for a subpart of type message/rfc822
that's (mis)used to store some header-like information.
- Passing a keyword argument 'prefix=<flag>' to addheader(),
start*body() affects where the header is inserted; 0 means
append at the end, 1 means insert at the start; default is
append for addheader(), but insert for start*body(), which use
it to determine where the Content-type header goes.
"""
def __init__(self, fp, http_hdrs=None):
self._http_hdrs = http_hdrs
self._fp = fp
self._headers = []
self._boundary = []
self._first_part = True
def addheader(self, key, value, prefix=0,
add_to_http_hdrs=0):
"""
prefix is ignored if add_to_http_hdrs is true.
"""
lines = value.split("\r\n")
while lines and not lines[-1]: del lines[-1]
while lines and not lines[0]: del lines[0]
if add_to_http_hdrs:
value = "".join(lines)
# 2.2 urllib2 doesn't normalize header case
self._http_hdrs.append((key.capitalize(), value))
else:
for i in range(1, len(lines)):
lines[i] = " " + lines[i].strip()
value = "\r\n".join(lines) + "\r\n"
line = key.title() + ": " + value
if prefix:
self._headers.insert(0, line)
else:
self._headers.append(line)
def flushheaders(self):
self._fp.writelines(self._headers)
self._headers = []
def startbody(self, ctype=None, plist=[], prefix=1,
add_to_http_hdrs=0, content_type=1):
"""
prefix is ignored if add_to_http_hdrs is true.
"""
if content_type and ctype:
for name, value in plist:
ctype = ctype + ';\r\n %s=%s' % (name, value)
self.addheader("Content-Type", ctype, prefix=prefix,
add_to_http_hdrs=add_to_http_hdrs)
self.flushheaders()
if not add_to_http_hdrs: self._fp.write("\r\n")
self._first_part = True
return self._fp
def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1,
add_to_http_hdrs=0, content_type=1):
boundary = boundary or choose_boundary()
self._boundary.append(boundary)
return self.startbody("multipart/" + subtype,
[("boundary", boundary)] + plist,
prefix=prefix,
add_to_http_hdrs=add_to_http_hdrs,
content_type=content_type)
def nextpart(self):
boundary = self._boundary[-1]
if self._first_part:
self._first_part = False
else:
self._fp.write("\r\n")
self._fp.write("--" + boundary + "\r\n")
return self.__class__(self._fp)
def lastpart(self):
if self._first_part:
self.nextpart()
boundary = self._boundary.pop()
self._fp.write("\r\n--" + boundary + "--\r\n")
class LocateError(ValueError): pass
class AmbiguityError(LocateError): pass
class ControlNotFoundError(LocateError): pass
class ItemNotFoundError(LocateError): pass
class ItemCountError(ValueError): pass
# for backwards compatibility, ParseError derives from exceptions that were
# raised by versions of ClientForm <= 0.2.5
# TODO: move to _html
class ParseError(sgmllib.SGMLParseError,
HTMLParser.HTMLParseError):
def __init__(self, *args, **kwds):
Exception.__init__(self, *args, **kwds)
def __str__(self):
return Exception.__str__(self)
class _AbstractFormParser:
"""forms attribute contains HTMLForm instances on completion."""
# thanks to Moshe Zadka for an example of sgmllib/htmllib usage
def __init__(self, entitydefs=None, encoding=DEFAULT_ENCODING):
if entitydefs is None:
entitydefs = get_entitydefs()
self._entitydefs = entitydefs
self._encoding = encoding
self.base = None
self.forms = []
self.labels = []
self._current_label = None
self._current_form = None
self._select = None
self._optgroup = None
self._option = None
self._textarea = None
# forms[0] will contain all controls that are outside of any form
# self._global_form is an alias for self.forms[0]
self._global_form = None
self.start_form([])
self.end_form()
self._current_form = self._global_form = self.forms[0]
def do_base(self, attrs):
debug("%s", attrs)
for key, value in attrs:
if key == "href":
self.base = self.unescape_attr_if_required(value)
def end_body(self):
debug("")
if self._current_label is not None:
self.end_label()
if self._current_form is not self._global_form:
self.end_form()
def start_form(self, attrs):
debug("%s", attrs)
if self._current_form is not self._global_form:
raise ParseError("nested FORMs")
name = None
action = None
enctype = "application/x-www-form-urlencoded"
method = "GET"
d = {}
for key, value in attrs:
if key == "name":
name = self.unescape_attr_if_required(value)
elif key == "action":
action = self.unescape_attr_if_required(value)
elif key == "method":
method = self.unescape_attr_if_required(value.upper())
elif key == "enctype":
enctype = self.unescape_attr_if_required(value.lower())
d[key] = self.unescape_attr_if_required(value)
controls = []
self._current_form = (name, action, method, enctype), d, controls
def end_form(self):
debug("")
if self._current_label is not None:
self.end_label()
if self._current_form is self._global_form:
raise ParseError("end of FORM before start")
self.forms.append(self._current_form)
self._current_form = self._global_form
def start_select(self, attrs):
debug("%s", attrs)
if self._select is not None:
raise ParseError("nested SELECTs")
if self._textarea is not None:
raise ParseError("SELECT inside TEXTAREA")
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
self._select = d
self._add_label(d)
self._append_select_control({"__select": d})
def end_select(self):
debug("")
if self._select is None:
raise ParseError("end of SELECT before start")
if self._option is not None:
self._end_option()
self._select = None
def start_optgroup(self, attrs):
debug("%s", attrs)
if self._select is None:
raise ParseError("OPTGROUP outside of SELECT")
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
self._optgroup = d
def end_optgroup(self):
debug("")
if self._optgroup is None:
raise ParseError("end of OPTGROUP before start")
self._optgroup = None
def _start_option(self, attrs):
debug("%s", attrs)
if self._select is None:
raise ParseError("OPTION outside of SELECT")
if self._option is not None:
self._end_option()
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
self._option = {}
self._option.update(d)
if (self._optgroup and self._optgroup.has_key("disabled") and
not self._option.has_key("disabled")):
self._option["disabled"] = None
def _end_option(self):
debug("")
if self._option is None:
raise ParseError("end of OPTION before start")
contents = self._option.get("contents", "").strip()
self._option["contents"] = contents
if not self._option.has_key("value"):
self._option["value"] = contents
if not self._option.has_key("label"):
self._option["label"] = contents
# stuff dict of SELECT HTML attrs into a special private key
# (gets deleted again later)
self._option["__select"] = self._select
self._append_select_control(self._option)
self._option = None
def _append_select_control(self, attrs):
debug("%s", attrs)
controls = self._current_form[2]
name = self._select.get("name")
controls.append(("select", name, attrs))
def start_textarea(self, attrs):
debug("%s", attrs)
if self._textarea is not None:
raise ParseError("nested TEXTAREAs")
if self._select is not None:
raise ParseError("TEXTAREA inside SELECT")
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
self._add_label(d)
self._textarea = d
def end_textarea(self):
debug("")
if self._textarea is None:
raise ParseError("end of TEXTAREA before start")
controls = self._current_form[2]
name = self._textarea.get("name")
controls.append(("textarea", name, self._textarea))
self._textarea = None
def start_label(self, attrs):
debug("%s", attrs)
if self._current_label:
self.end_label()
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
taken = bool(d.get("for")) # empty id is invalid
d["__text"] = ""
d["__taken"] = taken
if taken:
self.labels.append(d)
self._current_label = d
def end_label(self):
debug("")
label = self._current_label
if label is None:
# something is ugly in the HTML, but we're ignoring it
return
self._current_label = None
# if it is staying around, it is True in all cases
del label["__taken"]
def _add_label(self, d):
#debug("%s", d)
if self._current_label is not None:
if not self._current_label["__taken"]:
self._current_label["__taken"] = True
d["__label"] = self._current_label
def handle_data(self, data):
debug("%s", data)
if self._option is not None:
# self._option is a dictionary of the OPTION element's HTML
# attributes, but it has two special keys, one of which is the
# special "contents" key contains text between OPTION tags (the
# other is the "__select" key: see the end_option method)
map = self._option
key = "contents"
elif self._textarea is not None:
map = self._textarea
key = "value"
data = normalize_line_endings(data)
# not if within option or textarea
elif self._current_label is not None:
map = self._current_label
key = "__text"
else:
return
if data and not map.has_key(key):
# according to
# http://www.w3.org/TR/html4/appendix/notes.html#h-B.3.1 line break
# immediately after start tags or immediately before end tags must
# be ignored, but real browsers only ignore a line break after a
# start tag, so we'll do that.
if data[0:2] == "\r\n":
data = data[2:]
elif data[0:1] in ["\n", "\r"]:
data = data[1:]
map[key] = data
else:
map[key] = map[key] + data
def do_button(self, attrs):
debug("%s", attrs)
d = {}
d["type"] = "submit" # default
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
controls = self._current_form[2]
type = d["type"]
name = d.get("name")
# we don't want to lose information, so use a type string that
# doesn't clash with INPUT TYPE={SUBMIT,RESET,BUTTON}
# e.g. type for BUTTON/RESET is "resetbutton"
# (type for INPUT/RESET is "reset")
type = type+"button"
self._add_label(d)
controls.append((type, name, d))
def do_input(self, attrs):
debug("%s", attrs)
d = {}
d["type"] = "text" # default
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
controls = self._current_form[2]
type = d["type"]
name = d.get("name")
self._add_label(d)
controls.append((type, name, d))
def do_isindex(self, attrs):
debug("%s", attrs)
d = {}
for key, val in attrs:
d[key] = self.unescape_attr_if_required(val)
controls = self._current_form[2]
self._add_label(d)
# isindex doesn't have type or name HTML attributes
controls.append(("isindex", None, d))
def handle_entityref(self, name):
#debug("%s", name)
self.handle_data(unescape(
'&%s;' % name, self._entitydefs, self._encoding))
def handle_charref(self, name):
#debug("%s", name)
self.handle_data(unescape_charref(name, self._encoding))
def unescape_attr(self, name):
#debug("%s", name)
return unescape(name, self._entitydefs, self._encoding)
def unescape_attrs(self, attrs):
#debug("%s", attrs)
escaped_attrs = {}
for key, val in attrs.items():
try:
val.items
except AttributeError:
escaped_attrs[key] = self.unescape_attr(val)
else:
# e.g. "__select" -- yuck!
escaped_attrs[key] = self.unescape_attrs(val)
return escaped_attrs
def unknown_entityref(self, ref): self.handle_data("&%s;" % ref)
def unknown_charref(self, ref): self.handle_data("&#%s;" % ref)
class XHTMLCompatibleFormParser(_AbstractFormParser, HTMLParser.HTMLParser):
"""Good for XHTML, bad for tolerance of incorrect HTML."""
# thanks to Michael Howitz for this!
def __init__(self, entitydefs=None, encoding=DEFAULT_ENCODING):
HTMLParser.HTMLParser.__init__(self)
_AbstractFormParser.__init__(self, entitydefs, encoding)
def feed(self, data):
try:
HTMLParser.HTMLParser.feed(self, data)
except HTMLParser.HTMLParseError, exc:
raise ParseError(exc)
def start_option(self, attrs):
_AbstractFormParser._start_option(self, attrs)
def end_option(self):
_AbstractFormParser._end_option(self)
def handle_starttag(self, tag, attrs):
try:
method = getattr(self, "start_" + tag)
except AttributeError:
try:
method = getattr(self, "do_" + tag)
except AttributeError:
pass # unknown tag
else:
method(attrs)
else:
method(attrs)
def handle_endtag(self, tag):
try:
method = getattr(self, "end_" + tag)
except AttributeError:
pass # unknown tag
else:
method()
def unescape(self, name):
# Use the entitydefs passed into constructor, not
# HTMLParser.HTMLParser's entitydefs.
return self.unescape_attr(name)
def unescape_attr_if_required(self, name):
return name # HTMLParser.HTMLParser already did it
def unescape_attrs_if_required(self, attrs):
return attrs # ditto
def close(self):
HTMLParser.HTMLParser.close(self)
self.end_body()
class _AbstractSgmllibParser(_AbstractFormParser):
def do_option(self, attrs):
_AbstractFormParser._start_option(self, attrs)
# we override this attr to decode hex charrefs
entity_or_charref = re.compile(
'&(?:([a-zA-Z][-.a-zA-Z0-9]*)|#(x?[0-9a-fA-F]+))(;?)')
def convert_entityref(self, name):
return unescape("&%s;" % name, self._entitydefs, self._encoding)
def convert_charref(self, name):
return unescape_charref("%s" % name, self._encoding)
def unescape_attr_if_required(self, name):
return name # sgmllib already did it
def unescape_attrs_if_required(self, attrs):
return attrs # ditto
class FormParser(_AbstractSgmllibParser, _sgmllib_copy.SGMLParser):
"""Good for tolerance of incorrect HTML, bad for XHTML."""
def __init__(self, entitydefs=None, encoding=DEFAULT_ENCODING):
_sgmllib_copy.SGMLParser.__init__(self)
_AbstractFormParser.__init__(self, entitydefs, encoding)
def feed(self, data):
try:
_sgmllib_copy.SGMLParser.feed(self, data)
except _sgmllib_copy.SGMLParseError, exc:
raise ParseError(exc)
def close(self):
_sgmllib_copy.SGMLParser.close(self)
self.end_body()
class _AbstractBSFormParser(_AbstractSgmllibParser):
bs_base_class = None
def __init__(self, entitydefs=None, encoding=DEFAULT_ENCODING):
_AbstractFormParser.__init__(self, entitydefs, encoding)
self.bs_base_class.__init__(self)
def handle_data(self, data):
_AbstractFormParser.handle_data(self, data)
self.bs_base_class.handle_data(self, data)
def feed(self, data):
try:
self.bs_base_class.feed(self, data)
except _sgmllib_copy.SGMLParseError, exc:
raise ParseError(exc)
def close(self):
self.bs_base_class.close(self)
self.end_body()
class RobustFormParser(_AbstractBSFormParser, _beautifulsoup.BeautifulSoup):
"""Tries to be highly tolerant of incorrect HTML."""
bs_base_class = _beautifulsoup.BeautifulSoup
class NestingRobustFormParser(_AbstractBSFormParser,
_beautifulsoup.ICantBelieveItsBeautifulSoup):
"""Tries to be highly tolerant of incorrect HTML.
Different from RobustFormParser in that it more often guesses nesting
above missing end tags (see BeautifulSoup docs).
"""
bs_base_class = _beautifulsoup.ICantBelieveItsBeautifulSoup
#FormParser = XHTMLCompatibleFormParser # testing hack
#FormParser = RobustFormParser # testing hack
def ParseResponseEx(response,
select_default=False,
form_parser_class=FormParser,
request_class=_request.Request,
entitydefs=None,
encoding=DEFAULT_ENCODING,
# private
_urljoin=urlparse.urljoin,
_urlparse=urlparse.urlparse,
_urlunparse=urlparse.urlunparse,
):
"""Identical to ParseResponse, except that:
1. The returned list contains an extra item. The first form in the list
contains all controls not contained in any FORM element.
2. The arguments ignore_errors and backwards_compat have been removed.
3. Backwards-compatibility mode (backwards_compat=True) is not available.
"""
return _ParseFileEx(response, response.geturl(),
select_default,
False,
form_parser_class,
request_class,
entitydefs,
False,
encoding,
_urljoin=_urljoin,
_urlparse=_urlparse,
_urlunparse=_urlunparse,
)
def ParseFileEx(file, base_uri,
select_default=False,
form_parser_class=FormParser,
request_class=_request.Request,
entitydefs=None,
encoding=DEFAULT_ENCODING,
# private
_urljoin=urlparse.urljoin,
_urlparse=urlparse.urlparse,
_urlunparse=urlparse.urlunparse,
):
"""Identical to ParseFile, except that:
1. The returned list contains an extra item. The first form in the list
contains all controls not contained in any FORM element.
2. The arguments ignore_errors and backwards_compat have been removed.
3. Backwards-compatibility mode (backwards_compat=True) is not available.
"""
return _ParseFileEx(file, base_uri,
select_default,
False,
form_parser_class,
request_class,
entitydefs,
False,
encoding,
_urljoin=_urljoin,
_urlparse=_urlparse,
_urlunparse=_urlunparse,
)
def ParseString(text, base_uri, *args, **kwds):
fh = StringIO(text)
return ParseFileEx(fh, base_uri, *args, **kwds)
def ParseResponse(response, *args, **kwds):
"""Parse HTTP response and return a list of HTMLForm instances.
The return value of mechanize.urlopen can be conveniently passed to this
function as the response parameter.
mechanize.ParseError is raised on parse errors.
response: file-like object (supporting read() method) with a method
geturl(), returning the URI of the HTTP response
select_default: for multiple-selection SELECT controls and RADIO controls,
pick the first item as the default if none are selected in the HTML
form_parser_class: class to instantiate and use to pass
request_class: class to return from .click() method (default is
mechanize.Request)
entitydefs: mapping like {"&": "&", ...} containing HTML entity
definitions (a sensible default is used)
encoding: character encoding used for encoding numeric character references
when matching link text. mechanize does not attempt to find the encoding
in a META HTTP-EQUIV attribute in the document itself (mechanize, for
example, does do that and will pass the correct value to mechanize using
this parameter).
backwards_compat: boolean that determines whether the returned HTMLForm
objects are backwards-compatible with old code. If backwards_compat is
true:
- ClientForm 0.1 code will continue to work as before.
- Label searches that do not specify a nr (number or count) will always
get the first match, even if other controls match. If
backwards_compat is False, label searches that have ambiguous results
will raise an AmbiguityError.
- Item label matching is done by strict string comparison rather than
substring matching.
- De-selecting individual list items is allowed even if the Item is
disabled.
The backwards_compat argument will be removed in a future release.
Pass a true value for select_default if you want the behaviour specified by
RFC 1866 (the HTML 2.0 standard), which is to select the first item in a
RADIO or multiple-selection SELECT control if none were selected in the
HTML. Most browsers (including Microsoft Internet Explorer (IE) and
Netscape Navigator) instead leave all items unselected in these cases. The
W3C HTML 4.0 standard leaves this behaviour undefined in the case of
multiple-selection SELECT controls, but insists that at least one RADIO
button should be checked at all times, in contradiction to browser
behaviour.
There is a choice of parsers. mechanize.XHTMLCompatibleFormParser (uses
HTMLParser.HTMLParser) works best for XHTML, mechanize.FormParser (uses
bundled copy of sgmllib.SGMLParser) (the default) works better for ordinary
grubby HTML. Note that HTMLParser is only available in Python 2.2 and
later. You can pass your own class in here as a hack to work around bad
HTML, but at your own risk: there is no well-defined interface.
"""
return _ParseFileEx(response, response.geturl(), *args, **kwds)[1:]
def ParseFile(file, base_uri, *args, **kwds):
"""Parse HTML and return a list of HTMLForm instances.
mechanize.ParseError is raised on parse errors.
file: file-like object (supporting read() method) containing HTML with zero
or more forms to be parsed
base_uri: the URI of the document (note that the base URI used to submit
the form will be that given in the BASE element if present, not that of
the document)
For the other arguments and further details, see ParseResponse.__doc__.
"""
return _ParseFileEx(file, base_uri, *args, **kwds)[1:]
def _ParseFileEx(file, base_uri,
select_default=False,
ignore_errors=False,
form_parser_class=FormParser,
request_class=_request.Request,
entitydefs=None,
backwards_compat=True,
encoding=DEFAULT_ENCODING,
_urljoin=urlparse.urljoin,
_urlparse=urlparse.urlparse,
_urlunparse=urlparse.urlunparse,
):
if backwards_compat:
deprecation("operating in backwards-compatibility mode", 1)
fp = form_parser_class(entitydefs, encoding)
while 1:
data = file.read(CHUNK)
try:
fp.feed(data)
except ParseError, e:
e.base_uri = base_uri
raise
if len(data) != CHUNK: break
fp.close()
if fp.base is not None:
# HTML BASE element takes precedence over document URI
base_uri = fp.base
labels = [] # Label(label) for label in fp.labels]
id_to_labels = {}
for l in fp.labels:
label = Label(l)
labels.append(label)
for_id = l["for"]
coll = id_to_labels.get(for_id)
if coll is None:
id_to_labels[for_id] = [label]
else:
coll.append(label)
forms = []
for (name, action, method, enctype), attrs, controls in fp.forms:
if action is None:
action = base_uri
else:
action = _urljoin(base_uri, action)
# would be nice to make HTMLForm class (form builder) pluggable
form = HTMLForm(
action, method, enctype, name, attrs, request_class,
forms, labels, id_to_labels, backwards_compat)
form._urlparse = _urlparse
form._urlunparse = _urlunparse
for ii in range(len(controls)):
type, name, attrs = controls[ii]
# index=ii*10 allows ImageControl to return multiple ordered pairs
form.new_control(
type, name, attrs, select_default=select_default, index=ii*10)
forms.append(form)
for form in forms:
form.fixup()
return forms
class Label:
def __init__(self, attrs):
self.id = attrs.get("for")
self._text = attrs.get("__text").strip()
self._ctext = compress_text(self._text)
self.attrs = attrs
self._backwards_compat = False # maintained by HTMLForm
def __getattr__(self, name):
if name == "text":
if self._backwards_compat:
return self._text
else:
return self._ctext
return getattr(Label, name)
def __setattr__(self, name, value):
if name == "text":
# don't see any need for this, so make it read-only
raise AttributeError("text attribute is read-only")
self.__dict__[name] = value
def __str__(self):
return "<Label(id=%r, text=%r)>" % (self.id, self.text)
def _get_label(attrs):
text = attrs.get("__label")
if text is not None:
return Label(text)
else:
return None
class Control:
"""An HTML form control.
An HTMLForm contains a sequence of Controls. The Controls in an HTMLForm
are accessed using the HTMLForm.find_control method or the
HTMLForm.controls attribute.
Control instances are usually constructed using the ParseFile /
ParseResponse functions. If you use those functions, you can ignore the
rest of this paragraph. A Control is only properly initialised after the
fixup method has been called. In fact, this is only strictly necessary for
ListControl instances. This is necessary because ListControls are built up
from ListControls each containing only a single item, and their initial
value(s) can only be known after the sequence is complete.
The types and values that are acceptable for assignment to the value
attribute are defined by subclasses.
If the disabled attribute is true, this represents the state typically
represented by browsers by 'greying out' a control. If the disabled
attribute is true, the Control will raise AttributeError if an attempt is
made to change its value. In addition, the control will not be considered
'successful' as defined by the W3C HTML 4 standard -- ie. it will
contribute no data to the return value of the HTMLForm.click* methods. To
enable a control, set the disabled attribute to a false value.
If the readonly attribute is true, the Control will raise AttributeError if
an attempt is made to change its value. To make a control writable, set
the readonly attribute to a false value.
All controls have the disabled and readonly attributes, not only those that
may have the HTML attributes of the same names.
On assignment to the value attribute, the following exceptions are raised:
TypeError, AttributeError (if the value attribute should not be assigned
to, because the control is disabled, for example) and ValueError.
If the name or value attributes are None, or the value is an empty list, or
if the control is disabled, the control is not successful.
Public attributes:
type: string describing type of control (see the keys of the
HTMLForm.type2class dictionary for the allowable values) (readonly)
name: name of control (readonly)
value: current value of control (subclasses may allow a single value, a
sequence of values, or either)
disabled: disabled state
readonly: readonly state
id: value of id HTML attribute
"""
def __init__(self, type, name, attrs, index=None):
"""
type: string describing type of control (see the keys of the
HTMLForm.type2class dictionary for the allowable values)
name: control name
attrs: HTML attributes of control's HTML element
"""
raise NotImplementedError()
def add_to_form(self, form):
self._form = form
form.controls.append(self)
def fixup(self):
pass
def is_of_kind(self, kind):
raise NotImplementedError()
def clear(self):
raise NotImplementedError()
def __getattr__(self, name): raise NotImplementedError()
def __setattr__(self, name, value): raise NotImplementedError()
def pairs(self):
"""Return list of (key, value) pairs suitable for passing to urlencode.
"""
return [(k, v) for (i, k, v) in self._totally_ordered_pairs()]
def _totally_ordered_pairs(self):
"""Return list of (key, value, index) tuples.
Like pairs, but allows preserving correct ordering even where several
controls are involved.
"""
raise NotImplementedError()
def _write_mime_data(self, mw, name, value):
"""Write data for a subitem of this control to a MimeWriter."""
# called by HTMLForm
mw2 = mw.nextpart()
mw2.addheader("Content-Disposition",
'form-data; name="%s"' % name, 1)
f = mw2.startbody(prefix=0)
f.write(value)
def __str__(self):
raise NotImplementedError()
def get_labels(self):
"""Return all labels (Label instances) for this control.
If the control was surrounded by a <label> tag, that will be the first
label; all other labels, connected by 'for' and 'id', are in the order
that appear in the HTML.
"""
res = []
if self._label:
res.append(self._label)
if self.id:
res.extend(self._form._id_to_labels.get(self.id, ()))
return res
#---------------------------------------------------
class ScalarControl(Control):
"""Control whose value is not restricted to one of a prescribed set.
Some ScalarControls don't accept any value attribute. Otherwise, takes a
single value, which must be string-like.
Additional read-only public attribute:
attrs: dictionary mapping the names of original HTML attributes of the
control to their values
"""
def __init__(self, type, name, attrs, index=None):
self._index = index
self._label = _get_label(attrs)
self.__dict__["type"] = type.lower()
self.__dict__["name"] = name
self._value = attrs.get("value")
self.disabled = attrs.has_key("disabled")
self.readonly = attrs.has_key("readonly")
self.id = attrs.get("id")
self.attrs = attrs.copy()
self._clicked = False
self._urlparse = urlparse.urlparse
self._urlunparse = urlparse.urlunparse
def __getattr__(self, name):
if name == "value":
return self.__dict__["_value"]
else:
raise AttributeError("%s instance has no attribute '%s'" %
(self.__class__.__name__, name))
def __setattr__(self, name, value):
if name == "value":
if not isstringlike(value):
raise TypeError("must assign a string")
elif self.readonly:
raise AttributeError("control '%s' is readonly" % self.name)
elif self.disabled:
raise AttributeError("control '%s' is disabled" % self.name)
self.__dict__["_value"] = value
elif name in ("name", "type"):
raise AttributeError("%s attribute is readonly" % name)
else:
self.__dict__[name] = value
def _totally_ordered_pairs(self):
name = self.name
value = self.value
if name is None or value is None or self.disabled:
return []
return [(self._index, name, value)]
def clear(self):
if self.readonly:
raise AttributeError("control '%s' is readonly" % self.name)
self.__dict__["_value"] = None
def __str__(self):
name = self.name
value = self.value
if name is None: name = "<None>"
if value is None: value = "<None>"
infos = []
if self.disabled: infos.append("disabled")
if self.readonly: infos.append("readonly")
info = ", ".join(infos)
if info: info = " (%s)" % info
return "<%s(%s=%s)%s>" % (self.__class__.__name__, name, value, info)
#---------------------------------------------------
class TextControl(ScalarControl):
"""Textual input control.
Covers:
INPUT/TEXT
INPUT/PASSWORD
INPUT/HIDDEN
TEXTAREA
"""
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
if self.type == "hidden": self.readonly = True
if self._value is None:
self._value = ""
def is_of_kind(self, kind): return kind == "text"
#---------------------------------------------------
class FileControl(ScalarControl):
"""File upload with INPUT TYPE=FILE.
The value attribute of a FileControl is always None. Use add_file instead.
Additional public method: add_file
"""
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
self._value = None
self._upload_data = []
def is_of_kind(self, kind): return kind == "file"
def clear(self):
if self.readonly:
raise AttributeError("control '%s' is readonly" % self.name)
self._upload_data = []
def __setattr__(self, name, value):
if name in ("value", "name", "type"):
raise AttributeError("%s attribute is readonly" % name)
else:
self.__dict__[name] = value
def add_file(self, file_object, content_type=None, filename=None):
if not hasattr(file_object, "read"):
raise TypeError("file-like object must have read method")
if content_type is not None and not isstringlike(content_type):
raise TypeError("content type must be None or string-like")
if filename is not None and not isstringlike(filename):
raise TypeError("filename must be None or string-like")
if content_type is None:
content_type = "application/octet-stream"
self._upload_data.append((file_object, content_type, filename))
def _totally_ordered_pairs(self):
# XXX should it be successful even if unnamed?
if self.name is None or self.disabled:
return []
return [(self._index, self.name, "")]
# If enctype is application/x-www-form-urlencoded and there's a FILE
# control present, what should be sent? Strictly, it should be 'name=data'
# (see HTML 4.01 spec., section 17.13.2), but code sends "name=" ATM. What
# about multiple file upload?
def _write_mime_data(self, mw, _name, _value):
# called by HTMLForm
# assert _name == self.name and _value == ''
if len(self._upload_data) < 2:
if len(self._upload_data) == 0:
file_object = StringIO()
content_type = "application/octet-stream"
filename = ""
else:
file_object, content_type, filename = self._upload_data[0]
if filename is None:
filename = ""
mw2 = mw.nextpart()
fn_part = '; filename="%s"' % filename
disp = 'form-data; name="%s"%s' % (self.name, fn_part)
mw2.addheader("Content-Disposition", disp, prefix=1)
fh = mw2.startbody(content_type, prefix=0)
fh.write(file_object.read())
else:
# multiple files
mw2 = mw.nextpart()
disp = 'form-data; name="%s"' % self.name
mw2.addheader("Content-Disposition", disp, prefix=1)
fh = mw2.startmultipartbody("mixed", prefix=0)
for file_object, content_type, filename in self._upload_data:
mw3 = mw2.nextpart()
if filename is None:
filename = ""
fn_part = '; filename="%s"' % filename
disp = "file%s" % fn_part
mw3.addheader("Content-Disposition", disp, prefix=1)
fh2 = mw3.startbody(content_type, prefix=0)
fh2.write(file_object.read())
mw2.lastpart()
def __str__(self):
name = self.name
if name is None: name = "<None>"
if not self._upload_data:
value = "<No files added>"
else:
value = []
for file, ctype, filename in self._upload_data:
if filename is None:
value.append("<Unnamed file>")
else:
value.append(filename)
value = ", ".join(value)
info = []
if self.disabled: info.append("disabled")
if self.readonly: info.append("readonly")
info = ", ".join(info)
if info: info = " (%s)" % info
return "<%s(%s=%s)%s>" % (self.__class__.__name__, name, value, info)
#---------------------------------------------------
class IsindexControl(ScalarControl):
"""ISINDEX control.
ISINDEX is the odd-one-out of HTML form controls. In fact, it isn't really
part of regular HTML forms at all, and predates it. You're only allowed
one ISINDEX per HTML document. ISINDEX and regular form submission are
mutually exclusive -- either submit a form, or the ISINDEX.
Having said this, since ISINDEX controls may appear in forms (which is
probably bad HTML), ParseFile / ParseResponse will include them in the
HTMLForm instances it returns. You can set the ISINDEX's value, as with
any other control (but note that ISINDEX controls have no name, so you'll
need to use the type argument of set_value!). When you submit the form,
the ISINDEX will not be successful (ie., no data will get returned to the
server as a result of its presence), unless you click on the ISINDEX
control, in which case the ISINDEX gets submitted instead of the form:
form.set_value("my isindex value", type="isindex")
mechanize.urlopen(form.click(type="isindex"))
ISINDEX elements outside of FORMs are ignored. If you want to submit one
by hand, do it like so:
url = urlparse.urljoin(page_uri, "?"+urllib.quote_plus("my isindex value"))
result = mechanize.urlopen(url)
"""
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
if self._value is None:
self._value = ""
def is_of_kind(self, kind): return kind in ["text", "clickable"]
def _totally_ordered_pairs(self):
return []
def _click(self, form, coord, return_type, request_class=_request.Request):
# Relative URL for ISINDEX submission: instead of "foo=bar+baz",
# want "bar+baz".
# This doesn't seem to be specified in HTML 4.01 spec. (ISINDEX is
# deprecated in 4.01, but it should still say how to submit it).
# Submission of ISINDEX is explained in the HTML 3.2 spec, though.
parts = self._urlparse(form.action)
rest, (query, frag) = parts[:-2], parts[-2:]
parts = rest + (urllib.quote_plus(self.value), None)
url = self._urlunparse(parts)
req_data = url, None, []
if return_type == "pairs":
return []
elif return_type == "request_data":
return req_data
else:
return request_class(url)
def __str__(self):
value = self.value
if value is None: value = "<None>"
infos = []
if self.disabled: infos.append("disabled")
if self.readonly: infos.append("readonly")
info = ", ".join(infos)
if info: info = " (%s)" % info
return "<%s(%s)%s>" % (self.__class__.__name__, value, info)
#---------------------------------------------------
class IgnoreControl(ScalarControl):
"""Control that we're not interested in.
Covers:
INPUT/RESET
BUTTON/RESET
INPUT/BUTTON
BUTTON/BUTTON
These controls are always unsuccessful, in the terminology of HTML 4 (ie.
they never require any information to be returned to the server).
BUTTON/BUTTON is used to generate events for script embedded in HTML.
The value attribute of IgnoreControl is always None.
"""
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
self._value = None
def is_of_kind(self, kind): return False
def __setattr__(self, name, value):
if name == "value":
raise AttributeError(
"control '%s' is ignored, hence read-only" % self.name)
elif name in ("name", "type"):
raise AttributeError("%s attribute is readonly" % name)
else:
self.__dict__[name] = value
#---------------------------------------------------
# ListControls
# helpers and subsidiary classes
class Item:
def __init__(self, control, attrs, index=None):
label = _get_label(attrs)
self.__dict__.update({
"name": attrs["value"],
"_labels": label and [label] or [],
"attrs": attrs,
"_control": control,
"disabled": attrs.has_key("disabled"),
"_selected": False,
"id": attrs.get("id"),
"_index": index,
})
control.items.append(self)
def get_labels(self):
"""Return all labels (Label instances) for this item.
For items that represent radio buttons or checkboxes, if the item was
surrounded by a <label> tag, that will be the first label; all other
labels, connected by 'for' and 'id', are in the order that appear in
the HTML.
For items that represent select options, if the option had a label
attribute, that will be the first label. If the option has contents
(text within the option tags) and it is not the same as the label
attribute (if any), that will be a label. There is nothing in the
spec to my knowledge that makes an option with an id unable to be the
target of a label's for attribute, so those are included, if any, for
the sake of consistency and completeness.
"""
res = []
res.extend(self._labels)
if self.id:
res.extend(self._control._form._id_to_labels.get(self.id, ()))
return res
def __getattr__(self, name):
if name=="selected":
return self._selected
raise AttributeError(name)
def __setattr__(self, name, value):
if name == "selected":
self._control._set_selected_state(self, value)
elif name == "disabled":
self.__dict__["disabled"] = bool(value)
else:
raise AttributeError(name)
def __str__(self):
res = self.name
if self.selected:
res = "*" + res
if self.disabled:
res = "(%s)" % res
return res
def __repr__(self):
# XXX appending the attrs without distinguishing them from name and id
# is silly
attrs = [("name", self.name), ("id", self.id)]+self.attrs.items()
return "<%s %s>" % (
self.__class__.__name__,
" ".join(["%s=%r" % (k, v) for k, v in attrs])
)
def disambiguate(items, nr, **kwds):
msgs = []
for key, value in kwds.items():
msgs.append("%s=%r" % (key, value))
msg = " ".join(msgs)
if not items:
raise ItemNotFoundError(msg)
if nr is None:
if len(items) > 1:
raise AmbiguityError(msg)
nr = 0
if len(items) <= nr:
raise ItemNotFoundError(msg)
return items[nr]
class ListControl(Control):
"""Control representing a sequence of items.
The value attribute of a ListControl represents the successful list items
in the control. The successful list items are those that are selected and
not disabled.
ListControl implements both list controls that take a length-1 value
(single-selection) and those that take length >1 values
(multiple-selection).
ListControls accept sequence values only. Some controls only accept
sequences of length 0 or 1 (RADIO, and single-selection SELECT).
In those cases, ItemCountError is raised if len(sequence) > 1. CHECKBOXes
and multiple-selection SELECTs (those having the "multiple" HTML attribute)
accept sequences of any length.
Note the following mistake:
control.value = some_value
assert control.value == some_value # not necessarily true
The reason for this is that the value attribute always gives the list items
in the order they were listed in the HTML.
ListControl items can also be referred to by their labels instead of names.
Use the label argument to .get(), and the .set_value_by_label(),
.get_value_by_label() methods.
Note that, rather confusingly, though SELECT controls are represented in
HTML by SELECT elements (which contain OPTION elements, representing
individual list items), CHECKBOXes and RADIOs are not represented by *any*
element. Instead, those controls are represented by a collection of INPUT
elements. For example, this is a SELECT control, named "control1":
<select name="control1">
<option>foo</option>
<option value="1">bar</option>
</select>
and this is a CHECKBOX control, named "control2":
<input type="checkbox" name="control2" value="foo" id="cbe1">
<input type="checkbox" name="control2" value="bar" id="cbe2">
The id attribute of a CHECKBOX or RADIO ListControl is always that of its
first element (for example, "cbe1" above).
Additional read-only public attribute: multiple.
"""
# ListControls are built up by the parser from their component items by
# creating one ListControl per item, consolidating them into a single
# master ListControl held by the HTMLForm:
# -User calls form.new_control(...)
# -Form creates Control, and calls control.add_to_form(self).
# -Control looks for a Control with the same name and type in the form,
# and if it finds one, merges itself with that control by calling
# control.merge_control(self). The first Control added to the form, of
# a particular name and type, is the only one that survives in the
# form.
# -Form calls control.fixup for all its controls. ListControls in the
# form know they can now safely pick their default values.
# To create a ListControl without an HTMLForm, use:
# control.merge_control(new_control)
# (actually, it's much easier just to use ParseFile)
_label = None
def __init__(self, type, name, attrs={}, select_default=False,
called_as_base_class=False, index=None):
"""
select_default: for RADIO and multiple-selection SELECT controls, pick
the first item as the default if no 'selected' HTML attribute is
present
"""
if not called_as_base_class:
raise NotImplementedError()
self.__dict__["type"] = type.lower()
self.__dict__["name"] = name
self._value = attrs.get("value")
self.disabled = False
self.readonly = False
self.id = attrs.get("id")
self._closed = False
# As Controls are merged in with .merge_control(), self.attrs will
# refer to each Control in turn -- always the most recently merged
# control. Each merged-in Control instance corresponds to a single
# list item: see ListControl.__doc__.
self.items = []
self._form = None
self._select_default = select_default
self._clicked = False
def clear(self):
self.value = []
def is_of_kind(self, kind):
if kind == "list":
return True
elif kind == "multilist":
return bool(self.multiple)
elif kind == "singlelist":
return not self.multiple
else:
return False
def get_items(self, name=None, label=None, id=None,
exclude_disabled=False):
"""Return matching items by name or label.
For argument docs, see the docstring for .get()
"""
if name is not None and not isstringlike(name):
raise TypeError("item name must be string-like")
if label is not None and not isstringlike(label):
raise TypeError("item label must be string-like")
if id is not None and not isstringlike(id):
raise TypeError("item id must be string-like")
items = [] # order is important
compat = self._form.backwards_compat
for o in self.items:
if exclude_disabled and o.disabled:
continue
if name is not None and o.name != name:
continue
if label is not None:
for l in o.get_labels():
if ((compat and l.text == label) or
(not compat and l.text.find(label) > -1)):
break
else:
continue
if id is not None and o.id != id:
continue
items.append(o)
return items
def get(self, name=None, label=None, id=None, nr=None,
exclude_disabled=False):
"""Return item by name or label, disambiguating if necessary with nr.
All arguments must be passed by name, with the exception of 'name',
which may be used as a positional argument.
If name is specified, then the item must have the indicated name.
If label is specified, then the item must have a label whose
whitespace-compressed, stripped, text substring-matches the indicated
label string (e.g. label="please choose" will match
" Do please choose an item ").
If id is specified, then the item must have the indicated id.
nr is an optional 0-based index of the items matching the query.
If nr is the default None value and more than item is found, raises
AmbiguityError (unless the HTMLForm instance's backwards_compat
attribute is true).
If no item is found, or if items are found but nr is specified and not
found, raises ItemNotFoundError.
Optionally excludes disabled items.
"""
if nr is None and self._form.backwards_compat:
nr = 0 # :-/
items = self.get_items(name, label, id, exclude_disabled)
return disambiguate(items, nr, name=name, label=label, id=id)
def _get(self, name, by_label=False, nr=None, exclude_disabled=False):
# strictly for use by deprecated methods
if by_label:
name, label = None, name
else:
name, label = name, None
return self.get(name, label, nr, exclude_disabled)
def toggle(self, name, by_label=False, nr=None):
"""Deprecated: given a name or label and optional disambiguating index
nr, toggle the matching item's selection.
Selecting items follows the behavior described in the docstring of the
'get' method.
if the item is disabled, or this control is disabled or readonly,
raise AttributeError.
"""
deprecation(
"item = control.get(...); item.selected = not item.selected")
o = self._get(name, by_label, nr)
self._set_selected_state(o, not o.selected)
def set(self, selected, name, by_label=False, nr=None):
"""Deprecated: given a name or label and optional disambiguating index
nr, set the matching item's selection to the bool value of selected.
Selecting items follows the behavior described in the docstring of the
'get' method.
if the item is disabled, or this control is disabled or readonly,
raise AttributeError.
"""
deprecation(
"control.get(...).selected = <boolean>")
self._set_selected_state(self._get(name, by_label, nr), selected)
def _set_selected_state(self, item, action):
# action:
# bool False: off
# bool True: on
if self.disabled:
raise AttributeError("control '%s' is disabled" % self.name)
if self.readonly:
raise AttributeError("control '%s' is readonly" % self.name)
action == bool(action)
compat = self._form.backwards_compat
if not compat and item.disabled:
raise AttributeError("item is disabled")
else:
if compat and item.disabled and action:
raise AttributeError("item is disabled")
if self.multiple:
item.__dict__["_selected"] = action
else:
if not action:
item.__dict__["_selected"] = False
else:
for o in self.items:
o.__dict__["_selected"] = False
item.__dict__["_selected"] = True
def toggle_single(self, by_label=None):
"""Deprecated: toggle the selection of the single item in this control.
Raises ItemCountError if the control does not contain only one item.
by_label argument is ignored, and included only for backwards
compatibility.
"""
deprecation(
"control.items[0].selected = not control.items[0].selected")
if len(self.items) != 1:
raise ItemCountError(
"'%s' is not a single-item control" % self.name)
item = self.items[0]
self._set_selected_state(item, not item.selected)
def set_single(self, selected, by_label=None):
"""Deprecated: set the selection of the single item in this control.
Raises ItemCountError if the control does not contain only one item.
by_label argument is ignored, and included only for backwards
compatibility.
"""
deprecation(
"control.items[0].selected = <boolean>")
if len(self.items) != 1:
raise ItemCountError(
"'%s' is not a single-item control" % self.name)
self._set_selected_state(self.items[0], selected)
def get_item_disabled(self, name, by_label=False, nr=None):
"""Get disabled state of named list item in a ListControl."""
deprecation(
"control.get(...).disabled")
return self._get(name, by_label, nr).disabled
def set_item_disabled(self, disabled, name, by_label=False, nr=None):
"""Set disabled state of named list item in a ListControl.
disabled: boolean disabled state
"""
deprecation(
"control.get(...).disabled = <boolean>")
self._get(name, by_label, nr).disabled = disabled
def set_all_items_disabled(self, disabled):
"""Set disabled state of all list items in a ListControl.
disabled: boolean disabled state
"""
for o in self.items:
o.disabled = disabled
def get_item_attrs(self, name, by_label=False, nr=None):
"""Return dictionary of HTML attributes for a single ListControl item.
The HTML element types that describe list items are: OPTION for SELECT
controls, INPUT for the rest. These elements have HTML attributes that
you may occasionally want to know about -- for example, the "alt" HTML
attribute gives a text string describing the item (graphical browsers
usually display this as a tooltip).
The returned dictionary maps HTML attribute names to values. The names
and values are taken from the original HTML.
"""
deprecation(
"control.get(...).attrs")
return self._get(name, by_label, nr).attrs
def close_control(self):
self._closed = True
def add_to_form(self, form):
assert self._form is None or form == self._form, (
"can't add control to more than one form")
self._form = form
if self.name is None:
# always count nameless elements as separate controls
Control.add_to_form(self, form)
else:
for ii in range(len(form.controls)-1, -1, -1):
control = form.controls[ii]
if control.name == self.name and control.type == self.type:
if control._closed:
Control.add_to_form(self, form)
else:
control.merge_control(self)
break
else:
Control.add_to_form(self, form)
def merge_control(self, control):
assert bool(control.multiple) == bool(self.multiple)
# usually, isinstance(control, self.__class__)
self.items.extend(control.items)
def fixup(self):
"""
ListControls are built up from component list items (which are also
ListControls) during parsing. This method should be called after all
items have been added. See ListControl.__doc__ for the reason this is
required.
"""
# Need to set default selection where no item was indicated as being
# selected by the HTML:
# CHECKBOX:
# Nothing should be selected.
# SELECT/single, SELECT/multiple and RADIO:
# RFC 1866 (HTML 2.0): says first item should be selected.
# W3C HTML 4.01 Specification: says that client behaviour is
# undefined in this case. For RADIO, exactly one must be selected,
# though which one is undefined.
# Both Netscape and Microsoft Internet Explorer (IE) choose first
# item for SELECT/single. However, both IE5 and Mozilla (both 1.0
# and Firebird 0.6) leave all items unselected for RADIO and
# SELECT/multiple.
# Since both Netscape and IE all choose the first item for
# SELECT/single, we do the same. OTOH, both Netscape and IE
# leave SELECT/multiple with nothing selected, in violation of RFC 1866
# (but not in violation of the W3C HTML 4 standard); the same is true
# of RADIO (which *is* in violation of the HTML 4 standard). We follow
# RFC 1866 if the _select_default attribute is set, and Netscape and IE
# otherwise. RFC 1866 and HTML 4 are always violated insofar as you
# can deselect all items in a RadioControl.
for o in self.items:
# set items' controls to self, now that we've merged
o.__dict__["_control"] = self
def __getattr__(self, name):
if name == "value":
compat = self._form.backwards_compat
if self.name is None:
return []
return [o.name for o in self.items if o.selected and
(not o.disabled or compat)]
else:
raise AttributeError("%s instance has no attribute '%s'" %
(self.__class__.__name__, name))
def __setattr__(self, name, value):
if name == "value":
if self.disabled:
raise AttributeError("control '%s' is disabled" % self.name)
if self.readonly:
raise AttributeError("control '%s' is readonly" % self.name)
self._set_value(value)
elif name in ("name", "type", "multiple"):
raise AttributeError("%s attribute is readonly" % name)
else:
self.__dict__[name] = value
def _set_value(self, value):
if value is None or isstringlike(value):
raise TypeError("ListControl, must set a sequence")
if not value:
compat = self._form.backwards_compat
for o in self.items:
if not o.disabled or compat:
o.selected = False
elif self.multiple:
self._multiple_set_value(value)
elif len(value) > 1:
raise ItemCountError(
"single selection list, must set sequence of "
"length 0 or 1")
else:
self._single_set_value(value)
def _get_items(self, name, target=1):
all_items = self.get_items(name)
items = [o for o in all_items if not o.disabled]
if len(items) < target:
if len(all_items) < target:
raise ItemNotFoundError(
"insufficient items with name %r" % name)
else:
raise AttributeError(
"insufficient non-disabled items with name %s" % name)
on = []
off = []
for o in items:
if o.selected:
on.append(o)
else:
off.append(o)
return on, off
def _single_set_value(self, value):
assert len(value) == 1
on, off = self._get_items(value[0])
assert len(on) <= 1
if not on:
off[0].selected = True
def _multiple_set_value(self, value):
compat = self._form.backwards_compat
turn_on = [] # transactional-ish
turn_off = [item for item in self.items if
item.selected and (not item.disabled or compat)]
names = {}
for nn in value:
if nn in names.keys():
names[nn] += 1
else:
names[nn] = 1
for name, count in names.items():
on, off = self._get_items(name, count)
for i in range(count):
if on:
item = on[0]
del on[0]
del turn_off[turn_off.index(item)]
else:
item = off[0]
del off[0]
turn_on.append(item)
for item in turn_off:
item.selected = False
for item in turn_on:
item.selected = True
def set_value_by_label(self, value):
"""Set the value of control by item labels.
value is expected to be an iterable of strings that are substrings of
the item labels that should be selected. Before substring matching is
performed, the original label text is whitespace-compressed
(consecutive whitespace characters are converted to a single space
character) and leading and trailing whitespace is stripped. Ambiguous
labels are accepted without complaint if the form's backwards_compat is
True; otherwise, it will not complain as long as all ambiguous labels
share the same item name (e.g. OPTION value).
"""
if isstringlike(value):
raise TypeError(value)
if not self.multiple and len(value) > 1:
raise ItemCountError(
"single selection list, must set sequence of "
"length 0 or 1")
items = []
for nn in value:
found = self.get_items(label=nn)
if len(found) > 1:
if not self._form.backwards_compat:
# ambiguous labels are fine as long as item names (e.g.
# OPTION values) are same
opt_name = found[0].name
if [o for o in found[1:] if o.name != opt_name]:
raise AmbiguityError(nn)
else:
# OK, we'll guess :-( Assume first available item.
found = found[:1]
for o in found:
# For the multiple-item case, we could try to be smarter,
# saving them up and trying to resolve, but that's too much.
if self._form.backwards_compat or o not in items:
items.append(o)
break
else: # all of them are used
raise ItemNotFoundError(nn)
# now we have all the items that should be on
# let's just turn everything off and then back on.
self.value = []
for o in items:
o.selected = True
def get_value_by_label(self):
"""Return the value of the control as given by normalized labels."""
res = []
compat = self._form.backwards_compat
for o in self.items:
if (not o.disabled or compat) and o.selected:
for l in o.get_labels():
if l.text:
res.append(l.text)
break
else:
res.append(None)
return res
def possible_items(self, by_label=False):
"""Deprecated: return the names or labels of all possible items.
Includes disabled items, which may be misleading for some use cases.
"""
deprecation(
"[item.name for item in self.items]")
if by_label:
res = []
for o in self.items:
for l in o.get_labels():
if l.text:
res.append(l.text)
break
else:
res.append(None)
return res
return [o.name for o in self.items]
def _totally_ordered_pairs(self):
if self.disabled or self.name is None:
return []
else:
return [(o._index, self.name, o.name) for o in self.items
if o.selected and not o.disabled]
def __str__(self):
name = self.name
if name is None: name = "<None>"
display = [str(o) for o in self.items]
infos = []
if self.disabled: infos.append("disabled")
if self.readonly: infos.append("readonly")
info = ", ".join(infos)
if info: info = " (%s)" % info
return "<%s(%s=[%s])%s>" % (self.__class__.__name__,
name, ", ".join(display), info)
class RadioControl(ListControl):
"""
Covers:
INPUT/RADIO
"""
def __init__(self, type, name, attrs, select_default=False, index=None):
attrs.setdefault("value", "on")
ListControl.__init__(self, type, name, attrs, select_default,
called_as_base_class=True, index=index)
self.__dict__["multiple"] = False
o = Item(self, attrs, index)
o.__dict__["_selected"] = attrs.has_key("checked")
def fixup(self):
ListControl.fixup(self)
found = [o for o in self.items if o.selected and not o.disabled]
if not found:
if self._select_default:
for o in self.items:
if not o.disabled:
o.selected = True
break
else:
# Ensure only one item selected. Choose the last one,
# following IE and Firefox.
for o in found[:-1]:
o.selected = False
def get_labels(self):
return []
class CheckboxControl(ListControl):
"""
Covers:
INPUT/CHECKBOX
"""
def __init__(self, type, name, attrs, select_default=False, index=None):
attrs.setdefault("value", "on")
ListControl.__init__(self, type, name, attrs, select_default,
called_as_base_class=True, index=index)
self.__dict__["multiple"] = True
o = Item(self, attrs, index)
o.__dict__["_selected"] = attrs.has_key("checked")
def get_labels(self):
return []
class SelectControl(ListControl):
"""
Covers:
SELECT (and OPTION)
OPTION 'values', in HTML parlance, are Item 'names' in mechanize parlance.
SELECT control values and labels are subject to some messy defaulting
rules. For example, if the HTML representation of the control is:
<SELECT name=year>
<OPTION value=0 label="2002">current year</OPTION>
<OPTION value=1>2001</OPTION>
<OPTION>2000</OPTION>
</SELECT>
The items, in order, have labels "2002", "2001" and "2000", whereas their
names (the OPTION values) are "0", "1" and "2000" respectively. Note that
the value of the last OPTION in this example defaults to its contents, as
specified by RFC 1866, as do the labels of the second and third OPTIONs.
The OPTION labels are sometimes more meaningful than the OPTION values,
which can make for more maintainable code.
Additional read-only public attribute: attrs
The attrs attribute is a dictionary of the original HTML attributes of the
SELECT element. Other ListControls do not have this attribute, because in
other cases the control as a whole does not correspond to any single HTML
element. control.get(...).attrs may be used as usual to get at the HTML
attributes of the HTML elements corresponding to individual list items (for
SELECT controls, these are OPTION elements).
Another special case is that the Item.attrs dictionaries have a special key
"contents" which does not correspond to any real HTML attribute, but rather
contains the contents of the OPTION element:
<OPTION>this bit</OPTION>
"""
# HTML attributes here are treated slightly differently from other list
# controls:
# -The SELECT HTML attributes dictionary is stuffed into the OPTION
# HTML attributes dictionary under the "__select" key.
# -The content of each OPTION element is stored under the special
# "contents" key of the dictionary.
# After all this, the dictionary is passed to the SelectControl constructor
# as the attrs argument, as usual. However:
# -The first SelectControl constructed when building up a SELECT control
# has a constructor attrs argument containing only the __select key -- so
# this SelectControl represents an empty SELECT control.
# -Subsequent SelectControls have both OPTION HTML-attribute in attrs and
# the __select dictionary containing the SELECT HTML-attributes.
def __init__(self, type, name, attrs, select_default=False, index=None):
# fish out the SELECT HTML attributes from the OPTION HTML attributes
# dictionary
self.attrs = attrs["__select"].copy()
self.__dict__["_label"] = _get_label(self.attrs)
self.__dict__["id"] = self.attrs.get("id")
self.__dict__["multiple"] = self.attrs.has_key("multiple")
# the majority of the contents, label, and value dance already happened
contents = attrs.get("contents")
attrs = attrs.copy()
del attrs["__select"]
ListControl.__init__(self, type, name, self.attrs, select_default,
called_as_base_class=True, index=index)
self.disabled = self.attrs.has_key("disabled")
self.readonly = self.attrs.has_key("readonly")
if attrs.has_key("value"):
# otherwise it is a marker 'select started' token
o = Item(self, attrs, index)
o.__dict__["_selected"] = attrs.has_key("selected")
# add 'label' label and contents label, if different. If both are
# provided, the 'label' label is used for display in HTML
# 4.0-compliant browsers (and any lower spec? not sure) while the
# contents are used for display in older or less-compliant
# browsers. We make label objects for both, if the values are
# different.
label = attrs.get("label")
if label:
o._labels.append(Label({"__text": label}))
if contents and contents != label:
o._labels.append(Label({"__text": contents}))
elif contents:
o._labels.append(Label({"__text": contents}))
def fixup(self):
ListControl.fixup(self)
# Firefox doesn't exclude disabled items from those considered here
# (i.e. from 'found', for both branches of the if below). Note that
# IE6 doesn't support the disabled attribute on OPTIONs at all.
found = [o for o in self.items if o.selected]
if not found:
if not self.multiple or self._select_default:
for o in self.items:
if not o.disabled:
was_disabled = self.disabled
self.disabled = False
try:
o.selected = True
finally:
o.disabled = was_disabled
break
elif not self.multiple:
# Ensure only one item selected. Choose the last one,
# following IE and Firefox.
for o in found[:-1]:
o.selected = False
#---------------------------------------------------
class SubmitControl(ScalarControl):
"""
Covers:
INPUT/SUBMIT
BUTTON/SUBMIT
"""
def __init__(self, type, name, attrs, index=None):
ScalarControl.__init__(self, type, name, attrs, index)
# IE5 defaults SUBMIT value to "Submit Query"; Firebird 0.6 leaves it
# blank, Konqueror 3.1 defaults to "Submit". HTML spec. doesn't seem
# to define this.
if self.value is None: self.value = ""
self.readonly = True
def get_labels(self):
res = []
if self.value:
res.append(Label({"__text": self.value}))
res.extend(ScalarControl.get_labels(self))
return res
def is_of_kind(self, kind): return kind == "clickable"
def _click(self, form, coord, return_type, request_class=_request.Request):
self._clicked = coord
r = form._switch_click(return_type, request_class)
self._clicked = False
return r
def _totally_ordered_pairs(self):
if not self._clicked:
return []
return ScalarControl._totally_ordered_pairs(self)
#---------------------------------------------------
class ImageControl(SubmitControl):
"""
Covers:
INPUT/IMAGE
Coordinates are specified using one of the HTMLForm.click* methods.
"""
def __init__(self, type, name, attrs, index=None):
SubmitControl.__init__(self, type, name, attrs, index)
self.readonly = False
def _totally_ordered_pairs(self):
clicked = self._clicked
if self.disabled or not clicked:
return []
name = self.name
if name is None: return []
pairs = [
(self._index, "%s.x" % name, str(clicked[0])),
(self._index+1, "%s.y" % name, str(clicked[1])),
]
value = self._value
if value:
pairs.append((self._index+2, name, value))
return pairs
get_labels = ScalarControl.get_labels
# aliases, just to make str(control) and str(form) clearer
class PasswordControl(TextControl): pass
class HiddenControl(TextControl): pass
class TextareaControl(TextControl): pass
class SubmitButtonControl(SubmitControl): pass
def is_listcontrol(control): return control.is_of_kind("list")
class HTMLForm:
"""Represents a single HTML <form> ... </form> element.
A form consists of a sequence of controls that usually have names, and
which can take on various values. The values of the various types of
controls represent variously: text, zero-or-one-of-many or many-of-many
choices, and files to be uploaded. Some controls can be clicked on to
submit the form, and clickable controls' values sometimes include the
coordinates of the click.
Forms can be filled in with data to be returned to the server, and then
submitted, using the click method to generate a request object suitable for
passing to mechanize.urlopen (or the click_request_data or click_pairs
methods for integration with third-party code).
import mechanize
forms = mechanize.ParseFile(html, base_uri)
form = forms[0]
form["query"] = "Python"
form.find_control("nr_results").get("lots").selected = True
response = mechanize.urlopen(form.click())
Usually, HTMLForm instances are not created directly. Instead, the
ParseFile or ParseResponse factory functions are used. If you do construct
HTMLForm objects yourself, however, note that an HTMLForm instance is only
properly initialised after the fixup method has been called (ParseFile and
ParseResponse do this for you). See ListControl.__doc__ for the reason
this is required.
Indexing a form (form["control_name"]) returns the named Control's value
attribute. Assignment to a form index (form["control_name"] = something)
is equivalent to assignment to the named Control's value attribute. If you
need to be more specific than just supplying the control's name, use the
set_value and get_value methods.
ListControl values are lists of item names (specifically, the names of the
items that are selected and not disabled, and hence are "successful" -- ie.
cause data to be returned to the server). The list item's name is the
value of the corresponding HTML element's"value" attribute.
Example:
<INPUT type="CHECKBOX" name="cheeses" value="leicester"></INPUT>
<INPUT type="CHECKBOX" name="cheeses" value="cheddar"></INPUT>
defines a CHECKBOX control with name "cheeses" which has two items, named
"leicester" and "cheddar".
Another example:
<SELECT name="more_cheeses">
<OPTION>1</OPTION>
<OPTION value="2" label="CHEDDAR">cheddar</OPTION>
</SELECT>
defines a SELECT control with name "more_cheeses" which has two items,
named "1" and "2" (because the OPTION element's value HTML attribute
defaults to the element contents -- see SelectControl.__doc__ for more on
these defaulting rules).
To select, deselect or otherwise manipulate individual list items, use the
HTMLForm.find_control() and ListControl.get() methods. To set the whole
value, do as for any other control: use indexing or the set_/get_value
methods.
Example:
# select *only* the item named "cheddar"
form["cheeses"] = ["cheddar"]
# select "cheddar", leave other items unaffected
form.find_control("cheeses").get("cheddar").selected = True
Some controls (RADIO and SELECT without the multiple attribute) can only
have zero or one items selected at a time. Some controls (CHECKBOX and
SELECT with the multiple attribute) can have multiple items selected at a
time. To set the whole value of a ListControl, assign a sequence to a form
index:
form["cheeses"] = ["cheddar", "leicester"]
If the ListControl is not multiple-selection, the assigned list must be of
length one.
To check if a control has an item, if an item is selected, or if an item is
successful (selected and not disabled), respectively:
"cheddar" in [item.name for item in form.find_control("cheeses").items]
"cheddar" in [item.name for item in form.find_control("cheeses").items and
item.selected]
"cheddar" in form["cheeses"] # (or "cheddar" in form.get_value("cheeses"))
Note that some list items may be disabled (see below).
Note the following mistake:
form[control_name] = control_value
assert form[control_name] == control_value # not necessarily true
The reason for this is that form[control_name] always gives the list items
in the order they were listed in the HTML.
List items (hence list values, too) can be referred to in terms of list
item labels rather than list item names using the appropriate label
arguments. Note that each item may have several labels.
The question of default values of OPTION contents, labels and values is
somewhat complicated: see SelectControl.__doc__ and
ListControl.get_item_attrs.__doc__ if you think you need to know.
Controls can be disabled or readonly. In either case, the control's value
cannot be changed until you clear those flags (see example below).
Disabled is the state typically represented by browsers by 'greying out' a
control. Disabled controls are not 'successful' -- they don't cause data
to get returned to the server. Readonly controls usually appear in
browsers as read-only text boxes. Readonly controls are successful. List
items can also be disabled. Attempts to select or deselect disabled items
fail with AttributeError.
If a lot of controls are readonly, it can be useful to do this:
form.set_all_readonly(False)
To clear a control's value attribute, so that it is not successful (until a
value is subsequently set):
form.clear("cheeses")
More examples:
control = form.find_control("cheeses")
control.disabled = False
control.readonly = False
control.get("gruyere").disabled = True
control.items[0].selected = True
See the various Control classes for further documentation. Many methods
take name, type, kind, id, label and nr arguments to specify the control to
be operated on: see HTMLForm.find_control.__doc__.
ControlNotFoundError (subclass of ValueError) is raised if the specified
control can't be found. This includes occasions where a non-ListControl
is found, but the method (set, for example) requires a ListControl.
ItemNotFoundError (subclass of ValueError) is raised if a list item can't
be found. ItemCountError (subclass of ValueError) is raised if an attempt
is made to select more than one item and the control doesn't allow that, or
set/get_single are called and the control contains more than one item.
AttributeError is raised if a control or item is readonly or disabled and
an attempt is made to alter its value.
Security note: Remember that any passwords you store in HTMLForm instances
will be saved to disk in the clear if you pickle them (directly or
indirectly). The simplest solution to this is to avoid pickling HTMLForm
objects. You could also pickle before filling in any password, or just set
the password to "" before pickling.
Public attributes:
action: full (absolute URI) form action
method: "GET" or "POST"
enctype: form transfer encoding MIME type
name: name of form (None if no name was specified)
attrs: dictionary mapping original HTML form attributes to their values
controls: list of Control instances; do not alter this list
(instead, call form.new_control to make a Control and add it to the
form, or control.add_to_form if you already have a Control instance)
Methods for form filling:
-------------------------
Most of the these methods have very similar arguments. See
HTMLForm.find_control.__doc__ for details of the name, type, kind, label
and nr arguments.
def find_control(self,
name=None, type=None, kind=None, id=None, predicate=None,
nr=None, label=None)
get_value(name=None, type=None, kind=None, id=None, nr=None,
by_label=False, # by_label is deprecated
label=None)
set_value(value,
name=None, type=None, kind=None, id=None, nr=None,
by_label=False, # by_label is deprecated
label=None)
clear_all()
clear(name=None, type=None, kind=None, id=None, nr=None, label=None)
set_all_readonly(readonly)
Method applying only to FileControls:
add_file(file_object,
content_type="application/octet-stream", filename=None,
name=None, id=None, nr=None, label=None)
Methods applying only to clickable controls:
click(name=None, type=None, id=None, nr=0, coord=(1,1), label=None)
click_request_data(name=None, type=None, id=None, nr=0, coord=(1,1),
label=None)
click_pairs(name=None, type=None, id=None, nr=0, coord=(1,1), label=None)
"""
type2class = {
"text": TextControl,
"password": PasswordControl,
"hidden": HiddenControl,
"textarea": TextareaControl,
"isindex": IsindexControl,
"file": FileControl,
"button": IgnoreControl,
"buttonbutton": IgnoreControl,
"reset": IgnoreControl,
"resetbutton": IgnoreControl,
"submit": SubmitControl,
"submitbutton": SubmitButtonControl,
"image": ImageControl,
"radio": RadioControl,
"checkbox": CheckboxControl,
"select": SelectControl,
}
#---------------------------------------------------
# Initialisation. Use ParseResponse / ParseFile instead.
def __init__(self, action, method="GET",
enctype="application/x-www-form-urlencoded",
name=None, attrs=None,
request_class=_request.Request,
forms=None, labels=None, id_to_labels=None,
backwards_compat=True):
"""
In the usual case, use ParseResponse (or ParseFile) to create new
HTMLForm objects.
action: full (absolute URI) form action
method: "GET" or "POST"
enctype: form transfer encoding MIME type
name: name of form
attrs: dictionary mapping original HTML form attributes to their values
"""
self.action = action
self.method = method
self.enctype = enctype
self.name = name
if attrs is not None:
self.attrs = attrs.copy()
else:
self.attrs = {}
self.controls = []
self._request_class = request_class
# these attributes are used by zope.testbrowser
self._forms = forms # this is a semi-public API!
self._labels = labels # this is a semi-public API!
self._id_to_labels = id_to_labels # this is a semi-public API!
self.backwards_compat = backwards_compat # note __setattr__
self._urlunparse = urlparse.urlunparse
self._urlparse = urlparse.urlparse
def __getattr__(self, name):
if name == "backwards_compat":
return self._backwards_compat
return getattr(HTMLForm, name)
def __setattr__(self, name, value):
# yuck
if name == "backwards_compat":
name = "_backwards_compat"
value = bool(value)
for cc in self.controls:
try:
items = cc.items
except AttributeError:
continue
else:
for ii in items:
for ll in ii.get_labels():
ll._backwards_compat = value
self.__dict__[name] = value
def new_control(self, type, name, attrs,
ignore_unknown=False, select_default=False, index=None):
"""Adds a new control to the form.
This is usually called by ParseFile and ParseResponse. Don't call it
youself unless you're building your own Control instances.
Note that controls representing lists of items are built up from
controls holding only a single list item. See ListControl.__doc__ for
further information.
type: type of control (see Control.__doc__ for a list)
attrs: HTML attributes of control
ignore_unknown: if true, use a dummy Control instance for controls of
unknown type; otherwise, use a TextControl
select_default: for RADIO and multiple-selection SELECT controls, pick
the first item as the default if no 'selected' HTML attribute is
present (this defaulting happens when the HTMLForm.fixup method is
called)
index: index of corresponding element in HTML (see
MoreFormTests.test_interspersed_controls for motivation)
"""
type = type.lower()
klass = self.type2class.get(type)
if klass is None:
if ignore_unknown:
klass = IgnoreControl
else:
klass = TextControl
a = attrs.copy()
if issubclass(klass, ListControl):
control = klass(type, name, a, select_default, index)
else:
control = klass(type, name, a, index)
if type == "select" and len(attrs) == 1:
for ii in range(len(self.controls)-1, -1, -1):
ctl = self.controls[ii]
if ctl.type == "select":
ctl.close_control()
break
control.add_to_form(self)
control._urlparse = self._urlparse
control._urlunparse = self._urlunparse
def fixup(self):
"""Normalise form after all controls have been added.
This is usually called by ParseFile and ParseResponse. Don't call it
youself unless you're building your own Control instances.
This method should only be called once, after all controls have been
added to the form.
"""
for control in self.controls:
control.fixup()
self.backwards_compat = self._backwards_compat
#---------------------------------------------------
def __str__(self):
header = "%s%s %s %s" % (
(self.name and self.name+" " or ""),
self.method, self.action, self.enctype)
rep = [header]
for control in self.controls:
rep.append(" %s" % str(control))
return "<%s>" % "\n".join(rep)
#---------------------------------------------------
# Form-filling methods.
def __getitem__(self, name):
return self.find_control(name).value
def __contains__(self, name):
return bool(self.find_control(name))
def __setitem__(self, name, value):
control = self.find_control(name)
try:
control.value = value
except AttributeError, e:
raise ValueError(str(e))
def get_value(self,
name=None, type=None, kind=None, id=None, nr=None,
by_label=False, # by_label is deprecated
label=None):
"""Return value of control.
If only name and value arguments are supplied, equivalent to
form[name]
"""
if by_label:
deprecation("form.get_value_by_label(...)")
c = self.find_control(name, type, kind, id, label=label, nr=nr)
if by_label:
try:
meth = c.get_value_by_label
except AttributeError:
raise NotImplementedError(
"control '%s' does not yet support by_label" % c.name)
else:
return meth()
else:
return c.value
def set_value(self, value,
name=None, type=None, kind=None, id=None, nr=None,
by_label=False, # by_label is deprecated
label=None):
"""Set value of control.
If only name and value arguments are supplied, equivalent to
form[name] = value
"""
if by_label:
deprecation("form.get_value_by_label(...)")
c = self.find_control(name, type, kind, id, label=label, nr=nr)
if by_label:
try:
meth = c.set_value_by_label
except AttributeError:
raise NotImplementedError(
"control '%s' does not yet support by_label" % c.name)
else:
meth(value)
else:
c.value = value
def get_value_by_label(
self, name=None, type=None, kind=None, id=None, label=None, nr=None):
"""
All arguments should be passed by name.
"""
c = self.find_control(name, type, kind, id, label=label, nr=nr)
return c.get_value_by_label()
def set_value_by_label(
self, value,
name=None, type=None, kind=None, id=None, label=None, nr=None):
"""
All arguments should be passed by name.
"""
c = self.find_control(name, type, kind, id, label=label, nr=nr)
c.set_value_by_label(value)
def set_all_readonly(self, readonly):
for control in self.controls:
control.readonly = bool(readonly)
def clear_all(self):
"""Clear the value attributes of all controls in the form.
See HTMLForm.clear.__doc__.
"""
for control in self.controls:
control.clear()
def clear(self,
name=None, type=None, kind=None, id=None, nr=None, label=None):
"""Clear the value attribute of a control.
As a result, the affected control will not be successful until a value
is subsequently set. AttributeError is raised on readonly controls.
"""
c = self.find_control(name, type, kind, id, label=label, nr=nr)
c.clear()
#---------------------------------------------------
# Form-filling methods applying only to ListControls.
def possible_items(self, # deprecated
name=None, type=None, kind=None, id=None,
nr=None, by_label=False, label=None):
"""Return a list of all values that the specified control can take."""
c = self._find_list_control(name, type, kind, id, label, nr)
return c.possible_items(by_label)
def set(self, selected, item_name, # deprecated
name=None, type=None, kind=None, id=None, nr=None,
by_label=False, label=None):
"""Select / deselect named list item.
selected: boolean selected state
"""
self._find_list_control(name, type, kind, id, label, nr).set(
selected, item_name, by_label)
def toggle(self, item_name, # deprecated
name=None, type=None, kind=None, id=None, nr=None,
by_label=False, label=None):
"""Toggle selected state of named list item."""
self._find_list_control(name, type, kind, id, label, nr).toggle(
item_name, by_label)
def set_single(self, selected, # deprecated
name=None, type=None, kind=None, id=None,
nr=None, by_label=None, label=None):
"""Select / deselect list item in a control having only one item.
If the control has multiple list items, ItemCountError is raised.
This is just a convenience method, so you don't need to know the item's
name -- the item name in these single-item controls is usually
something meaningless like "1" or "on".
For example, if a checkbox has a single item named "on", the following
two calls are equivalent:
control.toggle("on")
control.toggle_single()
""" # by_label ignored and deprecated
self._find_list_control(
name, type, kind, id, label, nr).set_single(selected)
def toggle_single(self, name=None, type=None, kind=None, id=None,
nr=None, by_label=None, label=None): # deprecated
"""Toggle selected state of list item in control having only one item.
The rest is as for HTMLForm.set_single.__doc__.
""" # by_label ignored and deprecated
self._find_list_control(name, type, kind, id, label, nr).toggle_single()
#---------------------------------------------------
# Form-filling method applying only to FileControls.
def add_file(self, file_object, content_type=None, filename=None,
name=None, id=None, nr=None, label=None):
"""Add a file to be uploaded.
file_object: file-like object (with read method) from which to read
data to upload
content_type: MIME content type of data to upload
filename: filename to pass to server
If filename is None, no filename is sent to the server.
If content_type is None, the content type is guessed based on the
filename and the data from read from the file object.
XXX
At the moment, guessed content type is always application/octet-stream.
Use sndhdr, imghdr modules. Should also try to guess HTML, XML, and
plain text.
Note the following useful HTML attributes of file upload controls (see
HTML 4.01 spec, section 17):
accept: comma-separated list of content types that the server will
handle correctly; you can use this to filter out non-conforming files
size: XXX IIRC, this is indicative of whether form wants multiple or
single files
maxlength: XXX hint of max content length in bytes?
"""
self.find_control(name, "file", id=id, label=label, nr=nr).add_file(
file_object, content_type, filename)
#---------------------------------------------------
# Form submission methods, applying only to clickable controls.
def click(self, name=None, type=None, id=None, nr=0, coord=(1,1),
request_class=_request.Request,
label=None):
"""Return request that would result from clicking on a control.
The request object is a mechanize.Request instance, which you can pass
to mechanize.urlopen.
Only some control types (INPUT/SUBMIT & BUTTON/SUBMIT buttons and
IMAGEs) can be clicked.
Will click on the first clickable control, subject to the name, type
and nr arguments (as for find_control). If no name, type, id or number
is specified and there are no clickable controls, a request will be
returned for the form in its current, un-clicked, state.
IndexError is raised if any of name, type, id or nr is specified but no
matching control is found. ValueError is raised if the HTMLForm has an
enctype attribute that is not recognised.
You can optionally specify a coordinate to click at, which only makes a
difference if you clicked on an image.
"""
return self._click(name, type, id, label, nr, coord, "request",
self._request_class)
def click_request_data(self,
name=None, type=None, id=None,
nr=0, coord=(1,1),
request_class=_request.Request,
label=None):
"""As for click method, but return a tuple (url, data, headers).
You can use this data to send a request to the server. This is useful
if you're using httplib or urllib rather than mechanize. Otherwise,
use the click method.
# Untested. Have to subclass to add headers, I think -- so use
# mechanize instead!
import urllib
url, data, hdrs = form.click_request_data()
r = urllib.urlopen(url, data)
# Untested. I don't know of any reason to use httplib -- you can get
# just as much control with mechanize.
import httplib, urlparse
url, data, hdrs = form.click_request_data()
tup = urlparse(url)
host, path = tup[1], urlparse.urlunparse((None, None)+tup[2:])
conn = httplib.HTTPConnection(host)
if data:
httplib.request("POST", path, data, hdrs)
else:
httplib.request("GET", path, headers=hdrs)
r = conn.getresponse()
"""
return self._click(name, type, id, label, nr, coord, "request_data",
self._request_class)
def click_pairs(self, name=None, type=None, id=None,
nr=0, coord=(1,1),
label=None):
"""As for click_request_data, but returns a list of (key, value) pairs.
You can use this list as an argument to urllib.urlencode. This is
usually only useful if you're using httplib or urllib rather than
mechanize. It may also be useful if you want to manually tweak the
keys and/or values, but this should not be necessary. Otherwise, use
the click method.
Note that this method is only useful for forms of MIME type
x-www-form-urlencoded. In particular, it does not return the
information required for file upload. If you need file upload and are
not using mechanize, use click_request_data.
"""
return self._click(name, type, id, label, nr, coord, "pairs",
self._request_class)
#---------------------------------------------------
def find_control(self,
name=None, type=None, kind=None, id=None,
predicate=None, nr=None,
label=None):
"""Locate and return some specific control within the form.
At least one of the name, type, kind, predicate and nr arguments must
be supplied. If no matching control is found, ControlNotFoundError is
raised.
If name is specified, then the control must have the indicated name.
If type is specified then the control must have the specified type (in
addition to the types possible for <input> HTML tags: "text",
"password", "hidden", "submit", "image", "button", "radio", "checkbox",
"file" we also have "reset", "buttonbutton", "submitbutton",
"resetbutton", "textarea", "select" and "isindex").
If kind is specified, then the control must fall into the specified
group, each of which satisfies a particular interface. The types are
"text", "list", "multilist", "singlelist", "clickable" and "file".
If id is specified, then the control must have the indicated id.
If predicate is specified, then the control must match that function.
The predicate function is passed the control as its single argument,
and should return a boolean value indicating whether the control
matched.
nr, if supplied, is the sequence number of the control (where 0 is the
first). Note that control 0 is the first control matching all the
other arguments (if supplied); it is not necessarily the first control
in the form. If no nr is supplied, AmbiguityError is raised if
multiple controls match the other arguments (unless the
.backwards-compat attribute is true).
If label is specified, then the control must have this label. Note
that radio controls and checkboxes never have labels: their items do.
"""
if ((name is None) and (type is None) and (kind is None) and
(id is None) and (label is None) and (predicate is None) and
(nr is None)):
raise ValueError(
"at least one argument must be supplied to specify control")
return self._find_control(name, type, kind, id, label, predicate, nr)
#---------------------------------------------------
# Private methods.
def _find_list_control(self,
name=None, type=None, kind=None, id=None,
label=None, nr=None):
if ((name is None) and (type is None) and (kind is None) and
(id is None) and (label is None) and (nr is None)):
raise ValueError(
"at least one argument must be supplied to specify control")
return self._find_control(name, type, kind, id, label,
is_listcontrol, nr)
def _find_control(self, name, type, kind, id, label, predicate, nr):
if ((name is not None) and (name is not Missing) and
not isstringlike(name)):
raise TypeError("control name must be string-like")
if (type is not None) and not isstringlike(type):
raise TypeError("control type must be string-like")
if (kind is not None) and not isstringlike(kind):
raise TypeError("control kind must be string-like")
if (id is not None) and not isstringlike(id):
raise TypeError("control id must be string-like")
if (label is not None) and not isstringlike(label):
raise TypeError("control label must be string-like")
if (predicate is not None) and not callable(predicate):
raise TypeError("control predicate must be callable")
if (nr is not None) and nr < 0:
raise ValueError("control number must be a positive integer")
orig_nr = nr
found = None
ambiguous = False
if nr is None and self.backwards_compat:
nr = 0
for control in self.controls:
if ((name is not None and name != control.name) and
(name is not Missing or control.name is not None)):
continue
if type is not None and type != control.type:
continue
if kind is not None and not control.is_of_kind(kind):
continue
if id is not None and id != control.id:
continue
if predicate and not predicate(control):
continue
if label:
for l in control.get_labels():
if l.text.find(label) > -1:
break
else:
continue
if nr is not None:
if nr == 0:
return control # early exit: unambiguous due to nr
nr -= 1
continue
if found:
ambiguous = True
break
found = control
if found and not ambiguous:
return found
description = []
if name is not None: description.append("name %s" % repr(name))
if type is not None: description.append("type '%s'" % type)
if kind is not None: description.append("kind '%s'" % kind)
if id is not None: description.append("id '%s'" % id)
if label is not None: description.append("label '%s'" % label)
if predicate is not None:
description.append("predicate %s" % predicate)
if orig_nr: description.append("nr %d" % orig_nr)
description = ", ".join(description)
if ambiguous:
raise AmbiguityError("more than one control matching "+description)
elif not found:
raise ControlNotFoundError("no control matching "+description)
assert False
def _click(self, name, type, id, label, nr, coord, return_type,
request_class=_request.Request):
try:
control = self._find_control(
name, type, "clickable", id, label, None, nr)
except ControlNotFoundError:
if ((name is not None) or (type is not None) or (id is not None) or
(label is not None) or (nr != 0)):
raise
# no clickable controls, but no control was explicitly requested,
# so return state without clicking any control
return self._switch_click(return_type, request_class)
else:
return control._click(self, coord, return_type, request_class)
def _pairs(self):
"""Return sequence of (key, value) pairs suitable for urlencoding."""
return [(k, v) for (i, k, v, c_i) in self._pairs_and_controls()]
def _pairs_and_controls(self):
"""Return sequence of (index, key, value, control_index)
of totally ordered pairs suitable for urlencoding.
control_index is the index of the control in self.controls
"""
pairs = []
for control_index in range(len(self.controls)):
control = self.controls[control_index]
for ii, key, val in control._totally_ordered_pairs():
pairs.append((ii, key, val, control_index))
# stable sort by ONLY first item in tuple
pairs.sort()
return pairs
def _request_data(self):
"""Return a tuple (url, data, headers)."""
method = self.method.upper()
#scheme, netloc, path, parameters, query, frag = urlparse.urlparse(self.action)
parts = self._urlparse(self.action)
rest, (query, frag) = parts[:-2], parts[-2:]
if method == "GET":
if self.enctype != "application/x-www-form-urlencoded":
raise ValueError(
"unknown GET form encoding type '%s'" % self.enctype)
parts = rest + (urllib.urlencode(self._pairs()), None)
uri = self._urlunparse(parts)
return uri, None, []
elif method == "POST":
parts = rest + (query, None)
uri = self._urlunparse(parts)
if self.enctype == "application/x-www-form-urlencoded":
return (uri, urllib.urlencode(self._pairs()),
[("Content-Type", self.enctype)])
elif self.enctype == "multipart/form-data":
data = StringIO()
http_hdrs = []
mw = MimeWriter(data, http_hdrs)
mw.startmultipartbody("form-data", add_to_http_hdrs=True,
prefix=0)
for ii, k, v, control_index in self._pairs_and_controls():
self.controls[control_index]._write_mime_data(mw, k, v)
mw.lastpart()
return uri, data.getvalue(), http_hdrs
else:
raise ValueError(
"unknown POST form encoding type '%s'" % self.enctype)
else:
raise ValueError("Unknown method '%s'" % method)
def _switch_click(self, return_type, request_class=_request.Request):
# This is called by HTMLForm and clickable Controls to hide switching
# on return_type.
if return_type == "pairs":
return self._pairs()
elif return_type == "request_data":
return self._request_data()
else:
req_data = self._request_data()
req = request_class(req_data[0], req_data[1])
for key, val in req_data[2]:
add_hdr = req.add_header
if key.lower() == "content-type":
try:
add_hdr = req.add_unredirected_header
except AttributeError:
# pre-2.4 and not using ClientCookie
pass
add_hdr(key, val)
return req
| gpl-2.0 |
jaskaye17/nomadpad | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/styles/manni.py | 364 | 2374 | # -*- coding: utf-8 -*-
"""
pygments.styles.manni
~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by the terminal highlighting style.
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ManniStyle(Style):
"""
A colorful style, inspired by the terminal highlighting style.
"""
background_color = '#f0f3f3'
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #0099FF',
Comment.Preproc: 'noitalic #009999',
Comment.Special: 'bold',
Keyword: 'bold #006699',
Keyword.Pseudo: 'nobold',
Keyword.Type: '#007788',
Operator: '#555555',
Operator.Word: 'bold #000000',
Name.Builtin: '#336666',
Name.Function: '#CC00FF',
Name.Class: 'bold #00AA88',
Name.Namespace: 'bold #00CCFF',
Name.Exception: 'bold #CC0000',
Name.Variable: '#003333',
Name.Constant: '#336600',
Name.Label: '#9999FF',
Name.Entity: 'bold #999999',
Name.Attribute: '#330099',
Name.Tag: 'bold #330099',
Name.Decorator: '#9999FF',
String: '#CC3300',
String.Doc: 'italic',
String.Interpol: '#AA0000',
String.Escape: 'bold #CC3300',
String.Regex: '#33AAAA',
String.Symbol: '#FFCC33',
String.Other: '#CC3300',
Number: '#FF6600',
Generic.Heading: 'bold #003300',
Generic.Subheading: 'bold #003300',
Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
Generic.Error: '#FF0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: 'bold #000099',
Generic.Output: '#AAAAAA',
Generic.Traceback: '#99CC66',
Error: 'bg:#FFAAAA #AA0000'
}
| mit |
odpi/bigtop | bigtop-packages/src/common/ambari/ODPi/1.0/services/HIVE/package/alerts/alert_hive_metastore.py | 6 | 9637 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import socket
import time
import traceback
import logging
from resource_management.core import global_lock
from resource_management.libraries.functions import format
from resource_management.libraries.functions import get_kinit_path
from resource_management.core.resources import Execute
from ambari_commons.os_check import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
OK_MESSAGE = "Metastore OK - Hive command took {0:.3f}s"
CRITICAL_MESSAGE = "Metastore on {0} failed ({1})"
SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
SMOKEUSER_KEYTAB_KEY = '{{cluster-env/smokeuser_keytab}}'
SMOKEUSER_PRINCIPAL_KEY = '{{cluster-env/smokeuser_principal_name}}'
SMOKEUSER_KEY = '{{cluster-env/smokeuser}}'
HIVE_METASTORE_URIS_KEY = '{{hive-site/hive.metastore.uris}}'
# The configured Kerberos executable search paths, if any
KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY = '{{kerberos-env/executable_search_paths}}'
# default keytab location
SMOKEUSER_KEYTAB_SCRIPT_PARAM_KEY = 'default.smoke.keytab'
SMOKEUSER_KEYTAB_DEFAULT = '/etc/security/keytabs/smokeuser.headless.keytab'
# default smoke principal
SMOKEUSER_PRINCIPAL_SCRIPT_PARAM_KEY = 'default.smoke.principal'
SMOKEUSER_PRINCIPAL_DEFAULT = 'ambari-qa@EXAMPLE.COM'
# default smoke user
SMOKEUSER_SCRIPT_PARAM_KEY = 'default.smoke.user'
SMOKEUSER_DEFAULT = 'ambari-qa'
STACK_ROOT = '{{cluster-env/stack_root}}'
HIVE_CONF_DIR_LEGACY = '/etc/hive/conf.server'
HIVE_BIN_DIR_LEGACY = '/usr/lib/hive/bin'
CHECK_COMMAND_TIMEOUT_KEY = 'check.command.timeout'
CHECK_COMMAND_TIMEOUT_DEFAULT = 60.0
HADOOPUSER_KEY = '{{cluster-env/hadoop.user.name}}'
HADOOPUSER_DEFAULT = 'hadoop'
logger = logging.getLogger('ambari_alerts')
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def get_tokens():
"""
Returns a tuple of tokens in the format {{site/property}} that will be used
to build the dictionary passed into execute
"""
return (SECURITY_ENABLED_KEY,SMOKEUSER_KEYTAB_KEY,SMOKEUSER_PRINCIPAL_KEY,
HIVE_METASTORE_URIS_KEY, SMOKEUSER_KEY, KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY,
STACK_ROOT)
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def get_tokens():
"""
Returns a tuple of tokens in the format {{site/property}} that will be used
to build the dictionary passed into execute
"""
return (HIVE_METASTORE_URIS_KEY, HADOOPUSER_KEY)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def execute(configurations={}, parameters={}, host_name=None):
"""
Returns a tuple containing the result code and a pre-formatted result label
Keyword arguments:
configurations (dictionary): a mapping of configuration key to value
parameters (dictionary): a mapping of script parameter key to value
host_name (string): the name of this host where the alert is running
"""
if configurations is None:
return (('UNKNOWN', ['There were no configurations supplied to the script.']))
if not HIVE_METASTORE_URIS_KEY in configurations:
return (('UNKNOWN', ['Hive metastore uris were not supplied to the script.']))
metastore_uris = configurations[HIVE_METASTORE_URIS_KEY].split(',')
security_enabled = False
if SECURITY_ENABLED_KEY in configurations:
security_enabled = str(configurations[SECURITY_ENABLED_KEY]).upper() == 'TRUE'
check_command_timeout = CHECK_COMMAND_TIMEOUT_DEFAULT
if CHECK_COMMAND_TIMEOUT_KEY in parameters:
check_command_timeout = float(parameters[CHECK_COMMAND_TIMEOUT_KEY])
# defaults
smokeuser_keytab = SMOKEUSER_KEYTAB_DEFAULT
smokeuser_principal = SMOKEUSER_PRINCIPAL_DEFAULT
smokeuser = SMOKEUSER_DEFAULT
# check script params
if SMOKEUSER_PRINCIPAL_SCRIPT_PARAM_KEY in parameters:
smokeuser_principal = parameters[SMOKEUSER_PRINCIPAL_SCRIPT_PARAM_KEY]
if SMOKEUSER_SCRIPT_PARAM_KEY in parameters:
smokeuser = parameters[SMOKEUSER_SCRIPT_PARAM_KEY]
if SMOKEUSER_KEYTAB_SCRIPT_PARAM_KEY in parameters:
smokeuser_keytab = parameters[SMOKEUSER_KEYTAB_SCRIPT_PARAM_KEY]
# check configurations last as they should always take precedence
if SMOKEUSER_PRINCIPAL_KEY in configurations:
smokeuser_principal = configurations[SMOKEUSER_PRINCIPAL_KEY]
if SMOKEUSER_KEY in configurations:
smokeuser = configurations[SMOKEUSER_KEY]
result_code = None
try:
if security_enabled:
if SMOKEUSER_KEYTAB_KEY in configurations:
smokeuser_keytab = configurations[SMOKEUSER_KEYTAB_KEY]
# Get the configured Kerberos executable search paths, if any
if KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY in configurations:
kerberos_executable_search_paths = configurations[KERBEROS_EXECUTABLE_SEARCH_PATHS_KEY]
else:
kerberos_executable_search_paths = None
kinit_path_local = get_kinit_path(kerberos_executable_search_paths)
kinitcmd=format("{kinit_path_local} -kt {smokeuser_keytab} {smokeuser_principal}; ")
# prevent concurrent kinit
kinit_lock = global_lock.get_lock(global_lock.LOCK_TYPE_KERBEROS)
kinit_lock.acquire()
try:
Execute(kinitcmd, user=smokeuser,
path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
timeout=10)
finally:
kinit_lock.release()
if host_name is None:
host_name = socket.getfqdn()
for uri in metastore_uris:
if host_name in uri:
metastore_uri = uri
conf_dir = HIVE_CONF_DIR_LEGACY
bin_dir = HIVE_BIN_DIR_LEGACY
if STACK_ROOT in configurations:
hive_conf_dir = configurations[STACK_ROOT] + format("/current/hive-metastore/conf/conf.server")
hive_bin_dir = configurations[STACK_ROOT] + format("/current/hive-metastore/bin")
if os.path.exists(hive_conf_dir):
conf_dir = hive_conf_dir
bin_dir = hive_bin_dir
cmd = format("export HIVE_CONF_DIR='{conf_dir}' ; "
"hive --hiveconf hive.metastore.uris={metastore_uri}\
--hiveconf hive.metastore.client.connect.retry.delay=1\
--hiveconf hive.metastore.failure.retries=1\
--hiveconf hive.metastore.connect.retries=1\
--hiveconf hive.metastore.client.socket.timeout=14\
--hiveconf hive.execution.engine=mr -e 'show databases;'")
start_time = time.time()
try:
Execute(cmd, user=smokeuser,
path=["/bin/", "/usr/bin/", "/usr/sbin/", bin_dir],
timeout=int(check_command_timeout) )
total_time = time.time() - start_time
result_code = 'OK'
label = OK_MESSAGE.format(total_time)
except:
result_code = 'CRITICAL'
label = CRITICAL_MESSAGE.format(host_name, traceback.format_exc())
except:
label = traceback.format_exc()
result_code = 'UNKNOWN'
return ((result_code, [label]))
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def execute(configurations={}, parameters={}, host_name=None):
"""
Returns a tuple containing the result code and a pre-formatted result label
Keyword arguments:
configurations (dictionary): a mapping of configuration key to value
parameters (dictionary): a mapping of script parameter key to value
host_name (string): the name of this host where the alert is running
"""
from resource_management.libraries.functions import reload_windows_env
reload_windows_env()
hive_home = os.environ['HIVE_HOME']
if configurations is None:
return (('UNKNOWN', ['There were no configurations supplied to the script.']))
if not HIVE_METASTORE_URIS_KEY in configurations:
return (('UNKNOWN', ['Hive metastore uris were not supplied to the script.']))
metastore_uris = configurations[HIVE_METASTORE_URIS_KEY].split(',')
# defaults
hiveuser = HADOOPUSER_DEFAULT
if HADOOPUSER_KEY in configurations:
hiveuser = configurations[HADOOPUSER_KEY]
result_code = None
try:
if host_name is None:
host_name = socket.getfqdn()
for uri in metastore_uris:
if host_name in uri:
metastore_uri = uri
hive_cmd = os.path.join(hive_home, "bin", "hive.cmd")
cmd = format("cmd /c {hive_cmd} --hiveconf hive.metastore.uris={metastore_uri}\
--hiveconf hive.metastore.client.connect.retry.delay=1\
--hiveconf hive.metastore.failure.retries=1\
--hiveconf hive.metastore.connect.retries=1\
--hiveconf hive.metastore.client.socket.timeout=14\
--hiveconf hive.execution.engine=mr -e 'show databases;'")
start_time = time.time()
try:
Execute(cmd, user=hiveuser, timeout=30)
total_time = time.time() - start_time
result_code = 'OK'
label = OK_MESSAGE.format(total_time)
except:
result_code = 'CRITICAL'
label = CRITICAL_MESSAGE.format(host_name, traceback.format_exc())
except:
label = traceback.format_exc()
result_code = 'UNKNOWN'
return ((result_code, [label]))
| apache-2.0 |
weiawe/django | django/db/backends/postgresql_psycopg2/features.py | 11 | 1062 | from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import InterfaceError
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_selected_pks = True
needs_datetime_string_cast = False
can_return_id_from_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
driver_supports_timedelta_args = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_autofield = True
can_introspect_ip_address_field = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
requires_sqlparse_for_splitting = False
| bsd-3-clause |
umars/npyscreen | build/lib/npyscreen/fmEditFormNew.py | 6 | 2599 | from . import fmForm
# TOTALLY BROKEN AND UNFINISHED
# DON'T USE
class ActionFormNew(fmForm.FormBaseNew):
CANCEL_BUTTON_BR_OFFSET = (2, 12)
OK_BUTTON_TEXT = "OK"
CANCEL_BUTTON_TEXT = "Cancel"
def __init__(self, *args, **keywords):
super(ActionFormNew, self).__init__(*args, **keywords)
self._added_buttons = {}
def set_up_exit_condition_handlers(self):
super(ActionForm, self).set_up_exit_condition_handlers()
self.how_exited_handers.update({
widget.EXITED_ESCAPE: self.find_cancel_button
})
def find_cancel_button(self):
self.editw = len(self._widgets__)-2
def on_cancel(self):
pass
def on_ok(self):
pass
def _add_button(self, button_name, button_type, button_text, button_rely, button_relx, button_function):
tmp_rely, tmp_relx = self.nextrely, self.nextrelx
self._added_buttons[button_name] = self.add_widget(button_type, name=button_text, rely=button_rely, relx=button_relx, use_max_space=True)
self.nextrely, self.nextrelx = tmp_rely, tmp_relx
def move_ok_button(self):
super(ActionFormNew, self).move_ok_button()
if hasattr(self, 'c_button'):
c_button_text = self.CANCEL_BUTTON_TEXT
cmy, cmx = self.curses_pad.getmaxyx()
cmy -= self.__class__.CANCEL_BUTTON_BR_OFFSET[0]
cmx -= len(c_button_text)+self.__class__.CANCEL_BUTTON_BR_OFFSET[1]
self.c_button.rely = cmy
self.c_button.relx = cmx
def _add_ok_button(self):
tmp_rely, tmp_relx = self.nextrely, self.nextrelx
my, mx = self.curses_pad.getmaxyx()
ok_button_text = self.__class__.OK_BUTTON_TEXT
my -= self.__class__.OK_BUTTON_BR_OFFSET[0]
mx -= len(ok_button_text)+self.__class__.OK_BUTTON_BR_OFFSET[1]
self.ok_button = self.add_widget(self.__class__.OKBUTTON_TYPE, name=ok_button_text, rely=my, relx=mx, use_max_space=True)
self.ok_button_postion = len(self._widgets__)-1
self.ok_button.update()
def _destroy_ok_button(self):
self.ok_button.destroy()
del self._widgets__[self.ok_button_postion]
del self.ok_button
self.display()
def pre_edit_loop(self):
self.tmp_rely, self.tmp_relx = self.nextrely, self.nextrelx
def post_edit_loop(self):
self.nextrely, self.nextrelx = tmp_rely, tmp_relx
def _during_edit_loop(self):
pass
| bsd-2-clause |
ofer43211/unisubs | apps/teams/migrations/0098_add_priority_indexes.py | 5 | 28968 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
db.create_index('teams_task', ['team_id', 'priority', 'created'])
db.create_index('teams_task', ['team_id', 'priority', 'expiration_date'])
def backwards(self, orm):
db.delete_index('teams_task', ['team_id', 'priority', 'created'])
db.delete_index('teams_task', ['team_id', 'priority', 'expiration_date'])
models = {
'accountlinker.thirdpartyaccount': {
'Meta': {'unique_together': "(('type', 'username'),)", 'object_name': 'ThirdPartyAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'oauth_access_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'oauth_refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'partner': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Application'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.invite': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Invite'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '200', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_invitations'", 'to': "orm['auth.CustomUser']"})
},
'teams.membershipnarrowing': {
'Meta': {'object_name': 'MembershipNarrowing'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'narrowing_includer'", 'null': 'True', 'to': "orm['teams.TeamMember']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '24', 'blank': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'narrowings'", 'to': "orm['teams.TeamMember']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']", 'null': 'True', 'blank': 'True'})
},
'teams.project': {
'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.setting': {
'Meta': {'unique_together': "(('key', 'team'),)", 'object_name': 'Setting'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.PositiveIntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'settings'", 'to': "orm['teams.Team']"})
},
'teams.task': {
'Meta': {'object_name': 'Task'},
'approved': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'assignee': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'body': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'completed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'review_base_version': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tasks_based_on'", 'null': 'True', 'to': "orm['videos.SubtitleVersion']"}),
'subtitle_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.TeamVideo']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'teams.team': {
'Meta': {'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'autocrop': True}", 'max_length': '100', 'blank': 'True'}),
'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'tseams'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}),
'translate_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.teamlanguagepreference': {
'Meta': {'unique_together': "(('team', 'language_code'),)", 'object_name': 'TeamLanguagePreference'},
'allow_reads': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_writes': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'preferred': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lang_preferences'", 'to': "orm['teams.Team']"})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_members'", 'to': "orm['auth.CustomUser']"})
},
'teams.teamnotificationsetting': {
'Meta': {'object_name': 'TeamNotificationSetting'},
'basic_auth_password': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'basic_auth_username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_class': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'request_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'notification_settings'", 'unique': 'True', 'to': "orm['teams.Team']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'completed_languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.SubtitleLanguage']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'null': 'True', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['videos.Video']", 'unique': 'True'})
},
'teams.workflow': {
'Meta': {'unique_together': "(('team', 'project', 'team_video'),)", 'object_name': 'Workflow'},
'approve_allowed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'autocreate_subtitle': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'autocreate_translate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']", 'null': 'True', 'blank': 'True'}),
'review_allowed': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.TeamVideo']", 'null': 'True', 'blank': 'True'})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language', 'standard_language'),)", 'object_name': 'SubtitleLanguage'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'had_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'standard_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'subtitle_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.subtitleversion': {
'Meta': {'unique_together': "(('language', 'version_no'),)", 'object_name': 'SubtitleVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'forked_from': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']"}),
'moderation_status': ('django.db.models.fields.CharField', [], {'default': "'not__under_moderation'", 'max_length': '32', 'db_index': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'result_of_rollback': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'moderating'", 'null': 'True', 'to': "orm['teams.Team']"}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'small_thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
}
}
complete_apps = ['teams']
| agpl-3.0 |
hackerbot/DjangoDev | django/contrib/auth/handlers/modwsgi.py | 537 | 1344 | from django import db
from django.contrib import auth
from django.utils.encoding import force_bytes
def check_password(environ, username, password):
"""
Authenticates against Django's auth database
mod_wsgi docs specify None, True, False as return value depending
on whether the user exists and authenticates.
"""
UserModel = auth.get_user_model()
# db connection state is managed similarly to the wsgi handler
# as mod_wsgi may call these functions outside of a request/response cycle
db.reset_queries()
try:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
return None
if not user.is_active:
return None
return user.check_password(password)
finally:
db.close_old_connections()
def groups_for_user(environ, username):
"""
Authorizes a user based on groups
"""
UserModel = auth.get_user_model()
db.reset_queries()
try:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
return []
if not user.is_active:
return []
return [force_bytes(group.name) for group in user.groups.all()]
finally:
db.close_old_connections()
| bsd-3-clause |
blademainer/intellij-community | python/helpers/docutils/parsers/rst/__init__.py | 42 | 14143 | # $Id: __init__.py 6314 2010-04-26 10:04:17Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
This is ``docutils.parsers.rst`` package. It exports a single class, `Parser`,
the reStructuredText parser.
Usage
=====
1. Create a parser::
parser = docutils.parsers.rst.Parser()
Several optional arguments may be passed to modify the parser's behavior.
Please see `Customizing the Parser`_ below for details.
2. Gather input (a multi-line string), by reading a file or the standard
input::
input = sys.stdin.read()
3. Create a new empty `docutils.nodes.document` tree::
document = docutils.utils.new_document(source, settings)
See `docutils.utils.new_document()` for parameter details.
4. Run the parser, populating the document tree::
parser.parse(input, document)
Parser Overview
===============
The reStructuredText parser is implemented as a state machine, examining its
input one line at a time. To understand how the parser works, please first
become familiar with the `docutils.statemachine` module, then see the
`states` module.
Customizing the Parser
----------------------
Anything that isn't already customizable is that way simply because that type
of customizability hasn't been implemented yet. Patches welcome!
When instantiating an object of the `Parser` class, two parameters may be
passed: ``rfc2822`` and ``inliner``. Pass ``rfc2822=1`` to enable an initial
RFC-2822 style header block, parsed as a "field_list" element (with "class"
attribute set to "rfc2822"). Currently this is the only body-level element
which is customizable without subclassing. (Tip: subclass `Parser` and change
its "state_classes" and "initial_state" attributes to refer to new classes.
Contact the author if you need more details.)
The ``inliner`` parameter takes an instance of `states.Inliner` or a subclass.
It handles inline markup recognition. A common extension is the addition of
further implicit hyperlinks, like "RFC 2822". This can be done by subclassing
`states.Inliner`, adding a new method for the implicit markup, and adding a
``(pattern, method)`` pair to the "implicit_dispatch" attribute of the
subclass. See `states.Inliner.implicit_inline()` for details. Explicit
inline markup can be customized in a `states.Inliner` subclass via the
``patterns.initial`` and ``dispatch`` attributes (and new methods as
appropriate).
"""
__docformat__ = 'reStructuredText'
import docutils.parsers
import docutils.statemachine
from docutils.parsers.rst import states
from docutils import frontend, nodes
class Parser(docutils.parsers.Parser):
"""The reStructuredText parser."""
supported = ('restructuredtext', 'rst', 'rest', 'restx', 'rtxt', 'rstx')
"""Aliases this parser supports."""
settings_spec = (
'reStructuredText Parser Options',
None,
(('Recognize and link to standalone PEP references (like "PEP 258").',
['--pep-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for PEP references '
'(default "http://www.python.org/dev/peps/").',
['--pep-base-url'],
{'metavar': '<URL>', 'default': 'http://www.python.org/dev/peps/',
'validator': frontend.validate_url_trailing_slash}),
('Template for PEP file part of URL. (default "pep-%04d")',
['--pep-file-url-template'],
{'metavar': '<URL>', 'default': 'pep-%04d'}),
('Recognize and link to standalone RFC references (like "RFC 822").',
['--rfc-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for RFC references (default "http://www.faqs.org/rfcs/").',
['--rfc-base-url'],
{'metavar': '<URL>', 'default': 'http://www.faqs.org/rfcs/',
'validator': frontend.validate_url_trailing_slash}),
('Set number of spaces for tab expansion (default 8).',
['--tab-width'],
{'metavar': '<width>', 'type': 'int', 'default': 8,
'validator': frontend.validate_nonnegative_int}),
('Remove spaces before footnote references.',
['--trim-footnote-reference-space'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Leave spaces before footnote references.',
['--leave-footnote-reference-space'],
{'action': 'store_false', 'dest': 'trim_footnote_reference_space'}),
('Disable directives that insert the contents of external file '
'("include" & "raw"); replaced with a "warning" system message.',
['--no-file-insertion'],
{'action': 'store_false', 'default': 1,
'dest': 'file_insertion_enabled',
'validator': frontend.validate_boolean}),
('Enable directives that insert the contents of external file '
'("include" & "raw"). Enabled by default.',
['--file-insertion-enabled'],
{'action': 'store_true'}),
('Disable the "raw" directives; replaced with a "warning" '
'system message.',
['--no-raw'],
{'action': 'store_false', 'default': 1, 'dest': 'raw_enabled',
'validator': frontend.validate_boolean}),
('Enable the "raw" directive. Enabled by default.',
['--raw-enabled'],
{'action': 'store_true'}),))
config_section = 'restructuredtext parser'
config_section_dependencies = ('parsers',)
def __init__(self, rfc2822=None, inliner=None):
if rfc2822:
self.initial_state = 'RFC2822Body'
else:
self.initial_state = 'Body'
self.state_classes = states.state_classes
self.inliner = inliner
def parse(self, inputstring, document):
"""Parse `inputstring` and populate `document`, a document tree."""
self.setup_parse(inputstring, document)
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,
debug=document.reporter.debug_flag)
inputlines = docutils.statemachine.string2lines(
inputstring, tab_width=document.settings.tab_width,
convert_whitespace=1)
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
class DirectiveError(Exception):
"""
Store a message and a system message level.
To be thrown from inside directive code.
Do not instantiate directly -- use `Directive.directive_error()`
instead!
"""
def __init__(self, level, message):
"""Set error `message` and `level`"""
Exception.__init__(self)
self.level = level
self.msg = message
class Directive(object):
"""
Base class for reStructuredText directives.
The following attributes may be set by subclasses. They are
interpreted by the directive parser (which runs the directive
class):
- `required_arguments`: The number of required arguments (default:
0).
- `optional_arguments`: The number of optional arguments (default:
0).
- `final_argument_whitespace`: A boolean, indicating if the final
argument may contain whitespace (default: False).
- `option_spec`: A dictionary, mapping known option names to
conversion functions such as `int` or `float` (default: {}, no
options). Several conversion functions are defined in the
directives/__init__.py module.
Option conversion functions take a single parameter, the option
argument (a string or ``None``), validate it and/or convert it
to the appropriate form. Conversion functions may raise
`ValueError` and `TypeError` exceptions.
- `has_content`: A boolean; True if content is allowed. Client
code must handle the case where content is required but not
supplied (an empty content list will be supplied).
Arguments are normally single whitespace-separated words. The
final argument may contain whitespace and/or newlines if
`final_argument_whitespace` is True.
If the form of the arguments is more complex, specify only one
argument (either required or optional) and set
`final_argument_whitespace` to True; the client code must do any
context-sensitive parsing.
When a directive implementation is being run, the directive class
is instantiated, and the `run()` method is executed. During
instantiation, the following instance variables are set:
- ``name`` is the directive type or name (string).
- ``arguments`` is the list of positional arguments (strings).
- ``options`` is a dictionary mapping option names (strings) to
values (type depends on option conversion functions; see
`option_spec` above).
- ``content`` is a list of strings, the directive content line by line.
- ``lineno`` is the absolute line number of the first line
of the directive.
- ``src`` is the name (or path) of the rst source of the directive.
- ``srcline`` is the line number of the first line of the directive
in its source. It may differ from ``lineno``, if the main source
includes other sources with the ``.. include::`` directive.
- ``content_offset`` is the line offset of the first line of the content from
the beginning of the current input. Used when initiating a nested parse.
- ``block_text`` is a string containing the entire directive.
- ``state`` is the state which called the directive function.
- ``state_machine`` is the state machine which controls the state which called
the directive function.
Directive functions return a list of nodes which will be inserted
into the document tree at the point where the directive was
encountered. This can be an empty list if there is nothing to
insert.
For ordinary directives, the list must contain body elements or
structural elements. Some directives are intended specifically
for substitution definitions, and must return a list of `Text`
nodes and/or inline elements (suitable for inline insertion, in
place of the substitution reference). Such directives must verify
substitution definition context, typically using code like this::
if not isinstance(state, states.SubstitutionDef):
error = state_machine.reporter.error(
'Invalid context: the "%s" directive can only be used '
'within a substitution definition.' % (name),
nodes.literal_block(block_text, block_text), line=lineno)
return [error]
"""
# There is a "Creating reStructuredText Directives" how-to at
# <http://docutils.sf.net/docs/howto/rst-directives.html>. If you
# update this docstring, please update the how-to as well.
required_arguments = 0
"""Number of required directive arguments."""
optional_arguments = 0
"""Number of optional arguments after the required arguments."""
final_argument_whitespace = False
"""May the final argument contain whitespace?"""
option_spec = None
"""Mapping of option names to validator functions."""
has_content = False
"""May the directive have content?"""
def __init__(self, name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
self.name = name
self.arguments = arguments
self.options = options
self.content = content
self.lineno = lineno
self.content_offset = content_offset
self.block_text = block_text
self.state = state
self.state_machine = state_machine
self.src, self.scrline = state_machine.get_source_and_line(lineno)
def run(self):
raise NotImplementedError('Must override run() is subclass.')
# Directive errors:
def directive_error(self, level, message):
"""
Return a DirectiveError suitable for being thrown as an exception.
Call "raise self.directive_error(level, message)" from within
a directive implementation to return one single system message
at level `level`, which automatically gets the directive block
and the line number added.
You'd often use self.error(message) instead, which will
generate an ERROR-level directive error.
"""
return DirectiveError(level, message)
def debug(self, message):
return self.directive_error(0, message)
def info(self, message):
return self.directive_error(1, message)
def warning(self, message):
return self.directive_error(2, message)
def error(self, message):
return self.directive_error(3, message)
def severe(self, message):
return self.directive_error(4, message)
# Convenience methods:
def assert_has_content(self):
"""
Throw an ERROR-level DirectiveError if the directive doesn't
have contents.
"""
if not self.content:
raise self.error('Content block expected for the "%s" directive; '
'none found.' % self.name)
def convert_directive_function(directive_fn):
"""
Define & return a directive class generated from `directive_fn`.
`directive_fn` uses the old-style, functional interface.
"""
class FunctionalDirective(Directive):
option_spec = getattr(directive_fn, 'options', None)
has_content = getattr(directive_fn, 'content', False)
_argument_spec = getattr(directive_fn, 'arguments', (0, 0, False))
required_arguments, optional_arguments, final_argument_whitespace \
= _argument_spec
def run(self):
return directive_fn(
self.name, self.arguments, self.options, self.content,
self.lineno, self.content_offset, self.block_text,
self.state, self.state_machine)
# Return new-style directive.
return FunctionalDirective
| apache-2.0 |
sserrot/champion_relationships | venv/Lib/site-packages/ipykernel/parentpoller.py | 4 | 4175 | # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
try:
import ctypes
except:
ctypes = None
import os
import platform
import signal
import time
try:
from _thread import interrupt_main # Py 3
except ImportError:
from thread import interrupt_main # Py 2
from threading import Thread
from traitlets.log import get_logger
import warnings
class ParentPollerUnix(Thread):
""" A Unix-specific daemon thread that terminates the program immediately
when the parent process no longer exists.
"""
def __init__(self):
super(ParentPollerUnix, self).__init__()
self.daemon = True
def run(self):
# We cannot use os.waitpid because it works only for child processes.
from errno import EINTR
while True:
try:
if os.getppid() == 1:
get_logger().warning("Parent appears to have exited, shutting down.")
os._exit(1)
time.sleep(1.0)
except OSError as e:
if e.errno == EINTR:
continue
raise
class ParentPollerWindows(Thread):
""" A Windows-specific daemon thread that listens for a special event that
signals an interrupt and, optionally, terminates the program immediately
when the parent process no longer exists.
"""
def __init__(self, interrupt_handle=None, parent_handle=None):
""" Create the poller. At least one of the optional parameters must be
provided.
Parameters
----------
interrupt_handle : HANDLE (int), optional
If provided, the program will generate a Ctrl+C event when this
handle is signaled.
parent_handle : HANDLE (int), optional
If provided, the program will terminate immediately when this
handle is signaled.
"""
assert(interrupt_handle or parent_handle)
super(ParentPollerWindows, self).__init__()
if ctypes is None:
raise ImportError("ParentPollerWindows requires ctypes")
self.daemon = True
self.interrupt_handle = interrupt_handle
self.parent_handle = parent_handle
def run(self):
""" Run the poll loop. This method never returns.
"""
try:
from _winapi import WAIT_OBJECT_0, INFINITE
except ImportError:
from _subprocess import WAIT_OBJECT_0, INFINITE
# Build the list of handle to listen on.
handles = []
if self.interrupt_handle:
handles.append(self.interrupt_handle)
if self.parent_handle:
handles.append(self.parent_handle)
arch = platform.architecture()[0]
c_int = ctypes.c_int64 if arch.startswith('64') else ctypes.c_int
# Listen forever.
while True:
result = ctypes.windll.kernel32.WaitForMultipleObjects(
len(handles), # nCount
(c_int * len(handles))(*handles), # lpHandles
False, # bWaitAll
INFINITE) # dwMilliseconds
if WAIT_OBJECT_0 <= result < len(handles):
handle = handles[result - WAIT_OBJECT_0]
if handle == self.interrupt_handle:
# check if signal handler is callable
# to avoid 'int not callable' error (Python issue #23395)
if callable(signal.getsignal(signal.SIGINT)):
interrupt_main()
elif handle == self.parent_handle:
get_logger().warning("Parent appears to have exited, shutting down.")
os._exit(1)
elif result < 0:
# wait failed, just give up and stop polling.
warnings.warn("""Parent poll failed. If the frontend dies,
the kernel may be left running. Please let us know
about your system (bitness, Python, etc.) at
ipython-dev@scipy.org""")
return
| mit |
rodrigob/fuel | fuel/datasets/mnist.py | 10 | 1401 | # -*- coding: utf-8 -*-
from fuel.datasets import H5PYDataset
from fuel.transformers.defaults import uint8_pixels_to_floatX
from fuel.utils import find_in_data_path
class MNIST(H5PYDataset):
u"""MNIST dataset.
MNIST (Mixed National Institute of Standards and Technology) [LBBH] is
a database of handwritten digits. It is one of the most famous
datasets in machine learning and consists of 60,000 training images
and 10,000 testing images. The images are grayscale and 28 x 28 pixels
large. It is accessible through Yann LeCun's website [LECUN].
.. [LBBH] Yann LeCun, Léon Bottou, Yoshua Bengio, and Patrick Haffner,
*Gradient-based learning applied to document recognition*,
Proceedings of the IEEE, November 1998, 86(11):2278-2324.
.. [LECUN] http://yann.lecun.com/exdb/mnist/
Parameters
----------
which_sets : tuple of str
Which split to load. Valid values are 'train' and 'test',
corresponding to the training set (50,000 examples) and the test
set (10,000 examples).
"""
filename = 'mnist.hdf5'
default_transformers = uint8_pixels_to_floatX(('features',))
def __init__(self, which_sets, **kwargs):
kwargs.setdefault('load_in_memory', True)
super(MNIST, self).__init__(
file_or_path=find_in_data_path(self.filename),
which_sets=which_sets, **kwargs)
| mit |
akarol/cfme_tests | sprout/appliances/migrations/0042_openshift_appliances_support.py | 6 | 1175 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-19 12:23
from __future__ import unicode_literals
from django.db import migrations, models
import json_field.fields
class Migration(migrations.Migration):
dependencies = [
('appliances', '0041_auto_20180216_1634'),
]
operations = [
migrations.AddField(
model_name='appliance',
name='openshift_ext_ip',
field=models.CharField(help_text=b"Openshift's project external ip", max_length=45,
null=True),
),
migrations.AddField(
model_name='appliance',
name='openshift_project',
field=models.CharField(help_text=b"Openshift's project name", max_length=45, null=True),
),
migrations.AddField(
model_name='template',
name='custom_data',
field=json_field.fields.JSONField(default={}, help_text=b'Some Templates '
b'require additional data '
b'for deployment'),
),
]
| gpl-2.0 |
ulkaio/ulka | docs/conf.py | 1 | 8316 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# ulka documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import ulka
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ulka'
copyright = u"2016, Ulka"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = ulka.__version__
# The full version, including alpha/beta/rc tags.
release = ulka.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ulkadoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'ulka.tex',
u'ulka Documentation',
u'Ulka', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ulka',
u'ulka Documentation',
[u'Ulka'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ulka',
u'ulka Documentation',
u'Ulka',
'ulka',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
ZhangXinNan/tensorflow | tensorflow/contrib/factorization/python/kernel_tests/wals_solver_ops_test.py | 8 | 4705 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# ==============================================================================
"""Tests for wals_solver_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.factorization.python.ops import gen_factorization_ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.platform import test
def SparseBlock3x3():
ind = np.array(
[[0, 0], [0, 2], [1, 1], [2, 0], [2, 1], [3, 2]]).astype(np.int64)
val = np.array([0.1, 0.2, 1.1, 2.0, 2.1, 3.2]).astype(np.float32)
shape = np.array([4, 3]).astype(np.int64)
return sparse_tensor.SparseTensor(ind, val, shape)
class WalsSolverOpsTest(test.TestCase):
def setUp(self):
self._column_factors = np.array([
[0.1, 0.2, 0.3],
[0.4, 0.5, 0.6],
[0.7, 0.8, 0.9],
]).astype(np.float32)
self._row_factors = np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6],
[0.7, 0.8, 0.9],
[1.1, 1.2, 1.3]]).astype(np.float32)
self._column_weights = np.array([0.1, 0.2, 0.3]).astype(np.float32)
self._row_weights = np.array([0.1, 0.2, 0.3, 0.4]).astype(np.float32)
self._unobserved_weights = 0.1
def testWalsSolverLhs(self):
sparse_block = SparseBlock3x3()
with self.test_session():
[lhs_tensor,
rhs_matrix] = gen_factorization_ops.wals_compute_partial_lhs_and_rhs(
self._column_factors, self._column_weights, self._unobserved_weights,
self._row_weights, sparse_block.indices, sparse_block.values,
[],
input_block_size=sparse_block.dense_shape[0],
input_is_transpose=False)
self.assertAllClose(lhs_tensor.eval(), [[
[0.014800, 0.017000, 0.019200],
[0.017000, 0.019600, 0.022200],
[0.019200, 0.022200, 0.025200],
], [
[0.0064000, 0.0080000, 0.0096000],
[0.0080000, 0.0100000, 0.0120000],
[0.0096000, 0.0120000, 0.0144000],
], [
[0.0099000, 0.0126000, 0.0153000],
[0.0126000, 0.0162000, 0.0198000],
[0.0153000, 0.0198000, 0.0243000],
], [
[0.058800, 0.067200, 0.075600],
[0.067200, 0.076800, 0.086400],
[0.075600, 0.086400, 0.097200],
]])
self.assertAllClose(rhs_matrix.eval(), [[0.019300, 0.023000, 0.026700],
[0.061600, 0.077000, 0.092400],
[0.160400, 0.220000, 0.279600],
[0.492800, 0.563200, 0.633600]])
def testWalsSolverLhsEntryWeights(self):
sparse_block = SparseBlock3x3()
with self.test_session():
[lhs_tensor,
rhs_matrix] = gen_factorization_ops.wals_compute_partial_lhs_and_rhs(
self._column_factors, [], self._unobserved_weights,
[], sparse_block.indices, sparse_block.values,
[0.01, 0.03, 0.04, 0.03, 0.06, 0.12],
input_block_size=sparse_block.dense_shape[0],
input_is_transpose=False)
self.assertAllClose(lhs_tensor.eval(), [[
[0.014800, 0.017000, 0.019200],
[0.017000, 0.019600, 0.022200],
[0.019200, 0.022200, 0.025200],
], [
[0.0064000, 0.0080000, 0.0096000],
[0.0080000, 0.0100000, 0.0120000],
[0.0096000, 0.0120000, 0.0144000],
], [
[0.0099000, 0.0126000, 0.0153000],
[0.0126000, 0.0162000, 0.0198000],
[0.0153000, 0.0198000, 0.0243000],
], [
[0.058800, 0.067200, 0.075600],
[0.067200, 0.076800, 0.086400],
[0.075600, 0.086400, 0.097200],
]])
self.assertAllClose(rhs_matrix.eval(), [[0.019300, 0.023000, 0.026700],
[0.061600, 0.077000, 0.092400],
[0.160400, 0.220000, 0.279600],
[0.492800, 0.563200, 0.633600]])
if __name__ == "__main__":
test.main()
| apache-2.0 |
ntt-sic/cinder | cinder/tests/test_storwize_svc.py | 2 | 95411 | # Copyright 2013 IBM Corp.
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Authors:
# Ronen Kat <ronenkat@il.ibm.com>
# Avishay Traeger <avishay@il.ibm.com>
"""
Tests for the IBM Storwize family and SVC volume driver.
"""
import random
import re
import socket
from cinder import context
from cinder import exception
from cinder.openstack.common import excutils
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils
from cinder import test
from cinder import units
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers import storwize_svc
from cinder.volume import volume_types
from eventlet import greenthread
LOG = logging.getLogger(__name__)
class StorwizeSVCFakeDB:
def __init__(self):
self.volume = None
def volume_get(self, context, vol_id):
return self.volume
def volume_set(self, vol):
self.volume = vol
class StorwizeSVCManagementSimulator:
def __init__(self, pool_name):
self._flags = {'storwize_svc_volpool_name': pool_name}
self._volumes_list = {}
self._hosts_list = {}
self._mappings_list = {}
self._fcmappings_list = {}
self._other_pools = {'openstack2': {}, 'openstack3': {}}
self._next_cmd_error = {
'lsportip': '',
'lsfabric': '',
'lsiscsiauth': '',
'lsnodecanister': '',
'mkvdisk': '',
'lsvdisk': '',
'lsfcmap': '',
'prestartfcmap': '',
'startfcmap': '',
'rmfcmap': '',
'lslicense': '',
}
self._errors = {
'CMMVC5701E': ('', 'CMMVC5701E No object ID was specified.'),
'CMMVC6035E': ('', 'CMMVC6035E The action failed as the '
'object already exists.'),
'CMMVC5753E': ('', 'CMMVC5753E The specified object does not '
'exist or is not a suitable candidate.'),
'CMMVC5707E': ('', 'CMMVC5707E Required parameters are missing.'),
'CMMVC6581E': ('', 'CMMVC6581E The command has failed because '
'the maximum number of allowed iSCSI '
'qualified names (IQNs) has been reached, '
'or the IQN is already assigned or is not '
'valid.'),
'CMMVC5754E': ('', 'CMMVC5754E The specified object does not '
'exist, or the name supplied does not meet '
'the naming rules.'),
'CMMVC6071E': ('', 'CMMVC6071E The VDisk-to-host mapping was '
'not created because the VDisk is already '
'mapped to a host.'),
'CMMVC5879E': ('', 'CMMVC5879E The VDisk-to-host mapping was '
'not created because a VDisk is already '
'mapped to this host with this SCSI LUN.'),
'CMMVC5840E': ('', 'CMMVC5840E The virtual disk (VDisk) was '
'not deleted because it is mapped to a '
'host or because it is part of a FlashCopy '
'or Remote Copy mapping, or is involved in '
'an image mode migrate.'),
'CMMVC6527E': ('', 'CMMVC6527E The name that you have entered '
'is not valid. The name can contain letters, '
'numbers, spaces, periods, dashes, and '
'underscores. The name must begin with a '
'letter or an underscore. The name must not '
'begin or end with a space.'),
'CMMVC5871E': ('', 'CMMVC5871E The action failed because one or '
'more of the configured port names is in a '
'mapping.'),
'CMMVC5924E': ('', 'CMMVC5924E The FlashCopy mapping was not '
'created because the source and target '
'virtual disks (VDisks) are different sizes.'),
'CMMVC6303E': ('', 'CMMVC6303E The create failed because the '
'source and target VDisks are the same.'),
'CMMVC7050E': ('', 'CMMVC7050E The command failed because at '
'least one node in the I/O group does not '
'support compressed VDisks.'),
'CMMVC6430E': ('', 'CMMVC6430E The command failed because the '
'target and source managed disk groups must '
'be different.'),
'CMMVC6353E': ('', 'CMMVC6353E The command failed because the '
'copy specified does not exist.'),
'CMMVC6446E': ('', 'The command failed because the managed disk '
'groups have different extent sizes.'),
# Catch-all for invalid state transitions:
'CMMVC5903E': ('', 'CMMVC5903E The FlashCopy mapping was not '
'changed because the mapping or consistency '
'group is another state.'),
}
self._transitions = {'begin': {'make': 'idle_or_copied'},
'idle_or_copied': {'prepare': 'preparing',
'delete': 'end',
'delete_force': 'end'},
'preparing': {'flush_failed': 'stopped',
'wait': 'prepared'},
'end': None,
'stopped': {'prepare': 'preparing',
'delete_force': 'end'},
'prepared': {'stop': 'stopped',
'start': 'copying'},
'copying': {'wait': 'idle_or_copied',
'stop': 'stopping'},
# Assume the worst case where stopping->stopped
# rather than stopping idle_or_copied
'stopping': {'wait': 'stopped'},
}
def _state_transition(self, function, fcmap):
if (function == 'wait' and
'wait' not in self._transitions[fcmap['status']]):
return ('', '')
if fcmap['status'] == 'copying' and function == 'wait':
if fcmap['copyrate'] != '0':
if fcmap['progress'] == '0':
fcmap['progress'] = '50'
else:
fcmap['progress'] = '100'
fcmap['status'] = 'idle_or_copied'
return ('', '')
else:
try:
curr_state = fcmap['status']
fcmap['status'] = self._transitions[curr_state][function]
return ('', '')
except Exception:
return self._errors['CMMVC5903E']
# Find an unused ID
def _find_unused_id(self, d):
ids = []
for k, v in d.iteritems():
ids.append(int(v['id']))
ids.sort()
for index, n in enumerate(ids):
if n > index:
return str(index)
return str(len(ids))
# Check if name is valid
def _is_invalid_name(self, name):
if re.match("^[a-zA-Z_][\w ._-]*$", name):
return False
return True
# Convert argument string to dictionary
def _cmd_to_dict(self, arg_list):
no_param_args = [
'autodelete',
'autoexpand',
'bytes',
'compressed',
'force',
'nohdr',
]
one_param_args = [
'chapsecret',
'cleanrate',
'copy',
'copyrate',
'delim',
'easytier',
'filtervalue',
'grainsize',
'hbawwpn',
'host',
'iogrp',
'iscsiname',
'mdiskgrp',
'name',
'rsize',
'scsi',
'size',
'source',
'target',
'unit',
'vdisk',
'warning',
'wwpn',
]
# Handle the special case of lsnode which is a two-word command
# Use the one word version of the command internally
if arg_list[0] in ('svcinfo', 'svctask'):
if arg_list[1] == 'lsnode':
if len(arg_list) > 4: # e.g. svcinfo lsnode -delim ! <node id>
ret = {'cmd': 'lsnode', 'node_id': arg_list[-1]}
else:
ret = {'cmd': 'lsnodecanister'}
else:
ret = {'cmd': arg_list[1]}
arg_list.pop(0)
else:
ret = {'cmd': arg_list[0]}
skip = False
for i in range(1, len(arg_list)):
if skip:
skip = False
continue
if arg_list[i][0] == '-':
if arg_list[i][1:] in no_param_args:
ret[arg_list[i][1:]] = True
elif arg_list[i][1:] in one_param_args:
ret[arg_list[i][1:]] = arg_list[i + 1]
skip = True
else:
raise exception.InvalidInput(
reason=_('unrecognized argument %s') % arg_list[i])
else:
ret['obj'] = arg_list[i]
return ret
def _print_info_cmd(self, rows, delim=' ', nohdr=False, **kwargs):
"""Generic function for printing information."""
if nohdr:
del rows[0]
for index in range(len(rows)):
rows[index] = delim.join(rows[index])
return ('%s' % '\n'.join(rows), '')
def _print_info_obj_cmd(self, header, row, delim=' ', nohdr=False):
"""Generic function for printing information for a specific object."""
objrows = []
for idx, val in enumerate(header):
objrows.append([val, row[idx]])
if nohdr:
for index in range(len(objrows)):
objrows[index] = ' '.join(objrows[index][1:])
for index in range(len(objrows)):
objrows[index] = delim.join(objrows[index])
return ('%s' % '\n'.join(objrows), '')
def _convert_bytes_units(self, bytestr):
num = int(bytestr)
unit_array = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
unit_index = 0
while num > 1024:
num = num / 1024
unit_index += 1
return '%d%s' % (num, unit_array[unit_index])
def _convert_units_bytes(self, num, unit):
unit_array = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
unit_index = 0
while unit.lower() != unit_array[unit_index].lower():
num = num * 1024
unit_index += 1
return str(num)
def _cmd_lslicense(self, **kwargs):
rows = [None] * 3
rows[0] = ['used_compression_capacity', '0.08']
rows[1] = ['license_compression_capacity', '0']
if self._next_cmd_error['lslicense'] == 'no_compression':
self._next_cmd_error['lslicense'] = ''
rows[2] = ['license_compression_enclosures', '0']
else:
rows[2] = ['license_compression_enclosures', '1']
return self._print_info_cmd(rows=rows, **kwargs)
# Print mostly made-up stuff in the correct syntax
def _cmd_lssystem(self, **kwargs):
rows = [None] * 2
rows[0] = ['id', '0123456789ABCDEF']
rows[1] = ['name', 'storwize-svc-sim']
return self._print_info_cmd(rows=rows, **kwargs)
# Print mostly made-up stuff in the correct syntax, assume -bytes passed
def _cmd_lsmdiskgrp(self, **kwargs):
rows = [None] * 4
rows[0] = ['id', 'name', 'status', 'mdisk_count',
'vdisk_count', 'capacity', 'extent_size',
'free_capacity', 'virtual_capacity', 'used_capacity',
'real_capacity', 'overallocation', 'warning',
'easy_tier', 'easy_tier_status']
rows[1] = ['1', self._flags['storwize_svc_volpool_name'], 'online',
'1', str(len(self._volumes_list)), '3573412790272',
'256', '3529926246400', '1693247906775', '277841182',
'38203734097', '47', '80', 'auto', 'inactive']
rows[2] = ['2', 'openstack2', 'online',
'1', '0', '3573412790272', '256',
'3529432325160', '1693247906775', '277841182',
'38203734097', '47', '80', 'auto', 'inactive']
rows[3] = ['3', 'openstack3', 'online',
'1', '0', '3573412790272', '128',
'3529432325160', '1693247906775', '277841182',
'38203734097', '47', '80', 'auto', 'inactive']
if 'obj' not in kwargs:
return self._print_info_cmd(rows=rows, **kwargs)
else:
if kwargs['obj'] == self._flags['storwize_svc_volpool_name']:
row = rows[1]
elif kwargs['obj'] == 'openstack2':
row = rows[2]
elif kwargs['obj'] == 'openstack3':
row = rows[3]
else:
return self._errors['CMMVC5754E']
objrows = []
for idx, val in enumerate(rows[0]):
objrows.append([val, row[idx]])
if 'nohdr' in kwargs:
for index in range(len(objrows)):
objrows[index] = ' '.join(objrows[index][1:])
if 'delim' in kwargs:
for index in range(len(objrows)):
objrows[index] = kwargs['delim'].join(objrows[index])
return ('%s' % '\n'.join(objrows), '')
# Print mostly made-up stuff in the correct syntax
def _cmd_lsnodecanister(self, **kwargs):
rows = [None] * 3
rows[0] = ['id', 'name', 'UPS_serial_number', 'WWNN', 'status',
'IO_group_id', 'IO_group_name', 'config_node',
'UPS_unique_id', 'hardware', 'iscsi_name', 'iscsi_alias',
'panel_name', 'enclosure_id', 'canister_id',
'enclosure_serial_number']
rows[1] = ['1', 'node1', '', '123456789ABCDEF0', 'online', '0',
'io_grp0',
'yes', '123456789ABCDEF0', '100',
'iqn.1982-01.com.ibm:1234.sim.node1', '', '01-1', '1', '1',
'0123ABC']
rows[2] = ['2', 'node2', '', '123456789ABCDEF1', 'online', '0',
'io_grp0',
'no', '123456789ABCDEF1', '100',
'iqn.1982-01.com.ibm:1234.sim.node2', '', '01-2', '1', '2',
'0123ABC']
if self._next_cmd_error['lsnodecanister'] == 'header_mismatch':
rows[0].pop(2)
self._next_cmd_error['lsnodecanister'] = ''
if self._next_cmd_error['lsnodecanister'] == 'remove_field':
for row in rows:
row.pop(0)
self._next_cmd_error['lsnodecanister'] = ''
return self._print_info_cmd(rows=rows, **kwargs)
# Print information of every single node of SVC
def _cmd_lsnode(self, **kwargs):
node_infos = dict()
node_infos['1'] = r'''id!1
name!node1
port_id!500507680210C744
port_status!active
port_speed!8Gb
port_id!500507680220C744
port_status!active
port_speed!8Gb
'''
node_infos['2'] = r'''id!2
name!node2
port_id!500507680220C745
port_status!active
port_speed!8Gb
port_id!500507680230C745
port_status!inactive
port_speed!N/A
'''
node_id = kwargs.get('node_id', None)
stdout = node_infos.get(node_id, '')
return stdout, ''
# Print mostly made-up stuff in the correct syntax
def _cmd_lsportip(self, **kwargs):
if self._next_cmd_error['lsportip'] == 'ip_no_config':
self._next_cmd_error['lsportip'] = ''
ip_addr1 = ''
ip_addr2 = ''
gw = ''
else:
ip_addr1 = '1.234.56.78'
ip_addr2 = '1.234.56.79'
gw = '1.234.56.1'
rows = [None] * 17
rows[0] = ['id', 'node_id', 'node_name', 'IP_address', 'mask',
'gateway', 'IP_address_6', 'prefix_6', 'gateway_6', 'MAC',
'duplex', 'state', 'speed', 'failover']
rows[1] = ['1', '1', 'node1', ip_addr1, '255.255.255.0',
gw, '', '', '', '01:23:45:67:89:00', 'Full',
'online', '1Gb/s', 'no']
rows[2] = ['1', '1', 'node1', '', '', '', '', '', '',
'01:23:45:67:89:00', 'Full', 'online', '1Gb/s', 'yes']
rows[3] = ['2', '1', 'node1', '', '', '', '', '', '',
'01:23:45:67:89:01', 'Full', 'unconfigured', '1Gb/s', 'no']
rows[4] = ['2', '1', 'node1', '', '', '', '', '', '',
'01:23:45:67:89:01', 'Full', 'unconfigured', '1Gb/s', 'yes']
rows[5] = ['3', '1', 'node1', '', '', '', '', '', '', '', '',
'unconfigured', '', 'no']
rows[6] = ['3', '1', 'node1', '', '', '', '', '', '', '', '',
'unconfigured', '', 'yes']
rows[7] = ['4', '1', 'node1', '', '', '', '', '', '', '', '',
'unconfigured', '', 'no']
rows[8] = ['4', '1', 'node1', '', '', '', '', '', '', '', '',
'unconfigured', '', 'yes']
rows[9] = ['1', '2', 'node2', ip_addr2, '255.255.255.0',
gw, '', '', '', '01:23:45:67:89:02', 'Full',
'online', '1Gb/s', 'no']
rows[10] = ['1', '2', 'node2', '', '', '', '', '', '',
'01:23:45:67:89:02', 'Full', 'online', '1Gb/s', 'yes']
rows[11] = ['2', '2', 'node2', '', '', '', '', '', '',
'01:23:45:67:89:03', 'Full', 'unconfigured', '1Gb/s', 'no']
rows[12] = ['2', '2', 'node2', '', '', '', '', '', '',
'01:23:45:67:89:03', 'Full', 'unconfigured', '1Gb/s',
'yes']
rows[13] = ['3', '2', 'node2', '', '', '', '', '', '', '', '',
'unconfigured', '', 'no']
rows[14] = ['3', '2', 'node2', '', '', '', '', '', '', '', '',
'unconfigured', '', 'yes']
rows[15] = ['4', '2', 'node2', '', '', '', '', '', '', '', '',
'unconfigured', '', 'no']
rows[16] = ['4', '2', 'node2', '', '', '', '', '', '', '', '',
'unconfigured', '', 'yes']
if self._next_cmd_error['lsportip'] == 'header_mismatch':
rows[0].pop(2)
self._next_cmd_error['lsportip'] = ''
if self._next_cmd_error['lsportip'] == 'remove_field':
for row in rows:
row.pop(1)
self._next_cmd_error['lsportip'] = ''
return self._print_info_cmd(rows=rows, **kwargs)
def _cmd_lsfabric(self, **kwargs):
host_name = kwargs['host'] if 'host' in kwargs else None
target_wwpn = kwargs['wwpn'] if 'wwpn' in kwargs else None
host_infos = []
for hk, hv in self._hosts_list.iteritems():
if not host_name or hv['host_name'].startswith(host_name):
for mk, mv in self._mappings_list.iteritems():
if mv['host'] == hv['host_name']:
if not target_wwpn or target_wwpn in hv['wwpns']:
host_infos.append(hv)
break
if not len(host_infos):
return ('', '')
rows = []
rows.append(['remote_wwpn', 'remote_nportid', 'id', 'node_name',
'local_wwpn', 'local_port', 'local_nportid', 'state',
'name', 'cluster_name', 'type'])
for host_info in host_infos:
for wwpn in host_info['wwpns']:
rows.append([wwpn, '123456', host_info['id'], 'nodeN',
'AABBCCDDEEFF0011', '1', '0123ABC', 'active',
host_info['host_name'], '', 'host'])
if self._next_cmd_error['lsfabric'] == 'header_mismatch':
rows[0].pop(0)
self._next_cmd_error['lsfabric'] = ''
if self._next_cmd_error['lsfabric'] == 'remove_field':
for row in rows:
row.pop(0)
self._next_cmd_error['lsfabric'] = ''
return self._print_info_cmd(rows=rows, **kwargs)
# Create a vdisk
def _cmd_mkvdisk(self, **kwargs):
# We only save the id/uid, name, and size - all else will be made up
volume_info = {}
volume_info['id'] = self._find_unused_id(self._volumes_list)
volume_info['uid'] = ('ABCDEF' * 3) + ('0' * 14) + volume_info['id']
if 'name' in kwargs:
volume_info['name'] = kwargs['name'].strip('\'\'')
else:
volume_info['name'] = 'vdisk' + volume_info['id']
# Assume size and unit are given, store it in bytes
capacity = int(kwargs['size'])
unit = kwargs['unit']
volume_info['capacity'] = self._convert_units_bytes(capacity, unit)
volume_info['IO_group_id'] = kwargs['iogrp']
volume_info['IO_group_name'] = 'io_grp%s' % kwargs['iogrp']
if 'easytier' in kwargs:
if kwargs['easytier'] == 'on':
volume_info['easy_tier'] = 'on'
else:
volume_info['easy_tier'] = 'off'
if 'rsize' in kwargs:
# Fake numbers
volume_info['used_capacity'] = '786432'
volume_info['real_capacity'] = '21474816'
volume_info['free_capacity'] = '38219264'
if 'warning' in kwargs:
volume_info['warning'] = kwargs['warning'].rstrip('%')
else:
volume_info['warning'] = '80'
if 'autoexpand' in kwargs:
volume_info['autoexpand'] = 'on'
else:
volume_info['autoexpand'] = 'off'
if 'grainsize' in kwargs:
volume_info['grainsize'] = kwargs['grainsize']
else:
volume_info['grainsize'] = '32'
if 'compressed' in kwargs:
volume_info['compressed_copy'] = 'yes'
else:
volume_info['compressed_copy'] = 'no'
else:
volume_info['used_capacity'] = volume_info['capacity']
volume_info['real_capacity'] = volume_info['capacity']
volume_info['free_capacity'] = '0'
volume_info['warning'] = ''
volume_info['autoexpand'] = ''
volume_info['grainsize'] = ''
volume_info['compressed_copy'] = 'no'
vol_cp = {'id': '0',
'status': 'online',
'sync': 'yes',
'primary': 'yes',
'mdisk_grp_id': '1',
'mdisk_grp_name': self._flags['storwize_svc_volpool_name'],
'easy_tier': volume_info['easy_tier'],
'compressed_copy': volume_info['compressed_copy']}
volume_info['copies'] = {'0': vol_cp}
if volume_info['name'] in self._volumes_list:
return self._errors['CMMVC6035E']
else:
self._volumes_list[volume_info['name']] = volume_info
return ('Virtual Disk, id [%s], successfully created' %
(volume_info['id']), '')
# Delete a vdisk
def _cmd_rmvdisk(self, **kwargs):
force = True if 'force' in kwargs else False
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
vol_name = kwargs['obj'].strip('\'\'')
if vol_name not in self._volumes_list:
return self._errors['CMMVC5753E']
if not force:
for k, mapping in self._mappings_list.iteritems():
if mapping['vol'] == vol_name:
return self._errors['CMMVC5840E']
for k, fcmap in self._fcmappings_list.iteritems():
if ((fcmap['source'] == vol_name) or
(fcmap['target'] == vol_name)):
return self._errors['CMMVC5840E']
del self._volumes_list[vol_name]
return ('', '')
def _cmd_expandvdisksize(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
vol_name = kwargs['obj'].strip('\'\'')
# Assume unit is gb
if 'size' not in kwargs:
return self._errors['CMMVC5707E']
size = int(kwargs['size'])
if vol_name not in self._volumes_list:
return self._errors['CMMVC5753E']
curr_size = int(self._volumes_list[vol_name]['capacity'])
addition = size * units.GiB
self._volumes_list[vol_name]['capacity'] = str(curr_size + addition)
return ('', '')
def _get_fcmap_info(self, vol_name):
ret_vals = {
'fc_id': '',
'fc_name': '',
'fc_map_count': '0',
}
for k, fcmap in self._fcmappings_list.iteritems():
if ((fcmap['source'] == vol_name) or
(fcmap['target'] == vol_name)):
ret_vals['fc_id'] = fcmap['id']
ret_vals['fc_name'] = fcmap['name']
ret_vals['fc_map_count'] = '1'
return ret_vals
# List information about vdisks
def _cmd_lsvdisk(self, **kwargs):
rows = []
rows.append(['id', 'name', 'IO_group_id', 'IO_group_name',
'status', 'mdisk_grp_id', 'mdisk_grp_name',
'capacity', 'type', 'FC_id', 'FC_name', 'RC_id',
'RC_name', 'vdisk_UID', 'fc_map_count', 'copy_count',
'fast_write_state', 'se_copy_count', 'RC_change'])
for k, vol in self._volumes_list.iteritems():
if (('filtervalue' not in kwargs) or
(kwargs['filtervalue'] == 'name=' + vol['name'])):
fcmap_info = self._get_fcmap_info(vol['name'])
if 'bytes' in kwargs:
cap = self._convert_bytes_units(vol['capacity'])
else:
cap = vol['capacity']
rows.append([str(vol['id']), vol['name'], vol['IO_group_id'],
vol['IO_group_name'], 'online', '0',
self._flags['storwize_svc_volpool_name'],
cap, 'striped',
fcmap_info['fc_id'], fcmap_info['fc_name'],
'', '', vol['uid'],
fcmap_info['fc_map_count'], '1', 'empty',
'1', 'no'])
if 'obj' not in kwargs:
return self._print_info_cmd(rows=rows, **kwargs)
else:
if kwargs['obj'] not in self._volumes_list:
return self._errors['CMMVC5754E']
vol = self._volumes_list[kwargs['obj']]
fcmap_info = self._get_fcmap_info(vol['name'])
cap = vol['capacity']
cap_u = vol['used_capacity']
cap_r = vol['real_capacity']
cap_f = vol['free_capacity']
if 'bytes' not in kwargs:
for item in [cap, cap_u, cap_r, cap_f]:
item = self._convert_bytes_units(item)
rows = []
rows.append(['id', str(vol['id'])])
rows.append(['name', vol['name']])
rows.append(['IO_group_id', vol['IO_group_id']])
rows.append(['IO_group_name', vol['IO_group_name']])
rows.append(['status', 'online'])
rows.append(['mdisk_grp_id', '0'])
rows.append([
'mdisk_grp_name',
self._flags['storwize_svc_volpool_name']])
rows.append(['capacity', cap])
rows.append(['type', 'striped'])
rows.append(['formatted', 'no'])
rows.append(['mdisk_id', ''])
rows.append(['mdisk_name', ''])
rows.append(['FC_id', fcmap_info['fc_id']])
rows.append(['FC_name', fcmap_info['fc_name']])
rows.append(['RC_id', ''])
rows.append(['RC_name', ''])
rows.append(['vdisk_UID', vol['uid']])
rows.append(['throttling', '0'])
if self._next_cmd_error['lsvdisk'] == 'blank_pref_node':
rows.append(['preferred_node_id', ''])
self._next_cmd_error['lsvdisk'] = ''
elif self._next_cmd_error['lsvdisk'] == 'no_pref_node':
self._next_cmd_error['lsvdisk'] = ''
else:
rows.append(['preferred_node_id', '1'])
rows.append(['fast_write_state', 'empty'])
rows.append(['cache', 'readwrite'])
rows.append(['udid', ''])
rows.append(['fc_map_count', fcmap_info['fc_map_count']])
rows.append(['sync_rate', '50'])
rows.append(['copy_count', '1'])
rows.append(['se_copy_count', '0'])
rows.append(['mirror_write_priority', 'latency'])
rows.append(['RC_change', 'no'])
rows.append(['used_capacity', cap_u])
rows.append(['real_capacity', cap_r])
rows.append(['free_capacity', cap_f])
rows.append(['autoexpand', vol['autoexpand']])
rows.append(['warning', vol['warning']])
rows.append(['grainsize', vol['grainsize']])
rows.append(['easy_tier', vol['easy_tier']])
rows.append(['compressed_copy', vol['compressed_copy']])
if 'nohdr' in kwargs:
for index in range(len(rows)):
rows[index] = ' '.join(rows[index][1:])
if 'delim' in kwargs:
for index in range(len(rows)):
rows[index] = kwargs['delim'].join(rows[index])
return ('%s' % '\n'.join(rows), '')
def _cmd_lsiogrp(self, **kwargs):
rows = [None] * 6
rows[0] = ['id', 'name', 'node_count', 'vdisk_count', 'host_count']
rows[1] = ['0', 'io_grp0', '2', '0', '4']
rows[2] = ['1', 'io_grp1', '2', '0', '4']
rows[3] = ['2', 'io_grp2', '0', '0', '4']
rows[4] = ['3', 'io_grp3', '0', '0', '4']
rows[5] = ['4', 'recovery_io_grp', '0', '0', '0']
return self._print_info_cmd(rows=rows, **kwargs)
def _add_port_to_host(self, host_info, **kwargs):
if 'iscsiname' in kwargs:
added_key = 'iscsi_names'
added_val = kwargs['iscsiname'].strip('\'\"')
elif 'hbawwpn' in kwargs:
added_key = 'wwpns'
added_val = kwargs['hbawwpn'].strip('\'\"')
else:
return self._errors['CMMVC5707E']
host_info[added_key].append(added_val)
for k, v in self._hosts_list.iteritems():
if v['id'] == host_info['id']:
continue
for port in v[added_key]:
if port == added_val:
return self._errors['CMMVC6581E']
return ('', '')
# Make a host
def _cmd_mkhost(self, **kwargs):
host_info = {}
host_info['id'] = self._find_unused_id(self._hosts_list)
if 'name' in kwargs:
host_name = kwargs['name'].strip('\'\"')
else:
host_name = 'host' + str(host_info['id'])
if self._is_invalid_name(host_name):
return self._errors['CMMVC6527E']
if host_name in self._hosts_list:
return self._errors['CMMVC6035E']
host_info['host_name'] = host_name
host_info['iscsi_names'] = []
host_info['wwpns'] = []
out, err = self._add_port_to_host(host_info, **kwargs)
if not len(err):
self._hosts_list[host_name] = host_info
return ('Host, id [%s], successfully created' %
(host_info['id']), '')
else:
return (out, err)
# Add ports to an existing host
def _cmd_addhostport(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
host_name = kwargs['obj'].strip('\'\'')
if host_name not in self._hosts_list:
return self._errors['CMMVC5753E']
host_info = self._hosts_list[host_name]
return self._add_port_to_host(host_info, **kwargs)
# Change host properties
def _cmd_chhost(self, **kwargs):
if 'chapsecret' not in kwargs:
return self._errors['CMMVC5707E']
secret = kwargs['obj'].strip('\'\'')
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
host_name = kwargs['obj'].strip('\'\'')
if host_name not in self._hosts_list:
return self._errors['CMMVC5753E']
self._hosts_list[host_name]['chapsecret'] = secret
return ('', '')
# Remove a host
def _cmd_rmhost(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
host_name = kwargs['obj'].strip('\'\'')
if host_name not in self._hosts_list:
return self._errors['CMMVC5753E']
for k, v in self._mappings_list.iteritems():
if (v['host'] == host_name):
return self._errors['CMMVC5871E']
del self._hosts_list[host_name]
return ('', '')
# List information about hosts
def _cmd_lshost(self, **kwargs):
if 'obj' not in kwargs:
rows = []
rows.append(['id', 'name', 'port_count', 'iogrp_count', 'status'])
found = False
for k, host in self._hosts_list.iteritems():
filterstr = 'name=' + host['host_name']
if (('filtervalue' not in kwargs) or
(kwargs['filtervalue'] == filterstr)):
rows.append([host['id'], host['host_name'], '1', '4',
'offline'])
found = True
if found:
return self._print_info_cmd(rows=rows, **kwargs)
else:
return ('', '')
else:
if kwargs['obj'] not in self._hosts_list:
return self._errors['CMMVC5754E']
host = self._hosts_list[kwargs['obj']]
rows = []
rows.append(['id', host['id']])
rows.append(['name', host['host_name']])
rows.append(['port_count', '1'])
rows.append(['type', 'generic'])
rows.append(['mask', '1111'])
rows.append(['iogrp_count', '4'])
rows.append(['status', 'online'])
for port in host['iscsi_names']:
rows.append(['iscsi_name', port])
rows.append(['node_logged_in_count', '0'])
rows.append(['state', 'offline'])
for port in host['wwpns']:
rows.append(['WWPN', port])
rows.append(['node_logged_in_count', '0'])
rows.append(['state', 'active'])
if 'nohdr' in kwargs:
for index in range(len(rows)):
rows[index] = ' '.join(rows[index][1:])
if 'delim' in kwargs:
for index in range(len(rows)):
rows[index] = kwargs['delim'].join(rows[index])
return ('%s' % '\n'.join(rows), '')
# List iSCSI authorization information about hosts
def _cmd_lsiscsiauth(self, **kwargs):
if self._next_cmd_error['lsiscsiauth'] == 'no_info':
self._next_cmd_error['lsiscsiauth'] = ''
return ('', '')
rows = []
rows.append(['type', 'id', 'name', 'iscsi_auth_method',
'iscsi_chap_secret'])
for k, host in self._hosts_list.iteritems():
method = 'none'
secret = ''
if 'chapsecret' in host:
method = 'chap'
secret = host['chapsecret']
rows.append(['host', host['id'], host['host_name'], method,
secret])
return self._print_info_cmd(rows=rows, **kwargs)
# Create a vdisk-host mapping
def _cmd_mkvdiskhostmap(self, **kwargs):
mapping_info = {}
mapping_info['id'] = self._find_unused_id(self._mappings_list)
if 'host' not in kwargs:
return self._errors['CMMVC5707E']
mapping_info['host'] = kwargs['host'].strip('\'\'')
if 'scsi' not in kwargs:
return self._errors['CMMVC5707E']
mapping_info['lun'] = kwargs['scsi'].strip('\'\'')
if 'obj' not in kwargs:
return self._errors['CMMVC5707E']
mapping_info['vol'] = kwargs['obj'].strip('\'\'')
if mapping_info['vol'] not in self._volumes_list:
return self._errors['CMMVC5753E']
if mapping_info['host'] not in self._hosts_list:
return self._errors['CMMVC5754E']
if mapping_info['vol'] in self._mappings_list:
return self._errors['CMMVC6071E']
for k, v in self._mappings_list.iteritems():
if ((v['host'] == mapping_info['host']) and
(v['lun'] == mapping_info['lun'])):
return self._errors['CMMVC5879E']
for k, v in self._mappings_list.iteritems():
if (v['lun'] == mapping_info['lun']) and ('force' not in kwargs):
return self._errors['CMMVC6071E']
self._mappings_list[mapping_info['id']] = mapping_info
return ('Virtual Disk to Host map, id [%s], successfully created'
% (mapping_info['id']), '')
# Delete a vdisk-host mapping
def _cmd_rmvdiskhostmap(self, **kwargs):
if 'host' not in kwargs:
return self._errors['CMMVC5707E']
host = kwargs['host'].strip('\'\'')
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
vol = kwargs['obj'].strip('\'\'')
mapping_ids = []
for k, v in self._mappings_list.iteritems():
if v['vol'] == vol:
mapping_ids.append(v['id'])
if not mapping_ids:
return self._errors['CMMVC5753E']
this_mapping = None
for mapping_id in mapping_ids:
if self._mappings_list[mapping_id]['host'] == host:
this_mapping = mapping_id
if this_mapping is None:
return self._errors['CMMVC5753E']
del self._mappings_list[this_mapping]
return ('', '')
# List information about vdisk-host mappings
def _cmd_lshostvdiskmap(self, **kwargs):
index = 1
no_hdr = 0
delimeter = ''
host_name = kwargs['obj']
if host_name not in self._hosts_list:
return self._errors['CMMVC5754E']
rows = []
rows.append(['id', 'name', 'SCSI_id', 'vdisk_id', 'vdisk_name',
'vdisk_UID'])
for k, mapping in self._mappings_list.iteritems():
if (host_name == '') or (mapping['host'] == host_name):
volume = self._volumes_list[mapping['vol']]
rows.append([mapping['id'], mapping['host'],
mapping['lun'], volume['id'],
volume['name'], volume['uid']])
return self._print_info_cmd(rows=rows, **kwargs)
# Create a FlashCopy mapping
def _cmd_mkfcmap(self, **kwargs):
source = ''
target = ''
copyrate = kwargs['copyrate'] if 'copyrate' in kwargs else '50'
if 'source' not in kwargs:
return self._errors['CMMVC5707E']
source = kwargs['source'].strip('\'\'')
if source not in self._volumes_list:
return self._errors['CMMVC5754E']
if 'target' not in kwargs:
return self._errors['CMMVC5707E']
target = kwargs['target'].strip('\'\'')
if target not in self._volumes_list:
return self._errors['CMMVC5754E']
if source == target:
return self._errors['CMMVC6303E']
if (self._volumes_list[source]['capacity'] !=
self._volumes_list[target]['capacity']):
return self._errors['CMMVC5924E']
fcmap_info = {}
fcmap_info['source'] = source
fcmap_info['target'] = target
fcmap_info['id'] = self._find_unused_id(self._fcmappings_list)
fcmap_info['name'] = 'fcmap' + fcmap_info['id']
fcmap_info['copyrate'] = copyrate
fcmap_info['progress'] = '0'
fcmap_info['autodelete'] = True if 'autodelete' in kwargs else False
fcmap_info['status'] = 'idle_or_copied'
self._fcmappings_list[fcmap_info['id']] = fcmap_info
return('FlashCopy Mapping, id [' + fcmap_info['id'] +
'], successfully created', '')
def _cmd_gen_prestartfcmap(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
id_num = kwargs['obj']
if self._next_cmd_error['prestartfcmap'] == 'bad_id':
id_num = -1
self._next_cmd_error['prestartfcmap'] = ''
try:
fcmap = self._fcmappings_list[id_num]
except KeyError:
return self._errors['CMMVC5753E']
return self._state_transition('prepare', fcmap)
def _cmd_gen_startfcmap(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
id_num = kwargs['obj']
if self._next_cmd_error['startfcmap'] == 'bad_id':
id_num = -1
self._next_cmd_error['startfcmap'] = ''
try:
fcmap = self._fcmappings_list[id_num]
except KeyError:
return self._errors['CMMVC5753E']
return self._state_transition('start', fcmap)
def _cmd_stopfcmap(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
id_num = kwargs['obj']
try:
fcmap = self._fcmappings_list[id_num]
except KeyError:
return self._errors['CMMVC5753E']
return self._state_transition('stop', fcmap)
def _cmd_rmfcmap(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
id_num = kwargs['obj']
force = True if 'force' in kwargs else False
if self._next_cmd_error['rmfcmap'] == 'bad_id':
id_num = -1
self._next_cmd_error['rmfcmap'] = ''
try:
fcmap = self._fcmappings_list[id_num]
except KeyError:
return self._errors['CMMVC5753E']
function = 'delete_force' if force else 'delete'
ret = self._state_transition(function, fcmap)
if fcmap['status'] == 'end':
del self._fcmappings_list[id_num]
return ret
def _cmd_lsvdiskfcmappings(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5707E']
vdisk = kwargs['obj']
rows = []
rows.append(['id', 'name'])
for k, v in self._fcmappings_list.iteritems():
if v['source'] == vdisk or v['target'] == vdisk:
rows.append([v['id'], v['name']])
return self._print_info_cmd(rows=rows, **kwargs)
def _cmd_chfcmap(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5707E']
id_num = kwargs['obj']
try:
fcmap = self._fcmappings_list[id_num]
except KeyError:
return self._errors['CMMVC5753E']
for key in ['name', 'copyrate', 'autodelete']:
if key in kwargs:
fcmap[key] = kwargs[key]
return ('', '')
def _cmd_lsfcmap(self, **kwargs):
rows = []
rows.append(['id', 'name', 'source_vdisk_id', 'source_vdisk_name',
'target_vdisk_id', 'target_vdisk_name', 'group_id',
'group_name', 'status', 'progress', 'copy_rate',
'clean_progress', 'incremental', 'partner_FC_id',
'partner_FC_name', 'restoring', 'start_time',
'rc_controlled'])
# Assume we always get a filtervalue argument
filter_key = kwargs['filtervalue'].split('=')[0]
filter_value = kwargs['filtervalue'].split('=')[1]
to_delete = []
for k, v in self._fcmappings_list.iteritems():
if str(v[filter_key]) == filter_value:
source = self._volumes_list[v['source']]
target = self._volumes_list[v['target']]
self._state_transition('wait', v)
if self._next_cmd_error['lsfcmap'] == 'speed_up':
self._next_cmd_error['lsfcmap'] = ''
curr_state = v['status']
while self._state_transition('wait', v) == ("", ""):
if curr_state == v['status']:
break
curr_state = v['status']
if ((v['status'] == 'idle_or_copied' and v['autodelete'] and
v['progress'] == '100') or (v['status'] == 'end')):
to_delete.append(k)
else:
rows.append([v['id'], v['name'], source['id'],
source['name'], target['id'], target['name'],
'', '', v['status'], v['progress'],
v['copyrate'], '100', 'off', '', '', 'no', '',
'no'])
for d in to_delete:
del self._fcmappings_list[d]
return self._print_info_cmd(rows=rows, **kwargs)
def _cmd_migratevdisk(self, **kwargs):
if 'mdiskgrp' not in kwargs or 'vdisk' not in kwargs:
return self._errors['CMMVC5707E']
mdiskgrp = kwargs['mdiskgrp'].strip('\'\'')
vdisk = kwargs['vdisk'].strip('\'\'')
if vdisk in self._volumes_list:
curr_mdiskgrp = self._volumes_list
else:
for pool in self._other_pools:
if vdisk in pool:
curr_mdiskgrp = pool
break
else:
return self._errors['CMMVC5754E']
if mdiskgrp == self._flags['storwize_svc_volpool_name']:
tgt_mdiskgrp = self._volumes_list
elif mdiskgrp == 'openstack2':
tgt_mdiskgrp = self._other_pools['openstack2']
elif mdiskgrp == 'openstack3':
tgt_mdiskgrp = self._other_pools['openstack3']
else:
return self._errors['CMMVC5754E']
if curr_mdiskgrp == tgt_mdiskgrp:
return self._errors['CMMVC6430E']
vol = curr_mdiskgrp[vdisk]
tgt_mdiskgrp[vdisk] = vol
del curr_mdiskgrp[vdisk]
return ('', '')
def _cmd_addvdiskcopy(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
vol_name = kwargs['obj'].strip('\'\'')
if vol_name not in self._volumes_list:
return self._errors['CMMVC5753E']
vol = self._volumes_list[vol_name]
if 'mdiskgrp' not in kwargs:
return self._errors['CMMVC5707E']
mdiskgrp = kwargs['mdiskgrp'].strip('\'\'')
copy_info = {}
copy_info['id'] = self._find_unused_id(vol['copies'])
copy_info['status'] = 'online'
copy_info['sync'] = 'no'
copy_info['primary'] = 'no'
copy_info['mdisk_grp_name'] = mdiskgrp
if mdiskgrp == self._flags['storwize_svc_volpool_name']:
copy_info['mdisk_grp_id'] = '1'
elif mdiskgrp == 'openstack2':
copy_info['mdisk_grp_id'] = '2'
elif mdiskgrp == 'openstack3':
copy_info['mdisk_grp_id'] = '3'
if 'easytier' in kwargs:
if kwargs['easytier'] == 'on':
copy_info['easy_tier'] = 'on'
else:
copy_info['easy_tier'] = 'off'
if 'rsize' in kwargs:
if 'compressed' in kwargs:
copy_info['compressed_copy'] = 'yes'
else:
copy_info['compressed_copy'] = 'no'
vol['copies'][copy_info['id']] = copy_info
return ('Vdisk [%(vid)s] copy [%(cid)s] successfully created' %
{'vid': vol['id'], 'cid': copy_info['id']}, '')
def _cmd_lsvdiskcopy(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5804E']
name = kwargs['obj']
vol = self._volumes_list[name]
rows = []
rows.append(['vdisk_id', 'vdisk_name', 'copy_id', 'status', 'sync',
'primary', 'mdisk_grp_id', 'mdisk_grp_name', 'capacity',
'type', 'se_copy', 'easy_tier', 'easy_tier_status',
'compressed_copy'])
for k, copy in vol['copies'].iteritems():
rows.append([vol['id'], vol['name'], copy['id'],
copy['status'], copy['sync'], copy['primary'],
copy['mdisk_grp_id'], copy['mdisk_grp_name'],
vol['capacity'], 'striped', 'yes', copy['easy_tier'],
'inactive', copy['compressed_copy']])
if 'copy' not in kwargs:
return self._print_info_cmd(rows=rows, **kwargs)
else:
copy_id = kwargs['copy'].strip('\'\'')
if copy_id not in vol['copies']:
return self._errors['CMMVC6353E']
copy = vol['copies'][copy_id]
rows = []
rows.append(['vdisk_id', vol['id']])
rows.append(['vdisk_name', vol['name']])
rows.append(['capacity', vol['capacity']])
rows.append(['copy_id', copy['id']])
rows.append(['status', copy['status']])
rows.append(['sync', copy['sync']])
copy['sync'] = 'yes'
rows.append(['primary', copy['primary']])
rows.append(['mdisk_grp_id', copy['mdisk_grp_id']])
rows.append(['mdisk_grp_name', copy['mdisk_grp_name']])
rows.append(['easy_tier', copy['easy_tier']])
rows.append(['easy_tier_status', 'inactive'])
rows.append(['compressed_copy', copy['compressed_copy']])
if 'delim' in kwargs:
for index in range(len(rows)):
rows[index] = kwargs['delim'].join(rows[index])
return ('%s' % '\n'.join(rows), '')
def _cmd_rmvdiskcopy(self, **kwargs):
if 'obj' not in kwargs:
return self._errors['CMMVC5701E']
vol_name = kwargs['obj'].strip('\'\'')
if 'copy' not in kwargs:
return self._errors['CMMVC5707E']
copy_id = kwargs['copy'].strip('\'\'')
if vol_name not in self._volumes_list:
return self._errors['CMMVC5753E']
vol = self._volumes_list[vol_name]
if copy_id not in vol['copies']:
return self._errors['CMMVC6353E']
del vol['copies'][copy_id]
return ('', '')
def _add_host_to_list(self, connector):
host_info = {}
host_info['id'] = self._find_unused_id(self._hosts_list)
host_info['host_name'] = connector['host']
host_info['iscsi_names'] = []
host_info['wwpns'] = []
if 'initiator' in connector:
host_info['iscsi_names'].append(connector['initiator'])
if 'wwpns' in connector:
host_info['wwpns'] = host_info['wwpns'] + connector['wwpns']
self._hosts_list[connector['host']] = host_info
def _host_in_list(self, host_name):
for k, v in self._hosts_list.iteritems():
if k.startswith(host_name):
return k
return None
# The main function to run commands on the management simulator
def execute_command(self, cmd, check_exit_code=True):
try:
kwargs = self._cmd_to_dict(cmd)
except IndexError:
return self._errors['CMMVC5707E']
command = kwargs['cmd']
del kwargs['cmd']
if command == 'lsmdiskgrp':
out, err = self._cmd_lsmdiskgrp(**kwargs)
elif command == 'lslicense':
out, err = self._cmd_lslicense(**kwargs)
elif command == 'lssystem':
out, err = self._cmd_lssystem(**kwargs)
elif command == 'lsnodecanister':
out, err = self._cmd_lsnodecanister(**kwargs)
elif command == 'lsnode':
out, err = self._cmd_lsnode(**kwargs)
elif command == 'lsportip':
out, err = self._cmd_lsportip(**kwargs)
elif command == 'lsfabric':
out, err = self._cmd_lsfabric(**kwargs)
elif command == 'mkvdisk':
out, err = self._cmd_mkvdisk(**kwargs)
elif command == 'rmvdisk':
out, err = self._cmd_rmvdisk(**kwargs)
elif command == 'expandvdisksize':
out, err = self._cmd_expandvdisksize(**kwargs)
elif command == 'lsvdisk':
out, err = self._cmd_lsvdisk(**kwargs)
elif command == 'lsiogrp':
out, err = self._cmd_lsiogrp(**kwargs)
elif command == 'mkhost':
out, err = self._cmd_mkhost(**kwargs)
elif command == 'addhostport':
out, err = self._cmd_addhostport(**kwargs)
elif command == 'chhost':
out, err = self._cmd_chhost(**kwargs)
elif command == 'rmhost':
out, err = self._cmd_rmhost(**kwargs)
elif command == 'lshost':
out, err = self._cmd_lshost(**kwargs)
elif command == 'lsiscsiauth':
out, err = self._cmd_lsiscsiauth(**kwargs)
elif command == 'mkvdiskhostmap':
out, err = self._cmd_mkvdiskhostmap(**kwargs)
elif command == 'rmvdiskhostmap':
out, err = self._cmd_rmvdiskhostmap(**kwargs)
elif command == 'lshostvdiskmap':
out, err = self._cmd_lshostvdiskmap(**kwargs)
elif command == 'mkfcmap':
out, err = self._cmd_mkfcmap(**kwargs)
elif command == 'prestartfcmap':
out, err = self._cmd_gen_prestartfcmap(**kwargs)
elif command == 'startfcmap':
out, err = self._cmd_gen_startfcmap(**kwargs)
elif command == 'stopfcmap':
out, err = self._cmd_stopfcmap(**kwargs)
elif command == 'rmfcmap':
out, err = self._cmd_rmfcmap(**kwargs)
elif command == 'chfcmap':
out, err = self._cmd_chfcmap(**kwargs)
elif command == 'lsfcmap':
out, err = self._cmd_lsfcmap(**kwargs)
elif command == 'lsvdiskfcmappings':
out, err = self._cmd_lsvdiskfcmappings(**kwargs)
elif command == 'migratevdisk':
out, err = self._cmd_migratevdisk(**kwargs)
elif command == 'addvdiskcopy':
out, err = self._cmd_addvdiskcopy(**kwargs)
elif command == 'lsvdiskcopy':
out, err = self._cmd_lsvdiskcopy(**kwargs)
elif command == 'rmvdiskcopy':
out, err = self._cmd_rmvdiskcopy(**kwargs)
else:
out, err = ('', 'ERROR: Unsupported command')
if (check_exit_code) and (len(err) != 0):
raise processutils.ProcessExecutionError(exit_code=1,
stdout=out,
stderr=err,
cmd=' '.join(cmd))
return (out, err)
# After calling this function, the next call to the specified command will
# result in in the error specified
def error_injection(self, cmd, error):
self._next_cmd_error[cmd] = error
class StorwizeSVCFakeDriver(storwize_svc.StorwizeSVCDriver):
def __init__(self, *args, **kwargs):
super(StorwizeSVCFakeDriver, self).__init__(*args, **kwargs)
def set_fake_storage(self, fake):
self.fake_storage = fake
def _run_ssh(self, cmd, check_exit_code=True):
try:
LOG.debug(_('Run CLI command: %s') % cmd)
ret = self.fake_storage.execute_command(cmd, check_exit_code)
(stdout, stderr) = ret
LOG.debug(_('CLI output:\n stdout: %(stdout)s\n stderr: '
'%(stderr)s') % {'stdout': stdout, 'stderr': stderr})
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.debug(_('CLI Exception output:\n stdout: %(out)s\n '
'stderr: %(err)s') % {'out': e.stdout,
'err': e.stderr})
return ret
class StorwizeSVCFakeSock:
def settimeout(self, time):
return
class StorwizeSVCDriverTestCase(test.TestCase):
def setUp(self):
super(StorwizeSVCDriverTestCase, self).setUp()
self.USESIM = True
if self.USESIM:
self.driver = StorwizeSVCFakeDriver(
configuration=conf.Configuration(None))
self._def_flags = {'san_ip': 'hostname',
'san_login': 'user',
'san_password': 'pass',
'storwize_svc_volpool_name': 'openstack',
'storwize_svc_flashcopy_timeout': 20,
# Test ignore capitalization
'storwize_svc_connection_protocol': 'iScSi',
'storwize_svc_multipath_enabled': False}
wwpns = [str(random.randint(0, 9999999999999999)).zfill(16),
str(random.randint(0, 9999999999999999)).zfill(16)]
initiator = 'test.initiator.%s' % str(random.randint(10000, 99999))
self._connector = {'ip': '1.234.56.78',
'host': 'storwize-svc-test',
'wwpns': wwpns,
'initiator': initiator}
self.sim = StorwizeSVCManagementSimulator('openstack')
self.driver.set_fake_storage(self.sim)
else:
self.driver = storwize_svc.StorwizeSVCDriver(
configuration=conf.Configuration(None))
self._def_flags = {'san_ip': '1.111.11.11',
'san_login': 'user',
'san_password': 'password',
'storwize_svc_volpool_name': 'openstack',
# Test ignore capitalization
'storwize_svc_connection_protocol': 'iScSi',
'storwize_svc_multipath_enabled': False,
'ssh_conn_timeout': 0}
config_group = self.driver.configuration.config_group
self.driver.configuration.set_override('rootwrap_config',
'/etc/cinder/rootwrap.conf',
config_group)
self._connector = utils.brick_get_connector_properties()
self._reset_flags()
self.driver.db = StorwizeSVCFakeDB()
self.driver.do_setup(None)
self.driver.check_for_setup_error()
self.stubs.Set(storwize_svc.time, 'sleep', lambda s: None)
self.stubs.Set(greenthread, 'sleep', lambda *x, **y: None)
self.stubs.Set(storwize_svc, 'CHECK_FCMAPPING_INTERVAL', 0)
def _set_flag(self, flag, value):
group = self.driver.configuration.config_group
self.driver.configuration.set_override(flag, value, group)
def _reset_flags(self):
self.driver.configuration.local_conf.reset()
for k, v in self._def_flags.iteritems():
self._set_flag(k, v)
def _assert_vol_exists(self, name, exists):
is_vol_defined = self.driver._is_vdisk_defined(name)
self.assertEqual(is_vol_defined, exists)
def test_storwize_svc_connectivity(self):
# Make sure we detect if the pool doesn't exist
no_exist_pool = 'i-dont-exist-%s' % random.randint(10000, 99999)
self._set_flag('storwize_svc_volpool_name', no_exist_pool)
self.assertRaises(exception.InvalidInput,
self.driver.do_setup, None)
self._reset_flags()
# Check the case where the user didn't configure IP addresses
# as well as receiving unexpected results from the storage
if self.USESIM:
self.sim.error_injection('lsnodecanister', 'header_mismatch')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.do_setup, None)
self.sim.error_injection('lsnodecanister', 'remove_field')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.do_setup, None)
self.sim.error_injection('lsportip', 'header_mismatch')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.do_setup, None)
self.sim.error_injection('lsportip', 'remove_field')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.do_setup, None)
# Check with bad parameters
self._set_flag('san_ip', '')
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('san_password', None)
self._set_flag('san_private_key', None)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_vol_rsize', 101)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_vol_warning', 101)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_vol_grainsize', 42)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_flashcopy_timeout', 601)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_vol_compression', True)
self._set_flag('storwize_svc_vol_rsize', -1)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_connection_protocol', 'foo')
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
self._set_flag('storwize_svc_vol_iogrp', 5)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
if self.USESIM:
self.sim.error_injection('lslicense', 'no_compression')
self._set_flag('storwize_svc_vol_compression', True)
self.driver.do_setup(None)
self.assertRaises(exception.InvalidInput,
self.driver.check_for_setup_error)
self._reset_flags()
# Finally, check with good parameters
self.driver.do_setup(None)
def _generate_vol_info(self, vol_name, vol_id):
rand_id = str(random.randint(10000, 99999))
if vol_name:
return {'name': 'snap_volume%s' % rand_id,
'volume_name': vol_name,
'id': rand_id,
'volume_id': vol_id,
'volume_size': 10}
else:
return {'name': 'test_volume%s' % rand_id,
'size': 10,
'id': '%s' % rand_id,
'volume_type_id': None}
def _create_test_vol(self, opts):
ctxt = context.get_admin_context()
type_ref = volume_types.create(ctxt, 'testtype', opts)
volume = self._generate_vol_info(None, None)
volume['volume_type_id'] = type_ref['id']
self.driver.create_volume(volume)
attrs = self.driver._get_vdisk_attributes(volume['name'])
self.driver.delete_volume(volume)
volume_types.destroy(ctxt, type_ref['id'])
return attrs
def _fail_prepare_fc_map(self, fc_map_id, source, target):
raise processutils.ProcessExecutionError(exit_code=1,
stdout='',
stderr='unit-test-fail',
cmd='prestartfcmap id')
def test_storwize_svc_snapshots(self):
vol1 = self._generate_vol_info(None, None)
self.driver.create_volume(vol1)
self.driver.db.volume_set(vol1)
snap1 = self._generate_vol_info(vol1['name'], vol1['id'])
# Test timeout and volume cleanup
self._set_flag('storwize_svc_flashcopy_timeout', 1)
self.assertRaises(exception.InvalidSnapshot,
self.driver.create_snapshot, snap1)
self._assert_vol_exists(snap1['name'], False)
self._reset_flags()
# Test prestartfcmap, startfcmap, and rmfcmap failing
orig = self.driver._call_prepare_fc_map
self.driver._call_prepare_fc_map = self._fail_prepare_fc_map
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_snapshot, snap1)
self.driver._call_prepare_fc_map = orig
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.sim.error_injection('startfcmap', 'bad_id')
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_snapshot, snap1)
self._assert_vol_exists(snap1['name'], False)
self.sim.error_injection('prestartfcmap', 'bad_id')
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_snapshot, snap1)
self._assert_vol_exists(snap1['name'], False)
# Test successful snapshot
self.driver.create_snapshot(snap1)
self._assert_vol_exists(snap1['name'], True)
# Try to create a snapshot from an non-existing volume - should fail
snap_novol = self._generate_vol_info('undefined-vol', '12345')
self.assertRaises(exception.VolumeNotFound,
self.driver.create_snapshot,
snap_novol)
# We support deleting a volume that has snapshots, so delete the volume
# first
self.driver.delete_volume(vol1)
self.driver.delete_snapshot(snap1)
def test_storwize_svc_create_volfromsnap_clone(self):
vol1 = self._generate_vol_info(None, None)
self.driver.create_volume(vol1)
self.driver.db.volume_set(vol1)
snap1 = self._generate_vol_info(vol1['name'], vol1['id'])
self.driver.create_snapshot(snap1)
vol2 = self._generate_vol_info(None, None)
vol3 = self._generate_vol_info(None, None)
# Try to create a volume from a non-existing snapshot
snap_novol = self._generate_vol_info('undefined-vol', '12345')
vol_novol = self._generate_vol_info(None, None)
self.assertRaises(exception.SnapshotNotFound,
self.driver.create_volume_from_snapshot,
vol_novol,
snap_novol)
# Fail the snapshot
orig = self.driver._call_prepare_fc_map
self.driver._call_prepare_fc_map = self._fail_prepare_fc_map
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_volume_from_snapshot,
vol2, snap1)
self.driver._call_prepare_fc_map = orig
self._assert_vol_exists(vol2['name'], False)
# Try to create where source size != target size
vol2['size'] += 1
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
vol2, snap1)
self._assert_vol_exists(vol2['name'], False)
vol2['size'] -= 1
# Succeed
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.driver.create_volume_from_snapshot(vol2, snap1)
self._assert_vol_exists(vol2['name'], True)
# Try to clone where source size != target size
vol3['size'] += 1
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cloned_volume,
vol3, vol2)
self._assert_vol_exists(vol3['name'], False)
vol3['size'] -= 1
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.driver.create_cloned_volume(vol3, vol2)
self._assert_vol_exists(vol3['name'], True)
# Delete in the 'opposite' order to make sure it works
self.driver.delete_volume(vol3)
self._assert_vol_exists(vol3['name'], False)
self.driver.delete_volume(vol2)
self._assert_vol_exists(vol2['name'], False)
self.driver.delete_snapshot(snap1)
self._assert_vol_exists(snap1['name'], False)
self.driver.delete_volume(vol1)
self._assert_vol_exists(vol1['name'], False)
def test_storwize_svc_volumes(self):
# Create a first volume
volume = self._generate_vol_info(None, None)
self.driver.create_volume(volume)
self.driver.ensure_export(None, volume)
# Do nothing
self.driver.create_export(None, volume)
self.driver.remove_export(None, volume)
# Make sure volume attributes are as they should be
attributes = self.driver._get_vdisk_attributes(volume['name'])
attr_size = float(attributes['capacity']) / (1024 ** 3) # bytes to GB
self.assertEqual(attr_size, float(volume['size']))
pool = self.driver.configuration.local_conf.storwize_svc_volpool_name
self.assertEqual(attributes['mdisk_grp_name'], pool)
# Try to create the volume again (should fail)
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_volume,
volume)
# Try to delete a volume that doesn't exist (should not fail)
vol_no_exist = {'name': 'i_dont_exist'}
self.driver.delete_volume(vol_no_exist)
# Ensure export for volume that doesn't exist (should not fail)
self.driver.ensure_export(None, vol_no_exist)
# Delete the volume
self.driver.delete_volume(volume)
def test_storwize_svc_volume_params(self):
# Option test matrix
# Option Value Covered by test #
# rsize -1 1
# rsize 2 2,3
# warning 0 2
# warning 80 3
# autoexpand True 2
# autoexpand False 3
# grainsize 32 2
# grainsize 256 3
# compression True 4
# compression False 2,3
# easytier True 1,3
# easytier False 2
# iogrp 0 1
# iogrp 1 2
opts_list = []
chck_list = []
opts_list.append({'rsize': -1, 'easytier': True, 'iogrp': 0})
chck_list.append({'free_capacity': '0', 'easy_tier': 'on',
'IO_group_id': '0'})
test_iogrp = 1 if self.USESIM else 0
opts_list.append({'rsize': 2, 'compression': False, 'warning': 0,
'autoexpand': True, 'grainsize': 32,
'easytier': False, 'iogrp': test_iogrp})
chck_list.append({'-free_capacity': '0', 'compressed_copy': 'no',
'warning': '0', 'autoexpand': 'on',
'grainsize': '32', 'easy_tier': 'off',
'IO_group_id': str(test_iogrp)})
opts_list.append({'rsize': 2, 'compression': False, 'warning': 80,
'autoexpand': False, 'grainsize': 256,
'easytier': True})
chck_list.append({'-free_capacity': '0', 'compressed_copy': 'no',
'warning': '80', 'autoexpand': 'off',
'grainsize': '256', 'easy_tier': 'on'})
opts_list.append({'rsize': 2, 'compression': True})
chck_list.append({'-free_capacity': '0',
'compressed_copy': 'yes'})
for idx in range(len(opts_list)):
attrs = self._create_test_vol(opts_list[idx])
for k, v in chck_list[idx].iteritems():
try:
if k[0] == '-':
k = k[1:]
self.assertNotEqual(attrs[k], v)
else:
self.assertEqual(attrs[k], v)
except processutils.ProcessExecutionError as e:
if 'CMMVC7050E' not in e.stderr:
raise
def test_storwize_svc_unicode_host_and_volume_names(self):
# We'll check with iSCSI only - nothing protocol-dependednt here
self._set_flag('storwize_svc_connection_protocol', 'iSCSI')
self.driver.do_setup(None)
rand_id = random.randint(10000, 99999)
volume1 = {'name': u'unicode1_volume%s' % rand_id,
'size': 2,
'id': 1,
'volume_type_id': None}
self.driver.create_volume(volume1)
self._assert_vol_exists(volume1['name'], True)
self.assertRaises(exception.NoValidHost,
self.driver._connector_to_hostname_prefix,
{'host': 12345})
# Add a a host first to make life interesting (this host and
# conn['host'] should be translated to the same prefix, and the
# initiator should differentiate
tmpconn1 = {'initiator': u'unicode:initiator1.%s' % rand_id,
'ip': '10.10.10.10',
'host': u'unicode.foo}.bar{.baz-%s' % rand_id}
self.driver._create_host(tmpconn1)
# Add a host with a different prefix
tmpconn2 = {'initiator': u'unicode:initiator2.%s' % rand_id,
'ip': '10.10.10.11',
'host': u'unicode.hello.world-%s' % rand_id}
self.driver._create_host(tmpconn2)
conn = {'initiator': u'unicode:initiator3.%s' % rand_id,
'ip': '10.10.10.12',
'host': u'unicode.foo}.bar}.baz-%s' % rand_id}
self.driver.initialize_connection(volume1, conn)
host_name = self.driver._get_host_from_connector(conn)
self.assertIsNotNone(host_name)
self.driver.terminate_connection(volume1, conn)
host_name = self.driver._get_host_from_connector(conn)
self.assertIsNone(host_name)
self.driver.delete_volume(volume1)
# Clean up temporary hosts
for tmpconn in [tmpconn1, tmpconn2]:
host_name = self.driver._get_host_from_connector(tmpconn)
self.assertIsNotNone(host_name)
self.driver._delete_host(host_name)
def test_storwize_svc_validate_connector(self):
conn_neither = {'host': 'host'}
conn_iscsi = {'host': 'host', 'initiator': 'foo'}
conn_fc = {'host': 'host', 'wwpns': 'bar'}
conn_both = {'host': 'host', 'initiator': 'foo', 'wwpns': 'bar'}
self.driver._enabled_protocols = set(['iSCSI'])
self.driver.validate_connector(conn_iscsi)
self.driver.validate_connector(conn_both)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.validate_connector, conn_fc)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.validate_connector, conn_neither)
self.driver._enabled_protocols = set(['FC'])
self.driver.validate_connector(conn_fc)
self.driver.validate_connector(conn_both)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.validate_connector, conn_iscsi)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.validate_connector, conn_neither)
self.driver._enabled_protocols = set(['iSCSI', 'FC'])
self.driver.validate_connector(conn_iscsi)
self.driver.validate_connector(conn_fc)
self.driver.validate_connector(conn_both)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.validate_connector, conn_neither)
def test_storwize_svc_host_maps(self):
# Create two volumes to be used in mappings
ctxt = context.get_admin_context()
volume1 = self._generate_vol_info(None, None)
self.driver.create_volume(volume1)
volume2 = self._generate_vol_info(None, None)
self.driver.create_volume(volume2)
# Create volume types that we created
types = {}
for protocol in ['FC', 'iSCSI']:
opts = {'storage_protocol': '<in> ' + protocol}
types[protocol] = volume_types.create(ctxt, protocol, opts)
for protocol in ['FC', 'iSCSI']:
volume1['volume_type_id'] = types[protocol]['id']
volume2['volume_type_id'] = types[protocol]['id']
# Check case where no hosts exist
if self.USESIM:
ret = self.driver._get_host_from_connector(self._connector)
self.assertIsNone(ret)
# Make sure that the volumes have been created
self._assert_vol_exists(volume1['name'], True)
self._assert_vol_exists(volume2['name'], True)
# Initialize connection from the first volume to a host
self.driver.initialize_connection(volume1, self._connector)
# Initialize again, should notice it and do nothing
self.driver.initialize_connection(volume1, self._connector)
# Try to delete the 1st volume (should fail because it is mapped)
self.assertRaises(processutils.ProcessExecutionError,
self.driver.delete_volume,
volume1)
# Check bad output from lsfabric for the 2nd volume
if protocol == 'FC' and self.USESIM:
for error in ['remove_field', 'header_mismatch']:
self.sim.error_injection('lsfabric', error)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume2, self._connector)
self.driver.terminate_connection(volume1, self._connector)
if self.USESIM:
ret = self.driver._get_host_from_connector(self._connector)
self.assertIsNone(ret)
# Check cases with no auth set for host
if self.USESIM:
for auth_enabled in [True, False]:
for host_exists in ['yes-auth', 'yes-noauth', 'no']:
self._set_flag('storwize_svc_iscsi_chap_enabled',
auth_enabled)
case = 'en' + str(auth_enabled) + 'ex' + str(host_exists)
conn_na = {'initiator': 'test:init:%s' %
random.randint(10000, 99999),
'ip': '11.11.11.11',
'host': 'host-%s' % case}
if host_exists.startswith('yes'):
self.sim._add_host_to_list(conn_na)
if host_exists == 'yes-auth':
kwargs = {'chapsecret': 'foo',
'obj': conn_na['host']}
self.sim._cmd_chhost(**kwargs)
volume1['volume_type_id'] = types['iSCSI']['id']
init_ret = self.driver.initialize_connection(volume1,
conn_na)
host_name = self.sim._host_in_list(conn_na['host'])
chap_ret = self.driver._get_chap_secret_for_host(host_name)
if auth_enabled or host_exists == 'yes-auth':
self.assertIn('auth_password', init_ret['data'])
self.assertIsNotNone(chap_ret)
else:
self.assertNotIn('auth_password', init_ret['data'])
self.assertIsNone(chap_ret)
self.driver.terminate_connection(volume1, conn_na)
self._set_flag('storwize_svc_iscsi_chap_enabled', True)
# Test no preferred node
if self.USESIM:
self.sim.error_injection('lsvdisk', 'no_pref_node')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume1, self._connector)
# Initialize connection from the second volume to the host with no
# preferred node set if in simulation mode, otherwise, just
# another initialize connection.
if self.USESIM:
self.sim.error_injection('lsvdisk', 'blank_pref_node')
self.driver.initialize_connection(volume2, self._connector)
# Try to remove connection from host that doesn't exist (should fail)
conn_no_exist = self._connector.copy()
conn_no_exist['initiator'] = 'i_dont_exist'
conn_no_exist['wwpns'] = ['0000000000000000']
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume1,
conn_no_exist)
# Try to remove connection from volume that isn't mapped (should print
# message but NOT fail)
vol_no_exist = {'name': 'i_dont_exist'}
self.driver.terminate_connection(vol_no_exist, self._connector)
# Remove the mapping from the 1st volume and delete it
self.driver.terminate_connection(volume1, self._connector)
self.driver.delete_volume(volume1)
self._assert_vol_exists(volume1['name'], False)
# Make sure our host still exists
host_name = self.driver._get_host_from_connector(self._connector)
self.assertIsNotNone(host_name)
# Remove the mapping from the 2nd volume and delete it. The host should
# be automatically removed because there are no more mappings.
self.driver.terminate_connection(volume2, self._connector)
self.driver.delete_volume(volume2)
self._assert_vol_exists(volume2['name'], False)
# Delete volume types that we created
for protocol in ['FC', 'iSCSI']:
volume_types.destroy(ctxt, types[protocol]['id'])
# Check if our host still exists (it should not)
if self.USESIM:
ret = self.driver._get_host_from_connector(self._connector)
self.assertIsNone(ret)
def test_storwize_svc_multi_host_maps(self):
# We can't test connecting to multiple hosts from a single host when
# using real storage
if not self.USESIM:
return
# Create a volume to be used in mappings
ctxt = context.get_admin_context()
volume = self._generate_vol_info(None, None)
self.driver.create_volume(volume)
# Create volume types for protocols
types = {}
for protocol in ['FC', 'iSCSI']:
opts = {'storage_protocol': '<in> ' + protocol}
types[protocol] = volume_types.create(ctxt, protocol, opts)
# Create a connector for the second 'host'
wwpns = [str(random.randint(0, 9999999999999999)).zfill(16),
str(random.randint(0, 9999999999999999)).zfill(16)]
initiator = 'test.initiator.%s' % str(random.randint(10000, 99999))
conn2 = {'ip': '1.234.56.79',
'host': 'storwize-svc-test2',
'wwpns': wwpns,
'initiator': initiator}
for protocol in ['FC', 'iSCSI']:
volume['volume_type_id'] = types[protocol]['id']
# Make sure that the volume has been created
self._assert_vol_exists(volume['name'], True)
self.driver.initialize_connection(volume, self._connector)
self._set_flag('storwize_svc_multihostmap_enabled', False)
self.assertRaises(exception.CinderException,
self.driver.initialize_connection, volume, conn2)
self._set_flag('storwize_svc_multihostmap_enabled', True)
self.driver.initialize_connection(volume, conn2)
self.driver.terminate_connection(volume, conn2)
self.driver.terminate_connection(volume, self._connector)
def test_storwize_svc_delete_volume_snapshots(self):
# Create a volume with two snapshots
master = self._generate_vol_info(None, None)
self.driver.create_volume(master)
self.driver.db.volume_set(master)
# Fail creating a snapshot - will force delete the snapshot
if self.USESIM and False:
snap = self._generate_vol_info(master['name'], master['id'])
self.sim.error_injection('startfcmap', 'bad_id')
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_snapshot, snap)
self._assert_vol_exists(snap['name'], False)
# Delete a snapshot
snap = self._generate_vol_info(master['name'], master['id'])
self.driver.create_snapshot(snap)
self._assert_vol_exists(snap['name'], True)
self.driver.delete_snapshot(snap)
self._assert_vol_exists(snap['name'], False)
# Delete a volume with snapshots (regular)
snap = self._generate_vol_info(master['name'], master['id'])
self.driver.create_snapshot(snap)
self._assert_vol_exists(snap['name'], True)
self.driver.delete_volume(master)
self._assert_vol_exists(master['name'], False)
# Fail create volume from snapshot - will force delete the volume
if self.USESIM:
volfs = self._generate_vol_info(None, None)
self.sim.error_injection('startfcmap', 'bad_id')
self.sim.error_injection('lsfcmap', 'speed_up')
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_volume_from_snapshot,
volfs, snap)
self._assert_vol_exists(volfs['name'], False)
# Create volume from snapshot and delete it
volfs = self._generate_vol_info(None, None)
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.driver.create_volume_from_snapshot(volfs, snap)
self._assert_vol_exists(volfs['name'], True)
self.driver.delete_volume(volfs)
self._assert_vol_exists(volfs['name'], False)
# Create volume from snapshot and delete the snapshot
volfs = self._generate_vol_info(None, None)
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.driver.create_volume_from_snapshot(volfs, snap)
self.driver.delete_snapshot(snap)
self._assert_vol_exists(snap['name'], False)
# Fail create clone - will force delete the target volume
if self.USESIM:
clone = self._generate_vol_info(None, None)
self.sim.error_injection('startfcmap', 'bad_id')
self.sim.error_injection('lsfcmap', 'speed_up')
self.assertRaises(processutils.ProcessExecutionError,
self.driver.create_cloned_volume,
clone, volfs)
self._assert_vol_exists(clone['name'], False)
# Create the clone, delete the source and target
clone = self._generate_vol_info(None, None)
if self.USESIM:
self.sim.error_injection('lsfcmap', 'speed_up')
self.driver.create_cloned_volume(clone, volfs)
self._assert_vol_exists(clone['name'], True)
self.driver.delete_volume(volfs)
self._assert_vol_exists(volfs['name'], False)
self.driver.delete_volume(clone)
self._assert_vol_exists(clone['name'], False)
# Note defined in python 2.6, so define here...
def assertLessEqual(self, a, b, msg=None):
if not a <= b:
self.fail('%s not less than or equal to %s' % (repr(a), repr(b)))
def test_storwize_svc_get_volume_stats(self):
self._set_flag('reserved_percentage', 25)
stats = self.driver.get_volume_stats()
self.assertLessEqual(stats['free_capacity_gb'],
stats['total_capacity_gb'])
self.assertEqual(stats['reserved_percentage'], 25)
pool = self.driver.configuration.local_conf.storwize_svc_volpool_name
if self.USESIM:
expected = 'storwize-svc-sim_' + pool
self.assertEqual(stats['volume_backend_name'], expected)
self.assertAlmostEqual(stats['total_capacity_gb'], 3328.0)
self.assertAlmostEqual(stats['free_capacity_gb'], 3287.5)
def test_storwize_svc_extend_volume(self):
volume = self._generate_vol_info(None, None)
self.driver.db.volume_set(volume)
self.driver.create_volume(volume)
stats = self.driver.extend_volume(volume, '13')
attrs = self.driver._get_vdisk_attributes(volume['name'])
vol_size = int(attrs['capacity']) / units.GiB
self.assertAlmostEqual(vol_size, 13)
snap = self._generate_vol_info(volume['name'], volume['id'])
self.driver.create_snapshot(snap)
self._assert_vol_exists(snap['name'], True)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume, volume, '16')
self.driver.delete_snapshot(snap)
self.driver.delete_volume(volume)
def _check_loc_info(self, capabilities, expected):
host = {'host': 'foo', 'capabilities': capabilities}
vol = {'name': 'test', 'id': 1, 'size': 1}
ctxt = context.get_admin_context()
moved, model_update = self.driver.migrate_volume(ctxt, vol, host)
self.assertEqual(moved, expected['moved'])
self.assertEqual(model_update, expected['model_update'])
def test_storwize_svc_migrate_bad_loc_info(self):
self._check_loc_info({}, {'moved': False, 'model_update': None})
cap = {'location_info': 'foo'}
self._check_loc_info(cap, {'moved': False, 'model_update': None})
cap = {'location_info': 'FooDriver:foo:bar'}
self._check_loc_info(cap, {'moved': False, 'model_update': None})
cap = {'location_info': 'StorwizeSVCDriver:foo:bar'}
self._check_loc_info(cap, {'moved': False, 'model_update': None})
def test_storwize_svc_migrate_same_extent_size(self):
def _copy_info_exc(self, name):
raise Exception('should not be called')
self.stubs.Set(self.driver, '_get_vdisk_copy_info', _copy_info_exc)
self.driver.do_setup(None)
loc = 'StorwizeSVCDriver:' + self.driver._system_id + ':openstack2'
cap = {'location_info': loc, 'extent_size': '256'}
host = {'host': 'foo', 'capabilities': cap}
ctxt = context.get_admin_context()
volume = self._generate_vol_info(None, None)
volume['volume_type_id'] = None
self.driver.create_volume(volume)
self.driver.migrate_volume(ctxt, volume, host)
self.driver.delete_volume(volume)
def test_storwize_svc_migrate_diff_extent_size(self):
self.driver.do_setup(None)
loc = 'StorwizeSVCDriver:' + self.driver._system_id + ':openstack3'
cap = {'location_info': loc, 'extent_size': '128'}
host = {'host': 'foo', 'capabilities': cap}
ctxt = context.get_admin_context()
volume = self._generate_vol_info(None, None)
volume['volume_type_id'] = None
self.driver.create_volume(volume)
self.assertNotEquals(cap['extent_size'], self.driver._extent_size)
self.driver.migrate_volume(ctxt, volume, host)
self.driver.delete_volume(volume)
class CLIResponseTestCase(test.TestCase):
def test_empty(self):
self.assertEqual(0, len(storwize_svc.CLIResponse('')))
self.assertEqual(0, len(storwize_svc.CLIResponse(('', 'stderr'))))
def test_header(self):
raw = r'''id!name
1!node1
2!node2
'''
resp = storwize_svc.CLIResponse(raw, with_header=True)
self.assertEqual(2, len(resp))
self.assertEqual('1', resp[0]['id'])
self.assertEqual('2', resp[1]['id'])
def test_select(self):
raw = r'''id!123
name!Bill
name!Bill2
age!30
home address!s1
home address!s2
id! 7
name!John
name!John2
age!40
home address!s3
home address!s4
'''
resp = storwize_svc.CLIResponse(raw, with_header=False)
self.assertEqual(list(resp.select('home address', 'name',
'home address')),
[('s1', 'Bill', 's1'), ('s2', 'Bill2', 's2'),
('s3', 'John', 's3'), ('s4', 'John2', 's4')])
def test_lsnode_all(self):
raw = r'''id!name!UPS_serial_number!WWNN!status
1!node1!!500507680200C744!online
2!node2!!500507680200C745!online
'''
resp = storwize_svc.CLIResponse(raw)
self.assertEqual(2, len(resp))
self.assertEqual('1', resp[0]['id'])
self.assertEqual('500507680200C744', resp[0]['WWNN'])
self.assertEqual('2', resp[1]['id'])
self.assertEqual('500507680200C745', resp[1]['WWNN'])
def test_lsnode_single(self):
raw = r'''id!1
port_id!500507680210C744
port_status!active
port_speed!8Gb
port_id!500507680240C744
port_status!inactive
port_speed!8Gb
'''
resp = storwize_svc.CLIResponse(raw, with_header=False)
self.assertEqual(1, len(resp))
self.assertEqual('1', resp[0]['id'])
self.assertEqual(list(resp.select('port_id', 'port_status')),
[('500507680210C744', 'active'),
('500507680240C744', 'inactive')])
| apache-2.0 |
comepradz/pybrain | pybrain/datasets/dataset.py | 25 | 13323 |
__author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
import random
import pickle
from itertools import chain
from scipy import zeros, resize, ravel, asarray
import scipy
from pybrain.utilities import Serializable
class OutOfSyncError(Exception): pass
class VectorFormatError(Exception): pass
class NoLinkedFieldsError(Exception): pass
class DataSet(Serializable):
"""DataSet is a general base class for other data set classes
(e.g. SupervisedDataSet, SequentialDataSet, ...). It consists of several
fields. A field is a NumPy array with a label (a string) attached to it.
Fields can be linked together which means they must have the same length."""
def __init__(self):
self.data = {}
self.endmarker = {}
self.link = []
self.index = 0
# row vectors returned by getLinked can have different formats:
# '1d' example: array([1, 2, 3])
# '2d' example: array([[1, 2, 3]])
# 'list' example: [1, 2, 3]
self.vectorformat = 'none'
def __str__(self):
"""Return a string representation of a dataset."""
s = ""
for key in self.data:
s = s + key + ": dim" + str(self.data[key].shape) + "\n" + str(self.data[key][:self.endmarker[key]]) + "\n\n"
return s
def __getitem__(self, field):
"""Return the given field."""
return self.getField(field)
def __iter__(self):
self.reset()
while not self.endOfData():
yield self.getLinked()
def getVectorFormat(self):
"""Returns the current vector format."""
return self.__vectorformat
def setVectorFormat(self, vf):
"""Determine which format to use for returning vectors. Use the property vectorformat.
:key type: possible types are '1d', '2d', 'list'
'1d' - example: array([1,2,3])
'2d' - example: array([[1,2,3]])
'list' - example: [1,2,3]
'none' - no conversion
"""
switch = {
'1d': self._convertArray1d,
'2d': self._convertArray2d,
'list': self._convertList,
'none': lambda x:x
}
try:
self._convert = switch[vf]
self.__vectorformat = vf
except KeyError:
raise VectorFormatError("vector format must be one of '1d', '2d', 'list'. given: %s" % vf)
vectorformat = property(getVectorFormat, setVectorFormat, None, "vectorformat can be '1d', '2d' or 'list'")
def _convertList(self, vector):
"""Converts the incoming vector to a python list."""
return ravel(vector).tolist()
def _convertArray1d(self, vector):
"""Converts the incoming vector to a 1d vector with shape (x,) where x
is the number of elements."""
return ravel(vector)
def _convertArray2d(self, vector, column=False):
"""Converts the incoming `vector` to a 2d vector with shape (1,x), or
(x,1) if `column` is set, where x is the number of elements."""
a = asarray(vector)
sh = a.shape
# also reshape scalar values to 2d-index
if len(sh) == 0:
sh = (1,)
if len(sh) == 1:
# use reshape to add extra dimension
if column:
return a.reshape((sh[0], 1))
else:
return a.reshape((1, sh[0]))
else:
# vector is not 1d, return a without change
return a
def addField(self, label, dim):
"""Add a field to the dataset.
A field consists of a string `label` and a numpy ndarray of dimension
`dim`."""
self.data[label] = zeros((0, dim), float)
self.endmarker[label] = 0
def setField(self, label, arr):
"""Set the given array `arr` as the new array of field `label`,"""
as_arr = asarray(arr)
self.data[label] = as_arr
self.endmarker[label] = as_arr.shape[0]
def linkFields(self, linklist):
"""Link the length of several fields given by the list of strings
`linklist`."""
length = self[linklist[0]].shape[0]
for l in linklist:
if self[l].shape[0] != length:
raise OutOfSyncError
self.link = linklist
def unlinkFields(self, unlinklist=None):
"""Remove fields from the link list or clears link given by the list of
string `linklist`.
This method has no effect if fields are not linked."""
link = self.link
if unlinklist is not None:
for l in unlinklist:
if l in self.link:
link.remove(l)
self.link = link
else:
self.link = []
def getDimension(self, label):
"""Return the dimension/number of columns for the field given by
`label`."""
try:
dim = self.data[label].shape[1]
except KeyError:
raise KeyError('dataset field %s not found.' % label)
return dim
def __len__(self):
"""Return the length of the linked data fields. If no linked fields exist,
return the length of the longest field."""
return self.getLength()
def getLength(self):
"""Return the length of the linked data fields. If no linked fields exist,
return the length of the longest field."""
if self.link == []:
try:
length = self.endmarker[max(self.endmarker)]
except ValueError:
return 0
return length
else:
# all linked fields have equal length. return the length of the first.
l = self.link[0]
return self.endmarker[l]
def _resize(self, label=None):
if label:
label = [label]
elif self.link:
label = self.link
else:
label = self.data
for l in label:
self.data[l] = self._resizeArray(self.data[l])
def _resizeArray(self, a):
"""Increase the buffer size. It should always be one longer than the
current sequence length and double on every growth step."""
shape = list(a.shape)
shape[0] = (shape[0] + 1) * 2
return resize(a, shape)
def _appendUnlinked(self, label, row):
"""Append `row` to the field array with the given `label`.
Do not call this function from outside, use ,append() instead.
Automatically casts vector to a 2d (or higher) shape."""
if self.data[label].shape[0] <= self.endmarker[label]:
self._resize(label)
self.data[label][self.endmarker[label], :] = row
self.endmarker[label] += 1
def append(self, label, row):
"""Append `row` to the array given by `label`.
If the field is linked with others, the function throws an
`OutOfSyncError` because all linked fields always have to have the same
length. If you want to add a row to all linked fields, use appendLink
instead."""
if label in self.link:
raise OutOfSyncError
self._appendUnlinked(label, row)
def appendLinked(self, *args):
"""Add rows to all linked fields at once."""
assert len(args) == len(self.link)
for i, l in enumerate(self.link):
self._appendUnlinked(l, args[i])
def getLinked(self, index=None):
"""Access the dataset randomly or sequential.
If called with `index`, the appropriate line consisting of all linked
fields is returned and the internal marker is set to the next line.
Otherwise the marked line is returned and the marker is moved to the
next line."""
if self.link == []:
raise NoLinkedFieldsError('The dataset does not have any linked fields.')
if index == None:
# no index given, return the currently marked line and step marker one line forward
index = self.index
self.index += 1
else:
# return the indexed line and move marker to next line
self.index = index + 1
if index >= self.getLength():
raise IndexError('index out of bounds of the dataset.')
return [self._convert(self.data[l][index]) for l in self.link]
def getField(self, label):
"""Return the entire field given by `label` as an array or list,
depending on user settings."""
# Note: label_data should always be a np.array, so this will never
# actually clone a list (performances are O(1)).
label_data = self.data[label][:self.endmarker[label]]
# Convert to list if requested.
if self.vectorformat == 'list':
label_data = label_data.tolist()
return label_data
def hasField(self, label):
"""Tell whether the field given by `label` exists."""
return label in self.data
def getFieldNames(self):
"""Return the names of the currently defined fields."""
return list(self.data.keys())
def convertField(self, label, newtype):
"""Convert the given field to a different data type."""
try:
self.setField(label, self.data[label].astype(newtype))
except KeyError:
raise KeyError('convertField: dataset field %s not found.' % label)
def endOfData(self):
"""Tell if the end of the data set is reached."""
return self.index == self.getLength()
def reset(self):
"""Reset the marker to the first line."""
self.index = 0
def clear(self, unlinked=False):
"""Clear the dataset.
If linked fields exist, only the linked fields will be deleted unless
`unlinked` is set to True. If no fields are linked, all data will be
deleted."""
self.reset()
keys = self.link
if keys == [] or unlinked:
# iterate over all fields instead
keys = self.data
for k in keys:
shape = list(self.data[k].shape)
# set to zero rows
shape[0] = 0
self.data[k] = zeros(shape)
self.endmarker[k] = 0
@classmethod
def reconstruct(cls, filename):
"""Read an incomplete data set (option arraysonly) into the given one. """
# FIXME: Obsolete! Kept here because of some old files...
obj = cls(1, 1)
for key, val in pickle.load(file(filename)).items():
obj.setField(key, val)
return obj
def save_pickle(self, flo, protocol=0, compact=False):
"""Save data set as pickle, removing empty space if desired."""
if compact:
# remove padding of zeros for each field
for field in self.getFieldNames():
temp = self[field][0:self.endmarker[field] + 1, :]
self.setField(field, temp)
Serializable.save_pickle(self, flo, protocol)
def __reduce__(self):
def creator():
obj = self.__class__()
obj.vectorformat = self.vectorformat
return obj
args = tuple()
state = {
'data': self.data,
'link': self.link,
'endmarker': self.endmarker,
}
return creator, args, state, iter([]), iter({})
def copy(self):
"""Return a deep copy."""
import copy
return copy.deepcopy(self)
def batches(self, label, n, permutation=None):
"""Yield batches of the size of n from the dataset.
A single batch is an array of with dim columns and n rows. The last
batch is possibly smaller.
If permutation is given, batches are yielded in the corresponding
order."""
# First calculate how many batches we will have
full_batches, rest = divmod(len(self), n)
number_of_batches = full_batches if rest == 0 else full_batches + 1
# We make one iterator for the startindexes ...
startindexes = (i * n for i in range(number_of_batches))
# ... and one for the stop indexes
stopindexes = (((i + 1) * n) for i in range(number_of_batches - 1))
# The last stop index is the last element of the list (last batch
# might not be filled completely)
stopindexes = chain(stopindexes, [len(self)])
# Now combine them
indexes = list(zip(startindexes, stopindexes))
# Shuffle them according to the permutation if one is given
if permutation is not None:
indexes = [indexes[i] for i in permutation]
for start, stop in indexes:
yield self.data[label][start:stop]
def randomBatches(self, label, n):
"""Like .batches(), but the order is random."""
permutation = random.shuffle(list(range(len(self))))
return self.batches(label, n, permutation)
def replaceNansByMeans(self):
"""Replace all not-a-number entries in the dataset by the means of the
corresponding column."""
for d in self.data.values():
means = scipy.nansum(d[:self.getLength()], axis=0) / self.getLength()
for i in range(self.getLength()):
for j in range(d.dim):
if not scipy.isfinite(d[i, j]):
d[i, j] = means[j]
| bsd-3-clause |
djeo94/CouchPotatoServer | couchpotato/core/media/movie/providers/automation/imdb.py | 15 | 11228 | import traceback
import re
from bs4 import BeautifulSoup
from couchpotato import fireEvent
from couchpotato.core.helpers.encoding import ss
from couchpotato.core.helpers.rss import RSS
from couchpotato.core.helpers.variable import getImdb, splitString, tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.base import MultiProvider
from couchpotato.core.media.movie.providers.automation.base import Automation
log = CPLog(__name__)
autoload = 'IMDB'
class IMDB(MultiProvider):
def getTypes(self):
return [IMDBWatchlist, IMDBAutomation, IMDBCharts]
class IMDBBase(Automation, RSS):
interval = 1800
charts = {
'theater': {
'order': 1,
'name': 'IMDB - Movies in Theaters',
'url': 'http://www.imdb.com/movies-in-theaters/',
},
'boxoffice': {
'order': 2,
'name': 'IMDB - Box Office',
'url': 'http://www.imdb.com/boxoffice/',
},
'rentals': {
'order': 3,
'name': 'IMDB - Top DVD rentals',
'url': 'http://www.imdb.com/boxoffice/rentals',
'type': 'json',
},
'top250': {
'order': 4,
'name': 'IMDB - Top 250 Movies',
'url': 'http://www.imdb.com/chart/top',
},
}
def getInfo(self, imdb_id):
return fireEvent('movie.info', identifier = imdb_id, extended = False, merge = True)
def getFromURL(self, url):
log.debug('Getting IMDBs from: %s', url)
html = self.getHTMLData(url)
try:
split = splitString(html, split_on = "<div class=\"list compact\">")[1]
html = splitString(split, split_on = "<div class=\"pages\">")[0]
except:
try:
split = splitString(html, split_on = "<div id=\"main\">")
if len(split) < 2:
log.error('Failed parsing IMDB page "%s", unexpected html.', url)
return []
html = BeautifulSoup(split[1])
for x in ['list compact', 'lister', 'list detail sub-list']:
html2 = html.find('div', attrs = {
'class': x
})
if html2:
html = html2.contents
html = ''.join([str(x) for x in html])
break
except:
log.error('Failed parsing IMDB page "%s": %s', (url, traceback.format_exc()))
html = ss(html)
imdbs = getImdb(html, multiple = True) if html else []
return imdbs
class IMDBWatchlist(IMDBBase):
enabled_option = 'automation_enabled'
def getIMDBids(self):
movies = []
watchlist_enablers = [tryInt(x) for x in splitString(self.conf('automation_urls_use'))]
watchlist_urls = splitString(self.conf('automation_urls'))
index = -1
for watchlist_url in watchlist_urls:
try:
# Get list ID
ids = re.findall('(?:list/|list_id=)([a-zA-Z0-9\-_]{11})', watchlist_url)
if len(ids) == 1:
watchlist_url = 'http://www.imdb.com/list/%s/?view=compact&sort=created:asc' % ids[0]
# Try find user id with watchlist
else:
userids = re.findall('(ur\d{7,9})', watchlist_url)
if len(userids) == 1:
watchlist_url = 'http://www.imdb.com/user/%s/watchlist?view=compact&sort=created:asc' % userids[0]
except:
log.error('Failed getting id from watchlist: %s', traceback.format_exc())
index += 1
if not watchlist_enablers[index]:
continue
start = 0
while True:
try:
w_url = '%s&start=%s' % (watchlist_url, start)
imdbs = self.getFromURL(w_url)
for imdb in imdbs:
if imdb not in movies:
movies.append(imdb)
if self.shuttingDown():
break
log.debug('Found %s movies on %s', (len(imdbs), w_url))
if len(imdbs) < 225:
break
start = len(movies)
except:
log.error('Failed loading IMDB watchlist: %s %s', (watchlist_url, traceback.format_exc()))
break
return movies
class IMDBAutomation(IMDBBase):
enabled_option = 'automation_providers_enabled'
def getIMDBids(self):
movies = []
for name in self.charts:
chart = self.charts[name]
url = chart.get('url')
if self.conf('automation_charts_%s' % name):
imdb_ids = self.getFromURL(url)
try:
for imdb_id in imdb_ids:
info = self.getInfo(imdb_id)
if info and self.isMinimalMovie(info):
movies.append(imdb_id)
if self.shuttingDown():
break
except:
log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc()))
return movies
class IMDBCharts(IMDBBase):
def getChartList(self):
# Nearly identical to 'getIMDBids', but we don't care about minimalMovie and return all movie data (not just id)
movie_lists = []
max_items = 10
for name in self.charts:
chart = self.charts[name].copy()
cache_key = 'imdb.chart_display_%s' % name
if self.conf('chart_display_%s' % name):
cached = self.getCache(cache_key)
if cached:
chart['list'] = cached
movie_lists.append(chart)
continue
url = chart.get('url')
chart['list'] = []
imdb_ids = self.getFromURL(url)
try:
for imdb_id in imdb_ids[0:max_items]:
is_movie = fireEvent('movie.is_movie', identifier = imdb_id, single = True)
if not is_movie:
continue
info = self.getInfo(imdb_id)
chart['list'].append(info)
if self.shuttingDown():
break
except:
log.error('Failed loading IMDB chart results from %s: %s', (url, traceback.format_exc()))
self.setCache(cache_key, chart['list'], timeout = 259200)
if chart['list']:
movie_lists.append(chart)
return movie_lists
config = [{
'name': 'imdb',
'groups': [
{
'tab': 'automation',
'list': 'watchlist_providers',
'name': 'imdb_automation_watchlist',
'label': 'IMDB',
'description': 'From any <strong>public</strong> IMDB watchlists.',
'options': [
{
'name': 'automation_enabled',
'default': False,
'type': 'enabler',
},
{
'name': 'automation_urls_use',
'label': 'Use',
},
{
'name': 'automation_urls',
'label': 'url',
'type': 'combined',
'combine': ['automation_urls_use', 'automation_urls'],
},
],
},
{
'tab': 'automation',
'list': 'automation_providers',
'name': 'imdb_automation_charts',
'label': 'IMDB',
'description': 'Import movies from IMDB Charts',
'options': [
{
'name': 'automation_providers_enabled',
'default': False,
'type': 'enabler',
},
{
'name': 'automation_charts_theater',
'type': 'bool',
'label': 'In Theaters',
'description': 'New Movies <a href="http://www.imdb.com/movies-in-theaters/">In-Theaters</a> chart',
'default': True,
},
{
'name': 'automation_charts_rentals',
'type': 'bool',
'label': 'DVD Rentals',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals">rentals</a> chart',
'default': True,
},
{
'name': 'automation_charts_top250',
'type': 'bool',
'label': 'TOP 250',
'description': 'IMDB <a href="http://www.imdb.com/chart/top/">TOP 250</a> chart',
'default': False,
},
{
'name': 'automation_charts_boxoffice',
'type': 'bool',
'label': 'Box office TOP 10',
'description': 'IMDB Box office <a href="http://www.imdb.com/chart/">TOP 10</a> chart',
'default': True,
},
],
},
{
'tab': 'display',
'list': 'charts_providers',
'name': 'imdb_charts_display',
'label': 'IMDB',
'description': 'Display movies from IMDB Charts',
'options': [
{
'name': 'chart_display_enabled',
'default': True,
'type': 'enabler',
},
{
'name': 'chart_display_theater',
'type': 'bool',
'label': 'In Theaters',
'description': 'New Movies <a href="http://www.imdb.com/movies-in-theaters/">In-Theaters</a> chart',
'default': False,
},
{
'name': 'chart_display_top250',
'type': 'bool',
'label': 'TOP 250',
'description': 'IMDB <a href="http://www.imdb.com/chart/top/">TOP 250</a> chart',
'default': False,
},
{
'name': 'chart_display_rentals',
'type': 'bool',
'label': 'DVD Rentals',
'description': 'Top DVD <a href="http://www.imdb.com/boxoffice/rentals">rentals</a> chart',
'default': True,
},
{
'name': 'chart_display_boxoffice',
'type': 'bool',
'label': 'Box office TOP 10',
'description': 'IMDB Box office <a href="http://www.imdb.com/chart/">TOP 10</a> chart',
'default': True,
},
],
},
],
}]
| gpl-3.0 |
krtkmj/zulip | zerver/views/webhooks/pivotal.py | 9 | 8028 | """Webhooks for external integrations."""
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import api_key_only_webhook_view, REQ, has_request_variables
from zerver.models import UserProfile, Client
from defusedxml.ElementTree import fromstring as xml_fromstring
import logging
import re
import ujson
from six import text_type
from typing import List, Optional, Tuple
def api_pivotal_webhook_v3(request, user_profile, stream):
# type: (HttpRequest, UserProfile, text_type) -> Tuple[text_type, text_type]
payload = xml_fromstring(request.body)
def get_text(attrs):
# type: (List[str]) -> str
start = payload
try:
for attr in attrs:
start = start.find(attr)
return start.text
except AttributeError:
return ""
event_type = payload.find('event_type').text
description = payload.find('description').text
project_id = payload.find('project_id').text
story_id = get_text(['stories', 'story', 'id'])
# Ugh, the URL in the XML data is not a clickable url that works for the user
# so we try to build one that the user can actually click on
url = "https://www.pivotaltracker.com/s/projects/%s/stories/%s" % (project_id, story_id)
# Pivotal doesn't tell us the name of the story, but it's usually in the
# description in quotes as the first quoted string
name_re = re.compile(r'[^"]+"([^"]+)".*')
match = name_re.match(description)
if match and len(match.groups()):
name = match.group(1)
else:
name = "Story changed" # Failed for an unknown reason, show something
more_info = " [(view)](%s)" % (url,)
if event_type == 'story_update':
subject = name
content = description + more_info
elif event_type == 'note_create':
subject = "Comment added"
content = description + more_info
elif event_type == 'story_create':
issue_desc = get_text(['stories', 'story', 'description'])
issue_type = get_text(['stories', 'story', 'story_type'])
issue_status = get_text(['stories', 'story', 'current_state'])
estimate = get_text(['stories', 'story', 'estimate'])
if estimate != '':
estimate = " worth %s story points" % (estimate,)
subject = name
content = "%s (%s %s%s):\n\n~~~ quote\n%s\n~~~\n\n%s" % (description,
issue_status,
issue_type,
estimate,
issue_desc,
more_info)
return subject, content
def api_pivotal_webhook_v5(request, user_profile, stream):
# type: (HttpRequest, UserProfile, text_type) -> Tuple[text_type, text_type]
payload = ujson.loads(request.body)
event_type = payload["kind"]
project_name = payload["project"]["name"]
project_id = payload["project"]["id"]
primary_resources = payload["primary_resources"][0]
story_url = primary_resources["url"]
story_type = primary_resources["story_type"]
story_id = primary_resources["id"]
story_name = primary_resources["name"]
performed_by = payload.get("performed_by", {}).get("name", "")
story_info = "[%s](https://www.pivotaltracker.com/s/projects/%s): [%s](%s)" % (
project_name, project_id, story_name, story_url)
changes = payload.get("changes", [])
content = ""
subject = "#%s: %s" % (story_id, story_name)
def extract_comment(change):
# type: (Dict[str, Dict]) -> Optional[text_type]
if change.get("kind") == "comment":
return change.get("new_values", {}).get("text", None)
return None
if event_type == "story_update_activity":
# Find the changed valued and build a message
content += "%s updated %s:\n" % (performed_by, story_info)
for change in changes:
old_values = change.get("original_values", {})
new_values = change["new_values"]
if "current_state" in old_values and "current_state" in new_values:
content += "* state changed from **%s** to **%s**\n" % (
old_values["current_state"], new_values["current_state"])
if "estimate" in old_values and "estimate" in new_values:
old_estimate = old_values.get("estimate", None)
if old_estimate is None:
estimate = "is now"
else:
estimate = "changed from %s to" % (old_estimate,)
new_estimate = new_values["estimate"] if new_values["estimate"] is not None else "0"
content += "* estimate %s **%s points**\n" % (estimate, new_estimate)
if "story_type" in old_values and "story_type" in new_values:
content += "* type changed from **%s** to **%s**\n" % (
old_values["story_type"], new_values["story_type"])
comment = extract_comment(change)
if comment is not None:
content += "* Comment added:\n~~~quote\n%s\n~~~\n" % (comment,)
elif event_type == "comment_create_activity":
for change in changes:
comment = extract_comment(change)
if comment is not None:
content += "%s added a comment to %s:\n~~~quote\n%s\n~~~" % (performed_by, story_info, comment)
elif event_type == "story_create_activity":
content += "%s created %s: %s\n" % (performed_by, story_type, story_info)
for change in changes:
new_values = change.get("new_values", {})
if "current_state" in new_values:
content += "* State is **%s**\n" % (new_values["current_state"],)
if "description" in new_values:
content += "* Description is\n\n> %s" % (new_values["description"],)
elif event_type == "story_move_activity":
content = "%s moved %s" % (performed_by, story_info)
for change in changes:
old_values = change.get("original_values", {})
new_values = change["new_values"]
if "current_state" in old_values and "current_state" in new_values:
content += " from **%s** to **%s**" % (old_values["current_state"], new_values["current_state"])
elif event_type in ["task_create_activity", "comment_delete_activity",
"task_delete_activity", "task_update_activity",
"story_move_from_project_activity", "story_delete_activity",
"story_move_into_project_activity"]:
# Known but unsupported Pivotal event types
pass
else:
logging.warning("Unknown Pivotal event type: %s" % (event_type,))
return subject, content
@api_key_only_webhook_view("Pivotal")
@has_request_variables
def api_pivotal_webhook(request, user_profile, client, stream=REQ()):
# type: (HttpRequest, UserProfile, Client, text_type) -> HttpResponse
subject = content = None
try:
subject, content = api_pivotal_webhook_v3(request, user_profile, stream)
except AttributeError:
return json_error(_("Failed to extract data from Pivotal XML response"))
except:
# Attempt to parse v5 JSON payload
try:
subject, content = api_pivotal_webhook_v5(request, user_profile, stream)
except AttributeError:
return json_error(_("Failed to extract data from Pivotal V5 JSON response"))
if subject is None or content is None:
return json_error(_("Unable to handle Pivotal payload"))
check_send_message(user_profile, client, "stream",
[stream], subject, content)
return json_success()
| apache-2.0 |
GyrosOfWar/servo | components/script/dom/bindings/codegen/parser/WebIDL.py | 28 | 249009 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""" A WebIDL parser. """
from ply import lex, yacc
import re
import os
import traceback
import math
from collections import defaultdict
# Machinery
def parseInt(literal):
string = literal
sign = 0
base = 0
if string[0] == '-':
sign = -1
string = string[1:]
else:
sign = 1
if string[0] == '0' and len(string) > 1:
if string[1] == 'x' or string[1] == 'X':
base = 16
string = string[2:]
else:
base = 8
string = string[1:]
else:
base = 10
value = int(string, base)
return value * sign
# Magic for creating enums
def M_add_class_attribs(attribs, start):
def foo(name, bases, dict_):
for v, k in enumerate(attribs):
dict_[k] = start + v
assert 'length' not in dict_
dict_['length'] = start + len(attribs)
return type(name, bases, dict_)
return foo
def enum(*names, **kw):
if len(kw) == 1:
base = kw['base'].__class__
start = base.length
else:
assert len(kw) == 0
base = object
start = 0
class Foo(base):
__metaclass__ = M_add_class_attribs(names, start)
def __setattr__(self, name, value): # this makes it read-only
raise NotImplementedError
return Foo()
class WebIDLError(Exception):
def __init__(self, message, locations, warning=False):
self.message = message
self.locations = [str(loc) for loc in locations]
self.warning = warning
def __str__(self):
return "%s: %s%s%s" % (self.warning and 'warning' or 'error',
self.message,
", " if len(self.locations) != 0 else "",
"\n".join(self.locations))
class Location(object):
def __init__(self, lexer, lineno, lexpos, filename):
self._line = None
self._lineno = lineno
self._lexpos = lexpos
self._lexdata = lexer.lexdata
self._file = filename if filename else "<unknown>"
def __eq__(self, other):
return (self._lexpos == other._lexpos and
self._file == other._file)
def filename(self):
return self._file
def resolve(self):
if self._line:
return
startofline = self._lexdata.rfind('\n', 0, self._lexpos) + 1
endofline = self._lexdata.find('\n', self._lexpos, self._lexpos + 80)
if endofline != -1:
self._line = self._lexdata[startofline:endofline]
else:
self._line = self._lexdata[startofline:]
self._colno = self._lexpos - startofline
# Our line number seems to point to the start of self._lexdata
self._lineno += self._lexdata.count('\n', 0, startofline)
def get(self):
self.resolve()
return "%s line %s:%s" % (self._file, self._lineno, self._colno)
def _pointerline(self):
return " " * self._colno + "^"
def __str__(self):
self.resolve()
return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno,
self._line, self._pointerline())
class BuiltinLocation(object):
def __init__(self, text):
self.msg = text + "\n"
def __eq__(self, other):
return (isinstance(other, BuiltinLocation) and
self.msg == other.msg)
def filename(self):
return '<builtin>'
def resolve(self):
pass
def get(self):
return self.msg
def __str__(self):
return self.get()
# Data Model
class IDLObject(object):
def __init__(self, location):
self.location = location
self.userData = dict()
def filename(self):
return self.location.filename()
def isInterface(self):
return False
def isEnum(self):
return False
def isCallback(self):
return False
def isType(self):
return False
def isDictionary(self):
return False
def isUnion(self):
return False
def isTypedef(self):
return False
def getUserData(self, key, default):
return self.userData.get(key, default)
def setUserData(self, key, value):
self.userData[key] = value
def addExtendedAttributes(self, attrs):
assert False # Override me!
def handleExtendedAttribute(self, attr):
assert False # Override me!
def _getDependentObjects(self):
assert False # Override me!
def getDeps(self, visited=None):
""" Return a set of files that this object depends on. If any of
these files are changed the parser needs to be rerun to regenerate
a new IDLObject.
The visited argument is a set of all the objects already visited.
We must test to see if we are in it, and if so, do nothing. This
prevents infinite recursion."""
# NB: We can't use visited=set() above because the default value is
# evaluated when the def statement is evaluated, not when the function
# is executed, so there would be one set for all invocations.
if visited is None:
visited = set()
if self in visited:
return set()
visited.add(self)
deps = set()
if self.filename() != "<builtin>":
deps.add(self.filename())
for d in self._getDependentObjects():
deps.update(d.getDeps(visited))
return deps
class IDLScope(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
self.parentScope = parentScope
if identifier:
assert isinstance(identifier, IDLIdentifier)
self._name = identifier
else:
self._name = None
self._dict = {}
self.globalNames = set()
# A mapping from global name to the set of global interfaces
# that have that global name.
self.globalNameMapping = defaultdict(set)
self.primaryGlobalAttr = None
self.primaryGlobalName = None
def __str__(self):
return self.QName()
def QName(self):
if self._name:
return self._name.QName() + "::"
return "::"
def ensureUnique(self, identifier, object):
"""
Ensure that there is at most one 'identifier' in scope ('self').
Note that object can be None. This occurs if we end up here for an
interface type we haven't seen yet.
"""
assert isinstance(identifier, IDLUnresolvedIdentifier)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == identifier
if identifier.name in self._dict:
if not object:
return
# ensureUnique twice with the same object is not allowed
assert id(object) != id(self._dict[identifier.name])
replacement = self.resolveIdentifierConflict(self, identifier,
self._dict[identifier.name],
object)
self._dict[identifier.name] = replacement
return
assert object
self._dict[identifier.name] = object
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
if (isinstance(originalObject, IDLExternalInterface) and
isinstance(newObject, IDLExternalInterface) and
originalObject.identifier.name == newObject.identifier.name):
return originalObject
if (isinstance(originalObject, IDLExternalInterface) or
isinstance(newObject, IDLExternalInterface)):
raise WebIDLError(
"Name collision between "
"interface declarations for identifier '%s' at '%s' and '%s'"
% (identifier.name,
originalObject.location, newObject.location), [])
if (isinstance(originalObject, IDLDictionary) or
isinstance(newObject, IDLDictionary)):
raise WebIDLError(
"Name collision between dictionary declarations for "
"identifier '%s'.\n%s\n%s"
% (identifier.name,
originalObject.location, newObject.location), [])
# We do the merging of overloads here as opposed to in IDLInterface
# because we need to merge overloads of NamedConstructors and we need to
# detect conflicts in those across interfaces. See also the comment in
# IDLInterface.addExtendedAttributes for "NamedConstructor".
if (originalObject.tag == IDLInterfaceMember.Tags.Method and
newObject.tag == IDLInterfaceMember.Tags.Method):
return originalObject.addOverload(newObject)
# Default to throwing, derived classes can override.
conflictdesc = "\n\t%s at %s\n\t%s at %s" % (originalObject,
originalObject.location,
newObject,
newObject.location)
raise WebIDLError(
"Multiple unresolvable definitions of identifier '%s' in scope '%s%s"
% (identifier.name, str(self), conflictdesc), [])
def _lookupIdentifier(self, identifier):
return self._dict[identifier.name]
def lookupIdentifier(self, identifier):
assert isinstance(identifier, IDLIdentifier)
assert identifier.scope == self
return self._lookupIdentifier(identifier)
class IDLIdentifier(IDLObject):
def __init__(self, location, scope, name):
IDLObject.__init__(self, location)
self.name = name
assert isinstance(scope, IDLScope)
self.scope = scope
def __str__(self):
return self.QName()
def QName(self):
return self.scope.QName() + self.name
def __hash__(self):
return self.QName().__hash__()
def __eq__(self, other):
return self.QName() == other.QName()
def object(self):
return self.scope.lookupIdentifier(self)
class IDLUnresolvedIdentifier(IDLObject):
def __init__(self, location, name, allowDoubleUnderscore=False,
allowForbidden=False):
IDLObject.__init__(self, location)
assert len(name) > 0
if name == "__noSuchMethod__":
raise WebIDLError("__noSuchMethod__ is deprecated", [location])
if name[:2] == "__" and name != "__content" and not allowDoubleUnderscore:
raise WebIDLError("Identifiers beginning with __ are reserved",
[location])
if name[0] == '_' and not allowDoubleUnderscore:
name = name[1:]
# TODO: Bug 872377, Restore "toJSON" to below list.
# We sometimes need custom serialization, so allow toJSON for now.
if (name in ["constructor", "toString"] and
not allowForbidden):
raise WebIDLError("Cannot use reserved identifier '%s'" % (name),
[location])
self.name = name
def __str__(self):
return self.QName()
def QName(self):
return "<unresolved scope>::" + self.name
def resolve(self, scope, object):
assert isinstance(scope, IDLScope)
assert not object or isinstance(object, IDLObjectWithIdentifier)
assert not object or object.identifier == self
scope.ensureUnique(self, object)
identifier = IDLIdentifier(self.location, scope, self.name)
if object:
object.identifier = identifier
return identifier
def finish(self):
assert False # Should replace with a resolved identifier first.
class IDLObjectWithIdentifier(IDLObject):
def __init__(self, location, parentScope, identifier):
IDLObject.__init__(self, location)
assert isinstance(identifier, IDLUnresolvedIdentifier)
self.identifier = identifier
if parentScope:
self.resolve(parentScope)
self.treatNullAs = "Default"
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
assert isinstance(self.identifier, IDLUnresolvedIdentifier)
self.identifier.resolve(parentScope, self)
def checkForStringHandlingExtendedAttributes(self, attrs,
isDictionaryMember=False,
isOptional=False):
"""
A helper function to deal with TreatNullAs. Returns the list
of attrs it didn't handle itself.
"""
assert isinstance(self, IDLArgument) or isinstance(self, IDLAttribute)
unhandledAttrs = list()
for attr in attrs:
if not attr.hasValue():
unhandledAttrs.append(attr)
continue
identifier = attr.identifier()
value = attr.value()
if identifier == "TreatNullAs":
if not self.type.isDOMString() or self.type.nullable():
raise WebIDLError("[TreatNullAs] is only allowed on "
"arguments or attributes whose type is "
"DOMString",
[self.location])
if isDictionaryMember:
raise WebIDLError("[TreatNullAs] is not allowed for "
"dictionary members", [self.location])
if value != 'EmptyString':
raise WebIDLError("[TreatNullAs] must take the identifier "
"'EmptyString', not '%s'" % value,
[self.location])
self.treatNullAs = value
else:
unhandledAttrs.append(attr)
return unhandledAttrs
class IDLObjectWithScope(IDLObjectWithIdentifier, IDLScope):
def __init__(self, location, parentScope, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
IDLScope.__init__(self, location, parentScope, self.identifier)
class IDLIdentifierPlaceholder(IDLObjectWithIdentifier):
def __init__(self, location, identifier):
assert isinstance(identifier, IDLUnresolvedIdentifier)
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
def finish(self, scope):
try:
scope._lookupIdentifier(self.identifier)
except:
raise WebIDLError("Unresolved type '%s'." % self.identifier,
[self.location])
obj = self.identifier.resolve(scope, None)
return scope.lookupIdentifier(obj)
class IDLExposureMixins():
def __init__(self, location):
# _exposureGlobalNames are the global names listed in our [Exposed]
# extended attribute. exposureSet is the exposure set as defined in the
# Web IDL spec: it contains interface names.
self._exposureGlobalNames = set()
self.exposureSet = set()
self._location = location
self._globalScope = None
def finish(self, scope):
assert scope.parentScope is None
self._globalScope = scope
# Verify that our [Exposed] value, if any, makes sense.
for globalName in self._exposureGlobalNames:
if globalName not in scope.globalNames:
raise WebIDLError("Unknown [Exposed] value %s" % globalName,
[self._location])
if len(self._exposureGlobalNames) == 0:
self._exposureGlobalNames.add(scope.primaryGlobalName)
globalNameSetToExposureSet(scope, self._exposureGlobalNames,
self.exposureSet)
def isExposedInWindow(self):
return 'Window' in self.exposureSet
def isExposedInAnyWorker(self):
return len(self.getWorkerExposureSet()) > 0
def isExposedInSystemGlobals(self):
return 'BackstagePass' in self.exposureSet
def isExposedInSomeButNotAllWorkers(self):
"""
Returns true if the Exposed extended attribute for this interface
exposes it in some worker globals but not others. The return value does
not depend on whether the interface is exposed in Window or System
globals.
"""
if not self.isExposedInAnyWorker():
return False
workerScopes = self.parentScope.globalNameMapping["Worker"]
return len(workerScopes.difference(self.exposureSet)) > 0
def getWorkerExposureSet(self):
workerScopes = self._globalScope.globalNameMapping["Worker"]
return workerScopes.intersection(self.exposureSet)
class IDLExternalInterface(IDLObjectWithIdentifier, IDLExposureMixins):
def __init__(self, location, parentScope, identifier):
raise WebIDLError("Servo does not support external interfaces.",
[self.location])
class IDLPartialInterface(IDLObject):
def __init__(self, location, name, members, nonPartialInterface):
assert isinstance(name, IDLUnresolvedIdentifier)
IDLObject.__init__(self, location)
self.identifier = name
self.members = members
# propagatedExtendedAttrs are the ones that should get
# propagated to our non-partial interface.
self.propagatedExtendedAttrs = []
self._nonPartialInterface = nonPartialInterface
self._finished = False
nonPartialInterface.addPartialInterface(self)
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
if identifier in ["Constructor", "NamedConstructor"]:
self.propagatedExtendedAttrs.append(attr)
elif identifier == "Exposed":
# This just gets propagated to all our members.
for member in self.members:
if len(member._exposureGlobalNames) != 0:
raise WebIDLError("[Exposed] specified on both a "
"partial interface member and on the "
"partial interface itself",
[member.location, attr.location])
member.addExtendedAttributes([attr])
else:
raise WebIDLError("Unknown extended attribute %s on partial "
"interface" % identifier,
[attr.location])
def finish(self, scope):
if self._finished:
return
self._finished = True
# Need to make sure our non-partial interface gets finished so it can
# report cases when we only have partial interfaces.
self._nonPartialInterface.finish(scope)
def validate(self):
pass
def convertExposedAttrToGlobalNameSet(exposedAttr, targetSet):
assert len(targetSet) == 0
if exposedAttr.hasValue():
targetSet.add(exposedAttr.value())
else:
assert exposedAttr.hasArgs()
targetSet.update(exposedAttr.args())
def globalNameSetToExposureSet(globalScope, nameSet, exposureSet):
for name in nameSet:
exposureSet.update(globalScope.globalNameMapping[name])
class IDLInterface(IDLObjectWithScope, IDLExposureMixins):
def __init__(self, location, parentScope, name, parent, members,
isKnownNonPartial):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert isKnownNonPartial or not parent
assert isKnownNonPartial or len(members) == 0
self.parent = None
self._callback = False
self._finished = False
self.members = []
self.maplikeOrSetlike = None
self._partialInterfaces = []
self._extendedAttrDict = {}
# namedConstructors needs deterministic ordering because bindings code
# outputs the constructs in the order that namedConstructors enumerates
# them.
self.namedConstructors = list()
self.implementedInterfaces = set()
self._consequential = False
self._isKnownNonPartial = False
# self.interfacesBasedOnSelf is the set of interfaces that inherit from
# self or have self as a consequential interface, including self itself.
# Used for distinguishability checking.
self.interfacesBasedOnSelf = set([self])
# self.interfacesImplementingSelf is the set of interfaces that directly
# have self as a consequential interface
self.interfacesImplementingSelf = set()
self._hasChildInterfaces = False
self._isOnGlobalProtoChain = False
# Tracking of the number of reserved slots we need for our
# members and those of ancestor interfaces.
self.totalMembersInSlots = 0
# Tracking of the number of own own members we have in slots
self._ownMembersInSlots = 0
IDLObjectWithScope.__init__(self, location, parentScope, name)
IDLExposureMixins.__init__(self, location)
if isKnownNonPartial:
self.setNonPartial(location, parent, members)
def __str__(self):
return "Interface '%s'" % self.identifier.name
def ctor(self):
identifier = IDLUnresolvedIdentifier(self.location, "constructor",
allowForbidden=True)
try:
return self._lookupIdentifier(identifier)
except:
return None
def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
assert isinstance(scope, IDLScope)
assert isinstance(originalObject, IDLInterfaceMember)
assert isinstance(newObject, IDLInterfaceMember)
retval = IDLScope.resolveIdentifierConflict(self, scope, identifier,
originalObject, newObject)
# Might be a ctor, which isn't in self.members
if newObject in self.members:
self.members.remove(newObject)
return retval
def finish(self, scope):
if self._finished:
return
self._finished = True
if not self._isKnownNonPartial:
raise WebIDLError("Interface %s does not have a non-partial "
"declaration" % self.identifier.name,
[self.location])
IDLExposureMixins.finish(self, scope)
# Now go ahead and merge in our partial interfaces.
for partial in self._partialInterfaces:
partial.finish(scope)
self.addExtendedAttributes(partial.propagatedExtendedAttrs)
self.members.extend(partial.members)
# Generate maplike/setlike interface members. Since generated members
# need to be treated like regular interface members, do this before
# things like exposure setting.
for member in self.members:
if member.isMaplikeOrSetlike():
# Check that we only have one interface declaration (currently
# there can only be one maplike/setlike declaration per
# interface)
if self.maplikeOrSetlike:
raise WebIDLError("%s declaration used on "
"interface that already has %s "
"declaration" %
(member.maplikeOrSetlikeType,
self.maplikeOrSetlike.maplikeOrSetlikeType),
[self.maplikeOrSetlike.location,
member.location])
self.maplikeOrSetlike = member
# If we've got a maplike or setlike declaration, we'll be building all of
# our required methods in Codegen. Generate members now.
self.maplikeOrSetlike.expand(self.members, self.isJSImplemented())
# Now that we've merged in our partial interfaces, set the
# _exposureGlobalNames on any members that don't have it set yet. Note
# that any partial interfaces that had [Exposed] set have already set up
# _exposureGlobalNames on all the members coming from them, so this is
# just implementing the "members default to interface that defined them"
# and "partial interfaces default to interface they're a partial for"
# rules from the spec.
for m in self.members:
# If m, or the partial interface m came from, had [Exposed]
# specified, it already has a nonempty exposure global names set.
if len(m._exposureGlobalNames) == 0:
m._exposureGlobalNames.update(self._exposureGlobalNames)
assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder)
parent = self.parent.finish(scope) if self.parent else None
if parent and isinstance(parent, IDLExternalInterface):
raise WebIDLError("%s inherits from %s which does not have "
"a definition" %
(self.identifier.name,
self.parent.identifier.name),
[self.location])
assert not parent or isinstance(parent, IDLInterface)
self.parent = parent
assert iter(self.members)
if self.parent:
self.parent.finish(scope)
self.parent._hasChildInterfaces = True
self.totalMembersInSlots = self.parent.totalMembersInSlots
# Interfaces with [Global] or [PrimaryGlobal] must not
# have anything inherit from them
if (self.parent.getExtendedAttribute("Global") or
self.parent.getExtendedAttribute("PrimaryGlobal")):
# Note: This is not a self.parent.isOnGlobalProtoChain() check
# because ancestors of a [Global] interface can have other
# descendants.
raise WebIDLError("[Global] interface has another interface "
"inheriting from it",
[self.location, self.parent.location])
# Make sure that we're not exposed in places where our parent is not
if not self.exposureSet.issubset(self.parent.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"parent interface %s is not exposed." %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Callbacks must not inherit from non-callbacks or inherit from
# anything that has consequential interfaces.
# XXXbz Can non-callbacks inherit from callbacks? Spec issue pending.
# XXXbz Can callbacks have consequential interfaces? Spec issue pending
if self.isCallback():
if not self.parent.isCallback():
raise WebIDLError("Callback interface %s inheriting from "
"non-callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
elif self.parent.isCallback():
raise WebIDLError("Non-callback interface %s inheriting from "
"callback interface %s" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
# Interfaces which have interface objects can't inherit
# from [NoInterfaceObject] interfaces.
if (self.parent.getExtendedAttribute("NoInterfaceObject") and
not self.getExtendedAttribute("NoInterfaceObject")):
raise WebIDLError("Interface %s does not have "
"[NoInterfaceObject] but inherits from "
"interface %s which does" %
(self.identifier.name,
self.parent.identifier.name),
[self.location, self.parent.location])
for iface in self.implementedInterfaces:
iface.finish(scope)
cycleInGraph = self.findInterfaceLoopPoint(self)
if cycleInGraph:
raise WebIDLError("Interface %s has itself as ancestor or "
"implemented interface" % self.identifier.name,
[self.location, cycleInGraph.location])
if self.isCallback():
# "implements" should have made sure we have no
# consequential interfaces.
assert len(self.getConsequentialInterfaces()) == 0
# And that we're not consequential.
assert not self.isConsequential()
# Now resolve() and finish() our members before importing the
# ones from our implemented interfaces.
# resolve() will modify self.members, so we need to iterate
# over a copy of the member list here.
for member in list(self.members):
member.resolve(self)
for member in self.members:
member.finish(scope)
# Now that we've finished our members, which has updated their exposure
# sets, make sure they aren't exposed in places where we are not.
for member in self.members:
if not member.exposureSet.issubset(self.exposureSet):
raise WebIDLError("Interface member has larger exposure set "
"than the interface itself",
[member.location, self.location])
ctor = self.ctor()
if ctor is not None:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
for ctor in self.namedConstructors:
assert len(ctor._exposureGlobalNames) == 0
ctor._exposureGlobalNames.update(self._exposureGlobalNames)
ctor.finish(scope)
# Make a copy of our member list, so things that implement us
# can get those without all the stuff we implement ourselves
# admixed.
self.originalMembers = list(self.members)
# Import everything from our consequential interfaces into
# self.members. Sort our consequential interfaces by name
# just so we have a consistent order.
for iface in sorted(self.getConsequentialInterfaces(),
cmp=cmp,
key=lambda x: x.identifier.name):
# Flag the interface as being someone's consequential interface
iface.setIsConsequentialInterfaceOf(self)
# Verify that we're not exposed somewhere where iface is not exposed
if not self.exposureSet.issubset(iface.exposureSet):
raise WebIDLError("Interface %s is exposed in globals where its "
"consequential interface %s is not exposed." %
(self.identifier.name, iface.identifier.name),
[self.location, iface.location])
# If we have a maplike or setlike, and the consequential interface
# also does, throw an error.
if iface.maplikeOrSetlike and self.maplikeOrSetlike:
raise WebIDLError("Maplike/setlike interface %s cannot have "
"maplike/setlike interface %s as a "
"consequential interface" %
(self.identifier.name,
iface.identifier.name),
[self.maplikeOrSetlike.location,
iface.maplikeOrSetlike.location])
additionalMembers = iface.originalMembers
for additionalMember in additionalMembers:
for member in self.members:
if additionalMember.identifier.name == member.identifier.name:
raise WebIDLError(
"Multiple definitions of %s on %s coming from 'implements' statements" %
(member.identifier.name, self),
[additionalMember.location, member.location])
self.members.extend(additionalMembers)
iface.interfacesImplementingSelf.add(self)
for ancestor in self.getInheritedInterfaces():
ancestor.interfacesBasedOnSelf.add(self)
if (ancestor.maplikeOrSetlike is not None and
self.maplikeOrSetlike is not None):
raise WebIDLError("Cannot have maplike/setlike on %s that "
"inherits %s, which is already "
"maplike/setlike" %
(self.identifier.name,
ancestor.identifier.name),
[self.maplikeOrSetlike.location,
ancestor.maplikeOrSetlike.location])
for ancestorConsequential in ancestor.getConsequentialInterfaces():
ancestorConsequential.interfacesBasedOnSelf.add(self)
# Deal with interfaces marked [Unforgeable], now that we have our full
# member list, except unforgeables pulled in from parents. We want to
# do this before we set "originatingInterface" on our unforgeable
# members.
if self.getExtendedAttribute("Unforgeable"):
# Check that the interface already has all the things the
# spec would otherwise require us to synthesize and is
# missing the ones we plan to synthesize.
if not any(m.isMethod() and m.isStringifier() for m in self.members):
raise WebIDLError("Unforgeable interface %s does not have a "
"stringifier" % self.identifier.name,
[self.location])
for m in self.members:
if ((m.isMethod() and m.isJsonifier()) or
m.identifier.name == "toJSON"):
raise WebIDLError("Unforgeable interface %s has a "
"jsonifier so we won't be able to add "
"one ourselves" % self.identifier.name,
[self.location, m.location])
if m.identifier.name == "valueOf" and not m.isStatic():
raise WebIDLError("Unforgeable interface %s has a valueOf "
"member so we won't be able to add one "
"ourselves" % self.identifier.name,
[self.location, m.location])
for member in self.members:
if ((member.isAttr() or member.isMethod()) and
member.isUnforgeable() and
not hasattr(member, "originatingInterface")):
member.originatingInterface = self
# Compute slot indices for our members before we pull in unforgeable
# members from our parent. Also, maplike/setlike declarations get a
# slot to hold their backing object.
for member in self.members:
if ((member.isAttr() and
(member.getExtendedAttribute("StoreInSlot") or
member.getExtendedAttribute("Cached"))) or
member.isMaplikeOrSetlike()):
member.slotIndex = self.totalMembersInSlots
self.totalMembersInSlots += 1
if member.getExtendedAttribute("StoreInSlot"):
self._ownMembersInSlots += 1
if self.parent:
# Make sure we don't shadow any of the [Unforgeable] attributes on
# our ancestor interfaces. We don't have to worry about
# consequential interfaces here, because those have already been
# imported into the relevant .members lists. And we don't have to
# worry about anything other than our parent, because it has already
# imported its ancestors unforgeable attributes into its member
# list.
for unforgeableMember in (member for member in self.parent.members if
(member.isAttr() or member.isMethod()) and
member.isUnforgeable()):
shadows = [m for m in self.members if
(m.isAttr() or m.isMethod()) and
not m.isStatic() and
m.identifier.name == unforgeableMember.identifier.name]
if len(shadows) != 0:
locs = [unforgeableMember.location] + [s.location for s
in shadows]
raise WebIDLError("Interface %s shadows [Unforgeable] "
"members of %s" %
(self.identifier.name,
ancestor.identifier.name),
locs)
# And now just stick it in our members, since we won't be
# inheriting this down the proto chain. If we really cared we
# could try to do something where we set up the unforgeable
# attributes/methods of ancestor interfaces, with their
# corresponding getters, on our interface, but that gets pretty
# complicated and seems unnecessary.
self.members.append(unforgeableMember)
# At this point, we have all of our members. If the current interface
# uses maplike/setlike, check for collisions anywhere in the current
# interface or higher in the inheritance chain.
if self.maplikeOrSetlike:
testInterface = self
isAncestor = False
while testInterface:
self.maplikeOrSetlike.checkCollisions(testInterface.members,
isAncestor)
isAncestor = True
testInterface = testInterface.parent
# Ensure that there's at most one of each {named,indexed}
# {getter,setter,creator,deleter}, at most one stringifier,
# and at most one legacycaller. Note that this last is not
# quite per spec, but in practice no one overloads
# legacycallers.
specialMembersSeen = {}
for member in self.members:
if not member.isMethod():
continue
if member.isGetter():
memberType = "getters"
elif member.isSetter():
memberType = "setters"
elif member.isCreator():
memberType = "creators"
elif member.isDeleter():
memberType = "deleters"
elif member.isStringifier():
memberType = "stringifiers"
elif member.isJsonifier():
memberType = "jsonifiers"
elif member.isLegacycaller():
memberType = "legacycallers"
else:
continue
if (memberType != "stringifiers" and memberType != "legacycallers" and
memberType != "jsonifiers"):
if member.isNamed():
memberType = "named " + memberType
else:
assert member.isIndexed()
memberType = "indexed " + memberType
if memberType in specialMembersSeen:
raise WebIDLError("Multiple " + memberType + " on %s" % (self),
[self.location,
specialMembersSeen[memberType].location,
member.location])
specialMembersSeen[memberType] = member
if self._isOnGlobalProtoChain:
# Make sure we have no named setters, creators, or deleters
for memberType in ["setter", "creator", "deleter"]:
memberId = "named " + memberType + "s"
if memberId in specialMembersSeen:
raise WebIDLError("Interface with [Global] has a named %s" %
memberType,
[self.location,
specialMembersSeen[memberId].location])
# Make sure we're not [OverrideBuiltins]
if self.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] also has "
"[OverrideBuiltins]",
[self.location])
# Mark all of our ancestors as being on the global's proto chain too
parent = self.parent
while parent:
# Must not inherit from an interface with [OverrideBuiltins]
if parent.getExtendedAttribute("OverrideBuiltins"):
raise WebIDLError("Interface with [Global] inherits from "
"interface with [OverrideBuiltins]",
[self.location, parent.location])
parent._isOnGlobalProtoChain = True
parent = parent.parent
def validate(self):
# We don't support consequential unforgeable interfaces. Need to check
# this here, becaue in finish() an interface might not know yet that
# it's consequential.
if self.getExtendedAttribute("Unforgeable") and self.isConsequential():
raise WebIDLError(
"%s is an unforgeable consequential interface" %
self.identifier.name,
[self.location] +
list(i.location for i in
(self.interfacesBasedOnSelf - {self})))
# We also don't support inheriting from unforgeable interfaces.
if self.getExtendedAttribute("Unforgeable") and self.hasChildInterfaces():
locations = ([self.location] +
list(i.location for i in
self.interfacesBasedOnSelf if i.parent == self))
raise WebIDLError("%s is an unforgeable ancestor interface" %
self.identifier.name,
locations)
for member in self.members:
member.validate()
if self.isCallback() and member.getExtendedAttribute("Replaceable"):
raise WebIDLError("[Replaceable] used on an attribute on "
"interface %s which is a callback interface" %
self.identifier.name,
[self.location, member.location])
# Check that PutForwards refers to another attribute and that no
# cycles exist in forwarded assignments.
if member.isAttr():
iface = self
attr = member
putForwards = attr.getExtendedAttribute("PutForwards")
if putForwards and self.isCallback():
raise WebIDLError("[PutForwards] used on an attribute "
"on interface %s which is a callback "
"interface" % self.identifier.name,
[self.location, member.location])
while putForwards is not None:
forwardIface = attr.type.unroll().inner
fowardAttr = None
for forwardedMember in forwardIface.members:
if (not forwardedMember.isAttr() or
forwardedMember.identifier.name != putForwards[0]):
continue
if forwardedMember == member:
raise WebIDLError("Cycle detected in forwarded "
"assignments for attribute %s on "
"%s" %
(member.identifier.name, self),
[member.location])
fowardAttr = forwardedMember
break
if fowardAttr is None:
raise WebIDLError("Attribute %s on %s forwards to "
"missing attribute %s" %
(attr.identifier.name, iface, putForwards),
[attr.location])
iface = forwardIface
attr = fowardAttr
putForwards = attr.getExtendedAttribute("PutForwards")
# Check that the name of an [Alias] doesn't conflict with an
# interface member.
if member.isMethod():
for alias in member.aliases:
if self.isOnGlobalProtoChain():
raise WebIDLError("[Alias] must not be used on a "
"[Global] interface operation",
[member.location])
if (member.getExtendedAttribute("Exposed") or
member.getExtendedAttribute("ChromeOnly") or
member.getExtendedAttribute("Pref") or
member.getExtendedAttribute("Func") or
member.getExtendedAttribute("AvailableIn") or
member.getExtendedAttribute("CheckAnyPermissions") or
member.getExtendedAttribute("CheckAllPermissions")):
raise WebIDLError("[Alias] must not be used on a "
"conditionally exposed operation",
[member.location])
if member.isStatic():
raise WebIDLError("[Alias] must not be used on a "
"static operation",
[member.location])
if member.isIdentifierLess():
raise WebIDLError("[Alias] must not be used on an "
"identifierless operation",
[member.location])
if member.isUnforgeable():
raise WebIDLError("[Alias] must not be used on an "
"[Unforgeable] operation",
[member.location])
for m in self.members:
if m.identifier.name == alias:
raise WebIDLError("[Alias=%s] has same name as "
"interface member" % alias,
[member.location, m.location])
if m.isMethod() and m != member and alias in m.aliases:
raise WebIDLError("duplicate [Alias=%s] definitions" %
alias,
[member.location, m.location])
if (self.getExtendedAttribute("Pref") and
self._exposureGlobalNames != set([self.parentScope.primaryGlobalName])):
raise WebIDLError("[Pref] used on an interface that is not %s-only" %
self.parentScope.primaryGlobalName,
[self.location])
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self._exposureGlobalNames != set([self.parentScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
# Conditional exposure makes no sense for interfaces with no
# interface object, unless they're navigator properties.
if (self.isExposedConditionally() and
not self.hasInterfaceObject() and
not self.getNavigatorProperty()):
raise WebIDLError("Interface with no interface object is "
"exposed conditionally",
[self.location])
def isInterface(self):
return True
def isExternal(self):
return False
def setIsConsequentialInterfaceOf(self, other):
self._consequential = True
self.interfacesBasedOnSelf.add(other)
def isConsequential(self):
return self._consequential
def setCallback(self, value):
self._callback = value
def isCallback(self):
return self._callback
def isSingleOperationInterface(self):
assert self.isCallback() or self.isJSImplemented()
return (
# JS-implemented things should never need the
# this-handling weirdness of single-operation interfaces.
not self.isJSImplemented() and
# Not inheriting from another interface
not self.parent and
# No consequential interfaces
len(self.getConsequentialInterfaces()) == 0 and
# No attributes of any kinds
not any(m.isAttr() for m in self.members) and
# There is at least one regular operation, and all regular
# operations have the same identifier
len(set(m.identifier.name for m in self.members if
m.isMethod() and not m.isStatic())) == 1)
def inheritanceDepth(self):
depth = 0
parent = self.parent
while parent:
depth = depth + 1
parent = parent.parent
return depth
def hasConstants(self):
return any(m.isConst() for m in self.members)
def hasInterfaceObject(self):
if self.isCallback():
return self.hasConstants()
return not hasattr(self, "_noInterfaceObject")
def hasInterfacePrototypeObject(self):
return not self.isCallback() and self.getUserData('hasConcreteDescendant', False)
def addExtendedAttributes(self, attrs):
for attr in attrs:
identifier = attr.identifier()
# Special cased attrs
if identifier == "TreatNonCallableAsNull":
raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces",
[attr.location, self.location])
if identifier == "TreatNonObjectAsNull":
raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces",
[attr.location, self.location])
elif identifier == "NoInterfaceObject":
if not attr.noArguments():
raise WebIDLError("[NoInterfaceObject] must take no arguments",
[attr.location])
if self.ctor():
raise WebIDLError("Constructor and NoInterfaceObject are incompatible",
[self.location])
self._noInterfaceObject = True
elif identifier == "Constructor" or identifier == "NamedConstructor" or identifier == "ChromeConstructor":
if identifier == "Constructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
if identifier == "NamedConstructor" and not attr.hasValue():
raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list",
[attr.location])
if identifier == "ChromeConstructor" and not self.hasInterfaceObject():
raise WebIDLError(str(identifier) + " and NoInterfaceObject are incompatible",
[self.location])
args = attr.args() if attr.hasArgs() else []
if self.identifier.name == "Promise":
promiseType = BuiltinTypes[IDLBuiltinType.Types.any]
else:
promiseType = None
retType = IDLWrapperType(self.location, self, promiseType)
if identifier == "Constructor" or identifier == "ChromeConstructor":
name = "constructor"
allowForbidden = True
else:
name = attr.value()
allowForbidden = False
methodIdentifier = IDLUnresolvedIdentifier(self.location, name,
allowForbidden=allowForbidden)
method = IDLMethod(self.location, methodIdentifier, retType,
args, static=True)
# Constructors are always NewObject and are always
# assumed to be able to throw (since there's no way to
# indicate otherwise) and never have any other
# extended attributes.
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("NewObject",)),
IDLExtendedAttribute(self.location, ("Throws",))])
if identifier == "ChromeConstructor":
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if identifier == "Constructor" or identifier == "ChromeConstructor":
method.resolve(self)
else:
# We need to detect conflicts for NamedConstructors across
# interfaces. We first call resolve on the parentScope,
# which will merge all NamedConstructors with the same
# identifier accross interfaces as overloads.
method.resolve(self.parentScope)
# Then we look up the identifier on the parentScope. If the
# result is the same as the method we're adding then it
# hasn't been added as an overload and it's the first time
# we've encountered a NamedConstructor with that identifier.
# If the result is not the same as the method we're adding
# then it has been added as an overload and we need to check
# whether the result is actually one of our existing
# NamedConstructors.
newMethod = self.parentScope.lookupIdentifier(method.identifier)
if newMethod == method:
self.namedConstructors.append(method)
elif newMethod not in self.namedConstructors:
raise WebIDLError("NamedConstructor conflicts with a NamedConstructor of a different interface",
[method.location, newMethod.location])
elif (identifier == "ArrayClass"):
if not attr.noArguments():
raise WebIDLError("[ArrayClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ArrayClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif (identifier == "ExceptionClass"):
if not attr.noArguments():
raise WebIDLError("[ExceptionClass] must take no arguments",
[attr.location])
if self.parent:
raise WebIDLError("[ExceptionClass] must not be specified on "
"an interface with inherited interfaces",
[attr.location, self.location])
elif identifier == "Global":
if attr.hasValue():
self.globalNames = [attr.value()]
elif attr.hasArgs():
self.globalNames = attr.args()
else:
self.globalNames = [self.identifier.name]
self.parentScope.globalNames.update(self.globalNames)
for globalName in self.globalNames:
self.parentScope.globalNameMapping[globalName].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif identifier == "PrimaryGlobal":
if not attr.noArguments():
raise WebIDLError("[PrimaryGlobal] must take no arguments",
[attr.location])
if self.parentScope.primaryGlobalAttr is not None:
raise WebIDLError(
"[PrimaryGlobal] specified twice",
[attr.location,
self.parentScope.primaryGlobalAttr.location])
self.parentScope.primaryGlobalAttr = attr
self.parentScope.primaryGlobalName = self.identifier.name
self.parentScope.globalNames.add(self.identifier.name)
self.parentScope.globalNameMapping[self.identifier.name].add(self.identifier.name)
self._isOnGlobalProtoChain = True
elif (identifier == "NeedResolve" or
identifier == "OverrideBuiltins" or
identifier == "ChromeOnly" or
identifier == "Unforgeable" or
identifier == "UnsafeInPrerendering" or
identifier == "LegacyEventInit" or
identifier == "Abstract"):
# Known extended attributes that do not take values
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr,
self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "JSImplementation" or
identifier == "HeaderFile" or
identifier == "NavigatorProperty" or
identifier == "AvailableIn" or
identifier == "Func" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions"):
# Known extended attributes that take a string value
if not attr.hasValue():
raise WebIDLError("[%s] must have a value" % identifier,
[attr.location])
else:
raise WebIDLError("Unknown extended attribute %s on interface" % identifier,
[attr.location])
attrlist = attr.listValue()
self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
def addImplementedInterface(self, implementedInterface):
assert(isinstance(implementedInterface, IDLInterface))
self.implementedInterfaces.add(implementedInterface)
def getInheritedInterfaces(self):
"""
Returns a list of the interfaces this interface inherits from
(not including this interface itself). The list is in order
from most derived to least derived.
"""
assert(self._finished)
if not self.parent:
return []
parentInterfaces = self.parent.getInheritedInterfaces()
parentInterfaces.insert(0, self.parent)
return parentInterfaces
def getConsequentialInterfaces(self):
assert(self._finished)
# The interfaces we implement directly
consequentialInterfaces = set(self.implementedInterfaces)
# And their inherited interfaces
for iface in self.implementedInterfaces:
consequentialInterfaces |= set(iface.getInheritedInterfaces())
# And now collect up the consequential interfaces of all of those
temp = set()
for iface in consequentialInterfaces:
temp |= iface.getConsequentialInterfaces()
return consequentialInterfaces | temp
def findInterfaceLoopPoint(self, otherInterface):
"""
Finds an interface, amongst our ancestors and consequential interfaces,
that inherits from otherInterface or implements otherInterface
directly. If there is no such interface, returns None.
"""
if self.parent:
if self.parent == otherInterface:
return self
loopPoint = self.parent.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
if otherInterface in self.implementedInterfaces:
return self
for iface in self.implementedInterfaces:
loopPoint = iface.findInterfaceLoopPoint(otherInterface)
if loopPoint:
return loopPoint
return None
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def setNonPartial(self, location, parent, members):
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
if self._isKnownNonPartial:
raise WebIDLError("Two non-partial definitions for the "
"same interface",
[location, self.location])
self._isKnownNonPartial = True
# Now make it look like we were parsed at this new location, since
# that's the place where the interface is "really" defined
self.location = location
assert not self.parent
self.parent = parent
# Put the new members at the beginning
self.members = members + self.members
def addPartialInterface(self, partial):
assert self.identifier.name == partial.identifier.name
self._partialInterfaces.append(partial)
def getJSImplementation(self):
classId = self.getExtendedAttribute("JSImplementation")
if not classId:
return classId
assert isinstance(classId, list)
assert len(classId) == 1
return classId[0]
def isJSImplemented(self):
return bool(self.getJSImplementation())
def getNavigatorProperty(self):
naviProp = self.getExtendedAttribute("NavigatorProperty")
if not naviProp:
return None
assert len(naviProp) == 1
assert isinstance(naviProp, list)
assert len(naviProp[0]) != 0
return naviProp[0]
def hasChildInterfaces(self):
return self._hasChildInterfaces
def isOnGlobalProtoChain(self):
return self._isOnGlobalProtoChain
def _getDependentObjects(self):
deps = set(self.members)
deps.update(self.implementedInterfaces)
if self.parent:
deps.add(self.parent)
return deps
def hasMembersInSlots(self):
return self._ownMembersInSlots != 0
def isExposedConditionally(self):
return (self.getExtendedAttribute("Pref") or
self.getExtendedAttribute("ChromeOnly") or
self.getExtendedAttribute("Func") or
self.getExtendedAttribute("AvailableIn") or
self.getExtendedAttribute("CheckAnyPermissions") or
self.getExtendedAttribute("CheckAllPermissions"))
class IDLDictionary(IDLObjectWithScope):
def __init__(self, location, parentScope, name, parent, members):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
self.parent = parent
self._finished = False
self.members = list(members)
IDLObjectWithScope.__init__(self, location, parentScope, name)
def __str__(self):
return "Dictionary '%s'" % self.identifier.name
def isDictionary(self):
return True
def canBeEmpty(self):
"""
Returns true if this dictionary can be empty (that is, it has no
required members and neither do any of its ancestors).
"""
return (all(member.optional for member in self.members) and
(not self.parent or self.parent.canBeEmpty()))
def finish(self, scope):
if self._finished:
return
self._finished = True
if self.parent:
assert isinstance(self.parent, IDLIdentifierPlaceholder)
oldParent = self.parent
self.parent = self.parent.finish(scope)
if not isinstance(self.parent, IDLDictionary):
raise WebIDLError("Dictionary %s has parent that is not a dictionary" %
self.identifier.name,
[oldParent.location, self.parent.location])
# Make sure the parent resolves all its members before we start
# looking at them.
self.parent.finish(scope)
for member in self.members:
member.resolve(self)
if not member.isComplete():
member.complete(scope)
assert member.type.isComplete()
# Members of a dictionary are sorted in lexicographic order
self.members.sort(cmp=cmp, key=lambda x: x.identifier.name)
inheritedMembers = []
ancestor = self.parent
while ancestor:
if ancestor == self:
raise WebIDLError("Dictionary %s has itself as an ancestor" %
self.identifier.name,
[self.identifier.location])
inheritedMembers.extend(ancestor.members)
ancestor = ancestor.parent
# Catch name duplication
for inheritedMember in inheritedMembers:
for member in self.members:
if member.identifier.name == inheritedMember.identifier.name:
raise WebIDLError("Dictionary %s has two members with name %s" %
(self.identifier.name, member.identifier.name),
[member.location, inheritedMember.location])
def validate(self):
def typeContainsDictionary(memberType, dictionary):
"""
Returns a tuple whose:
- First element is a Boolean value indicating whether
memberType contains dictionary.
- Second element is:
A list of locations that leads from the type that was passed in
the memberType argument, to the dictionary being validated,
if the boolean value in the first element is True.
None, if the boolean value in the first element is False.
"""
if (memberType.nullable() or
memberType.isArray() or
memberType.isSequence() or
memberType.isMozMap()):
return typeContainsDictionary(memberType.inner, dictionary)
if memberType.isDictionary():
if memberType.inner == dictionary:
return (True, [memberType.location])
(contains, locations) = dictionaryContainsDictionary(memberType.inner,
dictionary)
if contains:
return (True, [memberType.location] + locations)
if memberType.isUnion():
for member in memberType.flatMemberTypes:
(contains, locations) = typeContainsDictionary(member, dictionary)
if contains:
return (True, locations)
return (False, None)
def dictionaryContainsDictionary(dictMember, dictionary):
for member in dictMember.members:
(contains, locations) = typeContainsDictionary(member.type, dictionary)
if contains:
return (True, [member.location] + locations)
if dictMember.parent:
if dictMember.parent == dictionary:
return (True, [dictMember.location])
else:
(contains, locations) = dictionaryContainsDictionary(dictMember.parent, dictionary)
if contains:
return (True, [dictMember.location] + locations)
return (False, None)
for member in self.members:
if member.type.isDictionary() and member.type.nullable():
raise WebIDLError("Dictionary %s has member with nullable "
"dictionary type" % self.identifier.name,
[member.location])
(contains, locations) = typeContainsDictionary(member.type, self)
if contains:
raise WebIDLError("Dictionary %s has member with itself as type." %
self.identifier.name,
[member.location] + locations)
def module(self):
return self.location.filename().split('/')[-1].split('.webidl')[0] + 'Binding'
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
deps = set(self.members)
if (self.parent):
deps.add(self.parent)
return deps
class IDLEnum(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, name, values):
assert isinstance(parentScope, IDLScope)
assert isinstance(name, IDLUnresolvedIdentifier)
if len(values) != len(set(values)):
raise WebIDLError("Enum %s has multiple identical strings" % name.name,
[location])
IDLObjectWithIdentifier.__init__(self, location, parentScope, name)
self._values = values
def values(self):
return self._values
def finish(self, scope):
pass
def validate(self):
pass
def isEnum(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return set()
class IDLType(IDLObject):
Tags = enum(
# The integer types
'int8',
'uint8',
'int16',
'uint16',
'int32',
'uint32',
'int64',
'uint64',
# Additional primitive types
'bool',
'unrestricted_float',
'float',
'unrestricted_double',
# "double" last primitive type to match IDLBuiltinType
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'interface',
'dictionary',
'enum',
'callback',
'union',
'sequence',
'mozmap',
'array'
)
def __init__(self, location, name):
IDLObject.__init__(self, location)
self.name = name
self.builtin = False
def __eq__(self, other):
return other and self.builtin == other.builtin and self.name == other.name
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self.name)
def isType(self):
return True
def nullable(self):
return False
def isPrimitive(self):
return False
def isBoolean(self):
return False
def isNumeric(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return self.name == "Void"
def isSequence(self):
return False
def isMozMap(self):
return False
def isArray(self):
return False
def isArrayBuffer(self):
return False
def isArrayBufferView(self):
return False
def isSharedArrayBuffer(self):
return False
def isSharedArrayBufferView(self):
return False
def isTypedArray(self):
return False
def isSharedTypedArray(self):
return False
def isCallbackInterface(self):
return False
def isNonCallbackInterface(self):
return False
def isGeckoInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Gecko. At the moment, this returns
true for all interface types that are not types from the TypedArray
spec."""
return self.isInterface() and not self.isSpiderMonkeyInterface()
def isSpiderMonkeyInterface(self):
""" Returns a boolean indicating whether this type is an 'interface'
type that is implemented in Spidermonkey. At the moment, this
only returns true for the types from the TypedArray spec. """
return self.isInterface() and (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isSharedArrayBufferView() or
self.isTypedArray() or
self.isSharedTypedArray())
def isDictionary(self):
return False
def isInterface(self):
return False
def isAny(self):
return self.tag() == IDLType.Tags.any
def isDate(self):
return self.tag() == IDLType.Tags.date
def isObject(self):
return self.tag() == IDLType.Tags.object
def isPromise(self):
return False
def isComplete(self):
return True
def includesRestrictedFloat(self):
return False
def isFloat(self):
return False
def isUnrestricted(self):
# Should only call this on float types
assert self.isFloat()
def isSerializable(self):
return False
def tag(self):
assert False # Override me!
def treatNonCallableAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonCallableAsNull
def treatNonObjectAsNull(self):
assert self.tag() == IDLType.Tags.callback
return self.nullable() and self.inner.callback._treatNonObjectAsNull
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def resolveType(self, parentScope):
pass
def unroll(self):
return self
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether a generic type is or is not "
"distinguishable from other things")
def isExposedInAllOf(self, exposureSet):
return True
class IDLUnresolvedType(IDLType):
"""
Unresolved types are interface types
"""
def __init__(self, location, name, promiseInnerType=None):
IDLType.__init__(self, location, name)
self._promiseInnerType = promiseInnerType
def isComplete(self):
return False
def complete(self, scope):
obj = None
try:
obj = scope._lookupIdentifier(self.name)
except:
raise WebIDLError("Unresolved type '%s'." % self.name,
[self.location])
assert obj
if obj.isType():
print obj
assert not obj.isType()
if obj.isTypedef():
assert self.name.name == obj.identifier.name
typedefType = IDLTypedefType(self.location, obj.innerType,
obj.identifier)
assert not typedefType.isComplete()
return typedefType.complete(scope)
elif obj.isCallback() and not obj.isInterface():
assert self.name.name == obj.identifier.name
return IDLCallbackType(self.location, obj)
if self._promiseInnerType and not self._promiseInnerType.isComplete():
self._promiseInnerType = self._promiseInnerType.complete(scope)
name = self.name.resolve(scope, None)
return IDLWrapperType(self.location, obj, self._promiseInnerType)
def isDistinguishableFrom(self, other):
raise TypeError("Can't tell whether an unresolved type is or is not "
"distinguishable from other things")
class IDLNullableType(IDLType):
def __init__(self, location, innerType):
assert not innerType.isVoid()
assert not innerType == BuiltinTypes[IDLBuiltinType.Types.any]
name = innerType.name
if innerType.isComplete():
name += "OrNull"
IDLType.__init__(self, location, name)
self.inner = innerType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLNullableType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "OrNull"
def nullable(self):
return True
def isCallback(self):
return self.inner.isCallback()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isFloat(self):
return self.inner.isFloat()
def isUnrestricted(self):
return self.inner.isUnrestricted()
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def isInteger(self):
return self.inner.isInteger()
def isVoid(self):
return False
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isSharedArrayBufferView(self):
return self.inner.isSharedArrayBufferView()
def isTypedArray(self):
return self.inner.isTypedArray()
def isSharedTypedArray(self):
return self.inner.isSharedTypedArray()
def isDictionary(self):
return self.inner.isDictionary()
def isInterface(self):
return self.inner.isInterface()
def isPromise(self):
return self.inner.isPromise()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isEnum(self):
return self.inner.isEnum()
def isUnion(self):
return self.inner.isUnion()
def isSerializable(self):
return self.inner.isSerializable()
def tag(self):
return self.inner.tag()
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
if self.inner.nullable():
raise WebIDLError("The inner type of a nullable type must not be "
"a nullable type",
[self.location, self.inner.location])
if self.inner.isUnion():
if self.inner.hasNullableType:
raise WebIDLError("The inner type of a nullable type must not "
"be a union type that itself has a nullable "
"type as a member type", [self.location])
self.name = self.inner.name + "OrNull"
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if (other.nullable() or (other.isUnion() and other.hasNullableType) or
other.isDictionary()):
# Can't tell which type null should become
return False
return self.inner.isDistinguishableFrom(other)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLSequenceType(IDLType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "Sequence"
def __eq__(self, other):
return isinstance(other, IDLSequenceType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Sequence"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return True
def isArray(self):
return False
def isDictionary(self):
return False
def isInterface(self):
return False
def isEnum(self):
return False
def isSerializable(self):
return self.inner.isSerializable()
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def tag(self):
return IDLType.Tags.sequence
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "Sequence"
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isInterface() or
other.isDictionary() or
other.isCallback() or other.isMozMap())
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLMozMapType(IDLType):
# XXXbz This is pretty similar to IDLSequenceType in various ways.
# And maybe to IDLNullableType. Should we have a superclass for
# "type containing this other type"? Bug 1015318.
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
# Need to set self.name up front if our inner type is already complete,
# since in that case our .complete() won't be called.
if self.inner.isComplete():
self.name = self.inner.name + "MozMap"
def __eq__(self, other):
return isinstance(other, IDLMozMapType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "MozMap"
def isMozMap(self):
return True
def includesRestrictedFloat(self):
return self.inner.includesRestrictedFloat()
def tag(self):
return IDLType.Tags.mozmap
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name + "MozMap"
return self
def unroll(self):
# We do not unroll our inner. Just stop at ourselves. That
# lets us add headers for both ourselves and our inner as
# needed.
return self
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface() or other.isSequence())
def isExposedInAllOf(self, exposureSet):
return self.inner.unroll().isExposedInAllOf(exposureSet)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLUnionType(IDLType):
def __init__(self, location, memberTypes):
IDLType.__init__(self, location, "")
self.memberTypes = memberTypes
self.hasNullableType = False
self._dictionaryType = None
self.flatMemberTypes = None
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLUnionType) and self.memberTypes == other.memberTypes
def __hash__(self):
assert self.isComplete()
return self.name.__hash__()
def isVoid(self):
return False
def isUnion(self):
return True
def isSerializable(self):
return all(m.isSerializable() for m in self.memberTypes)
def includesRestrictedFloat(self):
return any(t.includesRestrictedFloat() for t in self.memberTypes)
def tag(self):
return IDLType.Tags.union
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
for t in self.memberTypes:
t.resolveType(parentScope)
def isComplete(self):
return self.flatMemberTypes is not None
def complete(self, scope):
def typeName(type):
if isinstance(type, IDLNullableType):
return typeName(type.inner) + "OrNull"
if isinstance(type, IDLWrapperType):
return typeName(type._identifier.object())
if isinstance(type, IDLObjectWithIdentifier):
return typeName(type.identifier)
return type.name
for (i, type) in enumerate(self.memberTypes):
if not type.isComplete():
self.memberTypes[i] = type.complete(scope)
self.name = "Or".join(typeName(type) for type in self.memberTypes)
self.flatMemberTypes = list(self.memberTypes)
i = 0
while i < len(self.flatMemberTypes):
if self.flatMemberTypes[i].nullable():
if self.hasNullableType:
raise WebIDLError("Can't have more than one nullable types in a union",
[nullableType.location, self.flatMemberTypes[i].location])
if self.hasDictionaryType():
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[self._dictionaryType.location,
self.flatMemberTypes[i].location])
self.hasNullableType = True
nullableType = self.flatMemberTypes[i]
self.flatMemberTypes[i] = self.flatMemberTypes[i].inner
continue
if self.flatMemberTypes[i].isDictionary():
if self.hasNullableType:
raise WebIDLError("Can't have a nullable type and a "
"dictionary type in a union",
[nullableType.location,
self.flatMemberTypes[i].location])
self._dictionaryType = self.flatMemberTypes[i]
elif self.flatMemberTypes[i].isUnion():
self.flatMemberTypes[i:i + 1] = self.flatMemberTypes[i].memberTypes
continue
i += 1
for (i, t) in enumerate(self.flatMemberTypes[:-1]):
for u in self.flatMemberTypes[i + 1:]:
if not t.isDistinguishableFrom(u):
raise WebIDLError("Flat member types of a union should be "
"distinguishable, " + str(t) + " is not "
"distinguishable from " + str(u),
[self.location, t.location, u.location])
return self
def isDistinguishableFrom(self, other):
if self.hasNullableType and other.nullable():
# Can't tell which type null should become
return False
if other.isUnion():
otherTypes = other.unroll().memberTypes
else:
otherTypes = [other]
# For every type in otherTypes, check that it's distinguishable from
# every type in our types
for u in otherTypes:
if any(not t.isDistinguishableFrom(u) for t in self.memberTypes):
return False
return True
def isExposedInAllOf(self, exposureSet):
# We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it.
for globalName in exposureSet:
if not any(t.unroll().isExposedInAllOf(set([globalName])) for t
in self.flatMemberTypes):
return False
return True
def hasDictionaryType(self):
return self._dictionaryType is not None
def hasPossiblyEmptyDictionaryType(self):
return (self._dictionaryType is not None and
self._dictionaryType.inner.canBeEmpty())
def _getDependentObjects(self):
return set(self.memberTypes)
class IDLArrayType(IDLType):
def __init__(self, location, parameterType):
assert not parameterType.isVoid()
if parameterType.isSequence():
raise WebIDLError("Array type cannot parameterize over a sequence type",
[location])
if parameterType.isMozMap():
raise WebIDLError("Array type cannot parameterize over a MozMap type",
[location])
if parameterType.isDictionary():
raise WebIDLError("Array type cannot parameterize over a dictionary type",
[location])
IDLType.__init__(self, location, parameterType.name)
self.inner = parameterType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLArrayType) and self.inner == other.inner
def __str__(self):
return self.inner.__str__() + "Array"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
assert not self.inner.isSequence()
return False
def isArray(self):
return True
def isDictionary(self):
assert not self.inner.isDictionary()
return False
def isInterface(self):
return False
def isEnum(self):
return False
def tag(self):
return IDLType.Tags.array
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolveType(parentScope)
def isComplete(self):
return self.inner.isComplete()
def complete(self, scope):
self.inner = self.inner.complete(scope)
self.name = self.inner.name
if self.inner.isDictionary():
raise WebIDLError("Array type must not contain "
"dictionary as element type.",
[self.inner.location])
assert not self.inner.isSequence()
return self
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isNonCallbackInterface())
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedefType(IDLType):
def __init__(self, location, innerType, name):
IDLType.__init__(self, location, name)
self.inner = innerType
self.builtin = False
def __eq__(self, other):
return isinstance(other, IDLTypedefType) and self.inner == other.inner
def __str__(self):
return self.name
def nullable(self):
return self.inner.nullable()
def isPrimitive(self):
return self.inner.isPrimitive()
def isBoolean(self):
return self.inner.isBoolean()
def isNumeric(self):
return self.inner.isNumeric()
def isString(self):
return self.inner.isString()
def isByteString(self):
return self.inner.isByteString()
def isDOMString(self):
return self.inner.isDOMString()
def isUSVString(self):
return self.inner.isUSVString()
def isVoid(self):
return self.inner.isVoid()
def isSequence(self):
return self.inner.isSequence()
def isMozMap(self):
return self.inner.isMozMap()
def isArray(self):
return self.inner.isArray()
def isDictionary(self):
return self.inner.isDictionary()
def isArrayBuffer(self):
return self.inner.isArrayBuffer()
def isArrayBufferView(self):
return self.inner.isArrayBufferView()
def isSharedArrayBuffer(self):
return self.inner.isSharedArrayBuffer()
def isSharedArrayBufferView(self):
return self.inner.isSharedArrayBufferView()
def isTypedArray(self):
return self.inner.isTypedArray()
def isSharedTypedArray(self):
return self.inner.isSharedTypedArray()
def isInterface(self):
return self.inner.isInterface()
def isCallbackInterface(self):
return self.inner.isCallbackInterface()
def isNonCallbackInterface(self):
return self.inner.isNonCallbackInterface()
def isComplete(self):
return False
def complete(self, parentScope):
if not self.inner.isComplete():
self.inner = self.inner.complete(parentScope)
assert self.inner.isComplete()
return self.inner
# Do we need a resolveType impl? I don't think it's particularly useful....
def tag(self):
return self.inner.tag()
def unroll(self):
return self.inner.unroll()
def isDistinguishableFrom(self, other):
return self.inner.isDistinguishableFrom(other)
def _getDependentObjects(self):
return self.inner._getDependentObjects()
class IDLTypedef(IDLObjectWithIdentifier):
def __init__(self, location, parentScope, innerType, name):
identifier = IDLUnresolvedIdentifier(location, name)
IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
self.innerType = innerType
def __str__(self):
return "Typedef %s %s" % (self.identifier.name, self.innerType)
def finish(self, parentScope):
if not self.innerType.isComplete():
self.innerType = self.innerType.complete(parentScope)
def validate(self):
pass
def isTypedef(self):
return True
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
def _getDependentObjects(self):
return self.innerType._getDependentObjects()
class IDLWrapperType(IDLType):
def __init__(self, location, inner, promiseInnerType=None):
IDLType.__init__(self, location, inner.identifier.name)
self.inner = inner
self._identifier = inner.identifier
self.builtin = False
assert not promiseInnerType or inner.identifier.name == "Promise"
self._promiseInnerType = promiseInnerType
def __eq__(self, other):
return (isinstance(other, IDLWrapperType) and
self._identifier == other._identifier and
self.builtin == other.builtin)
def __str__(self):
return str(self.name) + " (Wrapper)"
def nullable(self):
return False
def isPrimitive(self):
return False
def isString(self):
return False
def isByteString(self):
return False
def isDOMString(self):
return False
def isUSVString(self):
return False
def isVoid(self):
return False
def isSequence(self):
return False
def isArray(self):
return False
def isDictionary(self):
return isinstance(self.inner, IDLDictionary)
def isInterface(self):
return (isinstance(self.inner, IDLInterface) or
isinstance(self.inner, IDLExternalInterface))
def isCallbackInterface(self):
return self.isInterface() and self.inner.isCallback()
def isNonCallbackInterface(self):
return self.isInterface() and not self.inner.isCallback()
def isEnum(self):
return isinstance(self.inner, IDLEnum)
def isPromise(self):
return (isinstance(self.inner, IDLInterface) and
self.inner.identifier.name == "Promise")
def promiseInnerType(self):
assert self.isPromise()
return self._promiseInnerType
def isSerializable(self):
if self.isInterface():
if self.inner.isExternal():
return False
return any(m.isMethod() and m.isJsonifier() for m in self.inner.members)
elif self.isEnum():
return True
elif self.isDictionary():
return all(m.type.isSerializable() for m in self.inner.members)
else:
raise WebIDLError("IDLWrapperType wraps type %s that we don't know if "
"is serializable" % type(self.inner), [self.location])
def resolveType(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.inner.resolve(parentScope)
def isComplete(self):
return True
def tag(self):
if self.isInterface():
return IDLType.Tags.interface
elif self.isEnum():
return IDLType.Tags.enum
elif self.isDictionary():
return IDLType.Tags.dictionary
else:
assert False
def isDistinguishableFrom(self, other):
if self.isPromise():
return False
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
assert self.isInterface() or self.isEnum() or self.isDictionary()
if self.isEnum():
return (other.isPrimitive() or other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isDictionary() and other.nullable():
return False
if (other.isPrimitive() or other.isString() or other.isEnum() or
other.isDate() or other.isSequence()):
return True
if self.isDictionary():
return other.isNonCallbackInterface()
assert self.isInterface()
if other.isInterface():
if other.isSpiderMonkeyInterface():
# Just let |other| handle things
return other.isDistinguishableFrom(self)
assert self.isGeckoInterface() and other.isGeckoInterface()
if self.inner.isExternal() or other.unroll().inner.isExternal():
return self != other
return (len(self.inner.interfacesBasedOnSelf &
other.unroll().inner.interfacesBasedOnSelf) == 0 and
(self.isNonCallbackInterface() or
other.isNonCallbackInterface()))
if (other.isDictionary() or other.isCallback() or
other.isMozMap() or other.isArray()):
return self.isNonCallbackInterface()
# Not much else |other| can be
assert other.isObject()
return False
def isExposedInAllOf(self, exposureSet):
if not self.isInterface():
return True
iface = self.inner
if iface.isExternal():
# Let's say true, though ideally we'd only do this when
# exposureSet contains the primary global's name.
return True
if (self.isPromise() and
# Check the internal type
not self.promiseInnerType().unroll().isExposedInAllOf(exposureSet)):
return False
return iface.exposureSet.issuperset(exposureSet)
def _getDependentObjects(self):
# NB: The codegen for an interface type depends on
# a) That the identifier is in fact an interface (as opposed to
# a dictionary or something else).
# b) The native type of the interface.
# If we depend on the interface object we will also depend on
# anything the interface depends on which is undesirable. We
# considered implementing a dependency just on the interface type
# file, but then every modification to an interface would cause this
# to be regenerated which is still undesirable. We decided not to
# depend on anything, reasoning that:
# 1) Changing the concrete type of the interface requires modifying
# Bindings.conf, which is still a global dependency.
# 2) Changing an interface to a dictionary (or vice versa) with the
# same identifier should be incredibly rare.
#
# On the other hand, if our type is a dictionary, we should
# depend on it, because the member types of a dictionary
# affect whether a method taking the dictionary as an argument
# takes a JSContext* argument or not.
if self.isDictionary():
return set([self.inner])
return set()
class IDLBuiltinType(IDLType):
Types = enum(
# The integer types
'byte',
'octet',
'short',
'unsigned_short',
'long',
'unsigned_long',
'long_long',
'unsigned_long_long',
# Additional primitive types
'boolean',
'unrestricted_float',
'float',
'unrestricted_double',
# IMPORTANT: "double" must be the last primitive type listed
'double',
# Other types
'any',
'domstring',
'bytestring',
'usvstring',
'object',
'date',
'void',
# Funny stuff
'ArrayBuffer',
'ArrayBufferView',
'SharedArrayBuffer',
'SharedArrayBufferView',
'Int8Array',
'Uint8Array',
'Uint8ClampedArray',
'Int16Array',
'Uint16Array',
'Int32Array',
'Uint32Array',
'Float32Array',
'Float64Array',
'SharedInt8Array',
'SharedUint8Array',
'SharedUint8ClampedArray',
'SharedInt16Array',
'SharedUint16Array',
'SharedInt32Array',
'SharedUint32Array',
'SharedFloat32Array',
'SharedFloat64Array'
)
TagLookup = {
Types.byte: IDLType.Tags.int8,
Types.octet: IDLType.Tags.uint8,
Types.short: IDLType.Tags.int16,
Types.unsigned_short: IDLType.Tags.uint16,
Types.long: IDLType.Tags.int32,
Types.unsigned_long: IDLType.Tags.uint32,
Types.long_long: IDLType.Tags.int64,
Types.unsigned_long_long: IDLType.Tags.uint64,
Types.boolean: IDLType.Tags.bool,
Types.unrestricted_float: IDLType.Tags.unrestricted_float,
Types.float: IDLType.Tags.float,
Types.unrestricted_double: IDLType.Tags.unrestricted_double,
Types.double: IDLType.Tags.double,
Types.any: IDLType.Tags.any,
Types.domstring: IDLType.Tags.domstring,
Types.bytestring: IDLType.Tags.bytestring,
Types.usvstring: IDLType.Tags.usvstring,
Types.object: IDLType.Tags.object,
Types.date: IDLType.Tags.date,
Types.void: IDLType.Tags.void,
Types.ArrayBuffer: IDLType.Tags.interface,
Types.ArrayBufferView: IDLType.Tags.interface,
Types.SharedArrayBuffer: IDLType.Tags.interface,
Types.SharedArrayBufferView: IDLType.Tags.interface,
Types.Int8Array: IDLType.Tags.interface,
Types.Uint8Array: IDLType.Tags.interface,
Types.Uint8ClampedArray: IDLType.Tags.interface,
Types.Int16Array: IDLType.Tags.interface,
Types.Uint16Array: IDLType.Tags.interface,
Types.Int32Array: IDLType.Tags.interface,
Types.Uint32Array: IDLType.Tags.interface,
Types.Float32Array: IDLType.Tags.interface,
Types.Float64Array: IDLType.Tags.interface,
Types.SharedInt8Array: IDLType.Tags.interface,
Types.SharedUint8Array: IDLType.Tags.interface,
Types.SharedUint8ClampedArray: IDLType.Tags.interface,
Types.SharedInt16Array: IDLType.Tags.interface,
Types.SharedUint16Array: IDLType.Tags.interface,
Types.SharedInt32Array: IDLType.Tags.interface,
Types.SharedUint32Array: IDLType.Tags.interface,
Types.SharedFloat32Array: IDLType.Tags.interface,
Types.SharedFloat64Array: IDLType.Tags.interface
}
def __init__(self, location, name, type):
IDLType.__init__(self, location, name)
self.builtin = True
self._typeTag = type
def isPrimitive(self):
return self._typeTag <= IDLBuiltinType.Types.double
def isBoolean(self):
return self._typeTag == IDLBuiltinType.Types.boolean
def isNumeric(self):
return self.isPrimitive() and not self.isBoolean()
def isString(self):
return (self._typeTag == IDLBuiltinType.Types.domstring or
self._typeTag == IDLBuiltinType.Types.bytestring or
self._typeTag == IDLBuiltinType.Types.usvstring)
def isByteString(self):
return self._typeTag == IDLBuiltinType.Types.bytestring
def isDOMString(self):
return self._typeTag == IDLBuiltinType.Types.domstring
def isUSVString(self):
return self._typeTag == IDLBuiltinType.Types.usvstring
def isInteger(self):
return self._typeTag <= IDLBuiltinType.Types.unsigned_long_long
def isArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBuffer
def isArrayBufferView(self):
return self._typeTag == IDLBuiltinType.Types.ArrayBufferView
def isSharedArrayBuffer(self):
return self._typeTag == IDLBuiltinType.Types.SharedArrayBuffer
def isSharedArrayBufferView(self):
return self._typeTag == IDLBuiltinType.Types.SharedArrayBufferView
def isTypedArray(self):
return (self._typeTag >= IDLBuiltinType.Types.Int8Array and
self._typeTag <= IDLBuiltinType.Types.Float64Array)
def isSharedTypedArray(self):
return (self._typeTag >= IDLBuiltinType.Types.SharedInt8Array and
self._typeTag <= IDLBuiltinType.Types.SharedFloat64Array)
def isInterface(self):
# TypedArray things are interface types per the TypedArray spec,
# but we handle them as builtins because SpiderMonkey implements
# all of it internally.
return (self.isArrayBuffer() or
self.isArrayBufferView() or
self.isSharedArrayBuffer() or
self.isSharedArrayBufferView() or
self.isTypedArray() or
self.isSharedTypedArray())
def isNonCallbackInterface(self):
# All the interfaces we can be are non-callback
return self.isInterface()
def isFloat(self):
return (self._typeTag == IDLBuiltinType.Types.float or
self._typeTag == IDLBuiltinType.Types.double or
self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isUnrestricted(self):
assert self.isFloat()
return (self._typeTag == IDLBuiltinType.Types.unrestricted_float or
self._typeTag == IDLBuiltinType.Types.unrestricted_double)
def isSerializable(self):
return self.isPrimitive() or self.isString() or self.isDate()
def includesRestrictedFloat(self):
return self.isFloat() and not self.isUnrestricted()
def tag(self):
return IDLBuiltinType.TagLookup[self._typeTag]
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
if self.isBoolean():
return (other.isNumeric() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isNumeric():
return (other.isBoolean() or other.isString() or other.isEnum() or
other.isInterface() or other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isString():
return (other.isPrimitive() or other.isInterface() or
other.isObject() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate())
if self.isAny():
# Can't tell "any" apart from anything
return False
if self.isObject():
return other.isPrimitive() or other.isString() or other.isEnum()
if self.isDate():
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isInterface() or other.isCallback() or
other.isDictionary() or other.isSequence() or
other.isMozMap() or other.isArray())
if self.isVoid():
return not other.isVoid()
# Not much else we could be!
assert self.isSpiderMonkeyInterface()
# Like interfaces, but we know we're not a callback
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isCallback() or other.isDictionary() or
other.isSequence() or other.isMozMap() or other.isArray() or
other.isDate() or
(other.isInterface() and (
# ArrayBuffer is distinguishable from everything
# that's not an ArrayBuffer or a callback interface
(self.isArrayBuffer() and not other.isArrayBuffer()) or
(self.isSharedArrayBuffer() and not other.isSharedArrayBuffer()) or
# ArrayBufferView is distinguishable from everything
# that's not an ArrayBufferView or typed array.
(self.isArrayBufferView() and not other.isArrayBufferView() and
not other.isTypedArray()) or
(self.isSharedArrayBufferView() and not other.isSharedArrayBufferView() and
not other.isSharedTypedArray()) or
# Typed arrays are distinguishable from everything
# except ArrayBufferView and the same type of typed
# array
(self.isTypedArray() and not other.isArrayBufferView() and not
(other.isTypedArray() and other.name == self.name)) or
(self.isSharedTypedArray() and not other.isSharedArrayBufferView() and not
(other.isSharedTypedArray() and other.name == self.name)))))
def _getDependentObjects(self):
return set()
BuiltinTypes = {
IDLBuiltinType.Types.byte:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Byte",
IDLBuiltinType.Types.byte),
IDLBuiltinType.Types.octet:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Octet",
IDLBuiltinType.Types.octet),
IDLBuiltinType.Types.short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Short",
IDLBuiltinType.Types.short),
IDLBuiltinType.Types.unsigned_short:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedShort",
IDLBuiltinType.Types.unsigned_short),
IDLBuiltinType.Types.long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Long",
IDLBuiltinType.Types.long),
IDLBuiltinType.Types.unsigned_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLong",
IDLBuiltinType.Types.unsigned_long),
IDLBuiltinType.Types.long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "LongLong",
IDLBuiltinType.Types.long_long),
IDLBuiltinType.Types.unsigned_long_long:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnsignedLongLong",
IDLBuiltinType.Types.unsigned_long_long),
IDLBuiltinType.Types.boolean:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Boolean",
IDLBuiltinType.Types.boolean),
IDLBuiltinType.Types.float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float",
IDLBuiltinType.Types.float),
IDLBuiltinType.Types.unrestricted_float:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedFloat",
IDLBuiltinType.Types.unrestricted_float),
IDLBuiltinType.Types.double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Double",
IDLBuiltinType.Types.double),
IDLBuiltinType.Types.unrestricted_double:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "UnrestrictedDouble",
IDLBuiltinType.Types.unrestricted_double),
IDLBuiltinType.Types.any:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Any",
IDLBuiltinType.Types.any),
IDLBuiltinType.Types.domstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "String",
IDLBuiltinType.Types.domstring),
IDLBuiltinType.Types.bytestring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ByteString",
IDLBuiltinType.Types.bytestring),
IDLBuiltinType.Types.usvstring:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "USVString",
IDLBuiltinType.Types.usvstring),
IDLBuiltinType.Types.object:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Object",
IDLBuiltinType.Types.object),
IDLBuiltinType.Types.date:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Date",
IDLBuiltinType.Types.date),
IDLBuiltinType.Types.void:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Void",
IDLBuiltinType.Types.void),
IDLBuiltinType.Types.ArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBuffer",
IDLBuiltinType.Types.ArrayBuffer),
IDLBuiltinType.Types.ArrayBufferView:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "ArrayBufferView",
IDLBuiltinType.Types.ArrayBufferView),
IDLBuiltinType.Types.SharedArrayBuffer:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedArrayBuffer",
IDLBuiltinType.Types.SharedArrayBuffer),
IDLBuiltinType.Types.SharedArrayBufferView:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedArrayBufferView",
IDLBuiltinType.Types.SharedArrayBufferView),
IDLBuiltinType.Types.Int8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int8Array",
IDLBuiltinType.Types.Int8Array),
IDLBuiltinType.Types.Uint8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8Array",
IDLBuiltinType.Types.Uint8Array),
IDLBuiltinType.Types.Uint8ClampedArray:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint8ClampedArray",
IDLBuiltinType.Types.Uint8ClampedArray),
IDLBuiltinType.Types.Int16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int16Array",
IDLBuiltinType.Types.Int16Array),
IDLBuiltinType.Types.Uint16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint16Array",
IDLBuiltinType.Types.Uint16Array),
IDLBuiltinType.Types.Int32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Int32Array",
IDLBuiltinType.Types.Int32Array),
IDLBuiltinType.Types.Uint32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Uint32Array",
IDLBuiltinType.Types.Uint32Array),
IDLBuiltinType.Types.Float32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float32Array",
IDLBuiltinType.Types.Float32Array),
IDLBuiltinType.Types.Float64Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "Float64Array",
IDLBuiltinType.Types.Float64Array),
IDLBuiltinType.Types.SharedInt8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt8Array",
IDLBuiltinType.Types.SharedInt8Array),
IDLBuiltinType.Types.SharedUint8Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint8Array",
IDLBuiltinType.Types.SharedUint8Array),
IDLBuiltinType.Types.SharedUint8ClampedArray:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint8ClampedArray",
IDLBuiltinType.Types.SharedUint8ClampedArray),
IDLBuiltinType.Types.SharedInt16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt16Array",
IDLBuiltinType.Types.SharedInt16Array),
IDLBuiltinType.Types.SharedUint16Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint16Array",
IDLBuiltinType.Types.SharedUint16Array),
IDLBuiltinType.Types.SharedInt32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedInt32Array",
IDLBuiltinType.Types.SharedInt32Array),
IDLBuiltinType.Types.SharedUint32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedUint32Array",
IDLBuiltinType.Types.SharedUint32Array),
IDLBuiltinType.Types.SharedFloat32Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedFloat32Array",
IDLBuiltinType.Types.SharedFloat32Array),
IDLBuiltinType.Types.SharedFloat64Array:
IDLBuiltinType(BuiltinLocation("<builtin type>"), "SharedFloat64Array",
IDLBuiltinType.Types.SharedFloat64Array)
}
integerTypeSizes = {
IDLBuiltinType.Types.byte: (-128, 127),
IDLBuiltinType.Types.octet: (0, 255),
IDLBuiltinType.Types.short: (-32768, 32767),
IDLBuiltinType.Types.unsigned_short: (0, 65535),
IDLBuiltinType.Types.long: (-2147483648, 2147483647),
IDLBuiltinType.Types.unsigned_long: (0, 4294967295),
IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807),
IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615)
}
def matchIntegerValueToType(value):
for type, extremes in integerTypeSizes.items():
(min, max) = extremes
if value <= max and value >= min:
return BuiltinTypes[type]
return None
class IDLValue(IDLObject):
def __init__(self, location, type, value):
IDLObject.__init__(self, location)
self.type = type
assert isinstance(type, IDLType)
self.value = value
def coerceToType(self, type, location):
if type == self.type:
return self # Nothing to do
# We first check for unions to ensure that even if the union is nullable
# we end up with the right flat member type, not the union's type.
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
coercedValue = self.coerceToType(subtype, location)
# Create a new IDLValue to make sure that we have the
# correct float/double type. This is necessary because we
# use the value's type when it is a default value of a
# union, and the union cares about the exact float type.
return IDLValue(self.location, subtype, coercedValue.value)
except:
pass
# If the type allows null, rerun this matching on the inner type, except
# nullable enums. We handle those specially, because we want our
# default string values to stay strings even when assigned to a nullable
# enum.
elif type.nullable() and not type.isEnum():
innerValue = self.coerceToType(type.inner, location)
return IDLValue(self.location, type, innerValue.value)
elif self.type.isInteger() and type.isInteger():
# We're both integer types. See if we fit.
(min, max) = integerTypeSizes[type._typeTag]
if self.value <= max and self.value >= min:
# Promote
return IDLValue(self.location, type, self.value)
else:
raise WebIDLError("Value %s is out of range for type %s." %
(self.value, type), [location])
elif self.type.isInteger() and type.isFloat():
# Convert an integer literal into float
if -2**24 <= self.value <= 2**24:
return IDLValue(self.location, type, float(self.value))
else:
raise WebIDLError("Converting value %s to %s will lose precision." %
(self.value, type), [location])
elif self.type.isString() and type.isEnum():
# Just keep our string, but make sure it's a valid value for this enum
enum = type.unroll().inner
if self.value not in enum.values():
raise WebIDLError("'%s' is not a valid default value for enum %s"
% (self.value, enum.identifier.name),
[location, enum.location])
return self
elif self.type.isFloat() and type.isFloat():
if (not type.isUnrestricted() and
(self.value == float("inf") or self.value == float("-inf") or
math.isnan(self.value))):
raise WebIDLError("Trying to convert unrestricted value %s to non-unrestricted"
% self.value, [location]);
return IDLValue(self.location, type, self.value)
elif self.type.isString() and type.isUSVString():
# Allow USVStrings to use default value just like
# DOMString. No coercion is required in this case as Codegen.py
# treats USVString just like DOMString, but with an
# extra normalization step.
assert self.type.isDOMString()
return self
raise WebIDLError("Cannot coerce type %s to type %s." %
(self.type, type), [location])
def _getDependentObjects(self):
return set()
class IDLNullValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if (not isinstance(type, IDLNullableType) and
not (type.isUnion() and type.hasNullableType) and
not (type.isUnion() and type.hasDictionaryType()) and
not type.isDictionary() and
not type.isAny()):
raise WebIDLError("Cannot coerce null value to type %s." % type,
[location])
nullValue = IDLNullValue(self.location)
if type.isUnion() and not type.nullable() and type.hasDictionaryType():
# We're actually a default value for the union's dictionary member.
# Use its type.
for t in type.flatMemberTypes:
if t.isDictionary():
nullValue.type = t
return nullValue
nullValue.type = type
return nullValue
def _getDependentObjects(self):
return set()
class IDLEmptySequenceValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if type.isUnion():
# We use the flat member types here, because if we have a nullable
# member type, or a nested union, we want the type the value
# actually coerces to, not the nullable or nested union type.
for subtype in type.unroll().flatMemberTypes:
try:
return self.coerceToType(subtype, location)
except:
pass
if not type.isSequence():
raise WebIDLError("Cannot coerce empty sequence value to type %s." % type,
[location])
emptySequenceValue = IDLEmptySequenceValue(self.location)
emptySequenceValue.type = type
return emptySequenceValue
def _getDependentObjects(self):
return set()
class IDLUndefinedValue(IDLObject):
def __init__(self, location):
IDLObject.__init__(self, location)
self.type = None
self.value = None
def coerceToType(self, type, location):
if not type.isAny():
raise WebIDLError("Cannot coerce undefined value to type %s." % type,
[location])
undefinedValue = IDLUndefinedValue(self.location)
undefinedValue.type = type
return undefinedValue
def _getDependentObjects(self):
return set()
class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins):
Tags = enum(
'Const',
'Attr',
'Method',
'MaplikeOrSetlike'
)
Special = enum(
'Static',
'Stringifier'
)
AffectsValues = ("Nothing", "Everything")
DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything")
def __init__(self, location, identifier, tag):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
IDLExposureMixins.__init__(self, location)
self.tag = tag
self._extendedAttrDict = {}
def isMethod(self):
return self.tag == IDLInterfaceMember.Tags.Method
def isAttr(self):
return self.tag == IDLInterfaceMember.Tags.Attr
def isConst(self):
return self.tag == IDLInterfaceMember.Tags.Const
def isMaplikeOrSetlike(self):
return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike
def addExtendedAttributes(self, attrs):
for attr in attrs:
self.handleExtendedAttribute(attr)
attrlist = attr.listValue()
self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True
def handleExtendedAttribute(self, attr):
pass
def getExtendedAttribute(self, name):
return self._extendedAttrDict.get(name, None)
def finish(self, scope):
# We better be exposed _somewhere_.
if (len(self._exposureGlobalNames) == 0):
print self.identifier.name
assert len(self._exposureGlobalNames) != 0
IDLExposureMixins.finish(self, scope)
def validate(self):
if (self.getExtendedAttribute("Pref") and
self.exposureSet != set([self._globalScope.primaryGlobalName])):
raise WebIDLError("[Pref] used on an interface member that is not "
"%s-only" % self._globalScope.primaryGlobalName,
[self.location])
for attribute in ["CheckAnyPermissions", "CheckAllPermissions"]:
if (self.getExtendedAttribute(attribute) and
self.exposureSet != set([self._globalScope.primaryGlobalName])):
raise WebIDLError("[%s] used on an interface member that is "
"not %s-only" %
(attribute, self.parentScope.primaryGlobalName),
[self.location])
if self.isAttr() or self.isMethod():
if self.affects == "Everything" and self.dependsOn != "Everything":
raise WebIDLError("Interface member is flagged as affecting "
"everything but not depending on everything. "
"That seems rather unlikely.",
[self.location])
if self.getExtendedAttribute("NewObject"):
if self.dependsOn == "Nothing" or self.dependsOn == "DOMState":
raise WebIDLError("A [NewObject] method is not idempotent, "
"so it has to depend on something other than DOM state.",
[self.location])
def _setDependsOn(self, dependsOn):
if self.dependsOn != "Everything":
raise WebIDLError("Trying to specify multiple different DependsOn, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if dependsOn not in IDLInterfaceMember.DependsOnValues:
raise WebIDLError("Invalid [DependsOn=%s] on attribute" % dependsOn,
[self.location])
self.dependsOn = dependsOn
def _setAffects(self, affects):
if self.affects != "Everything":
raise WebIDLError("Trying to specify multiple different Affects, "
"Pure, or Constant extended attributes for "
"attribute", [self.location])
if affects not in IDLInterfaceMember.AffectsValues:
raise WebIDLError("Invalid [Affects=%s] on attribute" % dependsOn,
[self.location])
self.affects = affects
def _addAlias(self, alias):
if alias in self.aliases:
raise WebIDLError("Duplicate [Alias=%s] on attribute" % alias,
[self.location])
self.aliases.append(alias)
# MaplikeOrSetlike adds a trait to an interface, like map or iteration
# functions. To handle them while still getting all of the generated binding
# code taken care of, we treat them as macros that are expanded into members
# based on parsed values.
class IDLMaplikeOrSetlike(IDLInterfaceMember):
MaplikeOrSetlikeTypes = enum(
'maplike',
'setlike'
)
def __init__(self, location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.MaplikeOrSetlike)
assert isinstance(keyType, IDLType)
assert isinstance(valueType, IDLType)
self.maplikeOrSetlikeType = maplikeOrSetlikeType
self.readonly = readonly
self.keyType = keyType
self.valueType = valueType
self.slotIndex = None
self.disallowedMemberNames = []
self.disallowedNonMethodNames = []
# When generating JSAPI access code, we need to know the backing object
# type prefix to create the correct function. Generate here for reuse.
if self.isMaplike():
self.prefix = 'Map'
elif self.isSetlike():
self.prefix = 'Set'
def __str__(self):
return "declared '%s' with key '%s'" % (self.maplikeOrSetlikeType, self.keyType)
def isMaplike(self):
return self.maplikeOrSetlikeType == "maplike"
def isSetlike(self):
return self.maplikeOrSetlikeType == "setlike"
def checkCollisions(self, members, isAncestor):
for member in members:
# Check that there are no disallowed members
if (member.identifier.name in self.disallowedMemberNames and
not ((member.isMethod() and member.isMaplikeOrSetlikeMethod()) or
(member.isAttr() and member.isMaplikeOrSetlikeAttr()))):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s name." %
(member.identifier.name,
self.maplikeOrSetlikeType),
[self.location, member.location])
# Check that there are no disallowed non-method members
if (isAncestor or (member.isAttr() or member.isConst()) and
member.identifier.name in self.disallowedNonMethodNames):
raise WebIDLError("Member '%s' conflicts "
"with reserved %s method." %
(member.identifier.name,
self.maplikeOrSetlikeType),
[self.location, member.location])
def expand(self, members, isJSImplemented):
"""
In order to take advantage of all of the method machinery in Codegen,
we generate our functions as if they were part of the interface
specification during parsing.
"""
def addMethod(name, allowExistingOperations, returnType, args=[],
chromeOnly=False, isPure=False, affectsNothing=False):
"""
Create an IDLMethod based on the parameters passed in. chromeOnly is only
True for read-only js implemented classes, to implement underscore
prefixed convenience functions would otherwise not be available,
unlike the case of C++ bindings. isPure is only True for
idempotent functions, so it is not valid for things like keys,
values, etc. that return a new object every time.
"""
# Only add name to lists for collision checks if it's not chrome
# only.
if chromeOnly:
name = "__" + name
else:
if not allowExistingOperations:
self.disallowedMemberNames.append(name)
else:
self.disallowedNonMethodNames.append(name)
# If allowExistingOperations is True, and another operation exists
# with the same name as the one we're trying to add, don't add the
# maplike/setlike operation. However, if the operation is static,
# then fail by way of creating the function, which will cause a
# naming conflict, per the spec.
if allowExistingOperations:
for m in members:
if m.identifier.name == name and m.isMethod() and not m.isStatic():
return
method = IDLMethod(self.location,
IDLUnresolvedIdentifier(self.location, name, allowDoubleUnderscore=chromeOnly),
returnType, args, maplikeOrSetlike=self)
# We need to be able to throw from declaration methods
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Throws",))])
if chromeOnly:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("ChromeOnly",))])
if isPure:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("Pure",))])
# Following attributes are used for keys/values/entries. Can't mark
# them pure, since they return a new object each time they are run.
if affectsNothing:
method.addExtendedAttributes(
[IDLExtendedAttribute(self.location, ("DependsOn", "Everything")),
IDLExtendedAttribute(self.location, ("Affects", "Nothing"))])
members.append(method)
# Both maplike and setlike have a size attribute
members.append(IDLAttribute(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"), "size"),
BuiltinTypes[IDLBuiltinType.Types.unsigned_long],
True,
maplikeOrSetlike=self))
self.reserved_ro_names = ["size"]
# object entries()
addMethod("entries", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# object keys()
addMethod("keys", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# object values()
addMethod("values", False, BuiltinTypes[IDLBuiltinType.Types.object],
affectsNothing=True)
# void forEach(callback(valueType, keyType), thisVal)
foreachArguments = [IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"callback"),
BuiltinTypes[IDLBuiltinType.Types.object]),
IDLArgument(self.location,
IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"thisArg"),
BuiltinTypes[IDLBuiltinType.Types.any],
optional=True)]
addMethod("forEach", False, BuiltinTypes[IDLBuiltinType.Types.void],
foreachArguments)
def getKeyArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "key"),
self.keyType)
# boolean has(keyType key)
addMethod("has", False, BuiltinTypes[IDLBuiltinType.Types.boolean],
[getKeyArg()], isPure=True)
if not self.readonly:
# void clear()
addMethod("clear", True, BuiltinTypes[IDLBuiltinType.Types.void],
[])
# boolean delete(keyType key)
addMethod("delete", True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()])
# Always generate underscored functions (e.g. __add, __clear) for js
# implemented interfaces as convenience functions.
if isJSImplemented:
# void clear()
addMethod("clear", True, BuiltinTypes[IDLBuiltinType.Types.void],
[], chromeOnly=True)
# boolean delete(keyType key)
addMethod("delete", True,
BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()],
chromeOnly=True)
if self.isSetlike():
if not self.readonly:
# Add returns the set object it just added to.
# object add(keyType key)
addMethod("add", True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()])
if isJSImplemented:
addMethod("add", True,
BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()],
chromeOnly=True)
return
# If we get this far, we're a maplike declaration.
# valueType get(keyType key)
#
# Note that instead of the value type, we're using any here. The
# validity checks should happen as things are inserted into the map,
# and using any as the return type makes code generation much simpler.
#
# TODO: Bug 1155340 may change this to use specific type to provide
# more info to JIT.
addMethod("get", False, BuiltinTypes[IDLBuiltinType.Types.any],
[getKeyArg()], isPure=True)
def getValueArg():
return IDLArgument(self.location,
IDLUnresolvedIdentifier(self.location, "value"),
self.valueType)
if not self.readonly:
addMethod("set", True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()])
if isJSImplemented:
addMethod("set", True, BuiltinTypes[IDLBuiltinType.Types.object],
[getKeyArg(), getValueArg()], chromeOnly=True)
def resolve(self, parentScope):
self.keyType.resolveType(parentScope)
self.valueType.resolveType(parentScope)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.keyType.isComplete():
t = self.keyType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.keyType = t
if not self.valueType.isComplete():
t = self.valueType.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.valueType = t
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
return set([self.keyType, self.valueType])
class IDLConst(IDLInterfaceMember):
def __init__(self, location, identifier, type, value):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Const)
assert isinstance(type, IDLType)
if type.isDictionary():
raise WebIDLError("A constant cannot be of a dictionary type",
[self.location])
self.type = type
self.value = value
if identifier.name == "prototype":
raise WebIDLError("The identifier of a constant must not be 'prototype'",
[location])
def __str__(self):
return "'%s' const '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
type = self.type.complete(scope)
if not type.isPrimitive() and not type.isString():
locations = [self.type.location, type.location]
try:
locations.append(type.inner.location)
except:
pass
raise WebIDLError("Incorrect type for constant", locations)
self.type = type
# The value might not match the type
coercedValue = self.value.coerceToType(self.type, self.location)
assert coercedValue
self.value = coercedValue
def validate(self):
IDLInterfaceMember.validate(self)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "Pref" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on constant" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def _getDependentObjects(self):
return set([self.type, self.value])
class IDLAttribute(IDLInterfaceMember):
def __init__(self, location, identifier, type, readonly, inherit=False,
static=False, stringifier=False, maplikeOrSetlike=None):
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Attr)
assert isinstance(type, IDLType)
self.type = type
self.readonly = readonly
self.inherit = inherit
self.static = static
self.lenientThis = False
self._unforgeable = False
self.stringifier = stringifier
self.enforceRange = False
self.clamp = False
self.slotIndex = None
assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike)
self.maplikeOrSetlike = maplikeOrSetlike
self.dependsOn = "Everything"
self.affects = "Everything"
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static attribute must not be 'prototype'",
[location])
if readonly and inherit:
raise WebIDLError("An attribute cannot be both 'readonly' and 'inherit'",
[self.location])
def isStatic(self):
return self.static
def __str__(self):
return "'%s' attribute '%s'" % (self.type, self.identifier)
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
if not self.type.isComplete():
t = self.type.complete(scope)
assert not isinstance(t, IDLUnresolvedType)
assert not isinstance(t, IDLTypedefType)
assert not isinstance(t.name, IDLUnresolvedIdentifier)
self.type = t
if self.type.isDictionary() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("An attribute cannot be of a dictionary type",
[self.location])
if self.type.isSequence() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a sequence "
"type", [self.location])
if self.type.isMozMap() and not self.getExtendedAttribute("Cached"):
raise WebIDLError("A non-cached attribute cannot be of a MozMap "
"type", [self.location])
if self.type.isUnion():
for f in self.type.unroll().flatMemberTypes:
if f.isDictionary():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a dictionary "
"type", [self.location, f.location])
if f.isSequence():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a sequence "
"type", [self.location, f.location])
if f.isMozMap():
raise WebIDLError("An attribute cannot be of a union "
"type if one of its member types (or "
"one of its member types's member "
"types, and so on) is a MozMap "
"type", [self.location, f.location])
if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"):
raise WebIDLError("An attribute with [PutForwards] must have an "
"interface type as its type", [self.location])
if not self.type.isInterface() and self.getExtendedAttribute("SameObject"):
raise WebIDLError("An attribute with [SameObject] must have an "
"interface type as its type", [self.location])
def validate(self):
IDLInterfaceMember.validate(self)
if ((self.getExtendedAttribute("Cached") or
self.getExtendedAttribute("StoreInSlot")) and
not self.affects == "Nothing"):
raise WebIDLError("Cached attributes and attributes stored in "
"slots must be Constant or Pure or "
"Affects=Nothing, since the getter won't always "
"be called.",
[self.location])
if self.getExtendedAttribute("Frozen"):
if (not self.type.isSequence() and not self.type.isDictionary() and
not self.type.isMozMap()):
raise WebIDLError("[Frozen] is only allowed on "
"sequence-valued, dictionary-valued, and "
"MozMap-valued attributes",
[self.location])
if not self.type.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Attribute returns a type that is not exposed "
"everywhere where the attribute is exposed",
[self.location])
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "SetterThrows" and self.readonly:
raise WebIDLError("Readonly attributes must not be flagged as "
"[SetterThrows]",
[self.location])
elif (((identifier == "Throws" or identifier == "GetterThrows") and
self.getExtendedAttribute("StoreInSlot")) or
(identifier == "StoreInSlot" and
(self.getExtendedAttribute("Throws") or
self.getExtendedAttribute("GetterThrows")))):
raise WebIDLError("Throwing things can't be [StoreInSlot]",
[attr.location])
elif identifier == "LenientThis":
if not attr.noArguments():
raise WebIDLError("[LenientThis] must take no arguments",
[attr.location])
if self.isStatic():
raise WebIDLError("[LenientThis] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("CrossOriginReadable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginReadable]",
[attr.location, self.location])
if self.getExtendedAttribute("CrossOriginWritable"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [CrossOriginWritable]",
[attr.location, self.location])
self.lenientThis = True
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"attributes", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject" and not self.readonly:
raise WebIDLError("[SameObject] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "Constant" and not self.readonly:
raise WebIDLError("[Constant] only allowed on readonly attributes",
[attr.location, self.location])
elif identifier == "PutForwards":
if not self.readonly:
raise WebIDLError("[PutForwards] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[PutForwards] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("Replaceable") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
if not attr.hasValue():
raise WebIDLError("[PutForwards] takes an identifier",
[attr.location, self.location])
elif identifier == "Replaceable":
if not attr.noArguments():
raise WebIDLError("[Replaceable] must take no arguments",
[attr.location])
if not self.readonly:
raise WebIDLError("[Replaceable] is only allowed on readonly "
"attributes", [attr.location, self.location])
if self.isStatic():
raise WebIDLError("[Replaceable] is only allowed on non-static "
"attributes", [attr.location, self.location])
if self.getExtendedAttribute("PutForwards") is not None:
raise WebIDLError("[PutForwards] and [Replaceable] can't both "
"appear on the same attribute",
[attr.location, self.location])
elif identifier == "LenientFloat":
if self.readonly:
raise WebIDLError("[LenientFloat] used on a readonly attribute",
[attr.location, self.location])
if not self.type.includesRestrictedFloat():
raise WebIDLError("[LenientFloat] used on an attribute with a "
"non-restricted-float type",
[attr.location, self.location])
elif identifier == "EnforceRange":
if self.readonly:
raise WebIDLError("[EnforceRange] used on a readonly attribute",
[attr.location, self.location])
self.enforceRange = True
elif identifier == "Clamp":
if self.readonly:
raise WebIDLError("[Clamp] used on a readonly attribute",
[attr.location, self.location])
self.clamp = True
elif identifier == "StoreInSlot":
if self.getExtendedAttribute("Cached"):
raise WebIDLError("[StoreInSlot] and [Cached] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif identifier == "Cached":
if self.getExtendedAttribute("StoreInSlot"):
raise WebIDLError("[Cached] and [StoreInSlot] must not be "
"specified on the same attribute",
[attr.location, self.location])
elif (identifier == "CrossOriginReadable" or
identifier == "CrossOriginWritable"):
if not attr.noArguments() and identifier == "CrossOriginReadable":
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
if self.isStatic():
raise WebIDLError("[%s] is only allowed on non-static "
"attributes" % identifier,
[attr.location, self.location])
if self.getExtendedAttribute("LenientThis"):
raise WebIDLError("[LenientThis] is not allowed in combination "
"with [%s]" % identifier,
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Constant" or identifier == "SameObject":
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
self._setDependsOn("Nothing")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
if (attr.value() != "Everything" and attr.value() != "DOMState" and
not self.readonly):
raise WebIDLError("[DependsOn=%s] only allowed on "
"readonly attributes" % attr.value(),
[attr.location, self.location])
self._setDependsOn(attr.value())
elif (identifier == "Pref" or
identifier == "Deprecated" or
identifier == "SetterThrows" or
identifier == "Throws" or
identifier == "GetterThrows" or
identifier == "ChromeOnly" or
identifier == "Func" or
identifier == "Frozen" or
identifier == "AvailableIn" or
identifier == "NewObject" or
identifier == "UnsafeInPrerendering" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on attribute" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
self.type.resolveType(parentScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(attrs)
IDLInterfaceMember.addExtendedAttributes(self, attrs)
def hasLenientThis(self):
return self.lenientThis
def isMaplikeOrSetlikeAttr(self):
"""
True if this attribute was generated from an interface with
maplike/setlike (e.g. this is the size attribute for
maplike/setlike)
"""
return self.maplikeOrSetlike is not None
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
return set([self.type])
class IDLArgument(IDLObjectWithIdentifier):
def __init__(self, location, identifier, type, optional=False, defaultValue=None, variadic=False, dictionaryMember=False):
IDLObjectWithIdentifier.__init__(self, location, None, identifier)
assert isinstance(type, IDLType)
self.type = type
self.optional = optional
self.defaultValue = defaultValue
self.variadic = variadic
self.dictionaryMember = dictionaryMember
self._isComplete = False
self.enforceRange = False
self.clamp = False
self._allowTreatNonCallableAsNull = False
assert not variadic or optional
assert not variadic or not defaultValue
def addExtendedAttributes(self, attrs):
attrs = self.checkForStringHandlingExtendedAttributes(
attrs,
isDictionaryMember=self.dictionaryMember,
isOptional=self.optional)
for attribute in attrs:
identifier = attribute.identifier()
if identifier == "Clamp":
if not attribute.noArguments():
raise WebIDLError("[Clamp] must take no arguments",
[attribute.location])
if self.enforceRange:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.clamp = True
elif identifier == "EnforceRange":
if not attribute.noArguments():
raise WebIDLError("[EnforceRange] must take no arguments",
[attribute.location])
if self.clamp:
raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive",
[self.location])
self.enforceRange = True
elif identifier == "TreatNonCallableAsNull":
self._allowTreatNonCallableAsNull = True
else:
raise WebIDLError("Unhandled extended attribute on %s" %
("a dictionary member" if self.dictionaryMember else
"an argument"),
[attribute.location])
def isComplete(self):
return self._isComplete
def complete(self, scope):
if self._isComplete:
return
self._isComplete = True
if not self.type.isComplete():
type = self.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self.type = type
if ((self.type.isDictionary() or
self.type.isUnion() and self.type.unroll().hasDictionaryType()) and
self.optional and not self.defaultValue and not self.variadic):
# Default optional non-variadic dictionaries to null,
# for simplicity, so the codegen doesn't have to special-case this.
self.defaultValue = IDLNullValue(self.location)
elif self.type.isAny():
assert (self.defaultValue is None or
isinstance(self.defaultValue, IDLNullValue))
# optional 'any' values always have a default value
if self.optional and not self.defaultValue and not self.variadic:
# Set the default value to undefined, for simplicity, so the
# codegen doesn't have to special-case this.
self.defaultValue = IDLUndefinedValue(self.location)
# Now do the coercing thing; this needs to happen after the
# above creation of a default value.
if self.defaultValue:
self.defaultValue = self.defaultValue.coerceToType(self.type,
self.location)
assert self.defaultValue
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonCallableAsNull
def _getDependentObjects(self):
deps = set([self.type])
if self.defaultValue:
deps.add(self.defaultValue)
return deps
def canHaveMissingValue(self):
return self.optional and not self.defaultValue
class IDLCallback(IDLObjectWithScope):
def __init__(self, location, parentScope, identifier, returnType, arguments):
assert isinstance(returnType, IDLType)
self._returnType = returnType
# Clone the list
self._arguments = list(arguments)
IDLObjectWithScope.__init__(self, location, parentScope, identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
self._treatNonCallableAsNull = False
self._treatNonObjectAsNull = False
def module(self):
return self.location.filename().split('/')[-1].split('.webidl')[0] + 'Binding'
def isCallback(self):
return True
def signatures(self):
return [(self._returnType, self._arguments)]
def finish(self, scope):
if not self._returnType.isComplete():
type = self._returnType.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
self._returnType = type
for argument in self._arguments:
if argument.type.isComplete():
continue
type = argument.type.complete(scope)
assert not isinstance(type, IDLUnresolvedType)
assert not isinstance(type, IDLTypedefType)
assert not isinstance(type.name, IDLUnresolvedIdentifier)
argument.type = type
def validate(self):
pass
def addExtendedAttributes(self, attrs):
unhandledAttrs = []
for attr in attrs:
if attr.identifier() == "TreatNonCallableAsNull":
self._treatNonCallableAsNull = True
elif attr.identifier() == "TreatNonObjectAsNull":
self._treatNonObjectAsNull = True
else:
unhandledAttrs.append(attr)
if self._treatNonCallableAsNull and self._treatNonObjectAsNull:
raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] "
"and [TreatNonObjectAsNull]", [self.location])
if len(unhandledAttrs) != 0:
IDLType.addExtendedAttributes(self, unhandledAttrs)
def _getDependentObjects(self):
return set([self._returnType] + self._arguments)
class IDLCallbackType(IDLType):
def __init__(self, location, callback):
IDLType.__init__(self, location, callback.identifier.name)
self.callback = callback
def isCallback(self):
return True
def tag(self):
return IDLType.Tags.callback
def isDistinguishableFrom(self, other):
if other.isPromise():
return False
if other.isUnion():
# Just forward to the union; it'll deal
return other.isDistinguishableFrom(self)
return (other.isPrimitive() or other.isString() or other.isEnum() or
other.isNonCallbackInterface() or other.isDate() or
other.isSequence())
def _getDependentObjects(self):
return self.callback._getDependentObjects()
class IDLMethodOverload:
"""
A class that represents a single overload of a WebIDL method. This is not
quite the same as an element of the "effective overload set" in the spec,
because separate IDLMethodOverloads are not created based on arguments being
optional. Rather, when multiple methods have the same name, there is an
IDLMethodOverload for each one, all hanging off an IDLMethod representing
the full set of overloads.
"""
def __init__(self, returnType, arguments, location):
self.returnType = returnType
# Clone the list of arguments, just in case
self.arguments = list(arguments)
self.location = location
def _getDependentObjects(self):
deps = set(self.arguments)
deps.add(self.returnType)
return deps
class IDLMethod(IDLInterfaceMember, IDLScope):
Special = enum(
'Getter',
'Setter',
'Creator',
'Deleter',
'LegacyCaller',
base=IDLInterfaceMember.Special
)
TypeSuffixModifier = enum(
'None',
'QMark',
'Brackets'
)
NamedOrIndexed = enum(
'Neither',
'Named',
'Indexed'
)
def __init__(self, location, identifier, returnType, arguments,
static=False, getter=False, setter=False, creator=False,
deleter=False, specialType=NamedOrIndexed.Neither,
legacycaller=False, stringifier=False, jsonifier=False,
maplikeOrSetlike=None):
# REVIEW: specialType is NamedOrIndexed -- wow, this is messed up.
IDLInterfaceMember.__init__(self, location, identifier,
IDLInterfaceMember.Tags.Method)
self._hasOverloads = False
assert isinstance(returnType, IDLType)
# self._overloads is a list of IDLMethodOverloads
self._overloads = [IDLMethodOverload(returnType, arguments, location)]
assert isinstance(static, bool)
self._static = static
assert isinstance(getter, bool)
self._getter = getter
assert isinstance(setter, bool)
self._setter = setter
assert isinstance(creator, bool)
self._creator = creator
assert isinstance(deleter, bool)
self._deleter = deleter
assert isinstance(legacycaller, bool)
self._legacycaller = legacycaller
assert isinstance(stringifier, bool)
self._stringifier = stringifier
assert isinstance(jsonifier, bool)
self._jsonifier = jsonifier
assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike)
self.maplikeOrSetlike = maplikeOrSetlike
self._specialType = specialType
self._unforgeable = False
self.dependsOn = "Everything"
self.affects = "Everything"
self.aliases = []
if static and identifier.name == "prototype":
raise WebIDLError("The identifier of a static operation must not be 'prototype'",
[location])
self.assertSignatureConstraints()
def __str__(self):
return "Method '%s'" % self.identifier
def assertSignatureConstraints(self):
if self._getter or self._deleter:
assert len(self._overloads) == 1
overload = self._overloads[0]
arguments = overload.arguments
assert len(arguments) == 1
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not self._getter or not overload.returnType.isVoid()
if self._setter or self._creator:
assert len(self._overloads) == 1
arguments = self._overloads[0].arguments
assert len(arguments) == 2
assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or
arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long])
assert not arguments[0].optional and not arguments[0].variadic
assert not arguments[1].optional and not arguments[1].variadic
if self._stringifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring]
if self._jsonifier:
assert len(self._overloads) == 1
overload = self._overloads[0]
assert len(overload.arguments) == 0
assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.object]
def isStatic(self):
return self._static
def isGetter(self):
return self._getter
def isSetter(self):
return self._setter
def isCreator(self):
return self._creator
def isDeleter(self):
return self._deleter
def isNamed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Named
def isIndexed(self):
assert (self._specialType == IDLMethod.NamedOrIndexed.Named or
self._specialType == IDLMethod.NamedOrIndexed.Indexed)
return self._specialType == IDLMethod.NamedOrIndexed.Indexed
def isLegacycaller(self):
return self._legacycaller
def isStringifier(self):
return self._stringifier
def isJsonifier(self):
return self._jsonifier
def isMaplikeOrSetlikeMethod(self):
"""
True if this method was generated as part of a
maplike/setlike/etc interface (e.g. has/get methods)
"""
return self.maplikeOrSetlike is not None
def hasOverloads(self):
return self._hasOverloads
def isIdentifierLess(self):
"""
True if the method name started with __, and if the method is not a
maplike/setlike method. Interfaces with maplike/setlike will generate
methods starting with __ for chrome only backing object access in JS
implemented interfaces, so while these functions use what is considered
an non-identifier name, they actually DO have an identifier.
"""
return (self.identifier.name[:2] == "__" and
not self.isMaplikeOrSetlikeMethod())
def resolve(self, parentScope):
assert isinstance(parentScope, IDLScope)
IDLObjectWithIdentifier.resolve(self, parentScope)
IDLScope.__init__(self, self.location, parentScope, self.identifier)
for (returnType, arguments) in self.signatures():
for argument in arguments:
argument.resolve(self)
def addOverload(self, method):
assert len(method._overloads) == 1
if self._extendedAttrDict != method ._extendedAttrDict:
raise WebIDLError("Extended attributes differ on different "
"overloads of %s" % method.identifier,
[self.location, method.location])
self._overloads.extend(method._overloads)
self._hasOverloads = True
if self.isStatic() != method.isStatic():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'static' attribute" % method.identifier,
[method.location])
if self.isLegacycaller() != method.isLegacycaller():
raise WebIDLError("Overloaded identifier %s appears with different values of the 'legacycaller' attribute" % method.identifier,
[method.location])
# Can't overload special things!
assert not self.isGetter()
assert not method.isGetter()
assert not self.isSetter()
assert not method.isSetter()
assert not self.isCreator()
assert not method.isCreator()
assert not self.isDeleter()
assert not method.isDeleter()
assert not self.isStringifier()
assert not method.isStringifier()
assert not self.isJsonifier()
assert not method.isJsonifier()
return self
def signatures(self):
return [(overload.returnType, overload.arguments) for overload in
self._overloads]
def finish(self, scope):
IDLInterfaceMember.finish(self, scope)
for overload in self._overloads:
returnType = overload.returnType
if not returnType.isComplete():
returnType = returnType.complete(scope)
assert not isinstance(returnType, IDLUnresolvedType)
assert not isinstance(returnType, IDLTypedefType)
assert not isinstance(returnType.name, IDLUnresolvedIdentifier)
overload.returnType = returnType
for argument in overload.arguments:
if not argument.isComplete():
argument.complete(scope)
assert argument.type.isComplete()
# Now compute various information that will be used by the
# WebIDL overload resolution algorithm.
self.maxArgCount = max(len(s[1]) for s in self.signatures())
self.allowedArgCounts = [i for i in range(self.maxArgCount+1)
if len(self.signaturesForArgCount(i)) != 0]
def validate(self):
IDLInterfaceMember.validate(self)
# Make sure our overloads are properly distinguishable and don't have
# different argument types before the distinguishing args.
for argCount in self.allowedArgCounts:
possibleOverloads = self.overloadsForArgCount(argCount)
if len(possibleOverloads) == 1:
continue
distinguishingIndex = self.distinguishingIndexForArgCount(argCount)
for idx in range(distinguishingIndex):
firstSigType = possibleOverloads[0].arguments[idx].type
for overload in possibleOverloads[1:]:
if overload.arguments[idx].type != firstSigType:
raise WebIDLError(
"Signatures for method '%s' with %d arguments have "
"different types of arguments at index %d, which "
"is before distinguishing index %d" %
(self.identifier.name, argCount, idx,
distinguishingIndex),
[self.location, overload.location])
overloadWithPromiseReturnType = None
overloadWithoutPromiseReturnType = None
for overload in self._overloads:
returnType = overload.returnType
if not returnType.unroll().isExposedInAllOf(self.exposureSet):
raise WebIDLError("Overload returns a type that is not exposed "
"everywhere where the method is exposed",
[overload.location])
variadicArgument = None
arguments = overload.arguments
for (idx, argument) in enumerate(arguments):
assert argument.type.isComplete()
if ((argument.type.isDictionary() and
argument.type.inner.canBeEmpty())or
(argument.type.isUnion() and
argument.type.unroll().hasPossiblyEmptyDictionaryType())):
# Optional dictionaries and unions containing optional
# dictionaries at the end of the list or followed by
# optional arguments must be optional.
if (not argument.optional and
all(arg.optional for arg in arguments[idx+1:])):
raise WebIDLError("Dictionary argument or union "
"argument containing a dictionary "
"not followed by a required argument "
"must be optional",
[argument.location])
# An argument cannot be a Nullable Dictionary
if argument.type.nullable():
raise WebIDLError("An argument cannot be a nullable "
"dictionary or nullable union "
"containing a dictionary",
[argument.location])
# Only the last argument can be variadic
if variadicArgument:
raise WebIDLError("Variadic argument is not last argument",
[variadicArgument.location])
if argument.variadic:
variadicArgument = argument
if returnType.isPromise():
overloadWithPromiseReturnType = overload
else:
overloadWithoutPromiseReturnType = overload
# Make sure either all our overloads return Promises or none do
if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType:
raise WebIDLError("We have overloads with both Promise and "
"non-Promise return types",
[overloadWithPromiseReturnType.location,
overloadWithoutPromiseReturnType.location])
if overloadWithPromiseReturnType and self._legacycaller:
raise WebIDLError("May not have a Promise return type for a "
"legacycaller.",
[overloadWithPromiseReturnType.location])
if self.getExtendedAttribute("StaticClassOverride") and not \
(self.identifier.scope.isJSImplemented() and self.isStatic()):
raise WebIDLError("StaticClassOverride can be applied to static"
" methods on JS-implemented classes only.",
[self.location])
def overloadsForArgCount(self, argc):
return [overload for overload in self._overloads if
len(overload.arguments) == argc or
(len(overload.arguments) > argc and
all(arg.optional for arg in overload.arguments[argc:])) or
(len(overload.arguments) < argc and
len(overload.arguments) > 0 and
overload.arguments[-1].variadic)]
def signaturesForArgCount(self, argc):
return [(overload.returnType, overload.arguments) for overload
in self.overloadsForArgCount(argc)]
def locationsForArgCount(self, argc):
return [overload.location for overload in self.overloadsForArgCount(argc)]
def distinguishingIndexForArgCount(self, argc):
def isValidDistinguishingIndex(idx, signatures):
for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]):
for (secondRetval, secondArgs) in signatures[firstSigIndex+1:]:
if idx < len(firstArgs):
firstType = firstArgs[idx].type
else:
assert(firstArgs[-1].variadic)
firstType = firstArgs[-1].type
if idx < len(secondArgs):
secondType = secondArgs[idx].type
else:
assert(secondArgs[-1].variadic)
secondType = secondArgs[-1].type
if not firstType.isDistinguishableFrom(secondType):
return False
return True
signatures = self.signaturesForArgCount(argc)
for idx in range(argc):
if isValidDistinguishingIndex(idx, signatures):
return idx
# No valid distinguishing index. Time to throw
locations = self.locationsForArgCount(argc)
raise WebIDLError("Signatures with %d arguments for method '%s' are not "
"distinguishable" % (argc, self.identifier.name),
locations)
def handleExtendedAttribute(self, attr):
identifier = attr.identifier()
if identifier == "GetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[GetterThrows]",
[attr.location, self.location])
elif identifier == "SetterThrows":
raise WebIDLError("Methods must not be flagged as "
"[SetterThrows]",
[attr.location, self.location])
elif identifier == "Unforgeable":
if self.isStatic():
raise WebIDLError("[Unforgeable] is only allowed on non-static "
"methods", [attr.location, self.location])
self._unforgeable = True
elif identifier == "SameObject":
raise WebIDLError("Methods must not be flagged as [SameObject]",
[attr.location, self.location])
elif identifier == "Constant":
raise WebIDLError("Methods must not be flagged as [Constant]",
[attr.location, self.location])
elif identifier == "PutForwards":
raise WebIDLError("Only attributes support [PutForwards]",
[attr.location, self.location])
elif identifier == "LenientFloat":
# This is called before we've done overload resolution
assert len(self.signatures()) == 1
sig = self.signatures()[0]
if not sig[0].isVoid():
raise WebIDLError("[LenientFloat] used on a non-void method",
[attr.location, self.location])
if not any(arg.type.includesRestrictedFloat() for arg in sig[1]):
raise WebIDLError("[LenientFloat] used on an operation with no "
"restricted float type arguments",
[attr.location, self.location])
elif identifier == "Exposed":
convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
elif (identifier == "CrossOriginCallable" or
identifier == "WebGLHandlesContextLoss"):
# Known no-argument attributes.
if not attr.noArguments():
raise WebIDLError("[%s] must take no arguments" % identifier,
[attr.location])
elif identifier == "Pure":
if not attr.noArguments():
raise WebIDLError("[Pure] must take no arguments",
[attr.location])
self._setDependsOn("DOMState")
self._setAffects("Nothing")
elif identifier == "Affects":
if not attr.hasValue():
raise WebIDLError("[Affects] takes an identifier",
[attr.location])
self._setAffects(attr.value())
elif identifier == "DependsOn":
if not attr.hasValue():
raise WebIDLError("[DependsOn] takes an identifier",
[attr.location])
self._setDependsOn(attr.value())
elif identifier == "Alias":
if not attr.hasValue():
raise WebIDLError("[Alias] takes an identifier or string",
[attr.location])
self._addAlias(attr.value())
elif (identifier == "Throws" or
identifier == "NewObject" or
identifier == "ChromeOnly" or
identifier == "UnsafeInPrerendering" or
identifier == "Pref" or
identifier == "Deprecated" or
identifier == "Func" or
identifier == "AvailableIn" or
identifier == "CheckAnyPermissions" or
identifier == "CheckAllPermissions" or
identifier == "BinaryName" or
identifier == "MethodIdentityTestable" or
identifier == "StaticClassOverride"):
# Known attributes that we don't need to do anything with here
pass
else:
raise WebIDLError("Unknown extended attribute %s on method" % identifier,
[attr.location])
IDLInterfaceMember.handleExtendedAttribute(self, attr)
def returnsPromise(self):
return self._overloads[0].returnType.isPromise()
def isUnforgeable(self):
return self._unforgeable
def _getDependentObjects(self):
deps = set()
for overload in self._overloads:
deps.update(overload._getDependentObjects())
return deps
class IDLImplementsStatement(IDLObject):
def __init__(self, location, implementor, implementee):
IDLObject.__init__(self, location)
self.implementor = implementor
self.implementee = implementee
self._finished = False
def finish(self, scope):
if self._finished:
return
assert(isinstance(self.implementor, IDLIdentifierPlaceholder))
assert(isinstance(self.implementee, IDLIdentifierPlaceholder))
implementor = self.implementor.finish(scope)
implementee = self.implementee.finish(scope)
# NOTE: we depend on not setting self.implementor and
# self.implementee here to keep track of the original
# locations.
if not isinstance(implementor, IDLInterface):
raise WebIDLError("Left-hand side of 'implements' is not an "
"interface",
[self.implementor.location])
if implementor.isCallback():
raise WebIDLError("Left-hand side of 'implements' is a callback "
"interface",
[self.implementor.location])
if not isinstance(implementee, IDLInterface):
raise WebIDLError("Right-hand side of 'implements' is not an "
"interface",
[self.implementee.location])
if implementee.isCallback():
raise WebIDLError("Right-hand side of 'implements' is a callback "
"interface",
[self.implementee.location])
implementor.addImplementedInterface(implementee)
self.implementor = implementor
self.implementee = implementee
def validate(self):
pass
def addExtendedAttributes(self, attrs):
assert len(attrs) == 0
class IDLExtendedAttribute(IDLObject):
"""
A class to represent IDL extended attributes so we can give them locations
"""
def __init__(self, location, tuple):
IDLObject.__init__(self, location)
self._tuple = tuple
def identifier(self):
return self._tuple[0]
def noArguments(self):
return len(self._tuple) == 1
def hasValue(self):
return len(self._tuple) >= 2 and isinstance(self._tuple[1], str)
def value(self):
assert(self.hasValue())
return self._tuple[1]
def hasArgs(self):
return (len(self._tuple) == 2 and isinstance(self._tuple[1], list) or
len(self._tuple) == 3)
def args(self):
assert(self.hasArgs())
# Our args are our last element
return self._tuple[-1]
def listValue(self):
"""
Backdoor for storing random data in _extendedAttrDict
"""
return list(self._tuple)[1:]
# Parser
class Tokenizer(object):
tokens = [
"INTEGER",
"FLOATLITERAL",
"IDENTIFIER",
"STRING",
"WHITESPACE",
"OTHER"
]
def t_FLOATLITERAL(self, t):
r'(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN'
t.value = float(t.value)
return t
def t_INTEGER(self, t):
r'-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)'
try:
# Can't use int(), because that doesn't handle octal properly.
t.value = parseInt(t.value)
except:
raise WebIDLError("Invalid integer literal",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self._filename)])
return t
def t_IDENTIFIER(self, t):
r'[A-Z_a-z][0-9A-Z_a-z-]*'
t.type = self.keywords.get(t.value, 'IDENTIFIER')
return t
def t_STRING(self, t):
r'"[^"]*"'
t.value = t.value[1:-1]
return t
def t_WHITESPACE(self, t):
r'[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+'
pass
def t_ELLIPSIS(self, t):
r'\.\.\.'
t.type = self.keywords.get(t.value)
return t
def t_OTHER(self, t):
r'[^\t\n\r 0-9A-Z_a-z]'
t.type = self.keywords.get(t.value, 'OTHER')
return t
keywords = {
"module": "MODULE",
"interface": "INTERFACE",
"partial": "PARTIAL",
"dictionary": "DICTIONARY",
"exception": "EXCEPTION",
"enum": "ENUM",
"callback": "CALLBACK",
"typedef": "TYPEDEF",
"implements": "IMPLEMENTS",
"const": "CONST",
"null": "NULL",
"true": "TRUE",
"false": "FALSE",
"serializer": "SERIALIZER",
"stringifier": "STRINGIFIER",
"jsonifier": "JSONIFIER",
"unrestricted": "UNRESTRICTED",
"attribute": "ATTRIBUTE",
"readonly": "READONLY",
"inherit": "INHERIT",
"static": "STATIC",
"getter": "GETTER",
"setter": "SETTER",
"creator": "CREATOR",
"deleter": "DELETER",
"legacycaller": "LEGACYCALLER",
"optional": "OPTIONAL",
"...": "ELLIPSIS",
"::": "SCOPE",
"Date": "DATE",
"DOMString": "DOMSTRING",
"ByteString": "BYTESTRING",
"USVString": "USVSTRING",
"any": "ANY",
"boolean": "BOOLEAN",
"byte": "BYTE",
"double": "DOUBLE",
"float": "FLOAT",
"long": "LONG",
"object": "OBJECT",
"octet": "OCTET",
"Promise": "PROMISE",
"required": "REQUIRED",
"sequence": "SEQUENCE",
"MozMap": "MOZMAP",
"short": "SHORT",
"unsigned": "UNSIGNED",
"void": "VOID",
":": "COLON",
";": "SEMICOLON",
"{": "LBRACE",
"}": "RBRACE",
"(": "LPAREN",
")": "RPAREN",
"[": "LBRACKET",
"]": "RBRACKET",
"?": "QUESTIONMARK",
",": "COMMA",
"=": "EQUALS",
"<": "LT",
">": "GT",
"ArrayBuffer": "ARRAYBUFFER",
"SharedArrayBuffer": "SHAREDARRAYBUFFER",
"or": "OR",
"maplike": "MAPLIKE",
"setlike": "SETLIKE"
}
tokens.extend(keywords.values())
def t_error(self, t):
raise WebIDLError("Unrecognized Input",
[Location(lexer=self.lexer,
lineno=self.lexer.lineno,
lexpos=self.lexer.lexpos,
filename=self.filename)])
def __init__(self, outputdir, lexer=None):
if lexer:
self.lexer = lexer
else:
self.lexer = lex.lex(object=self,
outputdir=outputdir,
lextab='webidllex',
reflags=re.DOTALL)
class SqueakyCleanLogger(object):
errorWhitelist = [
# Web IDL defines the WHITESPACE token, but doesn't actually
# use it ... so far.
"Token 'WHITESPACE' defined, but not used",
# And that means we have an unused token
"There is 1 unused token",
# Web IDL defines a OtherOrComma rule that's only used in
# ExtendedAttributeInner, which we don't use yet.
"Rule 'OtherOrComma' defined, but not used",
# And an unused rule
"There is 1 unused rule",
# And the OtherOrComma grammar symbol is unreachable.
"Symbol 'OtherOrComma' is unreachable",
# Which means the Other symbol is unreachable.
"Symbol 'Other' is unreachable",
]
def __init__(self):
self.errors = []
def debug(self, msg, *args, **kwargs):
pass
info = debug
def warning(self, msg, *args, **kwargs):
if msg == "%s:%d: Rule '%s' defined, but not used":
# Munge things so we don't have to hardcode filenames and
# line numbers in our whitelist.
whitelistmsg = "Rule '%s' defined, but not used"
whitelistargs = args[2:]
else:
whitelistmsg = msg
whitelistargs = args
if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist:
self.errors.append(msg % args)
error = warning
def reportGrammarErrors(self):
if self.errors:
raise WebIDLError("\n".join(self.errors), [])
class Parser(Tokenizer):
def getLocation(self, p, i):
return Location(self.lexer, p.lineno(i), p.lexpos(i), self._filename)
def globalScope(self):
return self._globalScope
# The p_Foo functions here must match the WebIDL spec's grammar.
# It's acceptable to split things at '|' boundaries.
def p_Definitions(self, p):
"""
Definitions : ExtendedAttributeList Definition Definitions
"""
if p[2]:
p[0] = [p[2]]
p[2].addExtendedAttributes(p[1])
else:
assert not p[1]
p[0] = []
p[0].extend(p[3])
def p_DefinitionsEmpty(self, p):
"""
Definitions :
"""
p[0] = []
def p_Definition(self, p):
"""
Definition : CallbackOrInterface
| PartialInterface
| Dictionary
| Exception
| Enum
| Typedef
| ImplementsStatement
"""
p[0] = p[1]
assert p[1] # We might not have implemented something ...
def p_CallbackOrInterfaceCallback(self, p):
"""
CallbackOrInterface : CALLBACK CallbackRestOrInterface
"""
if p[2].isInterface():
assert isinstance(p[2], IDLInterface)
p[2].setCallback(True)
p[0] = p[2]
def p_CallbackOrInterfaceInterface(self, p):
"""
CallbackOrInterface : Interface
"""
p[0] = p[1]
def p_CallbackRestOrInterface(self, p):
"""
CallbackRestOrInterface : CallbackRest
| Interface
"""
assert p[1]
p[0] = p[1]
def p_Interface(self, p):
"""
Interface : INTERFACE IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
parent = p[3]
try:
existingObj = self.globalScope()._lookupIdentifier(identifier)
if existingObj:
p[0] = existingObj
if not isinstance(p[0], IDLInterface):
raise WebIDLError("Interface has the same name as "
"non-interface object",
[location, p[0].location])
p[0].setNonPartial(location, parent, members)
return
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
p[0] = IDLInterface(location, self.globalScope(), identifier, parent,
members, isKnownNonPartial=True)
def p_InterfaceForwardDecl(self, p):
"""
Interface : INTERFACE IDENTIFIER SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
try:
if self.globalScope()._lookupIdentifier(identifier):
p[0] = self.globalScope()._lookupIdentifier(identifier)
if not isinstance(p[0], IDLExternalInterface):
raise WebIDLError("Name collision between external "
"interface declaration for identifier "
"%s and %s" % (identifier.name, p[0]),
[location, p[0].location])
return
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
p[0] = IDLExternalInterface(location, self.globalScope(), identifier)
def p_PartialInterface(self, p):
"""
PartialInterface : PARTIAL INTERFACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
members = p[5]
nonPartialInterface = None
try:
nonPartialInterface = self.globalScope()._lookupIdentifier(identifier)
if nonPartialInterface:
if not isinstance(nonPartialInterface, IDLInterface):
raise WebIDLError("Partial interface has the same name as "
"non-interface object",
[location, nonPartialInterface.location])
except Exception, ex:
if isinstance(ex, WebIDLError):
raise ex
pass
if not nonPartialInterface:
nonPartialInterface = IDLInterface(location, self.globalScope(),
identifier, None,
[], isKnownNonPartial=False)
partialInterface = IDLPartialInterface(location, identifier, members,
nonPartialInterface)
p[0] = partialInterface
def p_Inheritance(self, p):
"""
Inheritance : COLON ScopedName
"""
p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2])
def p_InheritanceEmpty(self, p):
"""
Inheritance :
"""
pass
def p_InterfaceMembers(self, p):
"""
InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
"""
p[0] = [p[2]] if p[2] else []
assert not p[1] or p[2]
p[2].addExtendedAttributes(p[1])
p[0].extend(p[3])
def p_InterfaceMembersEmpty(self, p):
"""
InterfaceMembers :
"""
p[0] = []
def p_InterfaceMember(self, p):
"""
InterfaceMember : Const
| AttributeOrOperationOrMaplikeOrSetlike
"""
p[0] = p[1]
def p_Dictionary(self, p):
"""
Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
members = p[5]
p[0] = IDLDictionary(location, self.globalScope(), identifier, p[3], members)
def p_DictionaryMembers(self, p):
"""
DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers
|
"""
if len(p) == 1:
# We're at the end of the list
p[0] = []
return
# Add our extended attributes
p[2].addExtendedAttributes(p[1])
p[0] = [p[2]]
p[0].extend(p[3])
def p_DictionaryMember(self, p):
"""
DictionaryMember : Required Type IDENTIFIER Default SEMICOLON
"""
# These quack a lot like optional arguments, so just treat them that way.
t = p[2]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
defaultValue = p[4]
optional = not p[1]
if not optional and defaultValue:
raise WebIDLError("Required dictionary members can't have a default value.",
[self.getLocation(p, 4)])
p[0] = IDLArgument(self.getLocation(p, 3), identifier, t,
optional=optional,
defaultValue=defaultValue, variadic=False,
dictionaryMember=True)
def p_Default(self, p):
"""
Default : EQUALS DefaultValue
|
"""
if len(p) > 1:
p[0] = p[2]
else:
p[0] = None
def p_DefaultValue(self, p):
"""
DefaultValue : ConstValue
| LBRACKET RBRACKET
"""
if len(p) == 2:
p[0] = p[1]
else:
assert len(p) == 3 # Must be []
p[0] = IDLEmptySequenceValue(self.getLocation(p, 1))
def p_Exception(self, p):
"""
Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON
"""
pass
def p_Enum(self, p):
"""
Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON
"""
location = self.getLocation(p, 1)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
values = p[4]
assert values
p[0] = IDLEnum(location, self.globalScope(), identifier, values)
def p_EnumValueList(self, p):
"""
EnumValueList : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListComma(self, p):
"""
EnumValueListComma : COMMA EnumValueListString
"""
p[0] = p[2]
def p_EnumValueListCommaEmpty(self, p):
"""
EnumValueListComma :
"""
p[0] = []
def p_EnumValueListString(self, p):
"""
EnumValueListString : STRING EnumValueListComma
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_EnumValueListStringEmpty(self, p):
"""
EnumValueListString :
"""
p[0] = []
def p_CallbackRest(self, p):
"""
CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
p[0] = IDLCallback(self.getLocation(p, 1), self.globalScope(),
identifier, p[3], p[5])
def p_ExceptionMembers(self, p):
"""
ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers
|
"""
pass
def p_Typedef(self, p):
"""
Typedef : TYPEDEF Type IDENTIFIER SEMICOLON
"""
typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(),
p[2], p[3])
p[0] = typedef
def p_ImplementsStatement(self, p):
"""
ImplementsStatement : ScopedName IMPLEMENTS ScopedName SEMICOLON
"""
assert(p[2] == "implements")
implementor = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1])
implementee = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3])
p[0] = IDLImplementsStatement(self.getLocation(p, 1), implementor,
implementee)
def p_Const(self, p):
"""
Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON
"""
location = self.getLocation(p, 1)
type = p[2]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
value = p[5]
p[0] = IDLConst(location, identifier, type, value)
def p_ConstValueBoolean(self, p):
"""
ConstValue : BooleanLiteral
"""
location = self.getLocation(p, 1)
booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean]
p[0] = IDLValue(location, booleanType, p[1])
def p_ConstValueInteger(self, p):
"""
ConstValue : INTEGER
"""
location = self.getLocation(p, 1)
# We don't know ahead of time what type the integer literal is.
# Determine the smallest type it could possibly fit in and use that.
integerType = matchIntegerValueToType(p[1])
if integerType is None:
raise WebIDLError("Integer literal out of range", [location])
p[0] = IDLValue(location, integerType, p[1])
def p_ConstValueFloat(self, p):
"""
ConstValue : FLOATLITERAL
"""
location = self.getLocation(p, 1)
p[0] = IDLValue(location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1])
def p_ConstValueString(self, p):
"""
ConstValue : STRING
"""
location = self.getLocation(p, 1)
stringType = BuiltinTypes[IDLBuiltinType.Types.domstring]
p[0] = IDLValue(location, stringType, p[1])
def p_ConstValueNull(self, p):
"""
ConstValue : NULL
"""
p[0] = IDLNullValue(self.getLocation(p, 1))
def p_BooleanLiteralTrue(self, p):
"""
BooleanLiteral : TRUE
"""
p[0] = True
def p_BooleanLiteralFalse(self, p):
"""
BooleanLiteral : FALSE
"""
p[0] = False
def p_AttributeOrOperationOrMaplikeOrSetlike(self, p):
"""
AttributeOrOperationOrMaplikeOrSetlike : Attribute
| Maplike
| Setlike
| Operation
"""
p[0] = p[1]
def p_Setlike(self, p):
"""
Setlike : ReadOnly SETLIKE LT Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__setlike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = keyType
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_Maplike(self, p):
"""
Maplike : ReadOnly MAPLIKE LT Type COMMA Type GT SEMICOLON
"""
readonly = p[1]
maplikeOrSetlikeType = p[2]
location = self.getLocation(p, 2)
identifier = IDLUnresolvedIdentifier(location, "__maplike",
allowDoubleUnderscore=True)
keyType = p[4]
valueType = p[6]
p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType,
readonly, keyType, valueType)
def p_AttributeWithQualifier(self, p):
"""
Attribute : Qualifier AttributeRest
"""
static = IDLInterfaceMember.Special.Static in p[1]
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly,
static=static, stringifier=stringifier)
def p_AttributeInherited(self, p):
"""
Attribute : INHERIT AttributeRest
"""
(location, identifier, type, readonly) = p[2]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True)
def p_Attribute(self, p):
"""
Attribute : AttributeRest
"""
(location, identifier, type, readonly) = p[1]
p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False)
def p_AttributeRest(self, p):
"""
AttributeRest : ReadOnly ATTRIBUTE Type AttributeName SEMICOLON
"""
location = self.getLocation(p, 2)
readonly = p[1]
t = p[3]
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 4), p[4])
p[0] = (location, identifier, t, readonly)
def p_ReadOnly(self, p):
"""
ReadOnly : READONLY
"""
p[0] = True
def p_ReadOnlyEmpty(self, p):
"""
ReadOnly :
"""
p[0] = False
def p_Operation(self, p):
"""
Operation : Qualifiers OperationRest
"""
qualifiers = p[1]
# Disallow duplicates in the qualifier set
if not len(set(qualifiers)) == len(qualifiers):
raise WebIDLError("Duplicate qualifiers are not allowed",
[self.getLocation(p, 1)])
static = IDLInterfaceMember.Special.Static in p[1]
# If static is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not static or len(qualifiers) == 1
stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
# If stringifier is there that's all that's allowed. This is disallowed
# by the parser, so we can assert here.
assert not stringifier or len(qualifiers) == 1
getter = True if IDLMethod.Special.Getter in p[1] else False
setter = True if IDLMethod.Special.Setter in p[1] else False
creator = True if IDLMethod.Special.Creator in p[1] else False
deleter = True if IDLMethod.Special.Deleter in p[1] else False
legacycaller = True if IDLMethod.Special.LegacyCaller in p[1] else False
if getter or deleter:
if setter or creator:
raise WebIDLError("getter and deleter are incompatible with setter and creator",
[self.getLocation(p, 1)])
(returnType, identifier, arguments) = p[2]
assert isinstance(returnType, IDLType)
specialType = IDLMethod.NamedOrIndexed.Neither
if getter or deleter:
if len(arguments) != 1:
raise WebIDLError("%s has wrong number of arguments" %
("getter" if getter else "deleter"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("getter" if getter else "deleter"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("getter" if getter else "deleter",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if getter:
if returnType.isVoid():
raise WebIDLError("getter cannot have void return type",
[self.getLocation(p, 2)])
if setter or creator:
if len(arguments) != 2:
raise WebIDLError("%s has wrong number of arguments" %
("setter" if setter else "creator"),
[self.getLocation(p, 2)])
argType = arguments[0].type
if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
specialType = IDLMethod.NamedOrIndexed.Named
elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
specialType = IDLMethod.NamedOrIndexed.Indexed
else:
raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
("setter" if setter else "creator"),
[arguments[0].location])
if arguments[0].optional or arguments[0].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[0].optional else "variadic"),
[arguments[0].location])
if arguments[1].optional or arguments[1].variadic:
raise WebIDLError("%s cannot have %s argument" %
("setter" if setter else "creator",
"optional" if arguments[1].optional else "variadic"),
[arguments[1].location])
if stringifier:
if len(arguments) != 0:
raise WebIDLError("stringifier has wrong number of arguments",
[self.getLocation(p, 2)])
if not returnType.isDOMString():
raise WebIDLError("stringifier must have DOMString return type",
[self.getLocation(p, 2)])
# identifier might be None. This is only permitted for special methods.
if not identifier:
if (not getter and not setter and not creator and
not deleter and not legacycaller and not stringifier):
raise WebIDLError("Identifier required for non-special methods",
[self.getLocation(p, 2)])
location = BuiltinLocation("<auto-generated-identifier>")
identifier = IDLUnresolvedIdentifier(
location,
"__%s%s%s%s%s%s%s" %
("named" if specialType == IDLMethod.NamedOrIndexed.Named else
"indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "",
"getter" if getter else "",
"setter" if setter else "",
"deleter" if deleter else "",
"creator" if creator else "",
"legacycaller" if legacycaller else "",
"stringifier" if stringifier else ""),
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments,
static=static, getter=getter, setter=setter, creator=creator,
deleter=deleter, specialType=specialType,
legacycaller=legacycaller, stringifier=stringifier)
p[0] = method
def p_Stringifier(self, p):
"""
Operation : STRINGIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__stringifier",
allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.domstring],
arguments=[],
stringifier=True)
p[0] = method
def p_Jsonifier(self, p):
"""
Operation : JSONIFIER SEMICOLON
"""
identifier = IDLUnresolvedIdentifier(BuiltinLocation("<auto-generated-identifier>"),
"__jsonifier", allowDoubleUnderscore=True)
method = IDLMethod(self.getLocation(p, 1),
identifier,
returnType=BuiltinTypes[IDLBuiltinType.Types.object],
arguments=[],
jsonifier=True)
p[0] = method
def p_QualifierStatic(self, p):
"""
Qualifier : STATIC
"""
p[0] = [IDLInterfaceMember.Special.Static]
def p_QualifierStringifier(self, p):
"""
Qualifier : STRINGIFIER
"""
p[0] = [IDLInterfaceMember.Special.Stringifier]
def p_Qualifiers(self, p):
"""
Qualifiers : Qualifier
| Specials
"""
p[0] = p[1]
def p_Specials(self, p):
"""
Specials : Special Specials
"""
p[0] = [p[1]]
p[0].extend(p[2])
def p_SpecialsEmpty(self, p):
"""
Specials :
"""
p[0] = []
def p_SpecialGetter(self, p):
"""
Special : GETTER
"""
p[0] = IDLMethod.Special.Getter
def p_SpecialSetter(self, p):
"""
Special : SETTER
"""
p[0] = IDLMethod.Special.Setter
def p_SpecialCreator(self, p):
"""
Special : CREATOR
"""
p[0] = IDLMethod.Special.Creator
def p_SpecialDeleter(self, p):
"""
Special : DELETER
"""
p[0] = IDLMethod.Special.Deleter
def p_SpecialLegacyCaller(self, p):
"""
Special : LEGACYCALLER
"""
p[0] = IDLMethod.Special.LegacyCaller
def p_OperationRest(self, p):
"""
OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON
"""
p[0] = (p[1], p[2], p[4])
def p_OptionalIdentifier(self, p):
"""
OptionalIdentifier : IDENTIFIER
"""
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_OptionalIdentifierEmpty(self, p):
"""
OptionalIdentifier :
"""
pass
def p_ArgumentList(self, p):
"""
ArgumentList : Argument Arguments
"""
p[0] = [p[1]] if p[1] else []
p[0].extend(p[2])
def p_ArgumentListEmpty(self, p):
"""
ArgumentList :
"""
p[0] = []
def p_Arguments(self, p):
"""
Arguments : COMMA Argument Arguments
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ArgumentsEmpty(self, p):
"""
Arguments :
"""
p[0] = []
def p_Argument(self, p):
"""
Argument : ExtendedAttributeList Optional Type Ellipsis ArgumentName Default
"""
t = p[3]
assert isinstance(t, IDLType)
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 5), p[5])
optional = p[2]
variadic = p[4]
defaultValue = p[6]
if not optional and defaultValue:
raise WebIDLError("Mandatory arguments can't have a default value.",
[self.getLocation(p, 6)])
# We can't test t.isAny() here and give it a default value as needed,
# since at this point t is not a fully resolved type yet (e.g. it might
# be a typedef). We'll handle the 'any' case in IDLArgument.complete.
if variadic:
if optional:
raise WebIDLError("Variadic arguments should not be marked optional.",
[self.getLocation(p, 2)])
optional = variadic
p[0] = IDLArgument(self.getLocation(p, 5), identifier, t, optional, defaultValue, variadic)
p[0].addExtendedAttributes(p[1])
def p_ArgumentName(self, p):
"""
ArgumentName : IDENTIFIER
| ATTRIBUTE
| CALLBACK
| CONST
| CREATOR
| DELETER
| DICTIONARY
| ENUM
| EXCEPTION
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| LEGACYCALLER
| MAPLIKE
| PARTIAL
| REQUIRED
| SERIALIZER
| SETLIKE
| SETTER
| STATIC
| STRINGIFIER
| JSONIFIER
| TYPEDEF
| UNRESTRICTED
"""
p[0] = p[1]
def p_AttributeName(self, p):
"""
AttributeName : IDENTIFIER
| REQUIRED
"""
p[0] = p[1]
def p_Optional(self, p):
"""
Optional : OPTIONAL
"""
p[0] = True
def p_OptionalEmpty(self, p):
"""
Optional :
"""
p[0] = False
def p_Required(self, p):
"""
Required : REQUIRED
"""
p[0] = True
def p_RequiredEmpty(self, p):
"""
Required :
"""
p[0] = False
def p_Ellipsis(self, p):
"""
Ellipsis : ELLIPSIS
"""
p[0] = True
def p_EllipsisEmpty(self, p):
"""
Ellipsis :
"""
p[0] = False
def p_ExceptionMember(self, p):
"""
ExceptionMember : Const
| ExceptionField
"""
pass
def p_ExceptionField(self, p):
"""
ExceptionField : Type IDENTIFIER SEMICOLON
"""
pass
def p_ExtendedAttributeList(self, p):
"""
ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET
"""
p[0] = [p[2]]
if p[3]:
p[0].extend(p[3])
def p_ExtendedAttributeListEmpty(self, p):
"""
ExtendedAttributeList :
"""
p[0] = []
def p_ExtendedAttribute(self, p):
"""
ExtendedAttribute : ExtendedAttributeNoArgs
| ExtendedAttributeArgList
| ExtendedAttributeIdent
| ExtendedAttributeNamedArgList
| ExtendedAttributeIdentList
"""
p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1])
def p_ExtendedAttributeEmpty(self, p):
"""
ExtendedAttribute :
"""
pass
def p_ExtendedAttributes(self, p):
"""
ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes
"""
p[0] = [p[2]] if p[2] else []
p[0].extend(p[3])
def p_ExtendedAttributesEmpty(self, p):
"""
ExtendedAttributes :
"""
p[0] = []
def p_Other(self, p):
"""
Other : INTEGER
| FLOATLITERAL
| IDENTIFIER
| STRING
| OTHER
| ELLIPSIS
| COLON
| SCOPE
| SEMICOLON
| LT
| EQUALS
| GT
| QUESTIONMARK
| DATE
| DOMSTRING
| BYTESTRING
| USVSTRING
| ANY
| ATTRIBUTE
| BOOLEAN
| BYTE
| LEGACYCALLER
| CONST
| CREATOR
| DELETER
| DOUBLE
| EXCEPTION
| FALSE
| FLOAT
| GETTER
| IMPLEMENTS
| INHERIT
| INTERFACE
| LONG
| MODULE
| NULL
| OBJECT
| OCTET
| OPTIONAL
| SEQUENCE
| MOZMAP
| SETTER
| SHORT
| STATIC
| STRINGIFIER
| JSONIFIER
| TRUE
| TYPEDEF
| UNSIGNED
| VOID
"""
pass
def p_OtherOrComma(self, p):
"""
OtherOrComma : Other
| COMMA
"""
pass
def p_TypeSingleType(self, p):
"""
Type : SingleType
"""
p[0] = p[1]
def p_TypeUnionType(self, p):
"""
Type : UnionType TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_SingleTypeNonAnyType(self, p):
"""
SingleType : NonAnyType
"""
p[0] = p[1]
def p_SingleTypeAnyType(self, p):
"""
SingleType : ANY TypeSuffixStartingWithArray
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.any], p[2])
def p_UnionType(self, p):
"""
UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN
"""
types = [p[2], p[4]]
types.extend(p[5])
p[0] = IDLUnionType(self.getLocation(p, 1), types)
def p_UnionMemberTypeNonAnyType(self, p):
"""
UnionMemberType : NonAnyType
"""
p[0] = p[1]
def p_UnionMemberTypeArrayOfAny(self, p):
"""
UnionMemberTypeArrayOfAny : ANY LBRACKET RBRACKET
"""
p[0] = IDLArrayType(self.getLocation(p, 2),
BuiltinTypes[IDLBuiltinType.Types.any])
def p_UnionMemberType(self, p):
"""
UnionMemberType : UnionType TypeSuffix
| UnionMemberTypeArrayOfAny TypeSuffix
"""
p[0] = self.handleModifiers(p[1], p[2])
def p_UnionMemberTypes(self, p):
"""
UnionMemberTypes : OR UnionMemberType UnionMemberTypes
"""
p[0] = [p[2]]
p[0].extend(p[3])
def p_UnionMemberTypesEmpty(self, p):
"""
UnionMemberTypes :
"""
p[0] = []
def p_NonAnyType(self, p):
"""
NonAnyType : PrimitiveOrStringType TypeSuffix
| ARRAYBUFFER TypeSuffix
| SHAREDARRAYBUFFER TypeSuffix
| OBJECT TypeSuffix
"""
if p[1] == "object":
type = BuiltinTypes[IDLBuiltinType.Types.object]
elif p[1] == "ArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer]
elif p[1] == "SharedArrayBuffer":
type = BuiltinTypes[IDLBuiltinType.Types.SharedArrayBuffer]
else:
type = BuiltinTypes[p[1]]
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeSequenceType(self, p):
"""
NonAnyType : SEQUENCE LT Type GT Null
"""
innerType = p[3]
type = IDLSequenceType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
# Note: Promise<void> is allowed, so we want to parametrize on
# ReturnType, not Type. Also, we want this to end up picking up
# the Promise interface for now, hence the games with IDLUnresolvedType.
def p_NonAnyTypePromiseType(self, p):
"""
NonAnyType : PROMISE LT ReturnType GT Null
"""
innerType = p[3]
promiseIdent = IDLUnresolvedIdentifier(self.getLocation(p, 1), "Promise")
type = IDLUnresolvedType(self.getLocation(p, 1), promiseIdent, p[3])
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeMozMapType(self, p):
"""
NonAnyType : MOZMAP LT Type GT Null
"""
innerType = p[3]
type = IDLMozMapType(self.getLocation(p, 1), innerType)
if p[5]:
type = IDLNullableType(self.getLocation(p, 5), type)
p[0] = type
def p_NonAnyTypeScopedName(self, p):
"""
NonAnyType : ScopedName TypeSuffix
"""
assert isinstance(p[1], IDLUnresolvedIdentifier)
if p[1].name == "Promise":
raise WebIDLError("Promise used without saying what it's "
"parametrized over",
[self.getLocation(p, 1)])
type = None
try:
if self.globalScope()._lookupIdentifier(p[1]):
obj = self.globalScope()._lookupIdentifier(p[1])
assert not obj.isType()
if obj.isTypedef():
type = IDLTypedefType(self.getLocation(p, 1), obj.innerType,
obj.identifier.name)
elif obj.isCallback() and not obj.isInterface():
type = IDLCallbackType(self.getLocation(p, 1), obj)
else:
type = IDLWrapperType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
return
except:
pass
type = IDLUnresolvedType(self.getLocation(p, 1), p[1])
p[0] = self.handleModifiers(type, p[2])
def p_NonAnyTypeDate(self, p):
"""
NonAnyType : DATE TypeSuffix
"""
p[0] = self.handleModifiers(BuiltinTypes[IDLBuiltinType.Types.date],
p[2])
def p_ConstType(self, p):
"""
ConstType : PrimitiveOrStringType Null
"""
type = BuiltinTypes[p[1]]
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_ConstTypeIdentifier(self, p):
"""
ConstType : IDENTIFIER Null
"""
identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
type = IDLUnresolvedType(self.getLocation(p, 1), identifier)
if p[2]:
type = IDLNullableType(self.getLocation(p, 1), type)
p[0] = type
def p_PrimitiveOrStringTypeUint(self, p):
"""
PrimitiveOrStringType : UnsignedIntegerType
"""
p[0] = p[1]
def p_PrimitiveOrStringTypeBoolean(self, p):
"""
PrimitiveOrStringType : BOOLEAN
"""
p[0] = IDLBuiltinType.Types.boolean
def p_PrimitiveOrStringTypeByte(self, p):
"""
PrimitiveOrStringType : BYTE
"""
p[0] = IDLBuiltinType.Types.byte
def p_PrimitiveOrStringTypeOctet(self, p):
"""
PrimitiveOrStringType : OCTET
"""
p[0] = IDLBuiltinType.Types.octet
def p_PrimitiveOrStringTypeFloat(self, p):
"""
PrimitiveOrStringType : FLOAT
"""
p[0] = IDLBuiltinType.Types.float
def p_PrimitiveOrStringTypeUnrestictedFloat(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED FLOAT
"""
p[0] = IDLBuiltinType.Types.unrestricted_float
def p_PrimitiveOrStringTypeDouble(self, p):
"""
PrimitiveOrStringType : DOUBLE
"""
p[0] = IDLBuiltinType.Types.double
def p_PrimitiveOrStringTypeUnrestictedDouble(self, p):
"""
PrimitiveOrStringType : UNRESTRICTED DOUBLE
"""
p[0] = IDLBuiltinType.Types.unrestricted_double
def p_PrimitiveOrStringTypeDOMString(self, p):
"""
PrimitiveOrStringType : DOMSTRING
"""
p[0] = IDLBuiltinType.Types.domstring
def p_PrimitiveOrStringTypeBytestring(self, p):
"""
PrimitiveOrStringType : BYTESTRING
"""
p[0] = IDLBuiltinType.Types.bytestring
def p_PrimitiveOrStringTypeUSVString(self, p):
"""
PrimitiveOrStringType : USVSTRING
"""
p[0] = IDLBuiltinType.Types.usvstring
def p_UnsignedIntegerTypeUnsigned(self, p):
"""
UnsignedIntegerType : UNSIGNED IntegerType
"""
# Adding one to a given signed integer type gets you the unsigned type:
p[0] = p[2] + 1
def p_UnsignedIntegerType(self, p):
"""
UnsignedIntegerType : IntegerType
"""
p[0] = p[1]
def p_IntegerTypeShort(self, p):
"""
IntegerType : SHORT
"""
p[0] = IDLBuiltinType.Types.short
def p_IntegerTypeLong(self, p):
"""
IntegerType : LONG OptionalLong
"""
if p[2]:
p[0] = IDLBuiltinType.Types.long_long
else:
p[0] = IDLBuiltinType.Types.long
def p_OptionalLong(self, p):
"""
OptionalLong : LONG
"""
p[0] = True
def p_OptionalLongEmpty(self, p):
"""
OptionalLong :
"""
p[0] = False
def p_TypeSuffixBrackets(self, p):
"""
TypeSuffix : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixQMark(self, p):
"""
TypeSuffix : QUESTIONMARK TypeSuffixStartingWithArray
"""
p[0] = [(IDLMethod.TypeSuffixModifier.QMark, self.getLocation(p, 1))]
p[0].extend(p[2])
def p_TypeSuffixEmpty(self, p):
"""
TypeSuffix :
"""
p[0] = []
def p_TypeSuffixStartingWithArray(self, p):
"""
TypeSuffixStartingWithArray : LBRACKET RBRACKET TypeSuffix
"""
p[0] = [(IDLMethod.TypeSuffixModifier.Brackets, self.getLocation(p, 1))]
p[0].extend(p[3])
def p_TypeSuffixStartingWithArrayEmpty(self, p):
"""
TypeSuffixStartingWithArray :
"""
p[0] = []
def p_Null(self, p):
"""
Null : QUESTIONMARK
|
"""
if len(p) > 1:
p[0] = True
else:
p[0] = False
def p_ReturnTypeType(self, p):
"""
ReturnType : Type
"""
p[0] = p[1]
def p_ReturnTypeVoid(self, p):
"""
ReturnType : VOID
"""
p[0] = BuiltinTypes[IDLBuiltinType.Types.void]
def p_ScopedName(self, p):
"""
ScopedName : AbsoluteScopedName
| RelativeScopedName
"""
p[0] = p[1]
def p_AbsoluteScopedName(self, p):
"""
AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_RelativeScopedName(self, p):
"""
RelativeScopedName : IDENTIFIER ScopedNameParts
"""
assert not p[2] # Not implemented!
p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
def p_ScopedNameParts(self, p):
"""
ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts
"""
assert False
pass
def p_ScopedNamePartsEmpty(self, p):
"""
ScopedNameParts :
"""
p[0] = None
def p_ExtendedAttributeNoArgs(self, p):
"""
ExtendedAttributeNoArgs : IDENTIFIER
"""
p[0] = (p[1],)
def p_ExtendedAttributeArgList(self, p):
"""
ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeIdent(self, p):
"""
ExtendedAttributeIdent : IDENTIFIER EQUALS STRING
| IDENTIFIER EQUALS IDENTIFIER
"""
p[0] = (p[1], p[3])
def p_ExtendedAttributeNamedArgList(self, p):
"""
ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN
"""
p[0] = (p[1], p[3], p[5])
def p_ExtendedAttributeIdentList(self, p):
"""
ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN
"""
p[0] = (p[1], p[4])
def p_IdentifierList(self, p):
"""
IdentifierList : IDENTIFIER Identifiers
"""
idents = list(p[2])
idents.insert(0, p[1])
p[0] = idents
def p_IdentifiersList(self, p):
"""
Identifiers : COMMA IDENTIFIER Identifiers
"""
idents = list(p[3])
idents.insert(0, p[2])
p[0] = idents
def p_IdentifiersEmpty(self, p):
"""
Identifiers :
"""
p[0] = []
def p_error(self, p):
if not p:
raise WebIDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both",
[self._filename])
else:
raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)])
def __init__(self, outputdir='', lexer=None):
Tokenizer.__init__(self, outputdir, lexer)
logger = SqueakyCleanLogger()
self.parser = yacc.yacc(module=self,
outputdir=outputdir,
tabmodule='webidlyacc',
errorlog=logger
# Pickling the grammar is a speedup in
# some cases (older Python?) but a
# significant slowdown in others.
# We're not pickling for now, until it
# becomes a speedup again.
# , picklefile='WebIDLGrammar.pkl'
)
logger.reportGrammarErrors()
self._globalScope = IDLScope(BuiltinLocation("<Global Scope>"), None, None)
# To make our test harness work, pretend like we have a primary global already.
# Note that we _don't_ set _globalScope.primaryGlobalAttr,
# so we'll still be able to detect multiple PrimaryGlobal extended attributes.
self._globalScope.primaryGlobalName = "FakeTestPrimaryGlobal"
self._globalScope.globalNames.add("FakeTestPrimaryGlobal")
self._globalScope.globalNameMapping["FakeTestPrimaryGlobal"].add("FakeTestPrimaryGlobal")
# And we add the special-cased "System" global name, which
# doesn't have any corresponding interfaces.
self._globalScope.globalNames.add("System")
self._globalScope.globalNameMapping["System"].add("BackstagePass")
self._installBuiltins(self._globalScope)
self._productions = []
self._filename = "<builtin>"
self.lexer.input(Parser._builtins)
self._filename = None
self.parser.parse(lexer=self.lexer, tracking=True)
def _installBuiltins(self, scope):
assert isinstance(scope, IDLScope)
# xrange omits the last value.
for x in xrange(IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.SharedFloat64Array + 1):
builtin = BuiltinTypes[x]
name = builtin.name
typedef = IDLTypedef(BuiltinLocation("<builtin type>"), scope, builtin, name)
@ staticmethod
def handleModifiers(type, modifiers):
for (modifier, modifierLocation) in modifiers:
assert (modifier == IDLMethod.TypeSuffixModifier.QMark or
modifier == IDLMethod.TypeSuffixModifier.Brackets)
if modifier == IDLMethod.TypeSuffixModifier.QMark:
type = IDLNullableType(modifierLocation, type)
elif modifier == IDLMethod.TypeSuffixModifier.Brackets:
type = IDLArrayType(modifierLocation, type)
return type
def parse(self, t, filename=None):
self.lexer.input(t)
# for tok in iter(self.lexer.token, None):
# print tok
self._filename = filename
self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True))
self._filename = None
def finish(self):
# First, finish all the IDLImplementsStatements. In particular, we
# have to make sure we do those before we do the IDLInterfaces.
# XXX khuey hates this bit and wants to nuke it from orbit.
implementsStatements = [p for p in self._productions if
isinstance(p, IDLImplementsStatement)]
otherStatements = [p for p in self._productions if
not isinstance(p, IDLImplementsStatement)]
for production in implementsStatements:
production.finish(self.globalScope())
for production in otherStatements:
production.finish(self.globalScope())
# Do any post-finish validation we need to do
for production in self._productions:
production.validate()
# De-duplicate self._productions, without modifying its order.
seen = set()
result = []
for p in self._productions:
if p not in seen:
seen.add(p)
result.append(p)
return result
def reset(self):
return Parser(lexer=self.lexer)
# Builtin IDL defined by WebIDL
_builtins = """
typedef unsigned long long DOMTimeStamp;
typedef (ArrayBufferView or ArrayBuffer) BufferSource;
typedef (SharedArrayBufferView or SharedArrayBuffer) SharedBufferSource;
"""
def main():
# Parse arguments.
from optparse import OptionParser
usageString = "usage: %prog [options] files"
o = OptionParser(usage=usageString)
o.add_option("--cachedir", dest='cachedir', default=None,
help="Directory in which to cache lex/parse tables.")
o.add_option("--verbose-errors", action='store_true', default=False,
help="When an error happens, display the Python traceback.")
(options, args) = o.parse_args()
if len(args) < 1:
o.error(usageString)
fileList = args
baseDir = os.getcwd()
# Parse the WebIDL.
parser = Parser(options.cachedir)
try:
for filename in fileList:
fullPath = os.path.normpath(os.path.join(baseDir, filename))
f = open(fullPath, 'rb')
lines = f.readlines()
f.close()
print fullPath
parser.parse(''.join(lines), fullPath)
parser.finish()
except WebIDLError, e:
if options.verbose_errors:
traceback.print_exc()
else:
print e
if __name__ == '__main__':
main()
| mpl-2.0 |
p0psicles/SickRage | lib/unidecode/x05f.py | 252 | 4660 | data = (
'Kai ', # 0x00
'Bian ', # 0x01
'Yi ', # 0x02
'Qi ', # 0x03
'Nong ', # 0x04
'Fen ', # 0x05
'Ju ', # 0x06
'Yan ', # 0x07
'Yi ', # 0x08
'Zang ', # 0x09
'Bi ', # 0x0a
'Yi ', # 0x0b
'Yi ', # 0x0c
'Er ', # 0x0d
'San ', # 0x0e
'Shi ', # 0x0f
'Er ', # 0x10
'Shi ', # 0x11
'Shi ', # 0x12
'Gong ', # 0x13
'Diao ', # 0x14
'Yin ', # 0x15
'Hu ', # 0x16
'Fu ', # 0x17
'Hong ', # 0x18
'Wu ', # 0x19
'Tui ', # 0x1a
'Chi ', # 0x1b
'Jiang ', # 0x1c
'Ba ', # 0x1d
'Shen ', # 0x1e
'Di ', # 0x1f
'Zhang ', # 0x20
'Jue ', # 0x21
'Tao ', # 0x22
'Fu ', # 0x23
'Di ', # 0x24
'Mi ', # 0x25
'Xian ', # 0x26
'Hu ', # 0x27
'Chao ', # 0x28
'Nu ', # 0x29
'Jing ', # 0x2a
'Zhen ', # 0x2b
'Yi ', # 0x2c
'Mi ', # 0x2d
'Quan ', # 0x2e
'Wan ', # 0x2f
'Shao ', # 0x30
'Ruo ', # 0x31
'Xuan ', # 0x32
'Jing ', # 0x33
'Dun ', # 0x34
'Zhang ', # 0x35
'Jiang ', # 0x36
'Qiang ', # 0x37
'Peng ', # 0x38
'Dan ', # 0x39
'Qiang ', # 0x3a
'Bi ', # 0x3b
'Bi ', # 0x3c
'She ', # 0x3d
'Dan ', # 0x3e
'Jian ', # 0x3f
'Gou ', # 0x40
'Sei ', # 0x41
'Fa ', # 0x42
'Bi ', # 0x43
'Kou ', # 0x44
'Nagi ', # 0x45
'Bie ', # 0x46
'Xiao ', # 0x47
'Dan ', # 0x48
'Kuo ', # 0x49
'Qiang ', # 0x4a
'Hong ', # 0x4b
'Mi ', # 0x4c
'Kuo ', # 0x4d
'Wan ', # 0x4e
'Jue ', # 0x4f
'Ji ', # 0x50
'Ji ', # 0x51
'Gui ', # 0x52
'Dang ', # 0x53
'Lu ', # 0x54
'Lu ', # 0x55
'Tuan ', # 0x56
'Hui ', # 0x57
'Zhi ', # 0x58
'Hui ', # 0x59
'Hui ', # 0x5a
'Yi ', # 0x5b
'Yi ', # 0x5c
'Yi ', # 0x5d
'Yi ', # 0x5e
'Huo ', # 0x5f
'Huo ', # 0x60
'Shan ', # 0x61
'Xing ', # 0x62
'Wen ', # 0x63
'Tong ', # 0x64
'Yan ', # 0x65
'Yan ', # 0x66
'Yu ', # 0x67
'Chi ', # 0x68
'Cai ', # 0x69
'Biao ', # 0x6a
'Diao ', # 0x6b
'Bin ', # 0x6c
'Peng ', # 0x6d
'Yong ', # 0x6e
'Piao ', # 0x6f
'Zhang ', # 0x70
'Ying ', # 0x71
'Chi ', # 0x72
'Chi ', # 0x73
'Zhuo ', # 0x74
'Tuo ', # 0x75
'Ji ', # 0x76
'Pang ', # 0x77
'Zhong ', # 0x78
'Yi ', # 0x79
'Wang ', # 0x7a
'Che ', # 0x7b
'Bi ', # 0x7c
'Chi ', # 0x7d
'Ling ', # 0x7e
'Fu ', # 0x7f
'Wang ', # 0x80
'Zheng ', # 0x81
'Cu ', # 0x82
'Wang ', # 0x83
'Jing ', # 0x84
'Dai ', # 0x85
'Xi ', # 0x86
'Xun ', # 0x87
'Hen ', # 0x88
'Yang ', # 0x89
'Huai ', # 0x8a
'Lu ', # 0x8b
'Hou ', # 0x8c
'Wa ', # 0x8d
'Cheng ', # 0x8e
'Zhi ', # 0x8f
'Xu ', # 0x90
'Jing ', # 0x91
'Tu ', # 0x92
'Cong ', # 0x93
'[?] ', # 0x94
'Lai ', # 0x95
'Cong ', # 0x96
'De ', # 0x97
'Pai ', # 0x98
'Xi ', # 0x99
'[?] ', # 0x9a
'Qi ', # 0x9b
'Chang ', # 0x9c
'Zhi ', # 0x9d
'Cong ', # 0x9e
'Zhou ', # 0x9f
'Lai ', # 0xa0
'Yu ', # 0xa1
'Xie ', # 0xa2
'Jie ', # 0xa3
'Jian ', # 0xa4
'Chi ', # 0xa5
'Jia ', # 0xa6
'Bian ', # 0xa7
'Huang ', # 0xa8
'Fu ', # 0xa9
'Xun ', # 0xaa
'Wei ', # 0xab
'Pang ', # 0xac
'Yao ', # 0xad
'Wei ', # 0xae
'Xi ', # 0xaf
'Zheng ', # 0xb0
'Piao ', # 0xb1
'Chi ', # 0xb2
'De ', # 0xb3
'Zheng ', # 0xb4
'Zheng ', # 0xb5
'Bie ', # 0xb6
'De ', # 0xb7
'Chong ', # 0xb8
'Che ', # 0xb9
'Jiao ', # 0xba
'Wei ', # 0xbb
'Jiao ', # 0xbc
'Hui ', # 0xbd
'Mei ', # 0xbe
'Long ', # 0xbf
'Xiang ', # 0xc0
'Bao ', # 0xc1
'Qu ', # 0xc2
'Xin ', # 0xc3
'Shu ', # 0xc4
'Bi ', # 0xc5
'Yi ', # 0xc6
'Le ', # 0xc7
'Ren ', # 0xc8
'Dao ', # 0xc9
'Ding ', # 0xca
'Gai ', # 0xcb
'Ji ', # 0xcc
'Ren ', # 0xcd
'Ren ', # 0xce
'Chan ', # 0xcf
'Tan ', # 0xd0
'Te ', # 0xd1
'Te ', # 0xd2
'Gan ', # 0xd3
'Qi ', # 0xd4
'Shi ', # 0xd5
'Cun ', # 0xd6
'Zhi ', # 0xd7
'Wang ', # 0xd8
'Mang ', # 0xd9
'Xi ', # 0xda
'Fan ', # 0xdb
'Ying ', # 0xdc
'Tian ', # 0xdd
'Min ', # 0xde
'Min ', # 0xdf
'Zhong ', # 0xe0
'Chong ', # 0xe1
'Wu ', # 0xe2
'Ji ', # 0xe3
'Wu ', # 0xe4
'Xi ', # 0xe5
'Ye ', # 0xe6
'You ', # 0xe7
'Wan ', # 0xe8
'Cong ', # 0xe9
'Zhong ', # 0xea
'Kuai ', # 0xeb
'Yu ', # 0xec
'Bian ', # 0xed
'Zhi ', # 0xee
'Qi ', # 0xef
'Cui ', # 0xf0
'Chen ', # 0xf1
'Tai ', # 0xf2
'Tun ', # 0xf3
'Qian ', # 0xf4
'Nian ', # 0xf5
'Hun ', # 0xf6
'Xiong ', # 0xf7
'Niu ', # 0xf8
'Wang ', # 0xf9
'Xian ', # 0xfa
'Xin ', # 0xfb
'Kang ', # 0xfc
'Hu ', # 0xfd
'Kai ', # 0xfe
'Fen ', # 0xff
)
| gpl-3.0 |
othercriteria/StochasticBlockmodel | test_block.py | 1 | 7829 | #!/usr/bin/env python
# Test of "new style" network inference, finally with blockmodel
# Daniel Klein, 7/8/2012
import numpy as np
from Network import Network
from Models import StationaryLogistic, NonstationaryLogistic, Blockmodel
from Models import alpha_zero, alpha_norm, alpha_unif, alpha_gamma
from Experiment import RandomSubnetworks, Results, add_array_stats
from Experiment import minimum_disagreement, rel_mse
# Parameters
params = { 'N': 100,
'K': 2,
'class_conc': 10.0,
'Theta_diag': 4.0,
'Theta_mean': 0.0,
'Theta_sd': 1.0,
'B': 1,
'beta_sd': 1.0,
'alpha_unif': 0.0,
'alpha_norm_sd': 2.0,
'alpha_gamma_sd': 0.0,
'kappa_target': ('row_sum', 5),
'fit_nonstationary': True,
'fit_conditional': True,
'fit_K': 2,
'initialize_true_z': False,
'cycles': 5,
'sweeps': 1,
'verbose': False,
'num_reps': 5,
'sub_sizes': np.arange(10, 60, 5),
'plot_mse': True,
'plot_network': True }
# Set random seed for reproducible output
np.random.seed(136)
# Report parameters for the run
print 'Parameters:'
for field in params:
print '%s: %s' % (field, str(params[field]))
# Initialize full network
net = Network(params['N'])
# Generate node-level propensities to extend and receive edges
if params['alpha_norm_sd'] > 0.0:
alpha_norm(net, params['alpha_norm_sd'])
elif params['alpha_unif'] > 0.0:
alpha_unif(net, params['alpha_unif'])
elif params['alpha_gamma_sd'] > 0.0:
# Choosing location somewhat arbitrarily to give unit skewness
alpha_gamma(net, 4.0, params['alpha_gamma_sd'])
else:
alpha_zero(net)
# Generate covariates and associated coefficients
data_base_model = NonstationaryLogistic()
for b in range(params['B']):
name = 'x_%d' % b
data_base_model.beta[name] = np.random.normal(0, params['beta_sd'])
def f_x(i_1, i_2):
return np.random.uniform(-np.sqrt(3), np.sqrt(3))
net.new_edge_covariate(name).from_binary_function_ind(f_x)
# Initialize data (block)model from base model
class_probs = np.random.dirichlet(np.repeat(params['class_conc'], params['K']))
z = np.where(np.random.multinomial(1, class_probs, params['N']) == 1)[1]
net.new_node_covariate_int('z_true')[:] = z
data_model = Blockmodel(data_base_model, params['K'], 'z_true')
Theta = np.random.normal(params['Theta_mean'], params['Theta_sd'],
(params['K'],params['K']))
Theta += params['Theta_diag'] * np.identity(params['K'])
Theta -= np.mean(Theta)
data_model.Theta = Theta
net.generate(data_model)
if params['plot_network']:
net.show_heatmap('z_true')
# Initialize fitting model
fit_base_model = StationaryLogistic()
for b in data_base_model.beta:
fit_base_model.beta[b] = None
fit_model = Blockmodel(fit_base_model, params['fit_K'])
if params['fit_nonstationary']:
n_fit_base_model = NonstationaryLogistic()
for b in data_base_model.beta:
n_fit_base_model.beta[b] = None
n_fit_model = Blockmodel(n_fit_base_model, params['fit_K'])
net.new_node_covariate_int('z')
# Set up recording of results from experiment
s_results = Results(params['sub_sizes'], params['sub_sizes'],
params['num_reps'], 'Stationary fit')
add_array_stats(s_results)
s_results.new('Subnetwork kappa', 'm', lambda d, f: d.base_model.kappa)
def f_b(b):
return ((lambda d, f: d.base_model.beta[b]),
(lambda d, f: f.base_model.beta[b]))
for b in data_base_model.beta:
# Need to do this hackily to avoid for-loop/lambda-binding weirdness.
f_true, f_estimated = f_b(b)
s_results.new('True beta_{%s}' % b, 'm', f_true)
s_results.new('Estimated beta_{%s}' % b, 'm', f_estimated)
s_results.new('Class mismatch', 'n',
lambda n: minimum_disagreement(n.node_covariates['z_true'][:], \
n.node_covariates['z'][:]))
def rel_mse_p_ij(n, d, f):
P = d.edge_probabilities(n)
return rel_mse(f.edge_probabilities(n), f.baseline(n), P)
s_results.new('Rel. MSE(P)', 'nm', rel_mse_p_ij)
def rel_mse_logit_p_ij(n, d, f):
logit_P = d.edge_probabilities(n, logit = True)
logit_Q = f.baseline_logit(n)
return rel_mse(f.edge_probabilities(n, logit = True), logit_Q, logit_P)
s_results.new('Rel. MSE(logit_P)', 'nm', rel_mse_logit_p_ij)
all_results = { 's': s_results }
if params['fit_nonstationary']:
n_results = s_results.copy()
n_results.title = 'Nonstationary fit'
all_results['n'] = n_results
if params['fit_conditional']:
c_results = s_results.copy()
c_results.title = 'Conditional fit'
all_results['c'] = c_results
def initialize(s, f):
if params['initialize_true_z']:
s.node_covariates['z'][:] = s.node_covariates['z_true'][:]
else:
s.node_covariates['z'][:] = np.random.randint(0, params['fit_K'], s.N)
for sub_size in params['sub_sizes']:
size = (sub_size, sub_size)
print 'subnetwork size = %s' % str(size)
gen = RandomSubnetworks(net, size)
for rep in range(params['num_reps']):
subnet = gen.sample(as_network = True)
data_model.match_kappa(subnet, params['kappa_target'])
subnet.generate(data_model)
initialize(subnet, fit_model)
fit_model.fit(subnet, params['cycles'], params['sweeps'],
verbose = params['verbose'])
s_results.record(size, rep, subnet, data_model, fit_model)
print
if params['fit_conditional']:
initialize(subnet, fit_model)
fit_base_model.fit = fit_base_model.fit_conditional
fit_model.fit(subnet, params['cycles'], params['sweeps'])
c_results.record(size, rep, subnet, data_model, fit_model)
print
if params['fit_nonstationary']:
subnet.offset_extremes()
initialize(subnet, n_fit_model)
n_fit_model.fit(subnet, params['cycles'], params['sweeps'],
verbose = params['verbose'])
n_results.record(size, rep, subnet, data_model, n_fit_model)
print
# Compute beta MSEs
covariate_mses = []
for b in fit_base_model.beta:
name = 'MSE(beta_{%s})' % b
covariate_mses.append(name)
for model in all_results:
results = all_results[model]
results.estimate_mse(name,
'True beta_{%s}' % b, 'Estimated beta_{%s}' % b)
for model in all_results:
results = all_results[model]
print results.title
results.summary()
print
# Plot inference performace, in terms of MSE(beta), MSE(P_ij), and
# inferred class disagreement; also plot kappas chosen for data models
if params['plot_mse']:
for model in all_results:
results = all_results[model]
to_plot = [(['MSE(beta_i)'] + covariate_mses,
{'ymin': 0, 'ymax': 0.5, 'plot_mean': True}),
('Rel. MSE(P)', {'ymin': 0, 'ymax': 2, 'baseline': 1}),
('Rel. MSE(logit_P)', {'ymin': 0, 'ymax': 2, 'baseline': 1}),
('Class mismatch', {'ymin': 0, 'ymax': 2})]
if model == 'n': to_plot.pop(2)
results.plot(to_plot)
# Plot network statistics as well as sparsity parameter
if params['plot_network']:
s_results.title = None
s_results.plot([('Average row-sum', {'ymin': 0, 'plot_mean': True}),
('Average col-sum', {'ymin': 0, 'plot_mean': True}),
(['row-sum', 'Max row-sum', 'Min row-sum'],
{'ymin': 0, 'plot_mean': True}),
(['col-sum', 'Max row-sum', 'Min col-sum'],
{'ymin': 0, 'plot_mean': True}),
'Subnetwork kappa'])
| mit |
frogbywyplay/genbox_xov | xov/config.py | 1 | 2984 | #
# Copyright (C) 2006-2014 Wyplay, All Rights Reserved.
# This file is part of xov.
#
# xov is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# xov is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see file COPYING.
# If not, see <http://www.gnu.org/licenses/>.
#
#
import exceptions
import os
import re
import overlay
import xtarget
class XovConfigError(exceptions.Exception):
"""Error class to Xov."""
def __init__(self, error=None):
self.error = error
def __str__(self):
if self.error:
return self.error
else:
return ""
class XovConfig(object):
def __init__(self, cfg_dir, ov_dir, is_targets=False):
self.cfg_dir = cfg_dir
self.cfg_file = self.cfg_dir + (xtarget.consts.XTARGET_SYS_OV_CFG[4:] if is_targets else '/xov.conf')
self.ov_dir = ov_dir
self.ov_re = re.compile('^PORTAGE_(.*)_(.*)="(.*)"$')
self.portdir_re = re.compile(r'^\s*PORTDIR\s*=\s*(?P<dbl>\")(?P<value>(?:[^\\"]|\\.)*)(?(dbl)\")')
self.portdir_ov_re = re.compile('PORTDIR_OVERLAY\s*=\s*"([^"]*)"')
pass
def __ov_config(self, line, ovs):
elems = self.ov_re.split(line)
name = elems[1].lower()
if not ovs.has_key(name):
ovs[name] = overlay.Overlay()
ovs[name].name = name
if elems[2] == "PROTO":
ovs[name].protocol = elems[3]
elif elems[2] == "URI":
ovs[name].uri = elems[3]
elif elems[2] == "REVISION":
ovs[name].revision = elems[3]
elif elems[2] == "BRANCH":
ovs[name].branch = elems[3]
else:
return 1
return 0
def __ov_portdir(self, line, ovs):
portdir = self.portdir_re.match(line).group('value')
name = os.path.basename(portdir)
if not ovs.has_key(name):
ovs[name] = overlay.Overlay()
ovs[name].set_portdir()
def read(self):
ovs = {}
# debug
if os.path.isfile(self.cfg_file):
fd = open(self.cfg_file, 'r')
for ii in fd.readlines():
if self.ov_re.match(ii):
self.__ov_config(ii, ovs)
elif self.portdir_re.match(ii):
self.__ov_portdir(ii, ovs)
fd.close()
return ovs
def write(self, ovs):
fd = open(self.cfg_file, 'w')
paths = []
portdir = None
for ii in ovs.values():
ii.write(fd)
if (ii.is_portdir() and not portdir):
portdir = 'PORTDIR="%s/%s"\n' % (self.ov_dir, ii.name)
else:
paths.append(self.ov_dir + '/' + ii.name)
# set PORTDIR_OVERLAY variable
overlays = 'PORTDIR_OVERLAY="\n'
overlays += '$PORTDIR_OVERLAY\n'
overlays += '\n'.join(paths) + '\n'
overlays += '"'
print >> fd, "# portage configuration"
if portdir:
print >> fd, portdir
print >> fd, overlays
fd.close()
| gpl-2.0 |
tsgit/invenio | modules/bibformat/lib/elements/bfe_external_publications.py | 35 | 1342 | ## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints links to external publications
"""
__revision__ = "$Id$"
def format_element(bfo, separator='<br/>'):
"""
Prints list of links to external publications.
"""
publications = bfo.fields('909C4')
out = ['<a href="'+pub['d']+'">'+pub['p']+'</a>'
for pub in publications
if pub.has_key('d') and pub.has_key('p')]
return separator.join(out)
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| gpl-2.0 |
v-iam/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_06_01/operations/subnets_operations.py | 2 | 16210 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class SubnetsOperations(object):
"""SubnetsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-06-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-06-01"
self.config = config
def delete(
self, resource_group_name, virtual_network_name, subnet_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified subnet.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 204, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, virtual_network_name, subnet_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets the specified subnet by virtual network and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Subnet <azure.mgmt.network.v2017_06_01.models.Subnet>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Subnet', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, virtual_network_name, subnet_name, subnet_parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a subnet in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param subnet_parameters: Parameters supplied to the create or update
subnet operation.
:type subnet_parameters: :class:`Subnet
<azure.mgmt.network.v2017_06_01.models.Subnet>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`Subnet
<azure.mgmt.network.v2017_06_01.models.Subnet>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(subnet_parameters, 'Subnet')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Subnet', response)
if response.status_code == 201:
deserialized = self._deserialize('Subnet', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list(
self, resource_group_name, virtual_network_name, custom_headers=None, raw=False, **operation_config):
"""Gets all subnets in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`SubnetPaged
<azure.mgmt.network.v2017_06_01.models.SubnetPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.SubnetPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.SubnetPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| mit |
hrishioa/Aviato | app/kartograph/layersource/special/graticule.py | 4 | 2307 |
from kartograph.geometry import MultiLineFeature
from kartograph.layersource.layersource import LayerSource
from shapely.geometry import LineString
class GraticuleLayer(LayerSource):
"""
special layer source for grid of longitudes and latitudes (graticule)
"""
def get_features(self, latitudes, longitudes, proj, bbox=[-180, -90, 180, 90]):
"""
returns a list of line features that make up
the graticule
"""
minLat = max(proj.minLat, bbox[1])
maxLat = min(proj.maxLat, bbox[3])
minLon = bbox[0]
maxLon = bbox[2]
def xfrange(start, stop, step):
while (step > 0 and start < stop) or (step < 0 and start > step):
yield start
start += step
line_features = []
# latitudes
for lat in latitudes:
if lat < minLat or lat > maxLat:
continue
pts = []
props = {'lat': lat}
for lon in xfrange(-180, 181, 0.5):
if lon < minLon or lon > maxLon:
continue
#if isinstance(proj, Azimuthal):
# lon += proj.lon0
# if lon < -180:
# lon += 360
# if lon > 180:
# lon -= 360
if proj._visible(lon, lat):
pts.append((lon, lat))
if len(pts) > 1:
line = MultiLineFeature(LineString(pts), props)
line_features.append(line)
# print line_features
# longitudes
for lon in longitudes:
if lon < minLon or lon > maxLon:
continue
pts = []
props = {'lon': lon}
#lat_range = xfrange(step[0], 181-step[0],1)
#if lon % 90 == 0:
# lat_range = xfrange(0, 181,1)
for lat in xfrange(0, 181, 0.5):
lat_ = lat - 90
if lat_ < minLat or lat_ > maxLat:
continue
if proj._visible(lon, lat_):
pts.append((lon, lat_))
if len(pts) > 1:
line = MultiLineFeature(LineString(pts), props)
line_features.append(line)
return line_features
| gpl-2.0 |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/pandas/tests/test_nanops.py | 7 | 44169 | # -*- coding: utf-8 -*-
from __future__ import division, print_function
from functools import partial
import warnings
import numpy as np
from pandas import Series, isnull
from pandas.types.common import is_integer_dtype
import pandas.core.nanops as nanops
import pandas.util.testing as tm
use_bn = nanops._USE_BOTTLENECK
class TestnanopsDataFrame(tm.TestCase):
def setUp(self):
np.random.seed(11235)
nanops._USE_BOTTLENECK = False
self.arr_shape = (11, 7, 5)
self.arr_float = np.random.randn(*self.arr_shape)
self.arr_float1 = np.random.randn(*self.arr_shape)
self.arr_complex = self.arr_float + self.arr_float1 * 1j
self.arr_int = np.random.randint(-10, 10, self.arr_shape)
self.arr_bool = np.random.randint(0, 2, self.arr_shape) == 0
self.arr_str = np.abs(self.arr_float).astype('S')
self.arr_utf = np.abs(self.arr_float).astype('U')
self.arr_date = np.random.randint(0, 20000,
self.arr_shape).astype('M8[ns]')
self.arr_tdelta = np.random.randint(0, 20000,
self.arr_shape).astype('m8[ns]')
self.arr_nan = np.tile(np.nan, self.arr_shape)
self.arr_float_nan = np.vstack([self.arr_float, self.arr_nan])
self.arr_float1_nan = np.vstack([self.arr_float1, self.arr_nan])
self.arr_nan_float1 = np.vstack([self.arr_nan, self.arr_float1])
self.arr_nan_nan = np.vstack([self.arr_nan, self.arr_nan])
self.arr_inf = self.arr_float * np.inf
self.arr_float_inf = np.vstack([self.arr_float, self.arr_inf])
self.arr_float1_inf = np.vstack([self.arr_float1, self.arr_inf])
self.arr_inf_float1 = np.vstack([self.arr_inf, self.arr_float1])
self.arr_inf_inf = np.vstack([self.arr_inf, self.arr_inf])
self.arr_nan_inf = np.vstack([self.arr_nan, self.arr_inf])
self.arr_float_nan_inf = np.vstack([self.arr_float, self.arr_nan,
self.arr_inf])
self.arr_nan_float1_inf = np.vstack([self.arr_float, self.arr_inf,
self.arr_nan])
self.arr_nan_nan_inf = np.vstack([self.arr_nan, self.arr_nan,
self.arr_inf])
self.arr_obj = np.vstack([self.arr_float.astype(
'O'), self.arr_int.astype('O'), self.arr_bool.astype(
'O'), self.arr_complex.astype('O'), self.arr_str.astype(
'O'), self.arr_utf.astype('O'), self.arr_date.astype('O'),
self.arr_tdelta.astype('O')])
with np.errstate(invalid='ignore'):
self.arr_nan_nanj = self.arr_nan + self.arr_nan * 1j
self.arr_complex_nan = np.vstack([self.arr_complex,
self.arr_nan_nanj])
self.arr_nan_infj = self.arr_inf * 1j
self.arr_complex_nan_infj = np.vstack([self.arr_complex,
self.arr_nan_infj])
self.arr_float_2d = self.arr_float[:, :, 0]
self.arr_float1_2d = self.arr_float1[:, :, 0]
self.arr_complex_2d = self.arr_complex[:, :, 0]
self.arr_int_2d = self.arr_int[:, :, 0]
self.arr_bool_2d = self.arr_bool[:, :, 0]
self.arr_str_2d = self.arr_str[:, :, 0]
self.arr_utf_2d = self.arr_utf[:, :, 0]
self.arr_date_2d = self.arr_date[:, :, 0]
self.arr_tdelta_2d = self.arr_tdelta[:, :, 0]
self.arr_nan_2d = self.arr_nan[:, :, 0]
self.arr_float_nan_2d = self.arr_float_nan[:, :, 0]
self.arr_float1_nan_2d = self.arr_float1_nan[:, :, 0]
self.arr_nan_float1_2d = self.arr_nan_float1[:, :, 0]
self.arr_nan_nan_2d = self.arr_nan_nan[:, :, 0]
self.arr_nan_nanj_2d = self.arr_nan_nanj[:, :, 0]
self.arr_complex_nan_2d = self.arr_complex_nan[:, :, 0]
self.arr_inf_2d = self.arr_inf[:, :, 0]
self.arr_float_inf_2d = self.arr_float_inf[:, :, 0]
self.arr_nan_inf_2d = self.arr_nan_inf[:, :, 0]
self.arr_float_nan_inf_2d = self.arr_float_nan_inf[:, :, 0]
self.arr_nan_nan_inf_2d = self.arr_nan_nan_inf[:, :, 0]
self.arr_float_1d = self.arr_float[:, 0, 0]
self.arr_float1_1d = self.arr_float1[:, 0, 0]
self.arr_complex_1d = self.arr_complex[:, 0, 0]
self.arr_int_1d = self.arr_int[:, 0, 0]
self.arr_bool_1d = self.arr_bool[:, 0, 0]
self.arr_str_1d = self.arr_str[:, 0, 0]
self.arr_utf_1d = self.arr_utf[:, 0, 0]
self.arr_date_1d = self.arr_date[:, 0, 0]
self.arr_tdelta_1d = self.arr_tdelta[:, 0, 0]
self.arr_nan_1d = self.arr_nan[:, 0, 0]
self.arr_float_nan_1d = self.arr_float_nan[:, 0, 0]
self.arr_float1_nan_1d = self.arr_float1_nan[:, 0, 0]
self.arr_nan_float1_1d = self.arr_nan_float1[:, 0, 0]
self.arr_nan_nan_1d = self.arr_nan_nan[:, 0, 0]
self.arr_nan_nanj_1d = self.arr_nan_nanj[:, 0, 0]
self.arr_complex_nan_1d = self.arr_complex_nan[:, 0, 0]
self.arr_inf_1d = self.arr_inf.ravel()
self.arr_float_inf_1d = self.arr_float_inf[:, 0, 0]
self.arr_nan_inf_1d = self.arr_nan_inf[:, 0, 0]
self.arr_float_nan_inf_1d = self.arr_float_nan_inf[:, 0, 0]
self.arr_nan_nan_inf_1d = self.arr_nan_nan_inf[:, 0, 0]
def tearDown(self):
nanops._USE_BOTTLENECK = use_bn
def check_results(self, targ, res, axis, check_dtype=True):
res = getattr(res, 'asm8', res)
res = getattr(res, 'values', res)
# timedeltas are a beast here
def _coerce_tds(targ, res):
if hasattr(targ, 'dtype') and targ.dtype == 'm8[ns]':
if len(targ) == 1:
targ = targ[0].item()
res = res.item()
else:
targ = targ.view('i8')
return targ, res
try:
if axis != 0 and hasattr(
targ, 'shape') and targ.ndim and targ.shape != res.shape:
res = np.split(res, [targ.shape[0]], axis=0)[0]
except:
targ, res = _coerce_tds(targ, res)
try:
tm.assert_almost_equal(targ, res, check_dtype=check_dtype)
except:
# handle timedelta dtypes
if hasattr(targ, 'dtype') and targ.dtype == 'm8[ns]':
targ, res = _coerce_tds(targ, res)
tm.assert_almost_equal(targ, res, check_dtype=check_dtype)
return
# There are sometimes rounding errors with
# complex and object dtypes.
# If it isn't one of those, re-raise the error.
if not hasattr(res, 'dtype') or res.dtype.kind not in ['c', 'O']:
raise
# convert object dtypes to something that can be split into
# real and imaginary parts
if res.dtype.kind == 'O':
if targ.dtype.kind != 'O':
res = res.astype(targ.dtype)
else:
try:
res = res.astype('c16')
except:
res = res.astype('f8')
try:
targ = targ.astype('c16')
except:
targ = targ.astype('f8')
# there should never be a case where numpy returns an object
# but nanops doesn't, so make that an exception
elif targ.dtype.kind == 'O':
raise
tm.assert_almost_equal(targ.real, res.real,
check_dtype=check_dtype)
tm.assert_almost_equal(targ.imag, res.imag,
check_dtype=check_dtype)
def check_fun_data(self, testfunc, targfunc, testarval, targarval,
targarnanval, check_dtype=True, **kwargs):
for axis in list(range(targarval.ndim)) + [None]:
for skipna in [False, True]:
targartempval = targarval if skipna else targarnanval
try:
targ = targfunc(targartempval, axis=axis, **kwargs)
res = testfunc(testarval, axis=axis, skipna=skipna,
**kwargs)
self.check_results(targ, res, axis,
check_dtype=check_dtype)
if skipna:
res = testfunc(testarval, axis=axis, **kwargs)
self.check_results(targ, res, axis,
check_dtype=check_dtype)
if axis is None:
res = testfunc(testarval, skipna=skipna, **kwargs)
self.check_results(targ, res, axis,
check_dtype=check_dtype)
if skipna and axis is None:
res = testfunc(testarval, **kwargs)
self.check_results(targ, res, axis,
check_dtype=check_dtype)
except BaseException as exc:
exc.args += ('axis: %s of %s' % (axis, testarval.ndim - 1),
'skipna: %s' % skipna, 'kwargs: %s' % kwargs)
raise
if testarval.ndim <= 1:
return
try:
testarval2 = np.take(testarval, 0, axis=-1)
targarval2 = np.take(targarval, 0, axis=-1)
targarnanval2 = np.take(targarnanval, 0, axis=-1)
except ValueError:
return
self.check_fun_data(testfunc, targfunc, testarval2, targarval2,
targarnanval2, check_dtype=check_dtype, **kwargs)
def check_fun(self, testfunc, targfunc, testar, targar=None,
targarnan=None, **kwargs):
if targar is None:
targar = testar
if targarnan is None:
targarnan = testar
testarval = getattr(self, testar)
targarval = getattr(self, targar)
targarnanval = getattr(self, targarnan)
try:
self.check_fun_data(testfunc, targfunc, testarval, targarval,
targarnanval, **kwargs)
except BaseException as exc:
exc.args += ('testar: %s' % testar, 'targar: %s' % targar,
'targarnan: %s' % targarnan)
raise
def check_funs(self, testfunc, targfunc, allow_complex=True,
allow_all_nan=True, allow_str=True, allow_date=True,
allow_tdelta=True, allow_obj=True, **kwargs):
self.check_fun(testfunc, targfunc, 'arr_float', **kwargs)
self.check_fun(testfunc, targfunc, 'arr_float_nan', 'arr_float',
**kwargs)
self.check_fun(testfunc, targfunc, 'arr_int', **kwargs)
self.check_fun(testfunc, targfunc, 'arr_bool', **kwargs)
objs = [self.arr_float.astype('O'), self.arr_int.astype('O'),
self.arr_bool.astype('O')]
if allow_all_nan:
self.check_fun(testfunc, targfunc, 'arr_nan', **kwargs)
if allow_complex:
self.check_fun(testfunc, targfunc, 'arr_complex', **kwargs)
self.check_fun(testfunc, targfunc, 'arr_complex_nan',
'arr_complex', **kwargs)
if allow_all_nan:
self.check_fun(testfunc, targfunc, 'arr_nan_nanj', **kwargs)
objs += [self.arr_complex.astype('O')]
if allow_str:
self.check_fun(testfunc, targfunc, 'arr_str', **kwargs)
self.check_fun(testfunc, targfunc, 'arr_utf', **kwargs)
objs += [self.arr_str.astype('O'), self.arr_utf.astype('O')]
if allow_date:
try:
targfunc(self.arr_date)
except TypeError:
pass
else:
self.check_fun(testfunc, targfunc, 'arr_date', **kwargs)
objs += [self.arr_date.astype('O')]
if allow_tdelta:
try:
targfunc(self.arr_tdelta)
except TypeError:
pass
else:
self.check_fun(testfunc, targfunc, 'arr_tdelta', **kwargs)
objs += [self.arr_tdelta.astype('O')]
if allow_obj:
self.arr_obj = np.vstack(objs)
# some nanops handle object dtypes better than their numpy
# counterparts, so the numpy functions need to be given something
# else
if allow_obj == 'convert':
targfunc = partial(self._badobj_wrap, func=targfunc,
allow_complex=allow_complex)
self.check_fun(testfunc, targfunc, 'arr_obj', **kwargs)
def check_funs_ddof(self,
testfunc,
targfunc,
allow_complex=True,
allow_all_nan=True,
allow_str=True,
allow_date=False,
allow_tdelta=False,
allow_obj=True, ):
for ddof in range(3):
try:
self.check_funs(testfunc, targfunc, allow_complex,
allow_all_nan, allow_str, allow_date,
allow_tdelta, allow_obj, ddof=ddof)
except BaseException as exc:
exc.args += ('ddof %s' % ddof, )
raise
def _badobj_wrap(self, value, func, allow_complex=True, **kwargs):
if value.dtype.kind == 'O':
if allow_complex:
value = value.astype('c16')
else:
value = value.astype('f8')
return func(value, **kwargs)
def test_nanany(self):
self.check_funs(nanops.nanany, np.any, allow_all_nan=False,
allow_str=False, allow_date=False, allow_tdelta=False)
def test_nanall(self):
self.check_funs(nanops.nanall, np.all, allow_all_nan=False,
allow_str=False, allow_date=False, allow_tdelta=False)
def test_nansum(self):
self.check_funs(nanops.nansum, np.sum, allow_str=False,
allow_date=False, allow_tdelta=True, check_dtype=False)
def test_nanmean(self):
self.check_funs(nanops.nanmean, np.mean, allow_complex=False,
allow_obj=False, allow_str=False, allow_date=False,
allow_tdelta=True)
def test_nanmean_overflow(self):
# GH 10155
# In the previous implementation mean can overflow for int dtypes, it
# is now consistent with numpy
# numpy < 1.9.0 is not computing this correctly
from distutils.version import LooseVersion
if LooseVersion(np.__version__) >= '1.9.0':
for a in [2 ** 55, -2 ** 55, 20150515061816532]:
s = Series(a, index=range(500), dtype=np.int64)
result = s.mean()
np_result = s.values.mean()
self.assertEqual(result, a)
self.assertEqual(result, np_result)
self.assertTrue(result.dtype == np.float64)
def test_returned_dtype(self):
dtypes = [np.int16, np.int32, np.int64, np.float32, np.float64]
if hasattr(np, 'float128'):
dtypes.append(np.float128)
for dtype in dtypes:
s = Series(range(10), dtype=dtype)
group_a = ['mean', 'std', 'var', 'skew', 'kurt']
group_b = ['min', 'max']
for method in group_a + group_b:
result = getattr(s, method)()
if is_integer_dtype(dtype) and method in group_a:
self.assertTrue(
result.dtype == np.float64,
"return dtype expected from %s is np.float64, "
"got %s instead" % (method, result.dtype))
else:
self.assertTrue(
result.dtype == dtype,
"return dtype expected from %s is %s, "
"got %s instead" % (method, dtype, result.dtype))
def test_nanmedian(self):
with warnings.catch_warnings(record=True):
self.check_funs(nanops.nanmedian, np.median, allow_complex=False,
allow_str=False, allow_date=False,
allow_tdelta=True, allow_obj='convert')
def test_nanvar(self):
self.check_funs_ddof(nanops.nanvar, np.var, allow_complex=False,
allow_str=False, allow_date=False,
allow_tdelta=True, allow_obj='convert')
def test_nanstd(self):
self.check_funs_ddof(nanops.nanstd, np.std, allow_complex=False,
allow_str=False, allow_date=False,
allow_tdelta=True, allow_obj='convert')
def test_nansem(self):
tm.skip_if_no_package('scipy.stats')
tm._skip_if_scipy_0_17()
from scipy.stats import sem
self.check_funs_ddof(nanops.nansem, sem, allow_complex=False,
allow_str=False, allow_date=False,
allow_tdelta=True, allow_obj='convert')
def _minmax_wrap(self, value, axis=None, func=None):
res = func(value, axis)
if res.dtype.kind == 'm':
res = np.atleast_1d(res)
return res
def test_nanmin(self):
func = partial(self._minmax_wrap, func=np.min)
self.check_funs(nanops.nanmin, func, allow_str=False, allow_obj=False)
def test_nanmax(self):
func = partial(self._minmax_wrap, func=np.max)
self.check_funs(nanops.nanmax, func, allow_str=False, allow_obj=False)
def _argminmax_wrap(self, value, axis=None, func=None):
res = func(value, axis)
nans = np.min(value, axis)
nullnan = isnull(nans)
if res.ndim:
res[nullnan] = -1
elif (hasattr(nullnan, 'all') and nullnan.all() or
not hasattr(nullnan, 'all') and nullnan):
res = -1
return res
def test_nanargmax(self):
func = partial(self._argminmax_wrap, func=np.argmax)
self.check_funs(nanops.nanargmax, func, allow_str=False,
allow_obj=False, allow_date=True, allow_tdelta=True)
def test_nanargmin(self):
func = partial(self._argminmax_wrap, func=np.argmin)
if tm.sys.version_info[0:2] == (2, 6):
self.check_funs(nanops.nanargmin, func, allow_date=True,
allow_tdelta=True, allow_str=False,
allow_obj=False)
else:
self.check_funs(nanops.nanargmin, func, allow_str=False,
allow_obj=False)
def _skew_kurt_wrap(self, values, axis=None, func=None):
if not isinstance(values.dtype.type, np.floating):
values = values.astype('f8')
result = func(values, axis=axis, bias=False)
# fix for handling cases where all elements in an axis are the same
if isinstance(result, np.ndarray):
result[np.max(values, axis=axis) == np.min(values, axis=axis)] = 0
return result
elif np.max(values) == np.min(values):
return 0.
return result
def test_nanskew(self):
tm.skip_if_no_package('scipy.stats')
tm._skip_if_scipy_0_17()
from scipy.stats import skew
func = partial(self._skew_kurt_wrap, func=skew)
self.check_funs(nanops.nanskew, func, allow_complex=False,
allow_str=False, allow_date=False, allow_tdelta=False)
def test_nankurt(self):
tm.skip_if_no_package('scipy.stats')
tm._skip_if_scipy_0_17()
from scipy.stats import kurtosis
func1 = partial(kurtosis, fisher=True)
func = partial(self._skew_kurt_wrap, func=func1)
self.check_funs(nanops.nankurt, func, allow_complex=False,
allow_str=False, allow_date=False, allow_tdelta=False)
def test_nanprod(self):
self.check_funs(nanops.nanprod, np.prod, allow_str=False,
allow_date=False, allow_tdelta=False)
def check_nancorr_nancov_2d(self, checkfun, targ0, targ1, **kwargs):
res00 = checkfun(self.arr_float_2d, self.arr_float1_2d, **kwargs)
res01 = checkfun(self.arr_float_2d, self.arr_float1_2d,
min_periods=len(self.arr_float_2d) - 1, **kwargs)
tm.assert_almost_equal(targ0, res00)
tm.assert_almost_equal(targ0, res01)
res10 = checkfun(self.arr_float_nan_2d, self.arr_float1_nan_2d,
**kwargs)
res11 = checkfun(self.arr_float_nan_2d, self.arr_float1_nan_2d,
min_periods=len(self.arr_float_2d) - 1, **kwargs)
tm.assert_almost_equal(targ1, res10)
tm.assert_almost_equal(targ1, res11)
targ2 = np.nan
res20 = checkfun(self.arr_nan_2d, self.arr_float1_2d, **kwargs)
res21 = checkfun(self.arr_float_2d, self.arr_nan_2d, **kwargs)
res22 = checkfun(self.arr_nan_2d, self.arr_nan_2d, **kwargs)
res23 = checkfun(self.arr_float_nan_2d, self.arr_nan_float1_2d,
**kwargs)
res24 = checkfun(self.arr_float_nan_2d, self.arr_nan_float1_2d,
min_periods=len(self.arr_float_2d) - 1, **kwargs)
res25 = checkfun(self.arr_float_2d, self.arr_float1_2d,
min_periods=len(self.arr_float_2d) + 1, **kwargs)
tm.assert_almost_equal(targ2, res20)
tm.assert_almost_equal(targ2, res21)
tm.assert_almost_equal(targ2, res22)
tm.assert_almost_equal(targ2, res23)
tm.assert_almost_equal(targ2, res24)
tm.assert_almost_equal(targ2, res25)
def check_nancorr_nancov_1d(self, checkfun, targ0, targ1, **kwargs):
res00 = checkfun(self.arr_float_1d, self.arr_float1_1d, **kwargs)
res01 = checkfun(self.arr_float_1d, self.arr_float1_1d,
min_periods=len(self.arr_float_1d) - 1, **kwargs)
tm.assert_almost_equal(targ0, res00)
tm.assert_almost_equal(targ0, res01)
res10 = checkfun(self.arr_float_nan_1d, self.arr_float1_nan_1d,
**kwargs)
res11 = checkfun(self.arr_float_nan_1d, self.arr_float1_nan_1d,
min_periods=len(self.arr_float_1d) - 1, **kwargs)
tm.assert_almost_equal(targ1, res10)
tm.assert_almost_equal(targ1, res11)
targ2 = np.nan
res20 = checkfun(self.arr_nan_1d, self.arr_float1_1d, **kwargs)
res21 = checkfun(self.arr_float_1d, self.arr_nan_1d, **kwargs)
res22 = checkfun(self.arr_nan_1d, self.arr_nan_1d, **kwargs)
res23 = checkfun(self.arr_float_nan_1d, self.arr_nan_float1_1d,
**kwargs)
res24 = checkfun(self.arr_float_nan_1d, self.arr_nan_float1_1d,
min_periods=len(self.arr_float_1d) - 1, **kwargs)
res25 = checkfun(self.arr_float_1d, self.arr_float1_1d,
min_periods=len(self.arr_float_1d) + 1, **kwargs)
tm.assert_almost_equal(targ2, res20)
tm.assert_almost_equal(targ2, res21)
tm.assert_almost_equal(targ2, res22)
tm.assert_almost_equal(targ2, res23)
tm.assert_almost_equal(targ2, res24)
tm.assert_almost_equal(targ2, res25)
def test_nancorr(self):
targ0 = np.corrcoef(self.arr_float_2d, self.arr_float1_2d)[0, 1]
targ1 = np.corrcoef(self.arr_float_2d.flat,
self.arr_float1_2d.flat)[0, 1]
self.check_nancorr_nancov_2d(nanops.nancorr, targ0, targ1)
targ0 = np.corrcoef(self.arr_float_1d, self.arr_float1_1d)[0, 1]
targ1 = np.corrcoef(self.arr_float_1d.flat,
self.arr_float1_1d.flat)[0, 1]
self.check_nancorr_nancov_1d(nanops.nancorr, targ0, targ1,
method='pearson')
def test_nancorr_pearson(self):
targ0 = np.corrcoef(self.arr_float_2d, self.arr_float1_2d)[0, 1]
targ1 = np.corrcoef(self.arr_float_2d.flat,
self.arr_float1_2d.flat)[0, 1]
self.check_nancorr_nancov_2d(nanops.nancorr, targ0, targ1,
method='pearson')
targ0 = np.corrcoef(self.arr_float_1d, self.arr_float1_1d)[0, 1]
targ1 = np.corrcoef(self.arr_float_1d.flat,
self.arr_float1_1d.flat)[0, 1]
self.check_nancorr_nancov_1d(nanops.nancorr, targ0, targ1,
method='pearson')
def test_nancorr_kendall(self):
tm.skip_if_no_package('scipy.stats')
from scipy.stats import kendalltau
targ0 = kendalltau(self.arr_float_2d, self.arr_float1_2d)[0]
targ1 = kendalltau(self.arr_float_2d.flat, self.arr_float1_2d.flat)[0]
self.check_nancorr_nancov_2d(nanops.nancorr, targ0, targ1,
method='kendall')
targ0 = kendalltau(self.arr_float_1d, self.arr_float1_1d)[0]
targ1 = kendalltau(self.arr_float_1d.flat, self.arr_float1_1d.flat)[0]
self.check_nancorr_nancov_1d(nanops.nancorr, targ0, targ1,
method='kendall')
def test_nancorr_spearman(self):
tm.skip_if_no_package('scipy.stats')
from scipy.stats import spearmanr
targ0 = spearmanr(self.arr_float_2d, self.arr_float1_2d)[0]
targ1 = spearmanr(self.arr_float_2d.flat, self.arr_float1_2d.flat)[0]
self.check_nancorr_nancov_2d(nanops.nancorr, targ0, targ1,
method='spearman')
targ0 = spearmanr(self.arr_float_1d, self.arr_float1_1d)[0]
targ1 = spearmanr(self.arr_float_1d.flat, self.arr_float1_1d.flat)[0]
self.check_nancorr_nancov_1d(nanops.nancorr, targ0, targ1,
method='spearman')
def test_nancov(self):
targ0 = np.cov(self.arr_float_2d, self.arr_float1_2d)[0, 1]
targ1 = np.cov(self.arr_float_2d.flat, self.arr_float1_2d.flat)[0, 1]
self.check_nancorr_nancov_2d(nanops.nancov, targ0, targ1)
targ0 = np.cov(self.arr_float_1d, self.arr_float1_1d)[0, 1]
targ1 = np.cov(self.arr_float_1d.flat, self.arr_float1_1d.flat)[0, 1]
self.check_nancorr_nancov_1d(nanops.nancov, targ0, targ1)
def check_nancomp(self, checkfun, targ0):
arr_float = self.arr_float
arr_float1 = self.arr_float1
arr_nan = self.arr_nan
arr_nan_nan = self.arr_nan_nan
arr_float_nan = self.arr_float_nan
arr_float1_nan = self.arr_float1_nan
arr_nan_float1 = self.arr_nan_float1
while targ0.ndim:
try:
res0 = checkfun(arr_float, arr_float1)
tm.assert_almost_equal(targ0, res0)
if targ0.ndim > 1:
targ1 = np.vstack([targ0, arr_nan])
else:
targ1 = np.hstack([targ0, arr_nan])
res1 = checkfun(arr_float_nan, arr_float1_nan)
tm.assert_numpy_array_equal(targ1, res1, check_dtype=False)
targ2 = arr_nan_nan
res2 = checkfun(arr_float_nan, arr_nan_float1)
tm.assert_numpy_array_equal(targ2, res2, check_dtype=False)
except Exception as exc:
exc.args += ('ndim: %s' % arr_float.ndim, )
raise
try:
arr_float = np.take(arr_float, 0, axis=-1)
arr_float1 = np.take(arr_float1, 0, axis=-1)
arr_nan = np.take(arr_nan, 0, axis=-1)
arr_nan_nan = np.take(arr_nan_nan, 0, axis=-1)
arr_float_nan = np.take(arr_float_nan, 0, axis=-1)
arr_float1_nan = np.take(arr_float1_nan, 0, axis=-1)
arr_nan_float1 = np.take(arr_nan_float1, 0, axis=-1)
targ0 = np.take(targ0, 0, axis=-1)
except ValueError:
break
def test_nangt(self):
targ0 = self.arr_float > self.arr_float1
self.check_nancomp(nanops.nangt, targ0)
def test_nange(self):
targ0 = self.arr_float >= self.arr_float1
self.check_nancomp(nanops.nange, targ0)
def test_nanlt(self):
targ0 = self.arr_float < self.arr_float1
self.check_nancomp(nanops.nanlt, targ0)
def test_nanle(self):
targ0 = self.arr_float <= self.arr_float1
self.check_nancomp(nanops.nanle, targ0)
def test_naneq(self):
targ0 = self.arr_float == self.arr_float1
self.check_nancomp(nanops.naneq, targ0)
def test_nanne(self):
targ0 = self.arr_float != self.arr_float1
self.check_nancomp(nanops.nanne, targ0)
def check_bool(self, func, value, correct, *args, **kwargs):
while getattr(value, 'ndim', True):
try:
res0 = func(value, *args, **kwargs)
if correct:
self.assertTrue(res0)
else:
self.assertFalse(res0)
except BaseException as exc:
exc.args += ('dim: %s' % getattr(value, 'ndim', value), )
raise
if not hasattr(value, 'ndim'):
break
try:
value = np.take(value, 0, axis=-1)
except ValueError:
break
def test__has_infs(self):
pairs = [('arr_complex', False), ('arr_int', False),
('arr_bool', False), ('arr_str', False), ('arr_utf', False),
('arr_complex', False), ('arr_complex_nan', False),
('arr_nan_nanj', False), ('arr_nan_infj', True),
('arr_complex_nan_infj', True)]
pairs_float = [('arr_float', False), ('arr_nan', False),
('arr_float_nan', False), ('arr_nan_nan', False),
('arr_float_inf', True), ('arr_inf', True),
('arr_nan_inf', True), ('arr_float_nan_inf', True),
('arr_nan_nan_inf', True)]
for arr, correct in pairs:
val = getattr(self, arr)
try:
self.check_bool(nanops._has_infs, val, correct)
except BaseException as exc:
exc.args += (arr, )
raise
for arr, correct in pairs_float:
val = getattr(self, arr)
try:
self.check_bool(nanops._has_infs, val, correct)
self.check_bool(nanops._has_infs, val.astype('f4'), correct)
self.check_bool(nanops._has_infs, val.astype('f2'), correct)
except BaseException as exc:
exc.args += (arr, )
raise
def test__isfinite(self):
pairs = [('arr_complex', False), ('arr_int', False),
('arr_bool', False), ('arr_str', False), ('arr_utf', False),
('arr_complex', False), ('arr_complex_nan', True),
('arr_nan_nanj', True), ('arr_nan_infj', True),
('arr_complex_nan_infj', True)]
pairs_float = [('arr_float', False), ('arr_nan', True),
('arr_float_nan', True), ('arr_nan_nan', True),
('arr_float_inf', True), ('arr_inf', True),
('arr_nan_inf', True), ('arr_float_nan_inf', True),
('arr_nan_nan_inf', True)]
func1 = lambda x: np.any(nanops._isfinite(x).ravel())
# TODO: unused?
# func2 = lambda x: np.any(nanops._isfinite(x).values.ravel())
for arr, correct in pairs:
val = getattr(self, arr)
try:
self.check_bool(func1, val, correct)
except BaseException as exc:
exc.args += (arr, )
raise
for arr, correct in pairs_float:
val = getattr(self, arr)
try:
self.check_bool(func1, val, correct)
self.check_bool(func1, val.astype('f4'), correct)
self.check_bool(func1, val.astype('f2'), correct)
except BaseException as exc:
exc.args += (arr, )
raise
def test__bn_ok_dtype(self):
self.assertTrue(nanops._bn_ok_dtype(self.arr_float.dtype, 'test'))
self.assertTrue(nanops._bn_ok_dtype(self.arr_complex.dtype, 'test'))
self.assertTrue(nanops._bn_ok_dtype(self.arr_int.dtype, 'test'))
self.assertTrue(nanops._bn_ok_dtype(self.arr_bool.dtype, 'test'))
self.assertTrue(nanops._bn_ok_dtype(self.arr_str.dtype, 'test'))
self.assertTrue(nanops._bn_ok_dtype(self.arr_utf.dtype, 'test'))
self.assertFalse(nanops._bn_ok_dtype(self.arr_date.dtype, 'test'))
self.assertFalse(nanops._bn_ok_dtype(self.arr_tdelta.dtype, 'test'))
self.assertFalse(nanops._bn_ok_dtype(self.arr_obj.dtype, 'test'))
class TestEnsureNumeric(tm.TestCase):
def test_numeric_values(self):
# Test integer
self.assertEqual(nanops._ensure_numeric(1), 1, 'Failed for int')
# Test float
self.assertEqual(nanops._ensure_numeric(1.1), 1.1, 'Failed for float')
# Test complex
self.assertEqual(nanops._ensure_numeric(1 + 2j), 1 + 2j,
'Failed for complex')
def test_ndarray(self):
# Test numeric ndarray
values = np.array([1, 2, 3])
self.assertTrue(np.allclose(nanops._ensure_numeric(values), values),
'Failed for numeric ndarray')
# Test object ndarray
o_values = values.astype(object)
self.assertTrue(np.allclose(nanops._ensure_numeric(o_values), values),
'Failed for object ndarray')
# Test convertible string ndarray
s_values = np.array(['1', '2', '3'], dtype=object)
self.assertTrue(np.allclose(nanops._ensure_numeric(s_values), values),
'Failed for convertible string ndarray')
# Test non-convertible string ndarray
s_values = np.array(['foo', 'bar', 'baz'], dtype=object)
self.assertRaises(ValueError, lambda: nanops._ensure_numeric(s_values))
def test_convertable_values(self):
self.assertTrue(np.allclose(nanops._ensure_numeric('1'), 1.0),
'Failed for convertible integer string')
self.assertTrue(np.allclose(nanops._ensure_numeric('1.1'), 1.1),
'Failed for convertible float string')
self.assertTrue(np.allclose(nanops._ensure_numeric('1+1j'), 1 + 1j),
'Failed for convertible complex string')
def test_non_convertable_values(self):
self.assertRaises(TypeError, lambda: nanops._ensure_numeric('foo'))
self.assertRaises(TypeError, lambda: nanops._ensure_numeric({}))
self.assertRaises(TypeError, lambda: nanops._ensure_numeric([]))
class TestNanvarFixedValues(tm.TestCase):
# xref GH10242
def setUp(self):
# Samples from a normal distribution.
self.variance = variance = 3.0
self.samples = self.prng.normal(scale=variance ** 0.5, size=100000)
def test_nanvar_all_finite(self):
samples = self.samples
actual_variance = nanops.nanvar(samples)
tm.assert_almost_equal(actual_variance, self.variance,
check_less_precise=2)
def test_nanvar_nans(self):
samples = np.nan * np.ones(2 * self.samples.shape[0])
samples[::2] = self.samples
actual_variance = nanops.nanvar(samples, skipna=True)
tm.assert_almost_equal(actual_variance, self.variance,
check_less_precise=2)
actual_variance = nanops.nanvar(samples, skipna=False)
tm.assert_almost_equal(actual_variance, np.nan, check_less_precise=2)
def test_nanstd_nans(self):
samples = np.nan * np.ones(2 * self.samples.shape[0])
samples[::2] = self.samples
actual_std = nanops.nanstd(samples, skipna=True)
tm.assert_almost_equal(actual_std, self.variance ** 0.5,
check_less_precise=2)
actual_std = nanops.nanvar(samples, skipna=False)
tm.assert_almost_equal(actual_std, np.nan,
check_less_precise=2)
def test_nanvar_axis(self):
# Generate some sample data.
samples_norm = self.samples
samples_unif = self.prng.uniform(size=samples_norm.shape[0])
samples = np.vstack([samples_norm, samples_unif])
actual_variance = nanops.nanvar(samples, axis=1)
tm.assert_almost_equal(actual_variance, np.array(
[self.variance, 1.0 / 12]), check_less_precise=2)
def test_nanvar_ddof(self):
n = 5
samples = self.prng.uniform(size=(10000, n + 1))
samples[:, -1] = np.nan # Force use of our own algorithm.
variance_0 = nanops.nanvar(samples, axis=1, skipna=True, ddof=0).mean()
variance_1 = nanops.nanvar(samples, axis=1, skipna=True, ddof=1).mean()
variance_2 = nanops.nanvar(samples, axis=1, skipna=True, ddof=2).mean()
# The unbiased estimate.
var = 1.0 / 12
tm.assert_almost_equal(variance_1, var,
check_less_precise=2)
# The underestimated variance.
tm.assert_almost_equal(variance_0, (n - 1.0) / n * var,
check_less_precise=2)
# The overestimated variance.
tm.assert_almost_equal(variance_2, (n - 1.0) / (n - 2.0) * var,
check_less_precise=2)
def test_ground_truth(self):
# Test against values that were precomputed with Numpy.
samples = np.empty((4, 4))
samples[:3, :3] = np.array([[0.97303362, 0.21869576, 0.55560287
], [0.72980153, 0.03109364, 0.99155171],
[0.09317602, 0.60078248, 0.15871292]])
samples[3] = samples[:, 3] = np.nan
# Actual variances along axis=0, 1 for ddof=0, 1, 2
variance = np.array([[[0.13762259, 0.05619224, 0.11568816
], [0.20643388, 0.08428837, 0.17353224],
[0.41286776, 0.16857673, 0.34706449]],
[[0.09519783, 0.16435395, 0.05082054
], [0.14279674, 0.24653093, 0.07623082],
[0.28559348, 0.49306186, 0.15246163]]])
# Test nanvar.
for axis in range(2):
for ddof in range(3):
var = nanops.nanvar(samples, skipna=True, axis=axis, ddof=ddof)
tm.assert_almost_equal(var[:3], variance[axis, ddof])
self.assertTrue(np.isnan(var[3]))
# Test nanstd.
for axis in range(2):
for ddof in range(3):
std = nanops.nanstd(samples, skipna=True, axis=axis, ddof=ddof)
tm.assert_almost_equal(std[:3], variance[axis, ddof] ** 0.5)
self.assertTrue(np.isnan(std[3]))
def test_nanstd_roundoff(self):
# Regression test for GH 10242 (test data taken from GH 10489). Ensure
# that variance is stable.
data = Series(766897346 * np.ones(10))
for ddof in range(3):
result = data.std(ddof=ddof)
self.assertEqual(result, 0.0)
@property
def prng(self):
return np.random.RandomState(1234)
class TestNanskewFixedValues(tm.TestCase):
# xref GH 11974
def setUp(self):
# Test data + skewness value (computed with scipy.stats.skew)
self.samples = np.sin(np.linspace(0, 1, 200))
self.actual_skew = -0.1875895205961754
def test_constant_series(self):
# xref GH 11974
for val in [3075.2, 3075.3, 3075.5]:
data = val * np.ones(300)
skew = nanops.nanskew(data)
self.assertEqual(skew, 0.0)
def test_all_finite(self):
alpha, beta = 0.3, 0.1
left_tailed = self.prng.beta(alpha, beta, size=100)
self.assertLess(nanops.nanskew(left_tailed), 0)
alpha, beta = 0.1, 0.3
right_tailed = self.prng.beta(alpha, beta, size=100)
self.assertGreater(nanops.nanskew(right_tailed), 0)
def test_ground_truth(self):
skew = nanops.nanskew(self.samples)
self.assertAlmostEqual(skew, self.actual_skew)
def test_axis(self):
samples = np.vstack([self.samples,
np.nan * np.ones(len(self.samples))])
skew = nanops.nanskew(samples, axis=1)
tm.assert_almost_equal(skew, np.array([self.actual_skew, np.nan]))
def test_nans(self):
samples = np.hstack([self.samples, np.nan])
skew = nanops.nanskew(samples, skipna=False)
self.assertTrue(np.isnan(skew))
def test_nans_skipna(self):
samples = np.hstack([self.samples, np.nan])
skew = nanops.nanskew(samples, skipna=True)
tm.assert_almost_equal(skew, self.actual_skew)
@property
def prng(self):
return np.random.RandomState(1234)
class TestNankurtFixedValues(tm.TestCase):
# xref GH 11974
def setUp(self):
# Test data + kurtosis value (computed with scipy.stats.kurtosis)
self.samples = np.sin(np.linspace(0, 1, 200))
self.actual_kurt = -1.2058303433799713
def test_constant_series(self):
# xref GH 11974
for val in [3075.2, 3075.3, 3075.5]:
data = val * np.ones(300)
kurt = nanops.nankurt(data)
self.assertEqual(kurt, 0.0)
def test_all_finite(self):
alpha, beta = 0.3, 0.1
left_tailed = self.prng.beta(alpha, beta, size=100)
self.assertLess(nanops.nankurt(left_tailed), 0)
alpha, beta = 0.1, 0.3
right_tailed = self.prng.beta(alpha, beta, size=100)
self.assertGreater(nanops.nankurt(right_tailed), 0)
def test_ground_truth(self):
kurt = nanops.nankurt(self.samples)
self.assertAlmostEqual(kurt, self.actual_kurt)
def test_axis(self):
samples = np.vstack([self.samples,
np.nan * np.ones(len(self.samples))])
kurt = nanops.nankurt(samples, axis=1)
tm.assert_almost_equal(kurt, np.array([self.actual_kurt, np.nan]))
def test_nans(self):
samples = np.hstack([self.samples, np.nan])
kurt = nanops.nankurt(samples, skipna=False)
self.assertTrue(np.isnan(kurt))
def test_nans_skipna(self):
samples = np.hstack([self.samples, np.nan])
kurt = nanops.nankurt(samples, skipna=True)
tm.assert_almost_equal(kurt, self.actual_kurt)
@property
def prng(self):
return np.random.RandomState(1234)
def test_int64_add_overflow():
# see gh-14068
msg = "Overflow in int64 addition"
m = np.iinfo(np.int64).max
n = np.iinfo(np.int64).min
with tm.assertRaisesRegexp(OverflowError, msg):
nanops._checked_add_with_arr(np.array([m, m]), m)
with tm.assertRaisesRegexp(OverflowError, msg):
nanops._checked_add_with_arr(np.array([m, m]), np.array([m, m]))
with tm.assertRaisesRegexp(OverflowError, msg):
nanops._checked_add_with_arr(np.array([n, n]), n)
with tm.assertRaisesRegexp(OverflowError, msg):
nanops._checked_add_with_arr(np.array([n, n]), np.array([n, n]))
with tm.assertRaisesRegexp(OverflowError, msg):
nanops._checked_add_with_arr(np.array([m, n]), np.array([n, n]))
with tm.assertRaisesRegexp(OverflowError, msg):
with tm.assert_produces_warning(RuntimeWarning):
nanops._checked_add_with_arr(np.array([m, m]),
np.array([np.nan, m]))
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure', '-s'
], exit=False)
| gpl-3.0 |
olivierdalang/QGIS | python/plugins/processing/algs/gdal/warp.py | 29 | 11677 | # -*- coding: utf-8 -*-
"""
***************************************************************************
self.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterCrs,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterEnum,
QgsProcessingParameterBoolean,
QgsProcessingParameterExtent,
QgsProcessingParameterString,
QgsProcessingParameterRasterDestination,
QgsProcessingUtils)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class warp(GdalAlgorithm):
INPUT = 'INPUT'
SOURCE_CRS = 'SOURCE_CRS'
TARGET_CRS = 'TARGET_CRS'
NODATA = 'NODATA'
TARGET_RESOLUTION = 'TARGET_RESOLUTION'
OPTIONS = 'OPTIONS'
RESAMPLING = 'RESAMPLING'
DATA_TYPE = 'DATA_TYPE'
TARGET_EXTENT = 'TARGET_EXTENT'
TARGET_EXTENT_CRS = 'TARGET_EXTENT_CRS'
MULTITHREADING = 'MULTITHREADING'
EXTRA = 'EXTRA'
OUTPUT = 'OUTPUT'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.methods = ((self.tr('Nearest Neighbour'), 'near'),
(self.tr('Bilinear'), 'bilinear'),
(self.tr('Cubic'), 'cubic'),
(self.tr('Cubic Spline'), 'cubicspline'),
(self.tr('Lanczos Windowed Sinc'), 'lanczos'),
(self.tr('Average'), 'average'),
(self.tr('Mode'), 'mode'),
(self.tr('Maximum'), 'max'),
(self.tr('Minimum'), 'min'),
(self.tr('Median'), 'med'),
(self.tr('First Quartile'), 'q1'),
(self.tr('Third Quartile'), 'q3'))
self.TYPES = [self.tr('Use Input Layer Data Type'), 'Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterCrs(self.SOURCE_CRS,
self.tr('Source CRS'),
optional=True))
self.addParameter(QgsProcessingParameterCrs(self.TARGET_CRS,
self.tr('Target CRS'),
optional=True))
self.addParameter(QgsProcessingParameterEnum(self.RESAMPLING,
self.tr('Resampling method to use'),
options=[i[0] for i in self.methods],
defaultValue=0))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('Nodata value for output bands'),
type=QgsProcessingParameterNumber.Double,
defaultValue=None,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.TARGET_RESOLUTION,
self.tr('Output file resolution in target georeferenced units'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=None,
optional=True))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=0)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
target_extent_param = QgsProcessingParameterExtent(self.TARGET_EXTENT,
self.tr('Georeferenced extents of output file to be created'),
optional=True)
target_extent_param.setFlags(target_extent_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(target_extent_param)
target_extent_crs_param = QgsProcessingParameterCrs(self.TARGET_EXTENT_CRS,
self.tr('CRS of the target raster extent'),
optional=True)
target_extent_crs_param.setFlags(target_extent_crs_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(target_extent_crs_param)
multithreading_param = QgsProcessingParameterBoolean(self.MULTITHREADING,
self.tr('Use multithreaded warping implementation'),
defaultValue=False)
multithreading_param.setFlags(multithreading_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(multithreading_param)
extra_param = QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(extra_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Reprojected')))
def name(self):
return 'warpreproject'
def displayName(self):
return self.tr('Warp (reproject)')
def group(self):
return self.tr('Raster projections')
def groupId(self):
return 'rasterprojections'
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'warp.png'))
def commandName(self):
return 'gdalwarp'
def tags(self):
tags = self.tr('transform,reproject,crs,srs').split(',')
tags.extend(super().tags())
return tags
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT))
sourceCrs = self.parameterAsCrs(parameters, self.SOURCE_CRS, context)
targetCrs = self.parameterAsCrs(parameters, self.TARGET_CRS, context)
if self.NODATA in parameters and parameters[self.NODATA] is not None:
nodata = self.parameterAsDouble(parameters, self.NODATA, context)
else:
nodata = None
resolution = self.parameterAsDouble(parameters, self.TARGET_RESOLUTION, context)
arguments = []
if sourceCrs.isValid():
arguments.append('-s_srs')
arguments.append(GdalUtils.gdal_crs_string(sourceCrs))
if targetCrs.isValid():
arguments.append('-t_srs')
arguments.append(GdalUtils.gdal_crs_string(targetCrs))
if nodata is not None:
arguments.append('-dstnodata')
arguments.append(str(nodata))
if resolution:
arguments.append('-tr')
arguments.append(str(resolution))
arguments.append(str(resolution))
arguments.append('-r')
arguments.append(self.methods[self.parameterAsEnum(parameters, self.RESAMPLING, context)][1])
extent = self.parameterAsExtent(parameters, self.TARGET_EXTENT, context)
if not extent.isNull():
arguments.append('-te')
arguments.append(extent.xMinimum())
arguments.append(extent.yMinimum())
arguments.append(extent.xMaximum())
arguments.append(extent.yMaximum())
extentCrs = self.parameterAsCrs(parameters, self.TARGET_EXTENT_CRS, context)
if extentCrs.isValid():
arguments.append('-te_srs')
arguments.append(GdalUtils.gdal_crs_string(extentCrs))
if self.parameterAsBoolean(parameters, self.MULTITHREADING, context):
arguments.append('-multi')
data_type = self.parameterAsEnum(parameters, self.DATA_TYPE, context)
if data_type:
arguments.append('-ot ' + self.TYPES[data_type])
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
options = self.parameterAsString(parameters, self.OPTIONS, context)
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''):
extra = self.parameterAsString(parameters, self.EXTRA, context)
arguments.append(extra)
arguments.append(inLayer.source())
arguments.append(out)
return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
| gpl-2.0 |
kmonsoor/python-for-android | python3-alpha/python3-src/Tools/scripts/reindent.py | 48 | 11022 | #! /usr/bin/env python3
# Released to the public domain, by Tim Peters, 03 October 2000.
"""reindent [-d][-r][-v] [ path ... ]
-d (--dryrun) Dry run. Analyze, but don't make any changes to, files.
-r (--recurse) Recurse. Search for all .py files in subdirectories too.
-n (--nobackup) No backup. Does not make a ".bak" file before reindenting.
-v (--verbose) Verbose. Print informative msgs; else no output.
-h (--help) Help. Print this usage information and exit.
Change Python (.py) files to use 4-space indents and no hard tab characters.
Also trim excess spaces and tabs from ends of lines, and remove empty lines
at the end of files. Also ensure the last line ends with a newline.
If no paths are given on the command line, reindent operates as a filter,
reading a single source file from standard input and writing the transformed
source to standard output. In this case, the -d, -r and -v flags are
ignored.
You can pass one or more file and/or directory paths. When a directory
path, all .py files within the directory will be examined, and, if the -r
option is given, likewise recursively for subdirectories.
If output is not to standard output, reindent overwrites files in place,
renaming the originals with a .bak extension. If it finds nothing to
change, the file is left alone. If reindent does change a file, the changed
file is a fixed-point for future runs (i.e., running reindent on the
resulting .py file won't change it again).
The hard part of reindenting is figuring out what to do with comment
lines. So long as the input files get a clean bill of health from
tabnanny.py, reindent should do a good job.
The backup file is a copy of the one that is being reindented. The ".bak"
file is generated with shutil.copy(), but some corner cases regarding
user/group and permissions could leave the backup file more readable than
you'd prefer. You can always use the --nobackup option to prevent this.
"""
__version__ = "1"
import tokenize
import os
import shutil
import sys
verbose = False
recurse = False
dryrun = False
makebackup = True
def usage(msg=None):
if msg is None:
msg = __doc__
print(msg, file=sys.stderr)
def errprint(*args):
sys.stderr.write(" ".join(str(arg) for arg in args))
sys.stderr.write("\n")
def main():
import getopt
global verbose, recurse, dryrun, makebackup
try:
opts, args = getopt.getopt(sys.argv[1:], "drnvh",
["dryrun", "recurse", "nobackup", "verbose", "help"])
except getopt.error as msg:
usage(msg)
return
for o, a in opts:
if o in ('-d', '--dryrun'):
dryrun = True
elif o in ('-r', '--recurse'):
recurse = True
elif o in ('-n', '--nobackup'):
makebackup = False
elif o in ('-v', '--verbose'):
verbose = True
elif o in ('-h', '--help'):
usage()
return
if not args:
r = Reindenter(sys.stdin)
r.run()
r.write(sys.stdout)
return
for arg in args:
check(arg)
def check(file):
if os.path.isdir(file) and not os.path.islink(file):
if verbose:
print("listing directory", file)
names = os.listdir(file)
for name in names:
fullname = os.path.join(file, name)
if ((recurse and os.path.isdir(fullname) and
not os.path.islink(fullname) and
not os.path.split(fullname)[1].startswith("."))
or name.lower().endswith(".py")):
check(fullname)
return
if verbose:
print("checking", file, "...", end=' ')
with open(file, 'rb') as f:
encoding, _ = tokenize.detect_encoding(f.readline)
try:
with open(file, encoding=encoding) as f:
r = Reindenter(f)
except IOError as msg:
errprint("%s: I/O Error: %s" % (file, str(msg)))
return
newline = r.newlines
if isinstance(newline, tuple):
errprint("%s: mixed newlines detected; cannot process file" % file)
return
if r.run():
if verbose:
print("changed.")
if dryrun:
print("But this is a dry run, so leaving it alone.")
if not dryrun:
bak = file + ".bak"
if makebackup:
shutil.copyfile(file, bak)
if verbose:
print("backed up", file, "to", bak)
with open(file, "w", encoding=encoding, newline=newline) as f:
r.write(f)
if verbose:
print("wrote new", file)
return True
else:
if verbose:
print("unchanged.")
return False
def _rstrip(line, JUNK='\n \t'):
"""Return line stripped of trailing spaces, tabs, newlines.
Note that line.rstrip() instead also strips sundry control characters,
but at least one known Emacs user expects to keep junk like that, not
mentioning Barry by name or anything <wink>.
"""
i = len(line)
while i > 0 and line[i - 1] in JUNK:
i -= 1
return line[:i]
class Reindenter:
def __init__(self, f):
self.find_stmt = 1 # next token begins a fresh stmt?
self.level = 0 # current indent level
# Raw file lines.
self.raw = f.readlines()
# File lines, rstripped & tab-expanded. Dummy at start is so
# that we can use tokenize's 1-based line numbering easily.
# Note that a line is all-blank iff it's "\n".
self.lines = [_rstrip(line).expandtabs() + "\n"
for line in self.raw]
self.lines.insert(0, None)
self.index = 1 # index into self.lines of next line
# List of (lineno, indentlevel) pairs, one for each stmt and
# comment line. indentlevel is -1 for comment lines, as a
# signal that tokenize doesn't know what to do about them;
# indeed, they're our headache!
self.stats = []
# Save the newlines found in the file so they can be used to
# create output without mutating the newlines.
self.newlines = f.newlines
def run(self):
tokens = tokenize.generate_tokens(self.getline)
for _token in tokens:
self.tokeneater(*_token)
# Remove trailing empty lines.
lines = self.lines
while lines and lines[-1] == "\n":
lines.pop()
# Sentinel.
stats = self.stats
stats.append((len(lines), 0))
# Map count of leading spaces to # we want.
have2want = {}
# Program after transformation.
after = self.after = []
# Copy over initial empty lines -- there's nothing to do until
# we see a line with *something* on it.
i = stats[0][0]
after.extend(lines[1:i])
for i in range(len(stats) - 1):
thisstmt, thislevel = stats[i]
nextstmt = stats[i + 1][0]
have = getlspace(lines[thisstmt])
want = thislevel * 4
if want < 0:
# A comment line.
if have:
# An indented comment line. If we saw the same
# indentation before, reuse what it most recently
# mapped to.
want = have2want.get(have, -1)
if want < 0:
# Then it probably belongs to the next real stmt.
for j in range(i + 1, len(stats) - 1):
jline, jlevel = stats[j]
if jlevel >= 0:
if have == getlspace(lines[jline]):
want = jlevel * 4
break
if want < 0: # Maybe it's a hanging
# comment like this one,
# in which case we should shift it like its base
# line got shifted.
for j in range(i - 1, -1, -1):
jline, jlevel = stats[j]
if jlevel >= 0:
want = have + (getlspace(after[jline - 1]) -
getlspace(lines[jline]))
break
if want < 0:
# Still no luck -- leave it alone.
want = have
else:
want = 0
assert want >= 0
have2want[have] = want
diff = want - have
if diff == 0 or have == 0:
after.extend(lines[thisstmt:nextstmt])
else:
for line in lines[thisstmt:nextstmt]:
if diff > 0:
if line == "\n":
after.append(line)
else:
after.append(" " * diff + line)
else:
remove = min(getlspace(line), -diff)
after.append(line[remove:])
return self.raw != self.after
def write(self, f):
f.writelines(self.after)
# Line-getter for tokenize.
def getline(self):
if self.index >= len(self.lines):
line = ""
else:
line = self.lines[self.index]
self.index += 1
return line
# Line-eater for tokenize.
def tokeneater(self, type, token, slinecol, end, line,
INDENT=tokenize.INDENT,
DEDENT=tokenize.DEDENT,
NEWLINE=tokenize.NEWLINE,
COMMENT=tokenize.COMMENT,
NL=tokenize.NL):
if type == NEWLINE:
# A program statement, or ENDMARKER, will eventually follow,
# after some (possibly empty) run of tokens of the form
# (NL | COMMENT)* (INDENT | DEDENT+)?
self.find_stmt = 1
elif type == INDENT:
self.find_stmt = 1
self.level += 1
elif type == DEDENT:
self.find_stmt = 1
self.level -= 1
elif type == COMMENT:
if self.find_stmt:
self.stats.append((slinecol[0], -1))
# but we're still looking for a new stmt, so leave
# find_stmt alone
elif type == NL:
pass
elif self.find_stmt:
# This is the first "real token" following a NEWLINE, so it
# must be the first token of the next program statement, or an
# ENDMARKER.
self.find_stmt = 0
if line: # not endmarker
self.stats.append((slinecol[0], self.level))
# Count number of leading blanks.
def getlspace(line):
i, n = 0, len(line)
while i < n and line[i] == " ":
i += 1
return i
if __name__ == '__main__':
main()
| apache-2.0 |
aidiary/keras_examples | imagenet/image_crawler.py | 1 | 2572 | import os
import time
import json
import random
import imghdr
import requests
IMAGE_URL_API = 'http://www.image-net.org/api/text/imagenet.synset.geturls?wnid='
OUTPUT_DIR = "images"
MAX_NUM_IMAGES_PER_CATEGORY = 100
def download_image(url, filename):
try:
r = requests.get(url)
except Exception:
return False
if not r.ok:
return False
imagetype = imghdr.what(None, h=r.content)
if imagetype != "jpeg":
return False
with open(filename, 'wb') as fp:
fp.write(r.content)
# flickr error image
if os.path.getsize(filename) == 2051:
os.remove(filename)
return False
return True
if __name__ == '__main__':
if not os.path.exists(OUTPUT_DIR):
os.mkdir(OUTPUT_DIR)
# 画像を収集したいカテゴリのリストを読み込む
# ISVRC2014の1000カテゴリ
# http://image-net.org/challenges/LSVRC/2014/browse-synsets
# https://github.com/fchollet/keras/blob/master/keras/applications/imagenet_utils.py
with open('imagenet_class_index.json', 'r') as fp:
class_list = json.load(fp)
# 各カテゴリについて画像を収集
for wnid, category in class_list.values():
print("*** category = %s (%s)" % (category, wnid))
# すでに画像ディレクトリがあったら収集済みなのでスキップする
if os.path.exists(os.path.join(OUTPUT_DIR, category)):
print("SKIP")
continue
# wnidに属する画像のURLリストをAPIで取得する
r = requests.get(IMAGE_URL_API + wnid)
if not r.ok:
print("WARNING: cannot get image list: wnid = %s" % wnid)
continue
page = r.text
image_url_list = page.rstrip().split('\r\n')
random.shuffle(image_url_list)
os.mkdir(os.path.join(OUTPUT_DIR, category))
num_ok = 0
for image_url in image_url_list:
try:
print("%s ... " % image_url, end='')
filename = image_url.split('/')[-1]
ret = download_image(image_url, os.path.join(OUTPUT_DIR, category, filename))
if ret:
print("OK")
num_ok += 1
if num_ok == MAX_NUM_IMAGES_PER_CATEGORY:
break
else:
print("NG")
# 同じドメインが連続する場合もあるので適宜スリープ
time.sleep(3)
except Exception:
continue
| mit |
hnoerdli/hussa | node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py | 1569 | 23354 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest, convert_to_binary)
else:
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
def _CopyStringsFile(self, source, dest, convert_to_binary):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
env = os.environ.copy()
# Ref:
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
# The problem with this flag is that it resets the file mtime on the file to
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env['ZERO_AR_DATE'] = '1'
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
This invokes 'actool' to compile all the inputs .xcassets files. The
|keys| arguments is a json-encoded dictionary of extra arguments to
pass to 'actool' when the asset catalogs contains an application icon
or a launch image.
Note that 'actool' does not create the Assets.car file if the asset
catalogs does not contains imageset.
"""
command_line = [
'xcrun', 'actool', '--output-format', 'human-readable-text',
'--compress-pngs', '--notices', '--warnings', '--errors',
]
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
if is_iphone_target:
platform = os.environ['CONFIGURATION'].split('-')[-1]
if platform not in ('iphoneos', 'iphonesimulator'):
platform = 'iphonesimulator'
command_line.extend([
'--platform', platform, '--target-device', 'iphone',
'--target-device', 'ipad', '--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
])
else:
command_line.extend([
'--platform', 'macosx', '--target-device', 'mac',
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
'--compile',
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
])
if keys:
keys = json.loads(keys)
for key, value in keys.iteritems():
arg_name = '--' + key
if isinstance(value, bool):
if value:
command_line.append(arg_name)
elif isinstance(value, list):
for v in value:
command_line.append(arg_name)
command_line.append(str(v))
else:
command_line.append(arg_name)
command_line.append(str(value))
# Note: actool crashes if inputs path are relative, so use os.path.abspath
# to get absolute path name for inputs.
command_line.extend(map(os.path.abspath, inputs))
subprocess.check_call(command_line)
def ExecMergeInfoPlist(self, output, *inputs):
"""Merge multiple .plist files into a single .plist file."""
merged_plist = {}
for path in inputs:
plist = self._LoadPlistMaybeBinary(path)
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
for key, value in plist.iteritems():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
self._MergePlist(merged_value, value)
merged_plist[key] = merged_value
else:
merged_plist[key] = value
else:
merged_plist[key] = value
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
moijes12/oh-mainline | vendor/packages/twisted/twisted/python/roots.py | 68 | 7311 | # -*- test-case-name: twisted.test.test_roots -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted Python Roots: an abstract hierarchy representation for Twisted.
Maintainer: Glyph Lefkowitz
"""
# System imports
import types
from twisted.python import reflect
class NotSupportedError(NotImplementedError):
"""
An exception meaning that the tree-manipulation operation
you're attempting to perform is not supported.
"""
class Request:
"""I am an abstract representation of a request for an entity.
I also function as the response. The request is responded to by calling
self.write(data) until there is no data left and then calling
self.finish().
"""
# This attribute should be set to the string name of the protocol being
# responded to (e.g. HTTP or FTP)
wireProtocol = None
def write(self, data):
"""Add some data to the response to this request.
"""
raise NotImplementedError("%s.write" % reflect.qual(self.__class__))
def finish(self):
"""The response to this request is finished; flush all data to the network stream.
"""
raise NotImplementedError("%s.finish" % reflect.qual(self.__class__))
class Entity:
"""I am a terminal object in a hierarchy, with no children.
I represent a null interface; certain non-instance objects (strings and
integers, notably) are Entities.
Methods on this class are suggested to be implemented, but are not
required, and will be emulated on a per-protocol basis for types which do
not handle them.
"""
def render(self, request):
"""
I produce a stream of bytes for the request, by calling request.write()
and request.finish().
"""
raise NotImplementedError("%s.render" % reflect.qual(self.__class__))
class Collection:
"""I represent a static collection of entities.
I contain methods designed to represent collections that can be dynamically
created.
"""
def __init__(self, entities=None):
"""Initialize me.
"""
if entities is not None:
self.entities = entities
else:
self.entities = {}
def getStaticEntity(self, name):
"""Get an entity that was added to me using putEntity.
This method will return 'None' if it fails.
"""
return self.entities.get(name)
def getDynamicEntity(self, name, request):
"""Subclass this to generate an entity on demand.
This method should return 'None' if it fails.
"""
def getEntity(self, name, request):
"""Retrieve an entity from me.
I will first attempt to retrieve an entity statically; static entities
will obscure dynamic ones. If that fails, I will retrieve the entity
dynamically.
If I cannot retrieve an entity, I will return 'None'.
"""
ent = self.getStaticEntity(name)
if ent is not None:
return ent
ent = self.getDynamicEntity(name, request)
if ent is not None:
return ent
return None
def putEntity(self, name, entity):
"""Store a static reference on 'name' for 'entity'.
Raises a KeyError if the operation fails.
"""
self.entities[name] = entity
def delEntity(self, name):
"""Remove a static reference for 'name'.
Raises a KeyError if the operation fails.
"""
del self.entities[name]
def storeEntity(self, name, request):
"""Store an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.storeEntity" % reflect.qual(self.__class__))
def removeEntity(self, name, request):
"""Remove an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.removeEntity" % reflect.qual(self.__class__))
def listStaticEntities(self):
"""Retrieve a list of all name, entity pairs that I store references to.
See getStaticEntity.
"""
return self.entities.items()
def listDynamicEntities(self, request):
"""A list of all name, entity that I can generate on demand.
See getDynamicEntity.
"""
return []
def listEntities(self, request):
"""Retrieve a list of all name, entity pairs I contain.
See getEntity.
"""
return self.listStaticEntities() + self.listDynamicEntities(request)
def listStaticNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getStaticEntity.
"""
return self.entities.keys()
def listDynamicNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getDynamicEntity.
"""
return []
def listNames(self, request):
"""Retrieve a list of all names for entities that I contain.
See getEntity.
"""
return self.listStaticNames()
class ConstraintViolation(Exception):
"""An exception raised when a constraint is violated.
"""
class Constrained(Collection):
"""A collection that has constraints on its names and/or entities."""
def nameConstraint(self, name):
"""A method that determines whether an entity may be added to me with a given name.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def entityConstraint(self, entity):
"""A method that determines whether an entity may be added to me.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def reallyPutEntity(self, name, entity):
Collection.putEntity(self, name, entity)
def putEntity(self, name, entity):
"""Store an entity if it meets both constraints.
Otherwise raise a ConstraintViolation.
"""
if self.nameConstraint(name):
if self.entityConstraint(entity):
self.reallyPutEntity(name, entity)
else:
raise ConstraintViolation("Entity constraint violated.")
else:
raise ConstraintViolation("Name constraint violated.")
class Locked(Constrained):
"""A collection that can be locked from adding entities."""
locked = 0
def lock(self):
self.locked = 1
def entityConstraint(self, entity):
return not self.locked
class Homogenous(Constrained):
"""A homogenous collection of entities.
I will only contain entities that are an instance of the class or type
specified by my 'entityType' attribute.
"""
entityType = types.InstanceType
def entityConstraint(self, entity):
if isinstance(entity, self.entityType):
return 1
else:
raise ConstraintViolation("%s of incorrect type (%s)" %
(entity, self.entityType))
def getNameType(self):
return "Name"
def getEntityType(self):
return self.entityType.__name__
| agpl-3.0 |
cloudbase/nova | nova/scheduler/driver.py | 3 | 2145 | # Copyright (c) 2010 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler base class that all Schedulers should inherit from
"""
import abc
import six
from stevedore import driver
import nova.conf
from nova import objects
from nova import servicegroup
CONF = nova.conf.CONF
@six.add_metaclass(abc.ABCMeta)
class Scheduler(object):
"""The base class that all Scheduler classes should inherit from."""
def __init__(self):
self.host_manager = driver.DriverManager(
"nova.scheduler.host_manager",
CONF.scheduler_host_manager,
invoke_on_load=True).driver
self.servicegroup_api = servicegroup.API()
def run_periodic_tasks(self, context):
"""Manager calls this so drivers can perform periodic tasks."""
pass
def hosts_up(self, context, topic):
"""Return the list of hosts that have a running service for topic."""
services = objects.ServiceList.get_by_topic(context, topic)
return [service.host
for service in services
if self.servicegroup_api.service_is_up(service)]
@abc.abstractmethod
def select_destinations(self, context, spec_obj):
"""Must override select_destinations method.
:return: A list of dicts with 'host', 'nodename' and 'limits' as keys
that satisfies the request_spec and filter_properties.
"""
return []
| apache-2.0 |
basvandenberg/spiceweb | spiceweb/project.py | 1 | 14038 | import os
import re
import zipfile
import traceback
#import urllib2
import simplejson
import cherrypy
from cherrypy.lib.static import serve_file
import spiceweb
from biopy import file_io
class Project:
SESSION_PROJECT_KEY = 'project_id'
EXAMPLE_DIR = 'example_projects'
EXAMPLES = [
('aniger-secretion', 'protein.fsa', 'prot_seq', 'secretion.txt'),
('yeast-expression', 'orf.fsa', 'orf_seq', 'expression.txt'),
('human-localization', 'protein.fsa', 'prot_seq', 'localization.txt'),
('ecoli-solubility', 'protein.fsa', 'prot_seq', 'solubility.txt')
]
TAXON_DOMAINS = [
('Bacteria', 2),
('Archea', 2157),
('Eukaryota', 2759),
('Viruses', 10239)
]
def __init__(self, auth, project_manager, root_url, main_menu,
main_menu_index, sub_menu):
self.auth = auth
self.project_manager = project_manager
self.root_url = root_url
self.mmi = main_menu_index
self.mm_name, self.mm_url = main_menu[self.mmi]
self.sub_menu = sub_menu
def get_project_id(self):
return cherrypy.session.get(self.SESSION_PROJECT_KEY, None)
def fetch_session_data(self):
# retrieve logged in user
self.user_id = self.auth.get_user()
# if no logged in user, request unregistered user id from cookie
if(self.user_id is None):
self.user_id = cherrypy.request.cookie['spice.session'].value
# fetch current project id from session data
self.project_id = self.get_project_id()
if not(self.project_id is None):
# fetch project ids for current user
existing_projects = [p[0] for p in
self.project_manager.get_projects()]
# if user does not have project with project id
if not(self.project_id in existing_projects):
# reset session project id to None
cherrypy.session[self.SESSION_PROJECT_KEY] = None
# set project id to None
self.project_id = None
self.project_manager.set_user(self.user_id)
self.project_manager.set_project(self.project_id)
def get_url(self, smi):
return '%s%s%s' % (self.root_url, self.mm_url, self.sub_menu[smi])
def get_template_f(self, smi):
return '%s_%s.html' % (self.mm_name, self.sub_menu[smi])
def get_template_args(self, smi):
return spiceweb.get_template_args(main_menu_index=self.mmi,
sub_menu_index=smi)
@cherrypy.expose
def index(self):
raise cherrypy.HTTPRedirect(self.get_url(0))
@cherrypy.expose
def list(self):
self.fetch_session_data()
smi = 0
projects = self.project_manager.get_projects()
kw_args = self.get_template_args(smi)
kw_args['projects'] = projects
template_f = self.get_template_f(smi)
return spiceweb.get_template(template_f, **kw_args)
@cherrypy.expose
def new(self, project_id=None, fasta_file=None, sequence_type=None,
use_reference=None, taxon_domain=None, taxon=None):
self.fetch_session_data()
smi = 1
kw_args = self.get_template_args(smi)
# does this stay the same over time???
# taxon domain and corresponding uniprot ancestor numbers
kw_args['taxon_domains'] = self.TAXON_DOMAINS
error_msg = None
# start a new project
if((fasta_file and sequence_type) and project_id):
# taxon domain is not used...
taxon_id = None
if not(use_reference is None):
taxon_id = int(taxon)
if(fasta_file.file is None):
error_msg = 'No fasta file provided'
elif(len(project_id) < 4):
error_msg = 'Project id should be at least 4 characters long'
elif(' ' in project_id):
error_msg = 'Spaces are not allowed in the project id'
elif not(re.match('^[A-Za-z0-9_-]*$', project_id)):
error_msg = 'Only characters, digits, dashes, and ' +\
'underscores are allowed in a project id'
else:
try:
# initiate new project
error_msg = self.project_manager.start_new_project(
project_id, fasta_file, sequence_type,
reference_taxon=taxon_id)
except:
print(traceback.format_exc())
error_msg = 'Error creating new project'
if(error_msg == ''):
# store project id in session
cherrypy.session[self.SESSION_PROJECT_KEY] = project_id
# redirect to project list page
url = self.get_url(0)
raise cherrypy.HTTPRedirect(url)
else:
kw_args['msg'] = error_msg
template_f = self.get_template_f(smi)
return spiceweb.get_template(template_f, **kw_args)
@cherrypy.expose
def details(self, project_id, data_type=None, data_name=None,
data_file=None):
self.fetch_session_data()
smi = 2
# first check if the provided project_id excists
existing_projects = [p[0] for p in self.project_manager.get_projects()]
if not(project_id in existing_projects):
# return message that this project does not exist
kw_args = self.get_template_args(smi)
template_f = 'no_such_project.html'
return spiceweb.get_template(template_f, **kw_args)
# store project id in session
cherrypy.session[self.SESSION_PROJECT_KEY] = project_id
# reset the session data, using the new project id
self.fetch_session_data()
msg_lab = ''
msg_seq = ''
# in case of a data file upload
if((data_type and data_name) and data_file):
pm = self.project_manager
# the upload labeling case
if(data_type == 'labeling'):
# check labeling input data
if(data_file.file is None):
msg_lab = 'No labeling file provided'
elif(' ' in data_name):
msg_lab = 'Spaces are not allowed in the project id'
elif not(re.match('^[A-Za-z0-9_-]*$', data_name)):
msg_lab = 'Only characters, digits, dashes, and ' +\
'underscores are allowed in a project id'
# if no incorrect input data
else:
# try to add the labeling, storing errors in msg_lab
try:
msg_lab = pm.add_labeling(data_name, data_file.file)
except Exception:
print(traceback.format_exc())
msg_lab = 'Error adding labeling'
# chop labeling message to reasonable size
if(len(msg_lab) > 100):
msg_lab = msg_lab[:100] + '...'
# the upload sequence data case
elif(data_type == 'data_source'):
# check sequence input data
if(data_file.file == None):
msg_seq = 'No file provided.'
# if no incorrect input data
else:
# try to add sequence data
try:
msg_seq = pm.add_data_source(data_name, data_file.file)
except Exception:
msg_seq = 'Error adding sequence data.'
if(msg_seq[:13] == 'Error in data'):
msg_seq = msg_seq + '<br /><br />NOTE:<ul><li>Secundary structure sequences should consist of the letters C, H, and E (same as output psipred)</li><li>Solvent accessibility sequences should consist of the letters B (buried), and E (exposed)</li></ul>'
fe = self.project_manager.get_feature_extraction()
kw_args = self.get_template_args(smi)
kw_args['fe'] = fe
kw_args['data_sources'] = ['prot_seq', 'orf_seq', 'ss_seq', 'sa_seq']
kw_args['msg_lab'] = msg_lab
kw_args['msg_seq'] = msg_seq
template_f = self.get_template_f(smi)
return spiceweb.get_template(template_f, **kw_args)
@cherrypy.expose
def load_example(self, example_number):
self.fetch_session_data()
smi = 1
try:
example_number = int(example_number)
except ValueError:
example_number = -1
if(example_number < 0 or example_number >= len(self.EXAMPLES)):
kw_args = self.get_template_args(smi)
template_f = 'no_such_example.html'
return spiceweb.get_template(template_f, **kw_args)
pm = self.project_manager
(pid, seq_f, seq_type, labeling_f) = self.EXAMPLES[example_number]
root_d = spiceweb.spiceweb_dir
seq_f = os.path.join(root_d, self.EXAMPLE_DIR, pid, seq_f)
labeling_f = os.path.join(root_d, self.EXAMPLE_DIR, pid, labeling_f)
error_msg = pm.start_example_project(pid, seq_f, seq_type, labeling_f)
if(error_msg == ''):
# store project id in session
cherrypy.session[self.SESSION_PROJECT_KEY] = pid
# redirect to project list page
url = self.get_url(0)
raise cherrypy.HTTPRedirect(url)
else:
print
print 'This should not happen...'
print error_msg
url = self.get_url(0)
raise cherrypy.HTTPRedirect(url)
#
# ajax methods
#
@cherrypy.expose
def taxon_list(self, taxon_domain=None):
self.fetch_session_data()
pm = self.project_manager
taxon_id = int(taxon_domain)
#top_lists = self.FAVO_TAXONS
#top_list = top_lists[taxon_id]
# obtain all taxons of this domain from uniprot
#url = 'http://www.uniprot.org/taxonomy/' +\
# '?query=complete:yes+ancestor:%i&format=tab' % (taxon_id)
#response = urllib2.urlopen(url)
#full_taxon_list = response.read()
f = os.path.join(pm.ref_data_dir, '%i.txt' % (taxon_id))
f_favo = os.path.join(pm.ref_data_dir, '%i_favo.txt' % (taxon_id))
taxon_tuples = []
if(os.path.exists(f)):
with open(f, 'r') as fin:
for line in fin:
tokens = line.split()
taxon_tuples.append((int(tokens[0]), ' '.join(tokens[1:])))
ids_favo = []
if(os.path.exists(f_favo)):
ids_favo = [int(i) for i in file_io.read_ids(f_favo)]
# parse result, fetch ids and names
'''
ids = []
names = []
first_line = True
for line in full_taxon_list.split('\n'):
if(len(line.strip()) > 0):
if(first_line):
first_line = False
else:
tokens = line.split('\t')
ids.append(int(tokens[0]))
names.append(tokens[2])
'''
# turn it into select list, would be nicer to let javascript do this
select_str = ''
if(len(ids_favo) > 0):
taxon_dict = dict(taxon_tuples)
select_str += '<optgroup label="Short list">\n'
for i in ids_favo:
select_str += '<option value="%i">%s (taxon id: %i)</option>\n' % (i, taxon_dict[i], i)
select_str += '</optgroup>\n'
select_str += '<optgroup label="All uniprot complete proteome taxonomies">\n'
for i, name in taxon_tuples:
select_str += '<option value="%i">%s (taxon id: %i)</option>\n' % (i, name, i)
select_str += '</optgroup>\n'
cherrypy.response.headers['Content-Type'] = 'application/json'
return simplejson.dumps(dict(taxon_list=select_str))
@cherrypy.expose
def download(self, data_type='project', data_name=None):
self.fetch_session_data()
pm = self.project_manager
if(data_type == 'project'):
filetype = 'application/zip'
filepath = os.path.join(pm.user_dir, '%s.zip' % (self.project_id))
with zipfile.ZipFile(filepath, 'w') as fout:
first = True
for root, dirs, files in os.walk(pm.project_dir):
if first:
rootroot = os.path.dirname(root)
first = False
arcroot = os.path.relpath(root, rootroot)
for file in files:
fout.write(os.path.join(root, file),
arcname=os.path.join(arcroot, file))
elif(data_type == 'data_source'):
filetype = 'text/plain'
fe = pm.get_feature_extraction()
filepath = fe.protein_data_set.ds_dict[data_name].get_data_path()
elif(data_type == 'labeling'):
filetype = 'text/plain'
fe = pm.get_feature_extraction()
fm = fe.fm_protein
labeling_d = os.path.join(fe.fm_protein_d, fm.LABELING_D)
filepath = os.path.join(labeling_d, '%s.txt' % (data_name))
return serve_file(filepath, filetype, 'attachment')
@cherrypy.expose
def delete(self, project_id):
'''
This function handles an ajax call to delete a project.
'''
self.fetch_session_data()
self.project_manager.delete_project(project_id)
# remove project id from session if it is the currently active one
if(self.SESSION_PROJECT_KEY in cherrypy.session and
cherrypy.session[self.SESSION_PROJECT_KEY] == project_id):
cherrypy.session[self.SESSION_PROJECT_KEY] = None
| gpl-3.0 |
sam-m888/gprime | gprime/plug/menu/_enumeratedlist.py | 1 | 5086 | #
# gPrime - A web-based genealogy program
#
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2008 Gary Burton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Option class representing an enumerated list of possible values.
"""
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from . import Option
from gprime.const import LOCALE as glocale
_ = glocale.translation.sgettext
#-------------------------------------------------------------------------
#
# set up logging
#
#-------------------------------------------------------------------------
import logging
#-------------------------------------------------------------------------
#
# EnumeratedListOption class
#
#-------------------------------------------------------------------------
class EnumeratedListOption(Option):
"""
This class describes an option that provides a finite number of values.
Each possible value is assigned a value and a description.
"""
__signals__ = { 'options-changed' : None }
def __init__(self, label, value):
"""
:param label: A friendly label to be applied to this option.
Example: "Paper Size"
:type label: string
:param value: An initial value for this option.
Example: 5
:type value: int
:return: nothing
"""
Option.__init__(self, label, value)
self.__items = []
self.__xml_items = []
def add_item(self, value, description, xml_item=False):
"""
Add an item to the list of possible values.
:param value: The value that corresponds to this item.
Example: 5
:type value: int
:param description: A description of this value.
Example: "8.5 x 11"
:type description: string
:param xml_item: allow deferred translation of item description
:type _xml_item: Boolean
:return: nothing
"""
if not xml_item: # localized item description is being passed in
self.__items.append((value, description))
else: # English item description is being passed in
self.__xml_items.append((value, description))
self.__items.append((value, _(description)))
self.emit('options-changed')
def set_items(self, items, xml_items=False):
"""
Add a list of items to the list of possible values.
:param items: A list of tuples containing value, description pairs.
Example: [ (5,"8.5 x 11"), (6,"11 x 17")]
:type items: array
:param xml_items: allow deferred translation of item descriptions
:type _xml_items: Boolean
:return: nothing
"""
if not xml_items: # localized item descriptions are being passed in
self.__items = items
else: # English item descriptions are being passed in
self.__xml_items = items
for (value, description) in items:
self.__items.append((value, _(description)))
self.emit('options-changed')
def get_items(self, xml_items=False):
"""
Get all the possible values for this option.
:param xml_items: allow deferred translation of item descriptions
:type _xml_items: Boolean
:return: an array of tuples containing (value,description) pairs.
"""
if not xml_items: # localized item descriptions are wanted
return self.__items
return self.__xml_items # English item descriptions are wanted
def clear(self):
"""
Clear all possible values from this option.
:return: nothing.
"""
self.__items = []
self.emit('options-changed')
def set_value(self, value):
"""
Set the value of this option.
:param value: A value for this option.
Example: True
:type value: The type will depend on the type of option.
:return: nothing
"""
if value in (v for v, d in self.__items):
Option.set_value(self, value)
else:
logging.warning(_("Value '%(val)s' not found for option '%(opt)s'") %
{'val' : str(value), 'opt' : self.get_label()})
logging.warning(_("Valid values: ") + str(self.__items))
| gpl-2.0 |
tragiclifestories/django | tests/auth_tests/test_management.py | 65 | 23053 | from __future__ import unicode_literals
import locale
import sys
from datetime import date
from django.apps import apps
from django.contrib.auth import management, models
from django.contrib.auth.checks import check_user_model
from django.contrib.auth.management import create_permissions
from django.contrib.auth.management.commands import (
changepassword, createsuperuser,
)
from django.contrib.auth.models import Group, User
from django.contrib.auth.tests.custom_user import CustomUser
from django.contrib.contenttypes.models import ContentType
from django.core import checks, exceptions
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import (
SimpleTestCase, TestCase, override_settings, override_system_checks,
)
from django.utils import six
from django.utils.encoding import force_str
from django.utils.translation import ugettext_lazy as _
from .models import (
CustomUserBadRequiredFields, CustomUserNonListRequiredFields,
CustomUserNonUniqueUsername, CustomUserWithFK, Email,
)
def mock_inputs(inputs):
"""
Decorator to temporarily replace input/getpass to allow interactive
createsuperuser.
"""
def inner(test_func):
def wrapped(*args):
class mock_getpass:
@staticmethod
def getpass(prompt=b'Password: ', stream=None):
if six.PY2:
# getpass on Windows only supports prompt as bytestring (#19807)
assert isinstance(prompt, six.binary_type)
return inputs['password']
def mock_input(prompt):
# prompt should be encoded in Python 2. This line will raise an
# Exception if prompt contains unencoded non-ASCII on Python 2.
prompt = str(prompt)
assert str('__proxy__') not in prompt
response = ''
for key, val in inputs.items():
if force_str(key) in prompt.lower():
response = val
break
return response
old_getpass = createsuperuser.getpass
old_input = createsuperuser.input
createsuperuser.getpass = mock_getpass
createsuperuser.input = mock_input
try:
test_func(*args)
finally:
createsuperuser.getpass = old_getpass
createsuperuser.input = old_input
return wrapped
return inner
class MockTTY(object):
"""
A fake stdin object that pretends to be a TTY to be used in conjunction
with mock_inputs.
"""
def isatty(self):
return True
class GetDefaultUsernameTestCase(TestCase):
def setUp(self):
self.old_get_system_username = management.get_system_username
def tearDown(self):
management.get_system_username = self.old_get_system_username
def test_actual_implementation(self):
self.assertIsInstance(management.get_system_username(), six.text_type)
def test_simple(self):
management.get_system_username = lambda: 'joe'
self.assertEqual(management.get_default_username(), 'joe')
def test_existing(self):
models.User.objects.create(username='joe')
management.get_system_username = lambda: 'joe'
self.assertEqual(management.get_default_username(), '')
self.assertEqual(
management.get_default_username(check_db=False), 'joe')
def test_i18n(self):
# 'Julia' with accented 'u':
management.get_system_username = lambda: 'J\xfalia'
self.assertEqual(management.get_default_username(), 'julia')
class ChangepasswordManagementCommandTestCase(TestCase):
def setUp(self):
self.user = models.User.objects.create_user(username='joe', password='qwerty')
self.stdout = six.StringIO()
self.stderr = six.StringIO()
def tearDown(self):
self.stdout.close()
self.stderr.close()
def test_that_changepassword_command_changes_joes_password(self):
"Executing the changepassword management command should change joe's password"
self.assertTrue(self.user.check_password('qwerty'))
command = changepassword.Command()
command._get_pass = lambda *args: 'not qwerty'
command.execute(username="joe", stdout=self.stdout)
command_output = self.stdout.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\nPassword changed successfully for user 'joe'"
)
self.assertTrue(models.User.objects.get(username="joe").check_password("not qwerty"))
def test_that_max_tries_exits_1(self):
"""
A CommandError should be thrown by handle() if the user enters in
mismatched passwords three times.
"""
command = changepassword.Command()
command._get_pass = lambda *args: args or 'foo'
with self.assertRaises(CommandError):
command.execute(username="joe", stdout=self.stdout, stderr=self.stderr)
def test_that_changepassword_command_works_with_nonascii_output(self):
"""
#21627 -- Executing the changepassword management command should allow
non-ASCII characters from the User object representation.
"""
# 'Julia' with accented 'u':
models.User.objects.create_user(username='J\xfalia', password='qwerty')
command = changepassword.Command()
command._get_pass = lambda *args: 'not qwerty'
command.execute(username="J\xfalia", stdout=self.stdout)
@override_settings(SILENCED_SYSTEM_CHECKS=['fields.W342']) # ForeignKey(unique=True)
class CreatesuperuserManagementCommandTestCase(TestCase):
def test_basic_usage(self):
"Check the operation of the createsuperuser management command"
# We can use the management command to create a superuser
new_io = six.StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="joe@somewhere.org",
stdout=new_io
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
u = User.objects.get(username="joe")
self.assertEqual(u.email, 'joe@somewhere.org')
# created password should be unusable
self.assertFalse(u.has_usable_password())
@mock_inputs({'password': "nopasswd"})
def test_nolocale(self):
"""
Check that createsuperuser does not break when no locale is set. See
ticket #16017.
"""
old_getdefaultlocale = locale.getdefaultlocale
try:
# Temporarily remove locale information
locale.getdefaultlocale = lambda: (None, None)
# Call the command in this new environment
call_command(
"createsuperuser",
interactive=True,
username="nolocale@somewhere.org",
email="nolocale@somewhere.org",
verbosity=0,
stdin=MockTTY(),
)
except TypeError:
self.fail("createsuperuser fails if the OS provides no information about the current locale")
finally:
# Re-apply locale information
locale.getdefaultlocale = old_getdefaultlocale
# If we were successful, a user should have been created
u = User.objects.get(username="nolocale@somewhere.org")
self.assertEqual(u.email, 'nolocale@somewhere.org')
@mock_inputs({
'password': "nopasswd",
'u\u017eivatel': 'foo', # username (cz)
'email': 'nolocale@somewhere.org'})
def test_non_ascii_verbose_name(self):
username_field = User._meta.get_field('username')
old_verbose_name = username_field.verbose_name
username_field.verbose_name = _('u\u017eivatel')
new_io = six.StringIO()
try:
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
finally:
username_field.verbose_name = old_verbose_name
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
def test_verbosity_zero(self):
# We can suppress output on the management command
new_io = six.StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe2",
email="joe2@somewhere.org",
verbosity=0,
stdout=new_io
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, '')
u = User.objects.get(username="joe2")
self.assertEqual(u.email, 'joe2@somewhere.org')
self.assertFalse(u.has_usable_password())
def test_email_in_username(self):
new_io = six.StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe+admin@somewhere.org",
email="joe@somewhere.org",
stdout=new_io
)
u = User._default_manager.get(username="joe+admin@somewhere.org")
self.assertEqual(u.email, 'joe@somewhere.org')
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
def test_swappable_user(self):
"A superuser can be created when a custom User model is in use"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = six.StringIO()
call_command(
"createsuperuser",
interactive=False,
email="joe@somewhere.org",
date_of_birth="1976-04-01",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
u = CustomUser._default_manager.get(email="joe@somewhere.org")
self.assertEqual(u.date_of_birth, date(1976, 4, 1))
# created password should be unusable
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
def test_swappable_user_missing_required_field(self):
"A Custom superuser won't be created when a required field isn't provided"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = six.StringIO()
with self.assertRaises(CommandError):
call_command(
"createsuperuser",
interactive=False,
username="joe@somewhere.org",
stdout=new_io,
stderr=new_io,
)
self.assertEqual(CustomUser._default_manager.count(), 0)
@override_settings(
AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername',
AUTHENTICATION_BACKENDS=['my.custom.backend'],
)
def test_swappable_user_username_non_unique(self):
@mock_inputs({
'username': 'joe',
'password': 'nopasswd',
})
def createsuperuser():
new_io = six.StringIO()
call_command(
"createsuperuser",
interactive=True,
email="joe@somewhere.org",
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
for i in range(2):
createsuperuser()
users = CustomUserNonUniqueUsername.objects.filter(username="joe")
self.assertEqual(users.count(), 2)
def test_skip_if_not_in_TTY(self):
"""
If the command is not called from a TTY, it should be skipped and a
message should be displayed (#7423).
"""
class FakeStdin(object):
"""A fake stdin object that has isatty() return False."""
def isatty(self):
return False
out = six.StringIO()
call_command(
"createsuperuser",
stdin=FakeStdin(),
stdout=out,
interactive=True,
)
self.assertEqual(User._default_manager.count(), 0)
self.assertIn("Superuser creation skipped", out.getvalue())
def test_passing_stdin(self):
"""
You can pass a stdin object as an option and it should be
available on self.stdin.
If no such option is passed, it defaults to sys.stdin.
"""
sentinel = object()
command = createsuperuser.Command()
command.check = lambda: []
command.execute(
stdin=sentinel,
stdout=six.StringIO(),
stderr=six.StringIO(),
interactive=False,
verbosity=0,
username='janet',
email='janet@example.com',
)
self.assertIs(command.stdin, sentinel)
command = createsuperuser.Command()
command.check = lambda: []
command.execute(
stdout=six.StringIO(),
stderr=six.StringIO(),
interactive=False,
verbosity=0,
username='joe',
email='joe@example.com',
)
self.assertIs(command.stdin, sys.stdin)
@override_settings(AUTH_USER_MODEL='auth.CustomUserWithFK')
def test_fields_with_fk(self):
new_io = six.StringIO()
group = Group.objects.create(name='mygroup')
email = Email.objects.create(email='mymail@gmail.com')
call_command(
'createsuperuser',
interactive=False,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
non_existent_email = 'mymail2@gmail.com'
with self.assertRaisesMessage(CommandError,
'email instance with email %r does not exist.' % non_existent_email):
call_command(
'createsuperuser',
interactive=False,
username=email.pk,
email=non_existent_email,
stdout=new_io,
)
@override_settings(AUTH_USER_MODEL='auth.CustomUserWithFK')
def test_fields_with_fk_interactive(self):
new_io = six.StringIO()
group = Group.objects.create(name='mygroup')
email = Email.objects.create(email='mymail@gmail.com')
@mock_inputs({
'password': 'nopasswd',
'username (email.id)': email.pk,
'email (email.email)': email.email,
'group (group.id)': group.pk,
})
def test(self):
call_command(
'createsuperuser',
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, 'Superuser created successfully.')
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
class CustomUserModelValidationTestCase(SimpleTestCase):
@override_settings(AUTH_USER_MODEL='auth.CustomUserNonListRequiredFields')
@override_system_checks([check_user_model])
def test_required_fields_is_list(self):
"REQUIRED_FIELDS should be a list."
errors = checks.run_checks()
expected = [
checks.Error(
"'REQUIRED_FIELDS' must be a list or tuple.",
hint=None,
obj=CustomUserNonListRequiredFields,
id='auth.E001',
),
]
self.assertEqual(errors, expected)
@override_settings(AUTH_USER_MODEL='auth.CustomUserBadRequiredFields')
@override_system_checks([check_user_model])
def test_username_not_in_required_fields(self):
"USERNAME_FIELD should not appear in REQUIRED_FIELDS."
errors = checks.run_checks()
expected = [
checks.Error(
("The field named as the 'USERNAME_FIELD' for a custom user model "
"must not be included in 'REQUIRED_FIELDS'."),
hint=None,
obj=CustomUserBadRequiredFields,
id='auth.E002',
),
]
self.assertEqual(errors, expected)
@override_settings(AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername')
@override_system_checks([check_user_model])
def test_username_non_unique(self):
"A non-unique USERNAME_FIELD should raise a model validation error."
errors = checks.run_checks()
expected = [
checks.Error(
("'CustomUserNonUniqueUsername.username' must be "
"unique because it is named as the 'USERNAME_FIELD'."),
hint=None,
obj=CustomUserNonUniqueUsername,
id='auth.E003',
),
]
self.assertEqual(errors, expected)
@override_settings(AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername',
AUTHENTICATION_BACKENDS=[
'my.custom.backend',
])
@override_system_checks([check_user_model])
def test_username_non_unique_with_custom_backend(self):
""" A non-unique USERNAME_FIELD should raise an error only if we use the
default authentication backend. Otherwise, an warning should be raised.
"""
errors = checks.run_checks()
expected = [
checks.Warning(
("'CustomUserNonUniqueUsername.username' is named as "
"the 'USERNAME_FIELD', but it is not unique."),
hint=('Ensure that your authentication backend(s) can handle '
'non-unique usernames.'),
obj=CustomUserNonUniqueUsername,
id='auth.W004',
)
]
self.assertEqual(errors, expected)
class PermissionTestCase(TestCase):
def setUp(self):
self._original_permissions = models.Permission._meta.permissions[:]
self._original_default_permissions = models.Permission._meta.default_permissions
self._original_verbose_name = models.Permission._meta.verbose_name
def tearDown(self):
models.Permission._meta.permissions = self._original_permissions
models.Permission._meta.default_permissions = self._original_default_permissions
models.Permission._meta.verbose_name = self._original_verbose_name
ContentType.objects.clear_cache()
def test_duplicated_permissions(self):
"""
Test that we show proper error message if we are trying to create
duplicate permissions.
"""
auth_app_config = apps.get_app_config('auth')
# check duplicated default permission
models.Permission._meta.permissions = [
('change_permission', 'Can edit permission (duplicate)')]
six.assertRaisesRegex(self, CommandError,
"The permission codename 'change_permission' clashes with a "
"builtin permission for model 'auth.Permission'.",
create_permissions, auth_app_config, verbosity=0)
# check duplicated custom permissions
models.Permission._meta.permissions = [
('my_custom_permission', 'Some permission'),
('other_one', 'Some other permission'),
('my_custom_permission', 'Some permission with duplicate permission code'),
]
six.assertRaisesRegex(self, CommandError,
"The permission codename 'my_custom_permission' is duplicated for model "
"'auth.Permission'.",
create_permissions, auth_app_config, verbosity=0)
# should not raise anything
models.Permission._meta.permissions = [
('my_custom_permission', 'Some permission'),
('other_one', 'Some other permission'),
]
create_permissions(auth_app_config, verbosity=0)
def test_default_permissions(self):
auth_app_config = apps.get_app_config('auth')
permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission')
models.Permission._meta.permissions = [
('my_custom_permission', 'Some permission'),
]
create_permissions(auth_app_config, verbosity=0)
# add/change/delete permission by default + custom permission
self.assertEqual(models.Permission.objects.filter(
content_type=permission_content_type,
).count(), 4)
models.Permission.objects.filter(content_type=permission_content_type).delete()
models.Permission._meta.default_permissions = []
create_permissions(auth_app_config, verbosity=0)
# custom permission only since default permissions is empty
self.assertEqual(models.Permission.objects.filter(
content_type=permission_content_type,
).count(), 1)
def test_verbose_name_length(self):
auth_app_config = apps.get_app_config('auth')
permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission')
models.Permission.objects.filter(content_type=permission_content_type).delete()
models.Permission._meta.verbose_name = "some ridiculously long verbose name that is out of control" * 5
six.assertRaisesRegex(self, exceptions.ValidationError,
"The verbose_name of auth.permission is longer than 244 characters",
create_permissions, auth_app_config, verbosity=0)
def test_custom_permission_name_length(self):
auth_app_config = apps.get_app_config('auth')
ContentType.objects.get_by_natural_key('auth', 'permission')
custom_perm_name = 'a' * 256
models.Permission._meta.permissions = [
('my_custom_permission', custom_perm_name),
]
try:
msg = (
"The permission name %s of auth.permission is longer than "
"255 characters" % custom_perm_name
)
with self.assertRaisesMessage(exceptions.ValidationError, msg):
create_permissions(auth_app_config, verbosity=0)
finally:
models.Permission._meta.permissions = []
| bsd-3-clause |
javierTerry/odoo | addons/l10n_be_intrastat/l10n_be_intrastat.py | 258 | 7828 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Business Applications
# Copyright (C) 2014-2015 Odoo S.A. <http://www.odoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_invoice(osv.osv):
_inherit = "account.invoice"
_columns = {
'incoterm_id': fields.many2one(
'stock.incoterms', 'Incoterm',
help="International Commercial Terms are a series of predefined commercial terms "
"used in international transactions."),
'intrastat_transaction_id': fields.many2one(
'l10n_be_intrastat.transaction', 'Intrastat Transaction Type',
help="Intrastat nature of transaction"),
'transport_mode_id': fields.many2one(
'l10n_be_intrastat.transport_mode', 'Intrastat Transport Mode'),
'intrastat_country_id': fields.many2one(
'res.country', 'Intrastat Country',
help='Intrastat country, delivery for sales, origin for purchases',
domain=[('intrastat','=',True)]),
}
class intrastat_region(osv.osv):
_name = 'l10n_be_intrastat.region'
_columns = {
'code': fields.char('Code', required=True),
'country_id': fields.many2one('res.country', 'Country'),
'name': fields.char('Name', translate=True),
'description': fields.char('Description'),
}
_sql_constraints = [
('l10n_be_intrastat_regioncodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class intrastat_transaction(osv.osv):
_name = 'l10n_be_intrastat.transaction'
_rec_name = 'code'
_columns = {
'code': fields.char('Code', required=True, readonly=True),
'description': fields.text('Description', readonly=True),
}
_sql_constraints = [
('l10n_be_intrastat_trcodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class intrastat_transport_mode(osv.osv):
_name = 'l10n_be_intrastat.transport_mode'
_columns = {
'code': fields.char('Code', required=True, readonly=True),
'name': fields.char('Description', readonly=True),
}
_sql_constraints = [
('l10n_be_intrastat_trmodecodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class product_category(osv.osv):
_name = "product.category"
_inherit = "product.category"
_columns = {
'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat Code'),
}
def get_intrastat_recursively(self, cr, uid, category, context=None):
""" Recursively search in categories to find an intrastat code id
:param category : Browse record of a category
"""
if category.intrastat_id:
res = category.intrastat_id.id
elif category.parent_id:
res = self.get_intrastat_recursively(cr, uid, category.parent_id, context=context)
else:
res = None
return res
class product_product(osv.osv):
_name = "product.product"
_inherit = "product.product"
def get_intrastat_recursively(self, cr, uid, id, context=None):
""" Recursively search in categories to find an intrastat code id
"""
product = self.browse(cr, uid, id, context=context)
if product.intrastat_id:
res = product.intrastat_id.id
elif product.categ_id:
res = self.pool['product.category'].get_intrastat_recursively(
cr, uid, product.categ_id, context=context)
else:
res = None
return res
class purchase_order(osv.osv):
_inherit = "purchase.order"
def _prepare_invoice(self, cr, uid, order, line_ids, context=None):
"""
copy incoterm from purchase order to invoice
"""
invoice = super(purchase_order, self)._prepare_invoice(
cr, uid, order, line_ids, context=context)
if order.incoterm_id:
invoice['incoterm_id'] = order.incoterm_id.id
#Try to determine products origin
if order.partner_id.country_id:
#It comes from supplier
invoice['intrastat_country_id'] = order.partner_id.country_id.id
return invoice
class report_intrastat_code(osv.osv):
_inherit = "report.intrastat.code"
_columns = {
'description': fields.text('Description', translate=True),
}
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'region_id': fields.many2one('l10n_be_intrastat.region', 'Intrastat region'),
'transport_mode_id': fields.many2one('l10n_be_intrastat.transport_mode',
'Default transport mode'),
'incoterm_id': fields.many2one('stock.incoterms', 'Default incoterm for Intrastat',
help="International Commercial Terms are a series of "
"predefined commercial terms used in international "
"transactions."),
}
class sale_order(osv.osv):
_inherit = "sale.order"
def _prepare_invoice(self, cr, uid, saleorder, lines, context=None):
"""
copy incoterm from sale order to invoice
"""
invoice = super(sale_order, self)._prepare_invoice(
cr, uid, saleorder, lines, context=context)
if saleorder.incoterm:
invoice['incoterm_id'] = saleorder.incoterm.id
# Guess products destination
if saleorder.partner_shipping_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_shipping_id.country_id.id
elif saleorder.partner_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_id.country_id.id
elif saleorder.partner_invoice_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_invoice_id.country_id.id
return invoice
class stock_warehouse(osv.osv):
_inherit = "stock.warehouse"
_columns = {
'region_id': fields.many2one('l10n_be_intrastat.region', 'Intrastat region'),
}
def get_regionid_from_locationid(self, cr, uid, location_id, context=None):
location_model = self.pool['stock.location']
location = location_model.browse(cr, uid, location_id, context=context)
location_ids = location_model.search(cr, uid,
[('parent_left', '<=', location.parent_left),
('parent_right', '>=', location.parent_right)],
context=context)
warehouse_ids = self.search(cr, uid,
[('lot_stock_id', 'in', location_ids),
('region_id', '!=', False)],
context=context)
warehouses = self.browse(cr, uid, warehouse_ids, context=context)
if warehouses and warehouses[0]:
return warehouses[0].region_id.id
return None
| agpl-3.0 |
erickuofucker/nw.js | tools/package_binaries.py | 64 | 14511 | #!/usr/bin/env python
import argparse
import getnwisrelease
import getnwversion
import gzip
import os
import platform
import shutil
import sys
import tarfile
import zipfile
from subprocess import call
steps = ['nw', 'chromedriver', 'symbol', 'headers', 'others']
################################
# Parse command line args
parser = argparse.ArgumentParser(description='Package nw binaries.')
parser.add_argument('-p','--path', help='Where to find the binaries, like out/Release', required=False)
parser.add_argument('-a','--arch', help='target arch', required=False)
parser.add_argument('-m','--mode', help='package mode', required=False)
parser.add_argument('-i','--icudat', help='icudat override', required=False)
group = parser.add_mutually_exclusive_group()
group.add_argument('-s','--step', choices=steps, help='Execute specified step.', required=False)
group.add_argument('-n','--skip', choices=steps, help='Skip specified step.', required=False)
args = parser.parse_args()
################################
# Init variables.
binaries_location = None # .../out/Release
platform_name = None # win/linux/osx
arch = None # ia32/x64
step = None # nw/chromedriver/symbol
skip = None
nw_ver = None # x.xx
dist_dir = None # .../out/Release/dist
flavor = args.mode
is_headers_ok = False # record whether nw-headers generated
package_name = 'nwjs'
if flavor in ['sdk', 'nacl']:
package_name = 'nwjs-' + args.mode
step = args.step
skip = args.skip
binaries_location = args.path
# If the binaries location is not given, calculate it from script related dir.
if binaries_location == None:
binaries_location = os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'out', 'Release')
if not os.path.isabs(binaries_location):
binaries_location = os.path.join(os.getcwd(), binaries_location)
if not os.path.isdir(binaries_location):
print 'Invalid path: ' + binaries_location
exit(-1)
binaries_location = os.path.normpath(binaries_location)
dist_dir = os.path.join(binaries_location, 'dist')
print 'Working on ' + binaries_location
if args.icudat != None:
#FIXME: for some reason they are the same file (hard link) and copy will fail
os.remove(os.path.join(binaries_location, 'icudtl.dat'))
shutil.copy(args.icudat, binaries_location)
if sys.platform.startswith('linux'):
platform_name = 'linux'
elif sys.platform in ('win32', 'cygwin'):
platform_name = 'win'
elif sys.platform == 'darwin':
platform_name = 'osx'
else:
print 'Unsupported platform: ' + sys.platform
exit(-1)
_arch = platform.architecture()[0]
if _arch == '64bit':
arch = 'x64'
elif _arch == '32bit':
arch = 'ia32'
else:
print 'Unsupported arch: ' + _arch
exit(-1)
if platform_name == 'win':
libfile = os.path.join(binaries_location, 'nw.lib')
expfile = os.path.join(binaries_location, 'nw.exp')
shutil.copy(os.path.join(binaries_location, 'nw.dll.lib'), libfile)
shutil.copy(os.path.join(binaries_location, 'nw.dll.exp'), expfile)
if platform_name == 'win':
arch = 'ia32'
if platform_name != 'osx':
try:
os.remove(os.path.join(binaries_location, 'en-US.pak'))
except OSError:
pass
shutil.copy(os.path.join(binaries_location, 'locales', 'en-US.pak'), binaries_location)
shutil.rmtree(os.path.join(binaries_location, 'locales'))
os.mkdir(os.path.join(binaries_location, 'locales'))
shutil.copy(os.path.join(binaries_location, 'en-US.pak'), os.path.join(binaries_location, 'locales'))
if platform_name == 'osx':
# detect output arch
nw_bin = binaries_location + '/nwjs.app/Contents/MacOS/nwjs'
import subprocess
if 'i386' in subprocess.check_output(['file',nw_bin]):
arch = 'ia32'
else: # should be 'x86_64'
arch = 'x64'
if args.arch != None:
arch = args.arch
nw_ver = getnwversion.nw_version
if getnwisrelease.release == 0:
nw_ver += getnwisrelease.postfix
################################
# Generate targets
#
# target example:
# {
# 'input' : [ 'nw', 'nw.pak', ... ]
# 'output' : 'nwjs-v0.9.2-linux-x64'
# 'compress' : 'tar.gz'
# 'folder' : True # Optional. More than 2 files will be put into a seprate folder
# # normally, if you want do to this for only 1 file, set this flag.
# }
def generate_target_nw(platform_name, arch, version):
target = {}
# Output
target['output'] = ''.join([
package_name, '-',
'v', version,
'-', platform_name,
'-', arch])
# Compress type
if platform_name == 'linux':
target['compress'] = 'tar.gz'
else:
target['compress'] = 'zip'
# Input
if platform_name == 'linux':
target['input'] = [
'credits.html',
'resources.pak',
'nw_100_percent.pak',
'nw',
'icudtl.dat',
'locales',
'snapshot_blob.bin',
'natives_blob.bin',
]
if flavor in ['nacl','sdk'] :
target['input'] += ['nacl_helper', 'nacl_helper_bootstrap', 'pnacl']
if arch == 'x64':
target['input'].append('nacl_irt_x86_64.nexe')
else:
target['input'].append('nacl_irt_x86_32.nexe')
elif platform_name == 'win':
target['input'] = [
'snapshot_blob.bin',
'natives_blob.bin',
'd3dcompiler_47.dll',
'libEGL.dll',
'libGLESv2.dll',
'nw.dll',
'nw_elf.dll',
'nw.exe',
'locales',
'icudtl.dat',
'credits.html',
'resources.pak',
'nw_100_percent.pak',
'nw_200_percent.pak',
]
if flavor in ['nacl','sdk'] :
target['input'].append('pnacl')
if arch == 'x64':
target['input'].append('nacl_irt_x86_64.nexe')
else:
target['input'].append('nacl_irt_x86_32.nexe')
elif platform_name == 'osx':
target['input'] = [
'nwjs.app',
'credits.html',
]
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_chromedriver(platform_name, arch, version):
if args.mode != 'sdk':
return generate_target_empty(platform_name, arch, version)
target = {}
# Output
target['output'] = ''.join([
'chromedriver-nw-',
'v', version,
'-', platform_name,
'-', arch])
# Compress type
if platform_name == 'linux':
target['compress'] = 'tar.gz'
else:
target['compress'] = 'zip'
# Input
if platform_name == 'win':
target['input'] = ['chromedriver.exe']
else:
target['input'] = ['chromedriver']
target['folder'] = True # always create a folder
return target
def generate_target_symbols(platform_name, arch, version):
target = {}
target['output'] = ''.join([package_name, '-symbol-',
'v', version,
'-', platform_name,
'-', arch])
if platform_name == 'linux':
target['compress'] = 'tar.gz'
target['input'] = ['nw.breakpad.' + arch]
target['folder'] = True
elif platform_name == 'win':
target['compress'] = None
target['input'] = ['nw.sym.7z']
target['output'] = ''.join([package_name, '-symbol-',
'v', version,
'-', platform_name,
'-', arch, '.7z'])
elif platform_name == 'osx':
target['compress'] = 'zip'
target['input'] = [
'nwjs.breakpad.tar'
]
target['folder'] = True
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_headers(platform_name, arch, version):
# here, call make_nw_header tool to generate headers
# then, move to binaries_location
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'osx':
target['input'] = []
# here , call make-nw-headers.py to generate nw headers
make_nw_header = os.path.join(os.path.dirname(__file__), \
'make-nw-headers.py')
print make_nw_header
res = call(['python', make_nw_header])
if res == 0:
print 'nw-headers generated'
nw_headers_name = 'nw-headers-v' + version + '.tar.gz'
nw_headers_path = os.path.join(os.path.dirname(__file__), \
os.pardir, 'tmp', nw_headers_name)
if os.path.isfile(os.path.join(binaries_location, nw_headers_name)):
os.remove(os.path.join(binaries_location, nw_headers_name))
shutil.move(nw_headers_path, binaries_location)
target['input'].append(nw_headers_name)
else:
#TODO, handle err
print 'nw-headers generate failed'
elif platform_name == 'win':
target['input'] = []
elif platform_name == 'linux':
target['input'] = []
else:
print 'Unsupported platform: ' + platform_name
exit(-1)
return target
def generate_target_empty(platform_name, arch, version):
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'win':
target['input'] = []
elif platform_name == 'linux' :
target['input'] = []
else:
target['input'] = []
return target
def generate_target_others(platform_name, arch, version):
target = {}
target['output'] = ''
target['compress'] = None
if platform_name == 'win':
target['input'] = ['nw.exp', 'nw.lib']
elif platform_name == 'linux' :
target['input'] = []
else:
target['input'] = []
return target
################################
# Make packages
def compress(from_dir, to_dir, fname, compress):
from_dir = os.path.normpath(from_dir)
to_dir = os.path.normpath(to_dir)
_from = os.path.join(from_dir, fname)
_to = os.path.join(to_dir, fname)
if compress == 'zip':
z = zipfile.ZipFile(_to + '.zip', 'w', compression=zipfile.ZIP_DEFLATED)
if os.path.isdir(_from):
for root, dirs, files in os.walk(_from):
for f in files:
_path = os.path.join(root, f)
z.write(_path, _path.replace(from_dir+os.sep, ''))
else:
z.write(_from, fname)
z.close()
elif compress == 'tar.gz': # only for folders
if not os.path.isdir(_from):
print 'Will not create tar.gz for a single file: ' + _from
exit(-1)
with tarfile.open(_to + '.tar.gz', 'w:gz') as tar:
tar.add(_from, arcname=os.path.basename(_from))
elif compress == 'gz': # only for single file
if os.path.isdir(_from):
print 'Will not create gz for a folder: ' + _from
exit(-1)
f_in = open(_from, 'rb')
f_out = gzip.open(_to + '.gz', 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
else:
print 'Unsupported compression format: ' + compress
exit(-1)
def make_packages(targets):
# check file existance
for t in targets:
for f in t['input']:
src = os.path.join(binaries_location, f)
if not os.path.exists(src):
print 'File does not exist: ', src
exit(-1)
# clear the output folder
if os.path.exists(dist_dir):
if not os.path.isdir(dist_dir):
print 'Invalid path: ' + dist_dir
exit(-1)
else:
shutil.rmtree(dist_dir)
# now let's do it
os.mkdir(dist_dir)
for t in targets:
if len(t['input']) == 0:
continue
if t['compress'] == None:
for f in t['input']:
src = os.path.join(binaries_location, f)
if t['output'] != '':
dest = os.path.join(dist_dir, t['output'])
else:
dest = os.path.join(dist_dir, f)
print "Copying " + f
shutil.copy(src, dest)
elif (t.has_key('folder') and t['folder'] == True) or len(t['input']) > 1:
print 'Making "' + t['output'] + '.' + t['compress'] + '"'
# copy files into a folder then pack
folder = os.path.join(dist_dir, t['output'])
os.mkdir(folder)
for f in t['input']:
src = os.path.join(binaries_location, f)
dest = os.path.join(folder, f)
if os.path.isdir(src): # like nw.app
shutil.copytree(src, dest)
else:
shutil.copy(src, dest)
compress(dist_dir, dist_dir, t['output'], t['compress'])
# remove temp folders
shutil.rmtree(folder)
else:
# single file
print 'Making "' + t['output'] + '.' + t['compress'] + '"'
compress(binaries_location, dist_dir, t['input'][0], t['compress'])
# must be aligned with steps
generators = {}
generators['nw'] = generate_target_nw
generators['chromedriver'] = generate_target_chromedriver
generators['symbol'] = generate_target_symbols
generators['headers'] = generate_target_headers
generators['others'] = generate_target_others
################################
# Process targets
targets = []
for s in steps:
if (step != None) and (s != step):
continue
if (skip != None) and (s == skip):
continue
targets.append(generators[s](platform_name, arch, nw_ver))
print 'Creating packages...'
make_packages(targets)
# vim: et:ts=4:sw=4
| mit |
luotao1/Paddle | python/paddle/fluid/tests/unittests/dygraph_to_static/test_basic_api_transformation.py | 1 | 14296 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import unittest
import inspect
import gast
import paddle
import paddle.fluid as fluid
import paddle.fluid.dygraph as dygraph
from paddle import to_tensor
from paddle.fluid.dygraph import to_variable
from paddle.fluid.dygraph.jit import dygraph_to_static_func
from paddle.fluid.dygraph.dygraph_to_static.utils import is_dygraph_api
SEED = 2020
np.random.seed(SEED)
def dyfunc_to_variable(x):
res = fluid.dygraph.to_variable(x, name=None, zero_copy=None)
return res
def dyfunc_to_variable_2(x):
res = dygraph.to_variable(value=np.zeros(shape=(1), dtype=np.int32))
return res
def dyfunc_to_variable_3(x):
res = to_variable(x, name=None, zero_copy=None)
return res
def dyfunc_to_tensor(x):
res1 = paddle.to_tensor(x, dtype=None, place=None, stop_gradient=True)
res2 = paddle.tensor.to_tensor(data=res1)
res3 = to_tensor(data=res2)
return res3
class TestDygraphBasicApi_ToVariable(unittest.TestCase):
def setUp(self):
self.input = np.ones(5).astype("int32")
self.test_funcs = [
dyfunc_to_tensor, dyfunc_to_variable, dyfunc_to_variable_2,
dyfunc_to_variable_3
]
self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
def get_dygraph_output(self):
with fluid.dygraph.guard():
res = self.dygraph_func(self.input).numpy()
return res
def get_static_output(self):
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program):
static_out = dygraph_to_static_func(self.dygraph_func)(self.input)
exe = fluid.Executor(self.place)
static_res = exe.run(main_program, fetch_list=static_out)
return static_res[0]
def test_transformed_static_result(self):
for func in self.test_funcs:
self.dygraph_func = func
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(
np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is {}'.format(dygraph_res,
static_res))
# 1. test Apis that inherit from layers.Layer
def dyfunc_BilinearTensorProduct(layer1, layer2):
bilinearTensorProduct = fluid.dygraph.nn.BilinearTensorProduct(
input1_dim=5,
input2_dim=4,
output_dim=1000,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)))
res = bilinearTensorProduct(
fluid.dygraph.base.to_variable(layer1),
fluid.dygraph.base.to_variable(layer2))
return res
def dyfunc_Conv2D(input):
conv2d = fluid.dygraph.Conv2D(
num_channels=3,
num_filters=2,
filter_size=3,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)), )
res = conv2d(input)
return res
def dyfunc_Conv3D(input):
conv3d = fluid.dygraph.Conv3D(
num_channels=3,
num_filters=2,
filter_size=3,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)), )
res = conv3d(input)
return res
def dyfunc_Conv2DTranspose(input):
conv2dTranspose = fluid.dygraph.nn.Conv2DTranspose(
num_channels=3,
num_filters=12,
filter_size=12,
use_cudnn=False,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)), )
ret = conv2dTranspose(input)
return ret
def dyfunc_Conv3DTranspose(input):
conv3dTranspose = fluid.dygraph.nn.Conv3DTranspose(
num_channels=3,
num_filters=12,
filter_size=12,
use_cudnn=False,
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)), )
ret = conv3dTranspose(input)
return ret
def dyfunc_Linear(input):
fc = fluid.dygraph.Linear(
input_dim=10,
output_dim=5,
act='relu',
param_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.99)),
bias_attr=fluid.ParamAttr(
initializer=fluid.initializer.Constant(value=0.5)), )
res = fc(input)
return res
def dyfunc_Pool2D(input):
fluid.dygraph.Pool2D(
pool_size=2, pool_type='avg', pool_stride=1, global_pooling=False)
pool2d = fluid.dygraph.Pool2D(
pool_size=2, pool_type='avg', pool_stride=1, global_pooling=False)
res = pool2d(input)
return res
def dyfunc_Prelu(input):
prelu0 = fluid.PRelu(
mode='all',
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0)))
res = prelu0(input=input)
return res
class TestDygraphBasicApi(unittest.TestCase):
# Compare results of dynamic graph and transformed static graph function which only
# includes basic Api.
def setUp(self):
self.input = np.random.random((1, 4, 3, 3)).astype('float32')
self.dygraph_func = dyfunc_Pool2D
def get_dygraph_output(self):
with fluid.dygraph.guard():
fluid.default_startup_program.random_seed = SEED
fluid.default_main_program.random_seed = SEED
data = fluid.dygraph.to_variable(self.input)
res = self.dygraph_func(data).numpy()
return res
def get_static_output(self):
startup_program = fluid.Program()
startup_program.random_seed = SEED
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
data = fluid.layers.assign(self.input)
static_out = dygraph_to_static_func(self.dygraph_func)(data)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
static_res = exe.run(main_program, fetch_list=static_out)
return static_res[0]
def test_transformed_static_result(self):
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(
np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is \n{}'.format(dygraph_res,
static_res))
class TestDygraphBasicApi_BilinearTensorProduct(TestDygraphBasicApi):
def setUp(self):
self.input1 = np.random.random((5, 5)).astype('float32')
self.input2 = np.random.random((5, 4)).astype('float32')
self.dygraph_func = dyfunc_BilinearTensorProduct
def get_dygraph_output(self):
with fluid.dygraph.guard():
fluid.default_startup_program.random_seed = SEED
fluid.default_main_program.random_seed = SEED
res = self.dygraph_func(self.input1, self.input2).numpy()
return res
def get_static_output(self):
startup_program = fluid.Program()
startup_program.random_seed = SEED
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
static_out = dygraph_to_static_func(self.dygraph_func)(self.input1,
self.input2)
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
static_res = exe.run(main_program, fetch_list=static_out)
return static_res[0]
class TestDygraphBasicApi_Conv2D(TestDygraphBasicApi):
def setUp(self):
self.input = np.random.random((1, 3, 3, 5)).astype('float32')
self.dygraph_func = dyfunc_Conv2D
class TestDygraphBasicApi_Conv3D(TestDygraphBasicApi):
def setUp(self):
self.input = np.random.random((1, 3, 3, 3, 5)).astype('float32')
self.dygraph_func = dyfunc_Conv3D
class TestDygraphBasicApi_Conv2DTranspose(TestDygraphBasicApi):
def setUp(self):
self.input = np.random.random((5, 3, 32, 32)).astype('float32')
self.dygraph_func = dyfunc_Conv2DTranspose
class TestDygraphBasicApi_Conv3DTranspose(TestDygraphBasicApi):
def setUp(self):
self.input = np.random.random((5, 3, 12, 32, 32)).astype('float32')
self.dygraph_func = dyfunc_Conv3DTranspose
class TestDygraphBasicApi_Linear(TestDygraphBasicApi):
def setUp(self):
self.input = np.random.random((4, 3, 10)).astype('float32')
self.dygraph_func = dyfunc_Linear
class TestDygraphBasicApi_Prelu(TestDygraphBasicApi):
def setUp(self):
self.input = np.ones([5, 20, 10, 10]).astype('float32')
self.dygraph_func = dyfunc_Prelu
# 2. test Apis that inherit from LearningRateDecay
def dyfunc_CosineDecay():
base_lr = 0.1
CosineDecay = fluid.dygraph.CosineDecay(
learning_rate=base_lr, step_each_epoch=10000, epochs=120)
lr = CosineDecay()
return lr
def dyfunc_ExponentialDecay():
base_lr = 0.1
exponential_decay = fluid.dygraph.ExponentialDecay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True)
lr = exponential_decay()
return lr
def dyfunc_InverseTimeDecay():
base_lr = 0.1
inverse_time_decay = fluid.dygraph.InverseTimeDecay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True)
lr = inverse_time_decay()
return lr
def dyfunc_NaturalExpDecay():
base_lr = 0.1
natural_exp_decay = fluid.dygraph.NaturalExpDecay(
learning_rate=base_lr,
decay_steps=10000,
decay_rate=0.5,
staircase=True)
lr = natural_exp_decay()
return lr
def dyfunc_NoamDecay():
noam_decay = fluid.dygraph.NoamDecay(100, 100)
lr = noam_decay()
return lr
def dyfunc_PiecewiseDecay():
boundaries = [10000, 20000]
values = [1.0, 0.5, 0.1]
pd = fluid.dygraph.PiecewiseDecay(boundaries, values, begin=0)
lr = pd()
return lr
def dyfunc_PolynomialDecay():
start_lr = 0.01
total_step = 5000
end_lr = 0
pd = fluid.dygraph.PolynomialDecay(start_lr, total_step, end_lr, power=1.0)
lr = pd()
return lr
class TestDygraphBasicApi_CosineDecay(unittest.TestCase):
def setUp(self):
self.dygraph_func = dyfunc_CosineDecay
def get_dygraph_output(self):
with fluid.dygraph.guard():
fluid.default_startup_program.random_seed = SEED
fluid.default_main_program.random_seed = SEED
res = self.dygraph_func().numpy()
return res
def get_static_output(self):
startup_program = fluid.Program()
startup_program.random_seed = SEED
main_program = fluid.Program()
main_program.random_seed = SEED
with fluid.program_guard(main_program, startup_program):
static_out = dygraph_to_static_func(self.dygraph_func)()
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
static_res = exe.run(main_program, fetch_list=static_out)
return static_res[0]
def test_transformed_static_result(self):
dygraph_res = self.get_dygraph_output()
static_res = self.get_static_output()
self.assertTrue(
np.allclose(dygraph_res, static_res),
msg='dygraph is {}\n static_res is \n{}'.format(dygraph_res,
static_res))
class TestDygraphBasicApi_ExponentialDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_ExponentialDecay
class TestDygraphBasicApi_InverseTimeDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_InverseTimeDecay
class TestDygraphBasicApi_NaturalExpDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_NaturalExpDecay
class TestDygraphBasicApi_NoamDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_NoamDecay
class TestDygraphBasicApi_PiecewiseDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_PiecewiseDecay
class TestDygraphBasicApi_PolynomialDecay(TestDygraphBasicApi_CosineDecay):
def setUp(self):
self.dygraph_func = dyfunc_PolynomialDecay
def _dygraph_fn():
import paddle.fluid as fluid
x = np.random.random((1, 3)).astype('float32')
with fluid.dygraph.guard():
fluid.dygraph.to_variable(x)
np.random.random((1))
class TestDygraphApiRecognition(unittest.TestCase):
def setUp(self):
self.src = inspect.getsource(_dygraph_fn)
self.root = gast.parse(self.src)
def _get_dygraph_ast_node(self):
return self.root.body[0].body[2].body[0].value
def _get_static_ast_node(self):
return self.root.body[0].body[2].body[1].value
def test_dygraph_api(self):
self.assertTrue(is_dygraph_api(self._get_dygraph_ast_node()) is True)
self.assertTrue(is_dygraph_api(self._get_static_ast_node()) is False)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
levelrf/level_basestation | grc/gui/Bars.py | 17 | 4092 | """
Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import Actions
import pygtk
pygtk.require('2.0')
import gtk
##The list of actions for the toolbar.
TOOLBAR_LIST = (
Actions.FLOW_GRAPH_NEW,
Actions.FLOW_GRAPH_OPEN,
Actions.FLOW_GRAPH_SAVE,
Actions.FLOW_GRAPH_CLOSE,
None,
Actions.FLOW_GRAPH_SCREEN_CAPTURE,
None,
Actions.BLOCK_CUT,
Actions.BLOCK_COPY,
Actions.BLOCK_PASTE,
Actions.ELEMENT_DELETE,
None,
Actions.FLOW_GRAPH_UNDO,
Actions.FLOW_GRAPH_REDO,
None,
Actions.ERRORS_WINDOW_DISPLAY,
Actions.FLOW_GRAPH_GEN,
Actions.FLOW_GRAPH_EXEC,
Actions.FLOW_GRAPH_KILL,
None,
Actions.BLOCK_ROTATE_CCW,
Actions.BLOCK_ROTATE_CW,
None,
Actions.BLOCK_ENABLE,
Actions.BLOCK_DISABLE,
None,
Actions.RELOAD_BLOCKS,
Actions.OPEN_HIER,
)
##The list of actions and categories for the menu bar.
MENU_BAR_LIST = (
(gtk.Action('File', '_File', None, None), [
Actions.FLOW_GRAPH_NEW,
Actions.FLOW_GRAPH_OPEN,
None,
Actions.FLOW_GRAPH_SAVE,
Actions.FLOW_GRAPH_SAVE_AS,
None,
Actions.FLOW_GRAPH_SCREEN_CAPTURE,
None,
Actions.FLOW_GRAPH_CLOSE,
Actions.APPLICATION_QUIT,
]),
(gtk.Action('Edit', '_Edit', None, None), [
Actions.FLOW_GRAPH_UNDO,
Actions.FLOW_GRAPH_REDO,
None,
Actions.BLOCK_CUT,
Actions.BLOCK_COPY,
Actions.BLOCK_PASTE,
Actions.ELEMENT_DELETE,
None,
Actions.BLOCK_ROTATE_CCW,
Actions.BLOCK_ROTATE_CW,
None,
Actions.BLOCK_ENABLE,
Actions.BLOCK_DISABLE,
None,
Actions.BLOCK_PARAM_MODIFY,
]),
(gtk.Action('View', '_View', None, None), [
Actions.ERRORS_WINDOW_DISPLAY,
]),
(gtk.Action('Build', '_Build', None, None), [
Actions.FLOW_GRAPH_GEN,
Actions.FLOW_GRAPH_EXEC,
Actions.FLOW_GRAPH_KILL,
]),
(gtk.Action('Help', '_Help', None, None), [
Actions.HELP_WINDOW_DISPLAY,
Actions.TYPES_WINDOW_DISPLAY,
None,
Actions.ABOUT_WINDOW_DISPLAY,
]),
)
class Toolbar(gtk.Toolbar):
"""The gtk toolbar with actions added from the toolbar list."""
def __init__(self):
"""
Parse the list of action names in the toolbar list.
Look up the action for each name in the action list and add it to the toolbar.
"""
gtk.Toolbar.__init__(self)
self.set_style(gtk.TOOLBAR_ICONS)
for action in TOOLBAR_LIST:
if action: #add a tool item
self.add(action.create_tool_item())
#this reset of the tooltip property is required (after creating the tool item) for the tooltip to show
action.set_property('tooltip', action.get_property('tooltip'))
else: self.add(gtk.SeparatorToolItem())
class MenuBar(gtk.MenuBar):
"""The gtk menu bar with actions added from the menu bar list."""
def __init__(self):
"""
Parse the list of submenus from the menubar list.
For each submenu, get a list of action names.
Look up the action for each name in the action list and add it to the submenu.
Add the submenu to the menu bar.
"""
gtk.MenuBar.__init__(self)
for main_action, actions in MENU_BAR_LIST:
#create the main menu item
main_menu_item = main_action.create_menu_item()
self.append(main_menu_item)
#create the menu
main_menu = gtk.Menu()
main_menu_item.set_submenu(main_menu)
for action in actions:
if action: #append a menu item
main_menu.append(action.create_menu_item())
else: main_menu.append(gtk.SeparatorMenuItem())
main_menu.show_all() #this show all is required for the separators to show
| gpl-3.0 |
pepitogithub/PythonScripts | varios/finalrl.py | 1 | 2373 | #-------------------------------------------------
#------------------- Imports -------------------
#-------------------------------------------------
import os
#-------------------------------------------------
# --------- Sacar el enter de un STR -----------
#-------------------------------------------------
# copia un string igual que el que entra pero sin el "\n"
def sacarenter(palabra):
palabra2 = ''
for i in range(0,len(palabra)-1):
palabra2 += palabra[i]
return palabra2
#-------------------------------------------------
#---------------- Clase mapa -------------------
#-------------------------------------------------
class mapa():
def __init__(self):
# matriz es el mapa vacio.
# map es el mismo mapa pero con los bichitos.
self.matriz = []
self.map = []
self.npcs = {}
def ver(self):
for i in range(0,len(self.map)):
for j in range(0,len(self.map[0])):
print self.map[i][j],
print '\n',
def cargar(self,archivo):
mapa = open(archivo)
vector = []
# carga el mapa en una matriz
linea = mapa.readline()
while (linea != "-npcs-\n"):
linea = sacarenter(linea)
for dato in linea:
vector.append(dato)
self.matriz.append(vector)
vector = []
linea = mapa.readline()
self.map = self.matriz
# saca la info de los npcs
while (linea != ''):
npc = sacarenter(mapa.readline())
if (npc != ''):
x = sacarenter(mapa.readline())
y = sacarenter(mapa.readline())
self.cargarnpc(npc,x,y)
else:
break
print self.npcs
mapa.close()
def cargarnpc(self,npc,x,y):
print npc
print x
print y
self.npcs[npc] = personaje(x,y)
self.map[int(y)][int(x)] = npc
#-------------------------------------------------
#--------------- Clase personaje ---------------
#-------------------------------------------------
class personaje():
def __init__(self,x,y):
self.x = x
self.y = y
def mover(self):
pass
def pasar(self):
pass
#-------------------------------------------------
#----------------- Turnador --------------------
#-------------------------------------------------
class turnador():
def __init__(self):
pass
#-------------------------------------------------
#----------------- acciones --------------------
#-------------------------------------------------
os.system("clear")
mapa1 = mapa()
mapa1.cargar("./mapa1")
mapa1.ver()
| gpl-2.0 |
akubera/AliPhysics | PWGJE/EMCALJetTasks/Tracks/analysis/util/SpectrumTools.py | 41 | 27252 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
"""
Toolset module for histogram operations
Original author (ROOT macro):
@author: Jacek Otwinowski
@organization: ALICE Collaboration
Translated into PYTHON by
@author: Markus Fasel
@contact: markus.fasel@cern.ch
@organization: ALICE Collaboration
@organization: Lawrence Berkeley National Laboratory
@copyright: 1998-2014, ALICE Experiment at CERN, All rights reserved.
"""
from ROOT import TF1, TGraph, TGraphAsymmErrors, TGraphErrors, TMultiGraph
from PWGJE.EMCALJetTasks.Tracks.analysis.util.Interpolator import Interpolator
import math
from copy import deepcopy
from numpy import array
from scipy.optimize import fsolve
import functools
import operator
class SpectrumTools(object):
"""
Toolset for histogram operations
"""
def __init__(self):
"""
Constructor
"""
pass
def DivideGraphs(self, g1, g2, interpolationMethod = "lin"):
"""
Divide 2 graphs g1 and g2, by each other applying an interpolation between the points for the denominator
@param g1: graph 1
@param g2: graph 2
@param interpolationMethod: method used for interpolation between the two graphs
@return: The ratio graphs
"""
#calculate x range
xmin = max(g1.GetX()[0], g2.GetX()[0])
xmax = min(g1.GetX()[g1.GetN()-1], g2.GetX()[g2.GetN()-1])
# create new TGraphErors for result1
result1 = self.MakeGraphDivision(g1, g2, xmin, xmax, interpolationMethod)
result1.SetMarkerColor(1);
result1.SetMarkerStyle(20);
# create new TGraphErors for result2
result2 = self.MakeGraphDivision(g2, g1, xmin, xmax, interpolationMethod)
result2.SetMarkerColor(2)
result2.SetMarkerStyle(20)
result = TMultiGraph("result"," ")
result.Add(result1)
result.Add(result2)
result.Draw("AP")
result.GetXaxis().SetRangeUser(xmin,xmax)
result.GetXaxis().SetTitle("p_{T} [Gev/c]")
result.GetXaxis().SetTitleOffset(1.2)
result.GetYaxis().SetTitleOffset(1.2)
return result
def MakeGraphDivision(self, numerator, denominator, xmin, xmax, interpolationMethod = "lin"):
"""
Divide 2 graphs by each other.
For the denominator graph use interpolation
@param numerator: Numerator graph
@param denominator: Denominator graph
@param xmin: lower boundary for calculation
@param xmax: upper boundary for calculation
@param interpolationMethod: method used for the interpolation
@return: the ratio of the graphs
"""
startBin = numerator.GetN()
endBin = -1
nBins = 0
for i in range(0, numerator.GetN()+1):
x = numerator.GetX()[i]
if x >= xmin and x <= xmax:
nBins += 1
if i > endBin:
endBin = i
if i < startBin:
startBin = i
print "startBin %d" %(startBin)
print "endBin %d" %(endBin)
print "nBins %d" %(nBins)
print "xmin %f" %(xmin)
print "xmax %f" %(xmax)
# create new TGraphErors for result1
result = TGraphErrors(nBins);
interpolator = Interpolator()
for i in range(startBin, endBin+1):
x = numerator.GetX()[i]
lower, upper = self.__FindNeighbors(x, denominator)
# after break the two neighboring bins are found: k-1 and k
y1 = denominator.GetY()[lower]
y2 = denominator.GetY()[upper]
ey1 = denominator.GetEY()[lower]
ey2 = denominator.GetEY()[upper]
if x == lower:
y = y1
ey = ey1
elif x == upper:
y = y2
ey = ey2
else:
y = interpolator.Interpolate(x,lower,y1,upper,y2, method = interpolationMethod)
ey = interpolator.Interpolate(x,lower,ey1,upper,ey2, method=interpolationMethod)
numy = numerator.GetY()[i]
numey = numerator.GetEY()[i]
yr = 0
eyr = 0
if y == 0:
yr = 0
else:
yr = numy / y
if y != 0 and numy != 0:
eyr = (yr * math.sqrt((ey/y)*(ey/y) + (numey/numy)*(numey/numy)))
else:
eyr = 0
result.SetPoint(i-startBin,x,yr);
result.SetPointError(i-startBin,numerator.GetEX()[i],eyr)
return result
def __FindNeighbors(self, x, graph):
"""
Find the upper and lower neighbor of point x in the graph
@param x: point to evaluate
@param graph: graph to check
"""
lower = -1
upper = -1
for i in range(0, graph.GetN()-1):
if x >= graph.GetX()[i] and x <= graph.GetX()[i+1]:
lower = i
upper = i+1
break
return lower, upper
def RebinPtSpectrum(self, h, nBins = 0, xBins = None):
"""
Apply rebinning of the spectrum
@param h: Input histogram
@param nBins: Number of bins
@param xbins: Binning of the new histogram
@return: The rebinned histogram
"""
if not h:
return None
if not nBins:
return h
if not xBins:
return h
h1 = deepcopy(h)
h1.Sumw2()
for i in range(1, h1.GetNbinsX()+1):
value = h1.GetBinContent(i)
width = h1.GetBinWidth(i)
center = h1.GetBinCenter(i)
error = h1.GetBinError(i)
h1.SetBinContent(i,value*center*width)
h1.SetBinError(i,error*center*width)
h2 = h1.Rebin(nBins,"hnew",array(xBins))
for i in range(1, h2.GetNbinsX()+1):
value = h2.GetBinContent(i)
width = h2.GetBinWidth(i)
center = h2.GetBinCenter(i)
error = h2.GetBinError(i)
h2.SetBinContent(i,value/(center*width))
h2.SetBinError(i,error/(center*width))
return h2
def ApplyBinShiftCorrection(self, hist):
"""
Apply bin-shift correction to the input spectrum using an iterative procedure
@param hist: Input spectrum
@return: Bin-shift corrected spectrum
"""
h = deepcopy(hist)
h.SetName("htemp")
# Bin shift correction performed in model specturm * pt
for i in range(1, h.GetNbinsX()+1):
pt = h.GetBinCenter(i)
h.SetBinContent(i, h.GetBinContent(i)*pt)
h.SetBinError(i, h.GetBinError(i)*pt)
result = TGraphErrors(h)
for i in range(0, result.GetN()):
result.GetEX()[i] = 0.
fitfun = TF1("fitfun","([0]*(1.+x/[1])^(-[2])*x)-[3]",0.15,100.0)
fitfun.SetParameter(0,1000)
fitfun.SetParameter(1,1)
fitfun.SetParameter(2,5)
fitfun.FixParameter(3,0)
h.Fit(fitfun,"")
self.__StableFit(h, fitfun, True)
# Iterative approach:
# - Use model to get the mean of the function inside the bin
# - Get the X where the mean is found
# - Use the new coordinate (x,y) for the next iteration of the fit
# for now 10 iterations fixed
for k in range(1, 11):
for i in range(1, h.GetNbinsX()+1):
y = fitfun.Integral(h.GetBinLowEdge(i), h.GetBinUpEdge(i)) / h.GetBinWidth(i)
result.GetX()[i-1] = self.FindX(y, fitfun, h.GetBinLowEdge(i), h.GetBinUpEdge(i))
self.__StableFit(result, fitfun, False)
# Undo multiplication with pt
for i in range(0, result.GetN()):
pt = result.GetX()[i]
result.GetY()[i] /= pt
result.GetEY()[i] /= pt
#remove points that are 0
while result.GetY()[0] < 1.e-99:
result.RemovePoint(0)
bval = 0
for mybin in range(0, result.GetN()+1):
if result.GetY()[bin] < 1.e-99:
bval = mybin
break
while result.RemovePoint(bval) > 0:
continue
return result
def ApplyBinShiftCorrectionGeneral(self, hist, fit):
"""
Alternative method for bin shift correction:
- Apply user-default model for bin-shift correction
- don't multiply by pt
@param hist: Input spectrum for the bin shift correction
@param fit: Model for the bin-shift correction
@return: The bin-shift corrected spectrum as graph
"""
h = deepcopy(hist)
hist.SetName("htemp")
result = TGraphErrors(h);
for i in range(0, result.GetN()):
result.GetEX()[i] = 0.
y = 0
#for now 10 iterations fixes
for k in range(0, 10):
for i in range(1, h.GetNbinsX()+1):
y = fit.Integral(h.GetBinLowEdge(i),h.GetBinUpEdge(i)) / h.GetBinWidth(i)
x = self.FindX(y, fit, h.GetBinLowEdge(i), h.GetBinUpEdge(i))
result.GetX()[i-1] = x
# remove points that are 0
while result.GetY()[0] < 1.e-99:
result.RemovePoint(0)
mybin = 0
for biniter in range(0, result.GetN()):
if result.GetY()[biniter] < 1.e-99:
mybin = biniter
break
while result.RemovePoint(mybin) > 0:
continue
return result
def __StableFit(self, inputdata, model, doIntegral = False):
"""
Perform stable fit: Fit until parameters don't change anymore with iteration
@param inputdata: Input data to fit (TGraph or TH)
@param model: Fit model
@param doIntegral: if true we perform integration during fit
"""
last = 0
while True:
inputdata.Fit(model,"IMR" if doIntegral else "MR")
params = []
for ipar in range(0, model.GetNumberOfParameters()):
params.append(model.GetParameter(ipar))
current = functools.reduce(operator.mul, params)
if current == last:
break
last= current
def FindX(self, y, function, xmin, xmax):
"""
ROOT-finding in PYTHON style
@param y: y-value
@param function: input of the equation
@param xmin: min. x (for initial guess)
@param xmax: max. x (for initial guess)
@return: Solution of the equation f(x) = y
"""
return fsolve(lambda x : function.Eval(x)-y, (xmax - xmin)/2.)
def PerformSpectrumNormalization(self, h, nevents, etarange):
"""
Perform normalization
1/(2 pi pt deleatEta Nevents)
@param h: Spectrum histogram
@param nevents: Number of events to scale
@param etarange: Delta eta
@deprecated: Use Normalization class in the correction package
"""
for i in range(1, h.GetNbinsX()+1):
pt = h.GetBinCenter(i)
width = h.GetBinWidth(i)
val = h.GetBinContent(i)
err = h.GetBinError(i)
cval = 0
cerr = 0
if (etarange*nevents>0):
cval = (val)/(width * 2.0 * math.pi * etarange * nevents * pt)
cerr = (err)/(width * 2.0 * math.pi * etarange * nevents * pt)
h.SetBinContent(i,cval);
h.SetBinError(i,cerr);
def MakeHisrogramFromGraph(self, g, prototype, options = "lin"):
"""
Create a histogram from a TGraphErrors:
options can be "lin" (default), "log", "exp", "pow" for the functional shape
and "I" for intergal
if g is a TGraphErrors errors will be calculated assuming uncorrelated errors
if option "E" is given errors are calculated
if option "EX" is given errors only from X errors
"EY" is give only from Y
"E0" supresses calculation of errors
"EC" calculates correlated errors (for systematics)
"I2" uses integral for value, but eval for errors
@param g: inputgraph
@param prototype: prototype histogram
@param option: see above
@return: Histogram created from the graph
"""
errx = False # flag for x errors
erry = False # flag for y errors
errc = False # flag for syst errors
if isinstance(g, TGraphErrors):
errx = True
erry = True
if "E" in options:
errx = True
erry = True
if "EX" in options:
erry = False
if "EY" in options:
errx = False
if "EC" in options:
errx = False
erry = True
errc = True
if "E0" in options:
errx = False
erry = False
errc = False
if isinstance(g, TGraphErrors) and not g.GetEX():
errx = False
if isinstance(g, TGraphErrors) and not g.GetEY():
erry = False
xmin = g.GetX()[0]
xmax = g.GetX()[g.GetN()-1]
h = deepcopy(prototype)
h.Reset()
dx1 = 0
dx2 = 0
ey = 0
for i in range(1, h.GetNbinsX()+1):
x = h.GetBinCenter(i)
# check the x range
if x < xmin:
continue
if x > xmax:
break
# find point k in g closest in x
lower, upper = self.__FindNeighbors(x, g)
# now x1 and x2 are the points next to x
x1 = g.GetX()[lower]
x2 = g.GetX()[upper]
y1 = g.GetY()[lower]
y2 = g.GetY()[upper]
y = self.__GetInterpolatedValue(x,x1,y1,x2,y2,options,h.GetXaxis().GetBinLowEdge(i),h.GetXaxis().GetBinUpEdge(i))
if errx:
exlow, exhigh = self.__GetXerrors(g, lower)
dx1 = max(exlow, exhigh) if exlow and exhigh else 0.
exlow, exhigh = self.__GetXerrors(g, upper)
dx2 = max(exlow, exhigh) if exlow and exhigh else 0.
if erry:
eylow, eyhigh = self.__GetYerrors(g, lower)
dy1 = max(eylow, eyhigh) if eylow and eyhigh else 0.
eylow, eyhigh = self.__GetYerrors(g, upper)
dy2 = max(eylow, eyhigh) if eylow and eyhigh else 0.
if errx or erry:
if errc:
ymax = self.__GetInterpolatedValue(x,x1,y1+dy1,x2,y2+dy2,options,h.GetXaxis().GetBinLowEdge(i),h.GetXaxis().GetBinUpEdge(i))
ymin = self.__GetInterpolatedValue(x,x1,y1-dy1,x2,y2-dy2,options,h.GetXaxis().GetBinLowEdge(i),h.GetXaxis().GetBinUpEdge(i))
ey = max(math.fabs(y-ymin),math.fabs(y-ymax))
else:
ey = self.__GetInterpolatedUncertainty(x,x1,y1,x2,y2,dx1,dy1,dx2,dy2,options,h.GetXaxis().GetBinLowEdge(i),h.GetXaxis().GetBinUpEdge(i))
h.SetBinContent(i,y)
h.SetBinError(i,ey)
h.SetName(g.GetName())
return h
def __GetXerrors(self, inputgraph, pointID):
"""
Get the x-errors (low, up) in a transparent way for TGraph and TGraphErrors
@param inputgraph: input for the graph
@return: tuple of lower and upper x error
"""
if isinstance(inputgraph, TGraphAsymmErrors):
return inputgraph.GetEXlow()[pointID], inputgraph.GetEXhigh()[pointID]
elif isinstance(inputgraph, TGraphErrors):
return inputgraph.GetEX()[pointID], inputgraph.GetEX()[pointID]
else:
return None
def __GetYerrors(self, inputgraph, pointID):
"""
Get the y-errors (low, up) in a transparent way for TGraph and TGraphErrors
@param inputgraph: input for the graph
@return: tuple of lower and upper y error
"""
if isinstance(inputgraph, TGraphAsymmErrors):
return inputgraph.GetEYlow()[pointID], inputgraph.GetEYhigh()[pointID]
elif isinstance(inputgraph, TGraphErrors):
return inputgraph.GetEY()[pointID], inputgraph.GetEY()[pointID]
else:
return None
def SetParameters(self, f, x1, y1, x2, y2):
"""
Set Function parameters
@param x1: lower x value
@param y1: y-value at x1
@param x2: lower x value
@param y2: y-value at x1
"""
if f.GetName() == "lin":
f.SetParameter(1,(y1-y2)/(x1-x2)) # slope
f.SetParameter(0,(x1*y2-x2*y1)/(x1-x2)) # const
elif f.GetName() == "exp":
f.SetParameter(1, math.log(y1/y2)/(x1-x2)) # exp
f.SetParameter(0,y1*math.pow(y1/y2,x1/(x2-x1))) # constfactor
elif f.GetName() == "pow":
f.SetParameter(1,math.log(y1/y2)/math.log(x1/x2)) # power
f.SetParameter(0,y1*math.pow(x1,-math.log(y1/y2)/math.log(x1/x2))); #constfactor
else:
gtemp = TGraph(2)
gtemp.SetPoint(0,x1,y1)
gtemp.SetPoint(1,x2,y2)
gtemp.Fit(f,"")
def MakeHistogramFromGraphSimple(self, g, prototype):
"""
Create a histogram using TGraph's
interpolation between points
@param g: input graph
@param prototype: prototype for the
@return: the new histogram
"""
h = deepcopy(prototype)
h.SetName(g.GetName())
h.Reset();
for i in range(1, g.GetNbinsX()+1):
h.SetBinContent(i, g.Eval(h.GetBinCenter(i)))
h.SetBinError(i,0)
return h
def __GetInterpolatedValue(self, x, x1, y1, x2, y2, options = "lin", xmin = 0, xmax = 0):
"""
Get value at x, interpolated using the points (x1,y1) and (x2,y2) as steps for the interpolation.
Several models can be applied:
- linear
- exponential
- power law
@param x: Step at which to perform the interpolation
@param x1: lower x coordinate
@param y1: Function value at x1
@param x2: upper x coordinate
@param y2: Function value at x2
@param option: Interpolatoin method
@param xmin:
@param xmax:
@return: interpolation value
"""
integrate = True if "I" in options else False
if "lin" in options:
if integrate:
return (-2*x2*y1 + (xmax + xmin)*(y1 - y2) + 2*x1*y2)/(2.*(x1 - x2))
else:
return (x*y1 - x2*y1 - x*y2 + x1*y2)/(x1 - x2)
# end of lin
elif "exp" in options:
if integrate:
return ((x1 - x2)*y1*(math.pow(y1/y2,xmax/(x1 - x2)) - math.pow(y1/y2,xmin/(x1 - x2)))* math.pow(y1/y2,x1/(-x1 + x2)))/((xmax - xmin)*math.log(y1/y2));
else:
return y1*math.pow(y1/y2,(x - x1)/(x1 - x2));
#end of "exp"
elif "pow" in options:
c = math.pow(x1,-math.log(y1/y2)/(math.log(x1) - math.log(x2)))
n = math.log(y1/y2)/(math.log(x1) - math.log(x2))
if integrate:
if math.fabs(n+1.) < 1e-6:
return (c*(math.log(xmax) - math.log(xmin)))/(xmax - xmin)
else:
(c*(math.pow(xmax,1 + n) - math.pow(xmin,1 + n)))/((1 + n)*(xmax - xmin))
else:
return c*math.pow(x,n)
# end of "pow"
else:
return 0
def __GetInterpolatedUncertainty(self, x, x1, y1, x2, y2, dx1, dy1, dx2, dy2, options = "lin", xmin = 0, xmax = 0):
"""
Get error at x, interpolated using the points (x1,y1) and (x2,y2) as steps for the interpolation.
Several models can be applied:
- linear
- exponential
- power law
@param x: Step at which to perform the interpolation
@param x1: lower x coordinate
@param y1: Function value at x1
@param x2: upper x coordinate
@param y2: Function value at x2
@param dx1: uncertainty in x at point 1
@param dy1: uncertainty in y at point 1
@param dx2: uncertainty in x at point 2
@param dy2: uncertainty in y at point 2
@param option: Interpolation method
@param xmin:
@param xmax:
@return: error value
"""
integrate = True if "I" in options or "I2" in options else False
if "lin" in options:
if integrate:
return math.sqrt((math.pow(dy2,2)*math.pow(x1 - x2,2)*math.pow(-2*x1 + xmax + xmin,2) +
math.pow(dy1,2)*math.pow(x1 - x2,2)*math.pow(-2*x2 + xmax + xmin,2) +
(math.pow(dx2,2)*math.pow(-2*x1 + xmax + xmin,2) +
math.pow(dx1,2)*math.pow(-2*x2 + xmax + xmin,2))*math.pow(y1 - y2,2))/math.pow(x1 - x2,4))/2.;
else:
return math.sqrt((math.pow(dy2,2)*math.pow(x - x1,2)*math.pow(x1 - x2,2) + \
math.pow(dy1,2)*math.pow(x - x2,2)*math.pow(x1 - x2,2) + \
(math.pow(dx2,2)*math.pow(x - x1,2) + math.pow(dx1,2)*math.pow(x - x2,2))*math.pow(y1 - y2,2))/math.pow(x1 - x2,4));
# end of "lin"
elif "exp" in options:
if integrate:
return math.sqrt((math.pow(y1/y2,(2*x1)/(-x1 + x2))*(math.pow(dy1*math.pow(y1/y2,xmax/(x1 - x2))*
(x1 - x2 + (x2 - xmax)*math.log(y1/y2)) - dy1*math.pow(y1/y2,xmin/(x1 - x2))*
(x1 - x2 + (x2 - xmin)*math.log(y1/y2)),2) + (math.pow(dy2,2)*math.pow(y1,2)*
math.pow(math.pow(y1/y2,xmax/(x1 - x2))*(x1 - x2 + (x1 - xmax)*math.log(y1/y2)) +
math.pow(y1/y2,xmin/(x1 - x2))*(-x1 + x2 + (-x1 + xmin)*math.log(y1/y2)),2))/math.pow(y2,2) +
(math.pow(dx2,2)*math.pow(y1,2)*math.pow(math.log(y1/y2),2)*math.pow(math.pow(y1/y2,xmax/(x1 - x2))*
(x1 - x2 + (x1 - xmax)*math.log(y1/y2)) + math.pow(y1/y2,xmin/(x1 - x2))*(-x1 + x2 + (-x1 + xmin)*
math.log(y1/y2)),2))/math.pow(x1 - x2,2) + (math.pow(dx1,2)*math.pow(y1,2)*math.pow(math.log(y1/y2),2)*
math.pow(math.pow(y1/y2,xmax/(x1 - x2))*(x1 - x2 + (x2 - xmax)*math.log(y1/y2)) + math.pow(y1/y2,xmin/(x1 - x2))*
(-x1 + x2 + (-x2 + xmin)*math.log(y1/y2)),2))/math.pow(x1 - x2,2)))/(math.pow(xmax - xmin,2)*math.pow(math.log(y1/y2),4)))
else:
return math.sqrt((math.pow(y1/y2,(2*(x - x1))/(x1 - x2))*((math.pow(x1 - x2,2)*(math.pow(dy2,2)*math.pow(x - x1,2)*
math.pow(y1,2) + math.pow(dy1,2)*math.pow(x - x2,2)*math.pow(y2,2)))/math.pow(y2,2) + (math.pow(dx2,2)*math.pow(x - x1,2)
+ math.pow(dx1,2)*math.pow(x - x2,2))*math.pow(y1,2)*math.pow(math.log(y1/y2),2)))/math.pow(x1 - x2,4))
# end of "exp"
elif "pow" in options:
c = math.pow(x1,-math.log(y1/y2)/(math.log(x1) - math.log(x2)))
n = math.log(y1/y2)/(math.log(x1) - math.log(x2))
if integrate:
if math.fabs(n+1.) < 1e-6:
return math.sqrt((math.pow(c,2)*math.pow(math.log(xmax) - math.log(xmin),2)*
((math.pow(dx2,2)*math.pow(-2*math.log(x1) + math.log(xmax) + math.log(xmin),2))/
math.pow(x2,2) + (math.pow(dy2,2)*math.pow(-2*math.log(x1) + math.log(xmax) + math.log(xmin),2))/math.pow(y2,2) +
(math.pow(dx1,2)*math.pow(-2*math.log(x2) + math.log(xmax) + math.log(xmin),2))/math.pow(x1,2) + (math.pow(dy1,2)*
math.pow(-2*math.log(x2) + math.log(xmax) + math.log(xmin),2))/math.pow(y1,2)))/
(math.pow(xmax - xmin,2)*math.pow(math.log(x1) - math.log(x2),2)))/2.
else:
return math.sqrt(((math.pow(c,2)*math.pow(dx2,2)*math.pow(n,2)*math.pow(math.pow(xmax,1 + n)*
(-1 - (1 + n)*math.log(x1) + (1 + n)*math.log(xmax)) + math.pow(xmin,1 + n)*(1 + (1 + n)*math.log(x1) -
(1 + n)*math.log(xmin)),2))/math.pow(x2,2) + (math.pow(dy2,2)*math.pow(c*math.pow(xmax,1 + n)*
(1 + (1 + n)*math.log(x1) - (1 + n)*math.log(xmax)) - c*math.pow(xmin,1 + n)*(1 + (1 + n)*math.log(x1) -
(1 + n)*math.log(xmin)),2))/math.pow(y2,2) + (math.pow(dy1,2)*math.pow(c*math.pow(xmax,1 + n)*
(1 + (1 + n)*math.log(x2) - (1 + n)*math.log(xmax)) - c*math.pow(xmin,1 + n)*(1 + (1 + n)*math.log(x2) -
(1 + n)*math.log(xmin)),2))/math.pow(y1,2) + (math.pow(c,2)*math.pow(dx1,2)*math.pow(n,2)*
math.pow(math.pow(xmax,1 + n)*(1 + (1 + n)*math.log(x2) - (1 + n)*math.log(xmax)) + math.pow(xmin,1 + n)*
(-1 - (1 + n)*math.log(x2) + (1 + n)*math.log(xmin)),2))/math.pow(x1,2))/
(math.pow(1 + n,4)*math.pow(xmax - xmin,2)*math.pow(math.log(x1) - math.log(x2),2)));
else:
return math.sqrt((math.pow(c,2)*math.pow(x,2*n)*((math.pow(dx2,2)*math.pow(n,2)*math.pow(math.log(x) -
math.log(x1),2))/math.pow(x2,2) + (math.pow(dy2,2)*math.pow(math.log(x) - math.log(x1),2))/math.pow(y2,2) +
(math.pow(dx1,2)*math.pow(n,2)*math.pow(math.log(x) - math.log(x2),2))/math.pow(x1,2) +
(math.pow(dy1,2)*math.pow(math.log(x) - math.log(x2),2))/math.pow(y1,2)))/math.pow(math.log(x1) - math.log(x2),2));
# end of "pow"
else:
return 0 | bsd-3-clause |
musically-ut/statsmodels | statsmodels/tsa/vector_ar/dynamic.py | 27 | 9932 | # pylint: disable=W0201
from statsmodels.compat.python import iteritems, string_types, range
import numpy as np
from statsmodels.tools.decorators import cache_readonly
import pandas as pd
from . import var_model as _model
from . import util
from . import plotting
FULL_SAMPLE = 0
ROLLING = 1
EXPANDING = 2
def _get_window_type(window_type):
if window_type in (FULL_SAMPLE, ROLLING, EXPANDING):
return window_type
elif isinstance(window_type, string_types):
window_type_up = window_type.upper()
if window_type_up in ('FULL SAMPLE', 'FULL_SAMPLE'):
return FULL_SAMPLE
elif window_type_up == 'ROLLING':
return ROLLING
elif window_type_up == 'EXPANDING':
return EXPANDING
raise Exception('Unrecognized window type: %s' % window_type)
class DynamicVAR(object):
"""
Estimates time-varying vector autoregression (VAR(p)) using
equation-by-equation least squares
Parameters
----------
data : pandas.DataFrame
lag_order : int, default 1
window : int
window_type : {'expanding', 'rolling'}
min_periods : int or None
Minimum number of observations to require in window, defaults to window
size if None specified
trend : {'c', 'nc', 'ct', 'ctt'}
TODO
Returns
-------
**Attributes**:
coefs : WidePanel
items : coefficient names
major_axis : dates
minor_axis : VAR equation names
"""
def __init__(self, data, lag_order=1, window=None, window_type='expanding',
trend='c', min_periods=None):
self.lag_order = lag_order
self.names = list(data.columns)
self.neqs = len(self.names)
self._y_orig = data
# TODO: deal with trend
self._x_orig = _make_lag_matrix(data, lag_order)
self._x_orig['intercept'] = 1
(self.y, self.x, self.x_filtered, self._index,
self._time_has_obs) = _filter_data(self._y_orig, self._x_orig)
self.lag_order = lag_order
self.trendorder = util.get_trendorder(trend)
self._set_window(window_type, window, min_periods)
def _set_window(self, window_type, window, min_periods):
self._window_type = _get_window_type(window_type)
if self._is_rolling:
if window is None:
raise Exception('Must pass window when doing rolling '
'regression')
if min_periods is None:
min_periods = window
else:
window = len(self.x)
if min_periods is None:
min_periods = 1
self._window = int(window)
self._min_periods = min_periods
@cache_readonly
def T(self):
"""
Number of time periods in results
"""
return len(self.result_index)
@property
def nobs(self):
# Stub, do I need this?
data = dict((eq, r.nobs) for eq, r in iteritems(self.equations))
return pd.DataFrame(data)
@cache_readonly
def equations(self):
eqs = {}
for col, ts in iteritems(self.y):
model = pd.ols(y=ts, x=self.x, window=self._window,
window_type=self._window_type,
min_periods=self._min_periods)
eqs[col] = model
return eqs
@cache_readonly
def coefs(self):
"""
Return dynamic regression coefficients as WidePanel
"""
data = {}
for eq, result in iteritems(self.equations):
data[eq] = result.beta
panel = pd.WidePanel.fromDict(data)
# Coefficient names become items
return panel.swapaxes('items', 'minor')
@property
def result_index(self):
return self.coefs.major_axis
@cache_readonly
def _coefs_raw(self):
"""
Reshape coefficients to be more amenable to dynamic calculations
Returns
-------
coefs : (time_periods x lag_order x neqs x neqs)
"""
coef_panel = self.coefs.copy()
del coef_panel['intercept']
coef_values = coef_panel.swapaxes('items', 'major').values
coef_values = coef_values.reshape((len(coef_values),
self.lag_order,
self.neqs, self.neqs))
return coef_values
@cache_readonly
def _intercepts_raw(self):
"""
Similar to _coefs_raw, return intercept values in easy-to-use matrix
form
Returns
-------
intercepts : (T x K)
"""
return self.coefs['intercept'].values
@cache_readonly
def resid(self):
data = {}
for eq, result in iteritems(self.equations):
data[eq] = result.resid
return pd.DataFrame(data)
def forecast(self, steps=1):
"""
Produce dynamic forecast
Parameters
----------
steps
Returns
-------
forecasts : pandas.DataFrame
"""
output = np.empty((self.T - steps, self.neqs))
y_values = self.y.values
y_index_map = dict((d, idx) for idx, d in enumerate(self.y.index))
result_index_map = dict((d, idx) for idx, d in enumerate(self.result_index))
coefs = self._coefs_raw
intercepts = self._intercepts_raw
# can only produce this many forecasts
forc_index = self.result_index[steps:]
for i, date in enumerate(forc_index):
# TODO: check that this does the right thing in weird cases...
idx = y_index_map[date] - steps
result_idx = result_index_map[date] - steps
y_slice = y_values[:idx]
forcs = _model.forecast(y_slice, coefs[result_idx],
intercepts[result_idx], steps)
output[i] = forcs[-1]
return pd.DataFrame(output, index=forc_index, columns=self.names)
def plot_forecast(self, steps=1, figsize=(10, 10)):
"""
Plot h-step ahead forecasts against actual realizations of time
series. Note that forecasts are lined up with their respective
realizations.
Parameters
----------
steps :
"""
import matplotlib.pyplot as plt
fig, axes = plt.subplots(figsize=figsize, nrows=self.neqs,
sharex=True)
forc = self.forecast(steps=steps)
dates = forc.index
y_overlay = self.y.reindex(dates)
for i, col in enumerate(forc.columns):
ax = axes[i]
y_ts = y_overlay[col]
forc_ts = forc[col]
y_handle = ax.plot(dates, y_ts.values, 'k.', ms=2)
forc_handle = ax.plot(dates, forc_ts.values, 'k-')
fig.legend((y_handle, forc_handle), ('Y', 'Forecast'))
fig.autofmt_xdate()
fig.suptitle('Dynamic %d-step forecast' % steps)
# pretty things up a bit
plotting.adjust_subplots(bottom=0.15, left=0.10)
plt.draw_if_interactive()
@property
def _is_rolling(self):
return self._window_type == ROLLING
@cache_readonly
def r2(self):
"""Returns the r-squared values."""
data = dict((eq, r.r2) for eq, r in iteritems(self.equations))
return pd.DataFrame(data)
class DynamicPanelVAR(DynamicVAR):
"""
Dynamic (time-varying) panel vector autoregression using panel ordinary
least squares
Parameters
----------
"""
def __init__(self, data, lag_order=1, window=None, window_type='expanding',
trend='c', min_periods=None):
self.lag_order = lag_order
self.neqs = len(data.columns)
self._y_orig = data
# TODO: deal with trend
self._x_orig = _make_lag_matrix(data, lag_order)
self._x_orig['intercept'] = 1
(self.y, self.x, self.x_filtered, self._index,
self._time_has_obs) = _filter_data(self._y_orig, self._x_orig)
self.lag_order = lag_order
self.trendorder = util.get_trendorder(trend)
self._set_window(window_type, window, min_periods)
def _filter_data(lhs, rhs):
"""
Data filtering routine for dynamic VAR
lhs : DataFrame
original data
rhs : DataFrame
lagged variables
Returns
-------
"""
def _has_all_columns(df):
return np.isfinite(df.values).sum(1) == len(df.columns)
rhs_valid = _has_all_columns(rhs)
if not rhs_valid.all():
pre_filtered_rhs = rhs[rhs_valid]
else:
pre_filtered_rhs = rhs
index = lhs.index.union(rhs.index)
if not index.equals(rhs.index) or not index.equals(lhs.index):
rhs = rhs.reindex(index)
lhs = lhs.reindex(index)
rhs_valid = _has_all_columns(rhs)
lhs_valid = _has_all_columns(lhs)
valid = rhs_valid & lhs_valid
if not valid.all():
filt_index = rhs.index[valid]
filtered_rhs = rhs.reindex(filt_index)
filtered_lhs = lhs.reindex(filt_index)
else:
filtered_rhs, filtered_lhs = rhs, lhs
return filtered_lhs, filtered_rhs, pre_filtered_rhs, index, valid
def _make_lag_matrix(x, lags):
data = {}
columns = []
for i in range(1, 1 + lags):
lagstr = 'L%d.'% i
lag = x.shift(i).rename(columns=lambda c: lagstr + c)
data.update(lag._series)
columns.extend(lag.columns)
return pd.DataFrame(data, columns=columns)
class Equation(object):
"""
Stub, estimate one equation
"""
def __init__(self, y, x):
pass
if __name__ == '__main__':
import pandas.util.testing as ptest
ptest.N = 500
data = ptest.makeTimeDataFrame().cumsum(0)
var = DynamicVAR(data, lag_order=2, window_type='expanding')
var2 = DynamicVAR(data, lag_order=2, window=10,
window_type='rolling')
| bsd-3-clause |
bakerlover/project2 | lib/flask/sessions.py | 348 | 12882 | # -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| apache-2.0 |
easmetz/inasafe | docs/conf.py | 17 | 8532 | # -*- coding: utf-8 -*-
#
# InaSAFE documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 16 11:54:36 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'InaSAFE'
copyright = u'2015, InaSAFE Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.2'
# The full version, including alpha/beta/rc tags.
release = '3.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'default'
html_theme = 'inasafe-theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['../../inasafe-doc/docs/themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'InaSAFE API Docs'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '../../inasafe-doc/docs/source/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%H:%M %b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'InaSAFEdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
'index',
'InaSAFE.tex',
u'InaSAFE Documentation',
u'Tim Sutton',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'inasafe', u'InaSAFE Documentation',
[u'Tim Sutton'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
'index',
'InaSAFE',
u'InaSAFE Documentation',
u'Tim Sutton',
'InaSAFE', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
| gpl-3.0 |
egenerat/flight-manager | django/template/loaders/filesystem.py | 9 | 2414 | """
Wrapper for loading templates from the filesystem.
"""
from django.conf import settings
from django.template import TemplateDoesNotExist
from django.template.loader import BaseLoader
from django.utils._os import safe_join
class Loader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = settings.TEMPLATE_DIRS
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
file = open(filepath)
try:
return (file.read().decode(settings.FILE_CHARSET), filepath)
finally:
file.close()
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
_loader = Loader()
def load_template_source(template_name, template_dirs=None):
# For backwards compatibility
import warnings
warnings.warn(
"'django.template.loaders.filesystem.load_template_source' is deprecated; use 'django.template.loaders.filesystem.Loader' instead.",
DeprecationWarning
)
return _loader.load_template_source(template_name, template_dirs)
load_template_source.is_usable = True
| mit |
jeremykid/FunAlgorithm | python_practice/sort/sort.py | 1 | 2772 | def bubble_sort(input_list):
input_list_length = len(input_list)
for passed in range(input_list_length-1):
for index in range(input_list_length-1):
if (input_list[index] > input_list[index+1]):
input_list[index], input_list[index+1] = input_list[index+1], input_list[index]
# print input_list
return input_list
def insert_sort(input_list):
input_list_length = len(input_list)
for i in range(1,input_list_length):
result_list = insert_sort_single_sort(input_list[:i], input_list[i])
if (i != input_list_length):
input_list = result_list + input_list[i+1:]
return input_list
def insert_sort_single_sort(input_list, new_element):
is_insert = False
for i in range(len(input_list)):
if input_list[i] > new_element:
input_list.insert(i,new_element)
is_insert = True
if is_insert == False:
input_list.append(new_element)
return input_list
def merge_sort(input_list):
input_list_length = len(input_list)
result_list = input_list
if (input_list_length > 1):
mid = input_list_length//2
left_list = input_list[:mid]
right_list = input_list[mid:]
left_list = merge_sort(left_list)
right_list = merge_sort(right_list)
left_index = 0
right_index = 0
index = 0
result_list = [0]*input_list_length
while left_index < len(left_list) and right_index < len(right_list):
if left_list[left_index] < right_list[right_index]:
result_list[index] = left_list[left_index]
left_index = left_index+1
else:
result_list[index] = right_list[right_index]
right_index = right_index+1
index += 1
while left_index < len(left_list):
result_list[index] = left_list[left_index]
left_index = left_index+1
index += 1
while right_index < len(right_list):
result_list[index] = right_list[right_index]
right_index = right_index+1
index += 1
return result_list
def quick_sort(input_list):
input_list_length = len(input_list)
if input_list_length <= 1:
return input_list
mark = input_list[0]
less_list = []
more_list = []
for i in range(1, input_list_length):
if (input_list[i] > mark):
more_list.append(input_list[i])
else:
less_list.append(input_list[i])
return quick_sort(less_list) + [mark] + quick_sort(more_list)
def selection_sort(input_list):
mark = 0
input_list_length = len(input_list)
for i in range(input_list_length):
min_value = input_list[mark]
for index in range(mark, input_list_length):
if min_value >= input_list[index]:
input_list[mark] = input_list[index]
input_list[index] = min_value
mark += 1
return input_list
| mit |
0x0all/carcade | tests/environments_tests.py | 2 | 2016 | # coding: utf-8
import os
import shutil
import tempfile
import unittest
from jinja2 import TemplateSyntaxError
from webassets import Bundle
from carcade.core import get_translations
from carcade.environments import create_assets_env, create_jinja2_env
class WebassetsEnvironmentTest(unittest.TestCase):
def setUp(self):
self.build_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.build_dir)
def test(self):
bundle = Bundle('one.css', 'two.css', output='styles.css')
assets_env = create_assets_env(
source_dir='./tests/fixtures/bundle',
build_dir=self.build_dir,
static_url='/',
bundles={})
bundle.build(env=assets_env)
self.assertTrue('styles.css' in os.listdir(self.build_dir))
class Jinja2EnvironmentTest(unittest.TestCase):
def setUp(self):
self.build_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.build_dir)
def test_webassets_integration(self):
template = '{% assets "css" %}{{ ASSET_URL }}{% endassets %}'
assets_env = create_assets_env(
source_dir='./tests/fixtures/bundle',
build_dir=self.build_dir,
static_url='/',
bundles={
'css': Bundle('one.css', 'two.css', output='styles.css'),
})
jinja2_env = create_jinja2_env(assets_env=assets_env)
result = jinja2_env.from_string(template).render()
self.assertTrue('styles.css' in result)
def test_translations_integration(self):
template = '{% trans %}Hey!{% endtrans %}'
jinja2_env = create_jinja2_env()
result = jinja2_env.from_string(template).render()
self.assertEqual('Hey!', result)
translations = get_translations('tests/fixtures/ru.po')
jinja2_env = create_jinja2_env(translations=translations)
result = jinja2_env.from_string(template).render()
self.assertEqual(u'Привет!', result)
| bsd-3-clause |
gerryhd/diabot-assistant | lib/python2.7/site-packages/werkzeug/contrib/jsrouting.py | 513 | 8564 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.jsrouting
~~~~~~~~~~~~~~~~~~~~~~~~~~
Addon module that allows to create a JavaScript function from a map
that generates rules.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from simplejson import dumps
except ImportError:
try:
from json import dumps
except ImportError:
def dumps(*args):
raise RuntimeError('simplejson required for jsrouting')
from inspect import getmro
from werkzeug.routing import NumberConverter
from werkzeug._compat import iteritems
def render_template(name_parts, rules, converters):
result = u''
if name_parts:
for idx in range(0, len(name_parts) - 1):
name = u'.'.join(name_parts[:idx + 1])
result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
result += '%s = ' % '.'.join(name_parts)
result += """(function (server_name, script_name, subdomain, url_scheme) {
var converters = [%(converters)s];
var rules = %(rules)s;
function in_array(array, value) {
if (array.indexOf != undefined) {
return array.indexOf(value) != -1;
}
for (var i = 0; i < array.length; i++) {
if (array[i] == value) {
return true;
}
}
return false;
}
function array_diff(array1, array2) {
array1 = array1.slice();
for (var i = array1.length-1; i >= 0; i--) {
if (in_array(array2, array1[i])) {
array1.splice(i, 1);
}
}
return array1;
}
function split_obj(obj) {
var names = [];
var values = [];
for (var name in obj) {
if (typeof(obj[name]) != 'function') {
names.push(name);
values.push(obj[name]);
}
}
return {names: names, values: values, original: obj};
}
function suitable(rule, args) {
var default_args = split_obj(rule.defaults || {});
var diff_arg_names = array_diff(rule.arguments, default_args.names);
for (var i = 0; i < diff_arg_names.length; i++) {
if (!in_array(args.names, diff_arg_names[i])) {
return false;
}
}
if (array_diff(rule.arguments, args.names).length == 0) {
if (rule.defaults == null) {
return true;
}
for (var i = 0; i < default_args.names.length; i++) {
var key = default_args.names[i];
var value = default_args.values[i];
if (value != args.original[key]) {
return false;
}
}
}
return true;
}
function build(rule, args) {
var tmp = [];
var processed = rule.arguments.slice();
for (var i = 0; i < rule.trace.length; i++) {
var part = rule.trace[i];
if (part.is_dynamic) {
var converter = converters[rule.converters[part.data]];
var data = converter(args.original[part.data]);
if (data == null) {
return null;
}
tmp.push(data);
processed.push(part.name);
} else {
tmp.push(part.data);
}
}
tmp = tmp.join('');
var pipe = tmp.indexOf('|');
var subdomain = tmp.substring(0, pipe);
var url = tmp.substring(pipe+1);
var unprocessed = array_diff(args.names, processed);
var first_query_var = true;
for (var i = 0; i < unprocessed.length; i++) {
if (first_query_var) {
url += '?';
} else {
url += '&';
}
first_query_var = false;
url += encodeURIComponent(unprocessed[i]);
url += '=';
url += encodeURIComponent(args.original[unprocessed[i]]);
}
return {subdomain: subdomain, path: url};
}
function lstrip(s, c) {
while (s && s.substring(0, 1) == c) {
s = s.substring(1);
}
return s;
}
function rstrip(s, c) {
while (s && s.substring(s.length-1, s.length) == c) {
s = s.substring(0, s.length-1);
}
return s;
}
return function(endpoint, args, force_external) {
args = split_obj(args);
var rv = null;
for (var i = 0; i < rules.length; i++) {
var rule = rules[i];
if (rule.endpoint != endpoint) continue;
if (suitable(rule, args)) {
rv = build(rule, args);
if (rv != null) {
break;
}
}
}
if (rv == null) {
return null;
}
if (!force_external && rv.subdomain == subdomain) {
return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/');
} else {
return url_scheme + '://'
+ (rv.subdomain ? rv.subdomain + '.' : '')
+ server_name + rstrip(script_name, '/')
+ '/' + lstrip(rv.path, '/');
}
};
})""" % {'converters': u', '.join(converters),
'rules': rules}
return result
def generate_map(map, name='url_map'):
"""
Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalone
script that assigns the function with that name. Dotted names are
resolved (so you an use a name like 'obj.url_for')
In order to use JavaScript generation, simplejson must be installed.
Note that using this feature will expose the rules
defined in your map to users. If your rules contain sensitive
information, don't use JavaScript generation!
"""
from warnings import warn
warn(DeprecationWarning('This module is deprecated'))
map.update()
rules = []
converters = []
for rule in map.iter_rules():
trace = [{
'is_dynamic': is_dynamic,
'data': data
} for is_dynamic, data in rule._trace]
rule_converters = {}
for key, converter in iteritems(rule._converters):
js_func = js_to_url_function(converter)
try:
index = converters.index(js_func)
except ValueError:
converters.append(js_func)
index = len(converters) - 1
rule_converters[key] = index
rules.append({
u'endpoint': rule.endpoint,
u'arguments': list(rule.arguments),
u'converters': rule_converters,
u'trace': trace,
u'defaults': rule.defaults
})
return render_template(name_parts=name and name.split('.') or [],
rules=dumps(rules),
converters=converters)
def generate_adapter(adapter, name='url_for', map_name='url_map'):
"""Generates the url building function for a map."""
values = {
u'server_name': dumps(adapter.server_name),
u'script_name': dumps(adapter.script_name),
u'subdomain': dumps(adapter.subdomain),
u'url_scheme': dumps(adapter.url_scheme),
u'name': name,
u'map_name': map_name
}
return u'''\
var %(name)s = %(map_name)s(
%(server_name)s,
%(script_name)s,
%(subdomain)s,
%(url_scheme)s
);''' % values
def js_to_url_function(converter):
"""Get the JavaScript converter function from a rule."""
if hasattr(converter, 'js_to_url_function'):
data = converter.js_to_url_function()
else:
for cls in getmro(type(converter)):
if cls in js_to_url_functions:
data = js_to_url_functions[cls](converter)
break
else:
return 'encodeURIComponent'
return '(function(value) { %s })' % data
def NumberConverter_js_to_url(conv):
if conv.fixed_digits:
return u'''\
var result = value.toString();
while (result.length < %s)
result = '0' + result;
return result;''' % conv.fixed_digits
return u'return value.toString();'
js_to_url_functions = {
NumberConverter: NumberConverter_js_to_url
}
| gpl-3.0 |
thatchristoph/namebench | libnamebench/selectors_test.py | 175 | 3406 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the selector module."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import selectors
import unittest
class SelectorsTest(unittest.TestCase):
def testMaxRepeatCount(self):
self.assertEquals(selectors.MaxRepeatCount(range(1,10), 5),
selectors.MAX_REPEAT)
self.assertEquals(selectors.MaxRepeatCount(range(1,10), 50),
2**32)
def testRandomSelect(self):
elements = range(10)
result = selectors.RandomSelect(elements, 10)
self.assertEquals(len(result), 10)
self.assertNotEquals(result, range(10))
def testRandomSelectConstrained(self):
elements = range(5)
result = selectors.RandomSelect(elements, 10)
self.assertEquals(len(result), 10)
ones = [x for x in result if x == 1]
twos = [x for x in result if x == 2]
self.assertTrue(len(ones) <= selectors.MAX_REPEAT)
self.assertTrue(len(twos) <= selectors.MAX_REPEAT)
def testRandomSelectVeryConstrained(self):
"""Test to make sure we don't infinite loop if count > len(elements)*3"""
elements = range(2)
result = selectors.RandomSelect(elements, 20)
self.assertEquals(len(result), 20)
ones = [x for x in result if x == 1]
twos = [x for x in result if x == 2]
self.assertTrue(ones > selectors.MAX_REPEAT)
self.assertTrue(twos > selectors.MAX_REPEAT)
def testWeightedDistribution(self):
"""Ensure that a weighted distribution is indeed weighted."""
elements = range(20)
result = selectors.WeightedDistribution(elements, 10)
self.assertEquals(len(result), 10)
zeros = [x for x in result if x == 0]
ones = [x for x in result if x == 1]
low = [x for x in result if x < 3]
mid = [x for x in result if x > 7 and x < 13]
high = [x for x in result if x > 17]
self.assertTrue(len(zeros) <= selectors.MAX_REPEAT)
self.assertTrue(len(ones) <= selectors.MAX_REPEAT)
self.assertTrue(len(low) >= 3)
self.assertTrue(len(mid) <= 3)
self.assertTrue(len(high) <= 2)
def testChuckSelect(self):
elements = range(10000)
result = selectors.ChunkSelect(elements, 5)
self.assertEquals(len(result), 5)
# Make sure our segment is a subset
self.assertTrue(set(result).issubset(set(elements)))
# Make sure our segment is contiguous
self.assertEquals(result, range(result[0], result[0]+5))
result2 = selectors.ChunkSelect(elements, 5)
self.assertEquals(len(result), 5)
self.assertNotEquals(result, result2)
def testChunkSelectConstrained(self):
"""Make sure we aren't inventing bogus data."""
elements = range(20)
result = selectors.ChunkSelect(elements, 25)
self.assertEquals(len(result), 20)
self.assertEquals(elements, result)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
khalim19/gimp-plugin-export-layers | git_hooks/commit_msg.py | 1 | 4246 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of Export Layers.
#
# Copyright (C) 2013-2019 khalim19 <khalim19@gmail.com>
#
# Export Layers is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Export Layers is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Export Layers. If not, see <https://www.gnu.org/licenses/>.
"""
This script automatically formats commit messages (header and body) and prevents
a commit from proceeding if some formatting conventions are violated (e.g. too
long commit header).
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
import sys
import textwrap
FIRST_LINE_MAX_CHAR_LENGTH = 80
MESSAGE_BODY_MAX_CHAR_LINE_LENGTH = 72
COMMIT_MESSAGE_FUNCS_PREFIX = "commit_msg"
def commit_msg_check_first_line_length(commit_message):
first_line = commit_message.split("\n")[0]
if len(first_line) <= FIRST_LINE_MAX_CHAR_LENGTH:
return commit_message
else:
print_error_message_and_exit(
"First line of commit message too long ({}), must be at most {}".format(
len(first_line), FIRST_LINE_MAX_CHAR_LENGTH))
def commit_msg_check_second_line_is_empty(commit_message):
lines = commit_message.split("\n")
if len(lines) <= 1 or not lines[1]:
return commit_message
else:
print_error_message_and_exit(
"If writing a commit message body, the second line must be empty")
def commit_msg_remove_trailing_period_from_first_line(commit_message):
lines = commit_message.split("\n")
first_line, body = lines[0], lines[1:]
first_line_processed = first_line.rstrip(".")
return "\n".join([first_line_processed] + body)
def commit_msg_capitalize_first_letter_in_header(commit_message):
lines = commit_message.split("\n")
first_line, body = lines[0], lines[1:]
first_line_segments = first_line.split(":", 1)
if len(first_line_segments) <= 1:
first_line_processed = first_line
else:
scope, header = first_line_segments
header_without_leading_space = header.lstrip(" ")
header_capitalized = (
" " + header_without_leading_space[0].upper()
+ header_without_leading_space[1:])
first_line_processed = ":".join([scope, header_capitalized])
return "\n".join([first_line_processed] + body)
def commit_msg_wrap_message_body(commit_message):
lines = commit_message.split("\n")
first_line, body = lines[0], lines[1:]
if not body:
return commit_message
else:
wrapped_body = [
textwrap.fill(
line,
MESSAGE_BODY_MAX_CHAR_LINE_LENGTH,
replace_whitespace=False,
drop_whitespace=False)
for line in body]
return "\n".join([first_line] + wrapped_body)
def commit_msg_remove_trailing_newlines(commit_message):
return commit_message.rstrip("\n")
def process_commit_messages(commit_message_filepath):
with open(commit_message_filepath, "r") as commit_message_file:
commit_message = commit_message_file.read()
commit_message_funcs = (
_get_module_level_functions_with_prefix(COMMIT_MESSAGE_FUNCS_PREFIX))
for func in commit_message_funcs:
commit_message = func(commit_message)
with open(commit_message_filepath, "w") as commit_message_file:
commit_message_file.write(commit_message)
def _get_module_level_functions_with_prefix(prefix):
return [
member_obj
for member_name, member_obj in inspect.getmembers(sys.modules[__name__])
if inspect.isfunction(member_obj) and member_name.startswith(prefix)]
def print_error_message_and_exit(message, exit_status=1):
print(message, file=sys.stderr)
sys.exit(exit_status)
#===============================================================================
def main():
process_commit_messages(sys.argv[1])
if __name__ == "__main__":
main()
| gpl-3.0 |
dvliman/jaikuengine | .google_appengine/lib/django-1.2/tests/regressiontests/expressions_regress/tests.py | 39 | 8118 | """
Spanning tests for all the operations that F() expressions can perform.
"""
from django.test import TestCase, Approximate
from django.conf import settings
from django.db import models, DEFAULT_DB_ALIAS
from django.db.models import F
from regressiontests.expressions_regress.models import Number
class ExpressionsRegressTests(TestCase):
def setUp(self):
Number(integer=-1).save()
Number(integer=42).save()
Number(integer=1337).save()
self.assertEqual(Number.objects.update(float=F('integer')), 3)
def test_fill_with_value_from_same_object(self):
"""
We can fill a value in all objects with an other value of the
same object.
"""
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 42, 42.000>',
'<Number: 1337, 1337.000>'
]
)
def test_increment_value(self):
"""
We can increment a value of all objects in a query set.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.all(),
[
'<Number: -1, -1.000>',
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_filter_not_equals_other_field(self):
"""
We can filter for objects, where a value is not equals the value
of an other field.
"""
self.assertEqual(
Number.objects.filter(integer__gt=0)
.update(integer=F('integer') + 1),
2)
self.assertQuerysetEqual(
Number.objects.exclude(float=F('integer')),
[
'<Number: 43, 42.000>',
'<Number: 1338, 1337.000>'
]
)
def test_complex_expressions(self):
"""
Complex expressions of different connection types are possible.
"""
n = Number.objects.create(integer=10, float=123.45)
self.assertEqual(Number.objects.filter(pk=n.pk)
.update(float=F('integer') + F('float') * 2),
1)
self.assertEqual(Number.objects.get(pk=n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3))
class ExpressionOperatorTests(TestCase):
def setUp(self):
self.n = Number.objects.create(integer=42, float=15.5)
def test_lefthand_addition(self):
# LH Addition of floats and integers
Number.objects.filter(pk=self.n.pk).update(
integer=F('integer') + 15,
float=F('float') + 42.7
)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_lefthand_subtraction(self):
# LH Subtraction of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') - 15,
float=F('float') - 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3))
def test_lefthand_multiplication(self):
# Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') * 15,
float=F('float') * 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_lefthand_division(self):
# LH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') / 2,
float=F('float') / 42.7)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3))
def test_lefthand_modulo(self):
# LH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') % 20)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_lefthand_bitwise_and(self):
# LH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') & 56)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.oracle':
def test_lefthand_bitwise_or(self):
# LH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=F('integer') | 48)
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_addition(self):
# Right hand operators
Number.objects.filter(pk=self.n.pk).update(integer=15 + F('integer'),
float=42.7 + F('float'))
# RH Addition of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3))
def test_right_hand_subtraction(self):
Number.objects.filter(pk=self.n.pk).update(integer=15 - F('integer'),
float=42.7 - F('float'))
# RH Subtraction of floats and integers
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3))
def test_right_hand_multiplication(self):
# RH Multiplication of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=15 * F('integer'),
float=42.7 * F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3))
def test_right_hand_division(self):
# RH Division of floats and integers
Number.objects.filter(pk=self.n.pk).update(integer=640 / F('integer'),
float=42.7 / F('float'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3))
def test_right_hand_modulo(self):
# RH Modulo arithmetic on integers
Number.objects.filter(pk=self.n.pk).update(integer=69 % F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
def test_right_hand_bitwise_and(self):
# RH Bitwise ands on integers
Number.objects.filter(pk=self.n.pk).update(integer=15 & F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.oracle':
def test_right_hand_bitwise_or(self):
# RH Bitwise or on integers
Number.objects.filter(pk=self.n.pk).update(integer=15 | F('integer'))
self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 47)
self.assertEqual(Number.objects.get(pk=self.n.pk).float, Approximate(15.500, places=3))
| apache-2.0 |
hynnet/openwrt-mt7620 | staging_dir/host/lib/python2.7/ctypes/macholib/dylib.py | 268 | 2041 | ######################################################################
# This file should be kept compatible with Python 2.3, see PEP 291. #
######################################################################
"""
Generic dylib path manipulation
"""
import re
__all__ = ['dylib_info']
DYLIB_RE = re.compile(r"""(?x)
(?P<location>^.*)(?:^|/)
(?P<name>
(?P<shortname>\w+?)
(?:\.(?P<version>[^._]+))?
(?:_(?P<suffix>[^._]+))?
\.dylib$
)
""")
def dylib_info(filename):
"""
A dylib name can take one of the following four forms:
Location/Name.SomeVersion_Suffix.dylib
Location/Name.SomeVersion.dylib
Location/Name_Suffix.dylib
Location/Name.dylib
returns None if not found or a mapping equivalent to:
dict(
location='Location',
name='Name.SomeVersion_Suffix.dylib',
shortname='Name',
version='SomeVersion',
suffix='Suffix',
)
Note that SomeVersion and Suffix are optional and may be None
if not present.
"""
is_dylib = DYLIB_RE.match(filename)
if not is_dylib:
return None
return is_dylib.groupdict()
def test_dylib_info():
def d(location=None, name=None, shortname=None, version=None, suffix=None):
return dict(
location=location,
name=name,
shortname=shortname,
version=version,
suffix=suffix
)
assert dylib_info('completely/invalid') is None
assert dylib_info('completely/invalide_debug') is None
assert dylib_info('P/Foo.dylib') == d('P', 'Foo.dylib', 'Foo')
assert dylib_info('P/Foo_debug.dylib') == d('P', 'Foo_debug.dylib', 'Foo', suffix='debug')
assert dylib_info('P/Foo.A.dylib') == d('P', 'Foo.A.dylib', 'Foo', 'A')
assert dylib_info('P/Foo_debug.A.dylib') == d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A')
assert dylib_info('P/Foo.A_debug.dylib') == d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug')
if __name__ == '__main__':
test_dylib_info()
| gpl-2.0 |
adrpar/incubator-airflow | airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py | 48 | 6399 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add fractional seconds to mysql tables
Revision ID: 4addfa1236f1
Revises: f2ca10b85618
Create Date: 2016-09-11 13:39:18.592072
"""
# revision identifiers, used by Alembic.
revision = '4addfa1236f1'
down_revision = 'f2ca10b85618'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from alembic import context
def upgrade():
if context.config.get_main_option('sqlalchemy.url').startswith('mysql'):
op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag', column_name='last_expired', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag_pickle', column_name='created_dttm', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag_run', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag_run', column_name='start_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='dag_run', column_name='end_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='import_error', column_name='timestamp', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='job', column_name='start_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='job', column_name='end_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='job', column_name='latest_heartbeat', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='known_event', column_name='start_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='known_event', column_name='end_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='log', column_name='dttm', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='log', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False)
op.alter_column(table_name='sla_miss', column_name='timestamp', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_fail', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_fail', column_name='start_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_fail', column_name='end_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False)
op.alter_column(table_name='task_instance', column_name='start_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_instance', column_name='end_date', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='task_instance', column_name='queued_dttm', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='xcom', column_name='timestamp', type_=mysql.DATETIME(fsp=6))
op.alter_column(table_name='xcom', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
def downgrade():
if context.config.get_main_option('sqlalchemy.url').startswith('mysql'):
op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME())
op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME())
op.alter_column(table_name='dag', column_name='last_expired', type_=mysql.DATETIME())
op.alter_column(table_name='dag_pickle', column_name='created_dttm', type_=mysql.DATETIME())
op.alter_column(table_name='dag_run', column_name='execution_date', type_=mysql.DATETIME())
op.alter_column(table_name='dag_run', column_name='start_date', type_=mysql.DATETIME())
op.alter_column(table_name='dag_run', column_name='end_date', type_=mysql.DATETIME())
op.alter_column(table_name='import_error', column_name='timestamp', type_=mysql.DATETIME())
op.alter_column(table_name='job', column_name='start_date', type_=mysql.DATETIME())
op.alter_column(table_name='job', column_name='end_date', type_=mysql.DATETIME())
op.alter_column(table_name='job', column_name='latest_heartbeat', type_=mysql.DATETIME())
op.alter_column(table_name='known_event', column_name='start_date', type_=mysql.DATETIME())
op.alter_column(table_name='known_event', column_name='end_date', type_=mysql.DATETIME())
op.alter_column(table_name='log', column_name='dttm', type_=mysql.DATETIME())
op.alter_column(table_name='log', column_name='execution_date', type_=mysql.DATETIME())
op.alter_column(table_name='sla_miss', column_name='execution_date', type_=mysql.DATETIME(), nullable=False)
op.alter_column(table_name='sla_miss', column_name='timestamp', type_=mysql.DATETIME())
op.alter_column(table_name='task_fail', column_name='execution_date', type_=mysql.DATETIME())
op.alter_column(table_name='task_fail', column_name='start_date', type_=mysql.DATETIME())
op.alter_column(table_name='task_fail', column_name='end_date', type_=mysql.DATETIME())
op.alter_column(table_name='task_instance', column_name='execution_date', type_=mysql.DATETIME(), nullable=False)
op.alter_column(table_name='task_instance', column_name='start_date', type_=mysql.DATETIME())
op.alter_column(table_name='task_instance', column_name='end_date', type_=mysql.DATETIME())
op.alter_column(table_name='task_instance', column_name='queued_dttm', type_=mysql.DATETIME())
op.alter_column(table_name='xcom', column_name='timestamp', type_=mysql.DATETIME())
op.alter_column(table_name='xcom', column_name='execution_date', type_=mysql.DATETIME())
| apache-2.0 |
jbzdak/edx-platform | lms/envs/static.py | 30 | 2282 | """
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
from .common import *
from openedx.core.lib.logsettings import get_logger_config
STATIC_GRAB = True
LOGGING = get_logger_config(ENV_ROOT / "log",
logging_env="dev",
tracking_filename="tracking.log",
debug=False)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "edx.db",
'ATOMIC_REQUESTS': True,
}
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
}
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
############################ FILE UPLOADS (for discussion forums) #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = ENV_ROOT / "uploads"
MEDIA_URL = "/discussion/upfiles/"
FILE_UPLOAD_TEMP_DIR = ENV_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
| agpl-3.0 |
teeple/pns_server | work/install/Python-2.7.4/Lib/plat-mac/aetools.py | 40 | 11515 | """Tools for use in AppleEvent clients and servers.
pack(x) converts a Python object to an AEDesc object
unpack(desc) does the reverse
packevent(event, parameters, attributes) sets params and attrs in an AEAppleEvent record
unpackevent(event) returns the parameters and attributes from an AEAppleEvent record
Plus... Lots of classes and routines that help representing AE objects,
ranges, conditionals, logicals, etc., so you can write, e.g.:
x = Character(1, Document("foobar"))
and pack(x) will create an AE object reference equivalent to AppleScript's
character 1 of document "foobar"
Some of the stuff that appears to be exported from this module comes from other
files: the pack stuff from aepack, the objects from aetypes.
"""
from warnings import warnpy3k
warnpy3k("In 3.x, the aetools module is removed.", stacklevel=2)
from types import *
from Carbon import AE
from Carbon import Evt
from Carbon import AppleEvents
import MacOS
import sys
import time
from aetypes import *
from aepack import packkey, pack, unpack, coerce, AEDescType
Error = 'aetools.Error'
# Amount of time to wait for program to be launched
LAUNCH_MAX_WAIT_TIME=10
# Special code to unpack an AppleEvent (which is *not* a disguised record!)
# Note by Jack: No??!? If I read the docs correctly it *is*....
aekeywords = [
'tran',
'rtid',
'evcl',
'evid',
'addr',
'optk',
'timo',
'inte', # this attribute is read only - will be set in AESend
'esrc', # this attribute is read only
'miss', # this attribute is read only
'from' # new in 1.0.1
]
def missed(ae):
try:
desc = ae.AEGetAttributeDesc('miss', 'keyw')
except AE.Error, msg:
return None
return desc.data
def unpackevent(ae, formodulename=""):
parameters = {}
try:
dirobj = ae.AEGetParamDesc('----', '****')
except AE.Error:
pass
else:
parameters['----'] = unpack(dirobj, formodulename)
del dirobj
# Workaround for what I feel is a bug in OSX 10.2: 'errn' won't show up in missed...
try:
dirobj = ae.AEGetParamDesc('errn', '****')
except AE.Error:
pass
else:
parameters['errn'] = unpack(dirobj, formodulename)
del dirobj
while 1:
key = missed(ae)
if not key: break
parameters[key] = unpack(ae.AEGetParamDesc(key, '****'), formodulename)
attributes = {}
for key in aekeywords:
try:
desc = ae.AEGetAttributeDesc(key, '****')
except (AE.Error, MacOS.Error), msg:
if msg[0] != -1701 and msg[0] != -1704:
raise
continue
attributes[key] = unpack(desc, formodulename)
return parameters, attributes
def packevent(ae, parameters = {}, attributes = {}):
for key, value in parameters.items():
packkey(ae, key, value)
for key, value in attributes.items():
ae.AEPutAttributeDesc(key, pack(value))
#
# Support routine for automatically generated Suite interfaces
# These routines are also useable for the reverse function.
#
def keysubst(arguments, keydict):
"""Replace long name keys by their 4-char counterparts, and check"""
ok = keydict.values()
for k in arguments.keys():
if k in keydict:
v = arguments[k]
del arguments[k]
arguments[keydict[k]] = v
elif k != '----' and k not in ok:
raise TypeError, 'Unknown keyword argument: %s'%k
def enumsubst(arguments, key, edict):
"""Substitute a single enum keyword argument, if it occurs"""
if key not in arguments or edict is None:
return
v = arguments[key]
ok = edict.values()
if v in edict:
arguments[key] = Enum(edict[v])
elif not v in ok:
raise TypeError, 'Unknown enumerator: %s'%v
def decodeerror(arguments):
"""Create the 'best' argument for a raise MacOS.Error"""
errn = arguments['errn']
err_a1 = errn
if 'errs' in arguments:
err_a2 = arguments['errs']
else:
err_a2 = MacOS.GetErrorString(errn)
if 'erob' in arguments:
err_a3 = arguments['erob']
else:
err_a3 = None
return (err_a1, err_a2, err_a3)
class TalkTo:
"""An AE connection to an application"""
_signature = None # Can be overridden by subclasses
_moduleName = None # Can be overridden by subclasses
_elemdict = {} # Can be overridden by subclasses
_propdict = {} # Can be overridden by subclasses
__eventloop_initialized = 0
def __ensure_WMAvailable(klass):
if klass.__eventloop_initialized: return 1
if not MacOS.WMAvailable(): return 0
# Workaround for a but in MacOSX 10.2: we must have an event
# loop before we can call AESend.
Evt.WaitNextEvent(0,0)
return 1
__ensure_WMAvailable = classmethod(__ensure_WMAvailable)
def __init__(self, signature=None, start=0, timeout=0):
"""Create a communication channel with a particular application.
Addressing the application is done by specifying either a
4-byte signature, an AEDesc or an object that will __aepack__
to an AEDesc.
"""
self.target_signature = None
if signature is None:
signature = self._signature
if type(signature) == AEDescType:
self.target = signature
elif type(signature) == InstanceType and hasattr(signature, '__aepack__'):
self.target = signature.__aepack__()
elif type(signature) == StringType and len(signature) == 4:
self.target = AE.AECreateDesc(AppleEvents.typeApplSignature, signature)
self.target_signature = signature
else:
raise TypeError, "signature should be 4-char string or AEDesc"
self.send_flags = AppleEvents.kAEWaitReply
self.send_priority = AppleEvents.kAENormalPriority
if timeout:
self.send_timeout = timeout
else:
self.send_timeout = AppleEvents.kAEDefaultTimeout
if start:
self._start()
def _start(self):
"""Start the application, if it is not running yet"""
try:
self.send('ascr', 'noop')
except AE.Error:
_launch(self.target_signature)
for i in range(LAUNCH_MAX_WAIT_TIME):
try:
self.send('ascr', 'noop')
except AE.Error:
pass
else:
break
time.sleep(1)
def start(self):
"""Deprecated, used _start()"""
self._start()
def newevent(self, code, subcode, parameters = {}, attributes = {}):
"""Create a complete structure for an apple event"""
event = AE.AECreateAppleEvent(code, subcode, self.target,
AppleEvents.kAutoGenerateReturnID, AppleEvents.kAnyTransactionID)
packevent(event, parameters, attributes)
return event
def sendevent(self, event):
"""Send a pre-created appleevent, await the reply and unpack it"""
if not self.__ensure_WMAvailable():
raise RuntimeError, "No window manager access, cannot send AppleEvent"
reply = event.AESend(self.send_flags, self.send_priority,
self.send_timeout)
parameters, attributes = unpackevent(reply, self._moduleName)
return reply, parameters, attributes
def send(self, code, subcode, parameters = {}, attributes = {}):
"""Send an appleevent given code/subcode/pars/attrs and unpack the reply"""
return self.sendevent(self.newevent(code, subcode, parameters, attributes))
#
# The following events are somehow "standard" and don't seem to appear in any
# suite...
#
def activate(self):
"""Send 'activate' command"""
self.send('misc', 'actv')
def _get(self, _object, asfile=None, _attributes={}):
"""_get: get data from an object
Required argument: the object
Keyword argument _attributes: AppleEvent attribute dictionary
Returns: the data
"""
_code = 'core'
_subcode = 'getd'
_arguments = {'----':_object}
if asfile:
_arguments['rtyp'] = mktype(asfile)
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if 'errn' in _arguments:
raise Error, decodeerror(_arguments)
if '----' in _arguments:
return _arguments['----']
if asfile:
item.__class__ = asfile
return item
get = _get
_argmap_set = {
'to' : 'data',
}
def _set(self, _object, _attributes={}, **_arguments):
"""set: Set an object's data.
Required argument: the object for the command
Keyword argument to: The new value.
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'core'
_subcode = 'setd'
keysubst(_arguments, self._argmap_set)
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise Error, decodeerror(_arguments)
# XXXX Optionally decode result
if '----' in _arguments:
return _arguments['----']
set = _set
# Magic glue to allow suite-generated classes to function somewhat
# like the "application" class in OSA.
def __getattr__(self, name):
if name in self._elemdict:
cls = self._elemdict[name]
return DelayedComponentItem(cls, None)
if name in self._propdict:
cls = self._propdict[name]
return cls()
raise AttributeError, name
# Tiny Finder class, for local use only
class _miniFinder(TalkTo):
def open(self, _object, _attributes={}, **_arguments):
"""open: Open the specified object(s)
Required argument: list of objects to open
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'aevt'
_subcode = 'odoc'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if 'errn' in _arguments:
raise Error, decodeerror(_arguments)
# XXXX Optionally decode result
if '----' in _arguments:
return _arguments['----']
#pass
_finder = _miniFinder('MACS')
def _launch(appfile):
"""Open a file thru the finder. Specify file by name or fsspec"""
_finder.open(_application_file(('ID ', appfile)))
class _application_file(ComponentItem):
"""application file - An application's file on disk"""
want = 'appf'
_application_file._propdict = {
}
_application_file._elemdict = {
}
# Test program
# XXXX Should test more, really...
def test():
target = AE.AECreateDesc('sign', 'quil')
ae = AE.AECreateAppleEvent('aevt', 'oapp', target, -1, 0)
print unpackevent(ae)
raw_input(":")
ae = AE.AECreateAppleEvent('core', 'getd', target, -1, 0)
obj = Character(2, Word(1, Document(1)))
print obj
print repr(obj)
packevent(ae, {'----': obj})
params, attrs = unpackevent(ae)
print params['----']
raw_input(":")
if __name__ == '__main__':
test()
sys.exit(1)
| gpl-2.0 |
pychess/pychess | lib/pychess/ic/__init__.py | 1 | 25090 | from gi.repository import Gtk
from pychess import Variants
from pychess.Utils.const import NORMALCHESS, ATOMICCHESS, BUGHOUSECHESS, CRAZYHOUSECHESS, \
LOSERSCHESS, SUICIDECHESS, FISCHERRANDOMCHESS, WILDCASTLESHUFFLECHESS, \
SHUFFLECHESS, RANDOMCHESS, ASYMMETRICRANDOMCHESS, WILDCASTLECHESS, UPSIDEDOWNCHESS, \
PAWNSPUSHEDCHESS, PAWNSPASSEDCHESS, GIVEAWAYCHESS, THREECHECKCHESS
IC_CONNECTED, IC_DISCONNECTED = range(2)
# Fixed values of my relation to this game
# http://www.freechess.org/Help/HelpFiles/style12.html
IC_POS_INITIAL, IC_POS_ISOLATED, IC_POS_OBSERVING_EXAMINATION, IC_POS_OP_TO_MOVE, \
IC_POS_OBSERVING, IC_POS_ME_TO_MOVE, IC_POS_EXAMINATING = range(-4, 3)
# RatingType
TYPE_BLITZ, TYPE_STANDARD, TYPE_LIGHTNING, TYPE_WILD, \
TYPE_BUGHOUSE, TYPE_CRAZYHOUSE, TYPE_SUICIDE, TYPE_LOSERS, TYPE_ATOMIC, \
TYPE_BULLET, TYPE_ONE_MINUTE, TYPE_THREE_MINUTE, TYPE_FIVE_MINUTE, \
TYPE_FIFTEEN_MINUTE, TYPE_FORTYFIVE_MINUTE, TYPE_CHESS960, \
TYPE_UNTIMED, TYPE_EXAMINED, TYPE_OTHER = range(19)
RATING_TYPES = (TYPE_BLITZ,
TYPE_STANDARD,
TYPE_LIGHTNING,
TYPE_BULLET,
TYPE_ONE_MINUTE,
TYPE_THREE_MINUTE,
TYPE_FIVE_MINUTE,
TYPE_FIFTEEN_MINUTE,
TYPE_FORTYFIVE_MINUTE,
TYPE_ATOMIC,
TYPE_BUGHOUSE,
TYPE_CRAZYHOUSE,
TYPE_LOSERS,
TYPE_SUICIDE,
TYPE_WILD,
TYPE_CHESS960,
TYPE_UNTIMED,
)
# Rating deviations
DEVIATION_NONE, DEVIATION_ESTIMATED, DEVIATION_PROVISIONAL = range(3)
IC_STATUS_PLAYING, IC_STATUS_ACTIVE, IC_STATUS_BUSY, IC_STATUS_OFFLINE, \
IC_STATUS_AVAILABLE, IC_STATUS_NOT_AVAILABLE, IC_STATUS_EXAMINING, \
IC_STATUS_IDLE, IC_STATUS_IN_TOURNAMENT, IC_STATUS_RUNNING_SIMUL_MATCH, \
IC_STATUS_UNKNOWN = range(11)
TITLES_RE = r"(?:\([A-Z*]+\))*"
NAMES_RE = "[A-Za-z]+"
DEVIATION = {
"E": DEVIATION_ESTIMATED,
"P": DEVIATION_PROVISIONAL,
" ": DEVIATION_NONE,
"": DEVIATION_NONE,
}
STATUS = {
"^": IC_STATUS_PLAYING,
" ": IC_STATUS_AVAILABLE,
".": IC_STATUS_IDLE,
"#": IC_STATUS_EXAMINING,
":": IC_STATUS_NOT_AVAILABLE,
"~": IC_STATUS_RUNNING_SIMUL_MATCH,
"&": IC_STATUS_IN_TOURNAMENT,
}
class GameType:
def __init__(self,
fics_name,
short_fics_name,
rating_type,
display_text=None,
variant_type=NORMALCHESS):
self.fics_name = fics_name
self.short_fics_name = short_fics_name
self.rating_type = rating_type
if display_text:
self.display_text = display_text
self.variant_type = variant_type
@property
def variant(self):
return Variants.variants[self.variant_type]
def __repr__(self):
s = "<GameType "
s += "fics_name='%s', " % self.fics_name
s += "display_text='%s'>" % self.display_text
return s
class NormalGameType(GameType):
def __init__(self, fics_name, short_fics_name, rating_type, display_text):
GameType.__init__(self,
fics_name,
short_fics_name,
rating_type,
display_text=display_text)
class VariantGameType(GameType):
def __init__(self, fics_name, short_fics_name, rating_type, variant_type):
GameType.__init__(self,
fics_name,
short_fics_name,
rating_type,
variant_type=variant_type)
@property
def display_text(self):
assert self.variant_type is not None
return Variants.variants[self.variant_type].name
@property
def seek_text(self):
if "/" in self.fics_name:
return self.fics_name.replace("/", " ")
else:
return self.fics_name
class WildGameType(VariantGameType):
_instances = []
def __init__(self, fics_name, variant_type):
VariantGameType.__init__(self,
fics_name,
"w",
TYPE_WILD,
variant_type=variant_type)
WildGameType._instances.append(self)
@classmethod
def instances(cls):
return cls._instances
# FICS game types
GAME_TYPES = {
"blitz": NormalGameType("blitz", "b", TYPE_BLITZ, _("Blitz")),
"standard": NormalGameType("standard", "s", TYPE_STANDARD, _("Standard")),
"lightning": NormalGameType("lightning", "l", TYPE_LIGHTNING, _("Lightning")),
"1-minute": NormalGameType("1-minute", "o", TYPE_ONE_MINUTE, _("1-minute")),
"3-minute": NormalGameType("3-minute", "M", TYPE_THREE_MINUTE, _("3-minute")),
"5-minute": NormalGameType("5-minute", "f", TYPE_FIVE_MINUTE, _("5-minute")),
"15-minute": NormalGameType("15-minute", "F", TYPE_FIFTEEN_MINUTE, _("15-minute")),
"45-minute": NormalGameType("45-minute", "J", TYPE_FORTYFIVE_MINUTE, _("45-minute")),
"chess960": NormalGameType("chess960", "K", TYPE_CHESS960, _("Chess960")),
"untimed": NormalGameType("untimed", "u", TYPE_UNTIMED, _("Untimed")),
"examined": NormalGameType("examined", "e", TYPE_EXAMINED, _("Examined")),
"nonstandard": NormalGameType("nonstandard", "n", TYPE_OTHER, _("Other")),
"w20": NormalGameType("loaded", "w20", TYPE_OTHER, _("Other")), # loadfen/loadgame
"w21": NormalGameType("loaded", "w21", TYPE_OTHER, _("Other")), # thematic tournaments
"atomic": VariantGameType("atomic", "x", TYPE_ATOMIC, ATOMICCHESS),
"bughouse": VariantGameType("bughouse", "B", TYPE_BUGHOUSE, BUGHOUSECHESS),
"crazyhouse": VariantGameType("crazyhouse", "z", TYPE_CRAZYHOUSE, CRAZYHOUSECHESS),
"losers": VariantGameType("losers", "L", TYPE_LOSERS, LOSERSCHESS),
"suicide": VariantGameType("suicide", "S", TYPE_SUICIDE, SUICIDECHESS),
# FICS http://www.freechess.org/Help/HelpFiles/wild.html
"wild/fr": WildGameType("wild/fr", FISCHERRANDOMCHESS),
"wild/0": WildGameType("wild/0", WILDCASTLECHESS),
"wild/1": WildGameType("wild/1", WILDCASTLESHUFFLECHESS),
"wild/2": WildGameType("wild/2", SHUFFLECHESS),
"wild/3": WildGameType("wild/3", RANDOMCHESS),
"wild/4": WildGameType("wild/4", ASYMMETRICRANDOMCHESS),
"wild/5": WildGameType("wild/5", UPSIDEDOWNCHESS),
"wild/8": WildGameType("wild/8", PAWNSPUSHEDCHESS),
"wild/8a": WildGameType("wild/8a", PAWNSPASSEDCHESS),
# ICC https://www.chessclub.com/user/helpcenter/tips/wild.html
"w17": WildGameType("w17", LOSERSCHESS),
"w26": WildGameType("w26", GIVEAWAYCHESS),
"w24": WildGameType("w24", BUGHOUSECHESS),
"w23": WildGameType("w23", CRAZYHOUSECHESS),
# "w16": WildGameType("w16", KRIEGSPIELCHESS),
"w27": WildGameType("w27", ATOMICCHESS),
# "w28": WildGameType("w28", SHATRANJCHESS),
"w25": WildGameType("w25", THREECHECKCHESS),
"w1": WildGameType("w1", WILDCASTLESHUFFLECHESS),
"w2": WildGameType("w2", SHUFFLECHESS),
"w3": WildGameType("w3", RANDOMCHESS),
"w4": WildGameType("w4", ASYMMETRICRANDOMCHESS),
"w22": WildGameType("w22", FISCHERRANDOMCHESS),
"w5": WildGameType("w5", UPSIDEDOWNCHESS),
# "w7": WildGameType("w7", THREEPAWNSCHESS),
"w8": WildGameType("w8", PAWNSPUSHEDCHESS),
# "w9": WildGameType("w9", TWOKINGSCHESS),
# "w18": WildGameType("w18", EIGHTQUEENSCHESS),
# "w19": WildGameType("w19", KNNKPCHESS),
}
VARIANT_GAME_TYPES = {}
for key in GAME_TYPES:
if isinstance(GAME_TYPES[key], VariantGameType):
VARIANT_GAME_TYPES[GAME_TYPES[key].variant_type] = GAME_TYPES[key]
# The following 3 GAME_TYPES_* data structures don't have any real entries
# for the WildGameType's in GAME_TYPES above, and instead use
# a dummy type for the all-encompassing "Wild" FICS rating for wild/* games
GAME_TYPES_BY_SHORT_FICS_NAME = {
"w": GameType("wild",
"w",
TYPE_WILD,
display_text=_("Wild"))
}
for key in GAME_TYPES:
if not isinstance(GAME_TYPES[key], WildGameType):
GAME_TYPES_BY_SHORT_FICS_NAME[GAME_TYPES[key].short_fics_name] = \
GAME_TYPES[key]
GAME_TYPES_BY_RATING_TYPE = {}
for key in GAME_TYPES_BY_SHORT_FICS_NAME:
GAME_TYPES_BY_RATING_TYPE[GAME_TYPES_BY_SHORT_FICS_NAME[key].rating_type] = \
GAME_TYPES_BY_SHORT_FICS_NAME[key]
GAME_TYPES_BY_FICS_NAME = {}
for key in GAME_TYPES_BY_SHORT_FICS_NAME:
GAME_TYPES_BY_FICS_NAME[GAME_TYPES_BY_SHORT_FICS_NAME[key].fics_name] = \
GAME_TYPES_BY_SHORT_FICS_NAME[key]
# Finally add conflicting ICC game type
GAME_TYPES["bullet"] = NormalGameType("bullet", "B", TYPE_BULLET, _("Bullet"))
GAME_TYPES_BY_FICS_NAME["bullet"] = GAME_TYPES["bullet"]
GAME_TYPES_BY_RATING_TYPE[TYPE_BULLET] = GAME_TYPES["bullet"]
# GAME_TYPES_BY_SHORT_FICS_NAME["B"] will be fixed in FICSConnections.py
# and VARIANT_GAME_TYPES[FISCHERRANDOMCHESS] also
def type_to_display_text(typename):
if "loaded from" in typename.lower():
typename = typename.split()[-1]
if typename in GAME_TYPES:
return GAME_TYPES[typename].display_text
# Default solution for eco/A00 and a few others
elif "/" in typename:
a, b = typename.split("/")
a = a[0].upper() + a[1:]
b = b[0].upper() + b[1:]
return a + " " + b
else:
# Otherwise forget about it
return typename[0].upper() + typename[1:]
def time_control_to_gametype(minutes, gain):
assert isinstance(minutes, int) and isinstance(gain, int)
assert minutes >= 0 and gain >= 0
gainminutes = gain > 0 and (gain * 60) - 1 or 0
if minutes == 0 and gain == 0:
return GAME_TYPES["untimed"]
elif (minutes * 60) + gainminutes >= (15 * 60):
return GAME_TYPES["standard"]
elif (minutes * 60) + gainminutes >= (3 * 60):
return GAME_TYPES["blitz"]
else:
return GAME_TYPES["lightning"]
TYPE_ADMINISTRATOR, TYPE_BLINDFOLD, TYPE_COMPUTER, \
TYPE_TEAM, TYPE_UNREGISTERED, TYPE_CHESS_ADVISOR, \
TYPE_SERVICE_REPRESENTATIVE, TYPE_TOURNAMENT_DIRECTOR, TYPE_MAMER_MANAGER, \
TYPE_GRAND_MASTER, TYPE_INTERNATIONAL_MASTER, TYPE_FIDE_MASTER, \
TYPE_WOMAN_GRAND_MASTER, TYPE_WOMAN_INTERNATIONAL_MASTER, TYPE_WOMAN_FIDE_MASTER,\
TYPE_DUMMY_ACCOUNT, TYPE_CANDIDATE_MASTER, TYPE_FIDE_ARBEITER, TYPE_NATIONAL_MASTER, \
TYPE_DISPLAY_MASTER = range(20)
TITLE_TYPE_DISPLAY_TEXTS = (_("Administrator"),
_("Blindfold Account"),
_("Computer"),
_("Team Account"),
_("Unregistered"),
_("Chess Advisor"),
_("Service Representative"),
_("Tournament Director"),
_("Mamer Manager"),
_("Grand Master"),
_("International Master"),
_("FIDE Master"),
_("Woman Grand Master"),
_("Woman International Master"),
_("Woman FIDE Master"),
_("Dummy Account"),
_("Candidate Master"),
_("FIDE Arbeiter"),
_("National Master"),
_("Display Master"),
)
TITLE_TYPE_DISPLAY_TEXTS_SHORT = (
_("*"), _("B"), _("C"), _("T"), _("U"), _("CA"), _("SR"), _("TD"), _("TM"),
_("GM"), _("IM"), _("FM"), _("WGM"), _("WIM"), _("WFM"), _("D"), _("H"),
_("CM"), _("FA"), _("NM"), _("DM"))
TITLES = { # From FICS 'help who'
"*": TYPE_ADMINISTRATOR,
"B": TYPE_BLINDFOLD,
"C": TYPE_COMPUTER,
"T": TYPE_TEAM,
"U": TYPE_UNREGISTERED,
"CA": TYPE_CHESS_ADVISOR,
"SR": TYPE_SERVICE_REPRESENTATIVE,
"TD": TYPE_TOURNAMENT_DIRECTOR,
"TM": TYPE_MAMER_MANAGER,
"GM": TYPE_GRAND_MASTER,
"IM": TYPE_INTERNATIONAL_MASTER,
"FM": TYPE_FIDE_MASTER,
"WFM": TYPE_WOMAN_FIDE_MASTER,
"WIM": TYPE_WOMAN_INTERNATIONAL_MASTER,
"WGM": TYPE_WOMAN_GRAND_MASTER,
"D": TYPE_DUMMY_ACCOUNT,
"H": TYPE_SERVICE_REPRESENTATIVE,
"CM": TYPE_CANDIDATE_MASTER,
"FA": TYPE_FIDE_ARBEITER,
"NM": TYPE_NATIONAL_MASTER,
"DM": TYPE_DISPLAY_MASTER,
}
HEX_TO_TITLE = {
0x1: TYPE_UNREGISTERED,
0x2: TYPE_COMPUTER,
0x4: TYPE_GRAND_MASTER,
0x8: TYPE_INTERNATIONAL_MASTER,
0x10: TYPE_FIDE_MASTER,
0x20: TYPE_WOMAN_GRAND_MASTER,
0x40: TYPE_WOMAN_INTERNATIONAL_MASTER,
0x80: TYPE_WOMAN_FIDE_MASTER,
}
def parse_title_hex(titlehex):
titles = set()
for key in HEX_TO_TITLE:
if int(titlehex, 16) & key:
titles.add(HEX_TO_TITLE[key])
return titles
def parseRating(rating):
if rating[0] == " ":
rating = rating[1:]
if rating[-1].isalpha():
rating = rating[:-1]
return int(rating) if rating.isdigit() else 0
def get_infobarmessage_content(player, text, gametype=None):
content = Gtk.HBox()
icon = Gtk.Image()
icon.set_from_pixbuf(player.getIcon(size=32, gametype=gametype))
content.pack_start(icon, False, False, 4)
label = Gtk.Label()
label.set_markup(player.getMarkup(gametype=gametype))
content.pack_start(label, False, False, 0)
label = Gtk.Label()
label.set_markup(text)
content.pack_start(label, False, False, 0)
return content
def get_infobarmessage_content2(player,
heading_text,
message_text,
gametype=None):
hbox = Gtk.HBox()
image = Gtk.Image()
image.set_from_pixbuf(player.getIcon(size=24, gametype=gametype))
hbox.pack_start(image, False, False, 0)
label = Gtk.Label()
markup = player.getMarkup(gametype=gametype, long_titles=False)
label.set_markup(markup + heading_text)
hbox.pack_start(label, False, False, 0)
vbox = Gtk.VBox()
vbox.pack_start(hbox, False, False, 0)
label = Gtk.Label()
label.props.xalign = 0
label.props.xpad = 4
label.props.justify = Gtk.Justification.LEFT
label.props.wrap = True
label.set_width_chars(70)
label.set_text(message_text)
vbox.pack_start(label, False, False, 5)
return vbox
"""
Internal command codes used in FICS block mode
(see "help block_codes" and "help iv_block").
Used mostly by internal library functions.
BLOCK_ variables are message boundary markers.
BLKCMD_ variables are command codes.
"""
BLOCK_START = chr(21) # \U
BLOCK_SEPARATOR = chr(22) # \V
BLOCK_END = chr(23) # \W
BLOCK_POSE_START = chr(24) # \X
BLOCK_POSE_END = chr(25) # \Y
BLKCMD_NULL = 0
BLKCMD_GAME_MOVE = 1
BLKCMD_ABORT = 10
BLKCMD_ACCEPT = 11
BLKCMD_ADDLIST = 12
BLKCMD_ADJOURN = 13
BLKCMD_ALLOBSERVERS = 14
BLKCMD_ASSESS = 15
BLKCMD_BACKWARD = 16
BLKCMD_BELL = 17
BLKCMD_BEST = 18
BLKCMD_BNAME = 19
BLKCMD_BOARDS = 20
BLKCMD_BSETUP = 21
BLKCMD_BUGWHO = 22
BLKCMD_CBEST = 23
BLKCMD_CLEARMESSAGES = 24
BLKCMD_CLRSQUARE = 25
BLKCMD_CONVERT_BCF = 26
BLKCMD_CONVERT_ELO = 27
BLKCMD_CONVERT_USCF = 28
BLKCMD_COPYGAME = 29
BLKCMD_CRANK = 30
BLKCMD_CSHOUT = 31
BLKCMD_DATE = 32
BLKCMD_DECLINE = 33
BLKCMD_DRAW = 34
BLKCMD_ECO = 35
BLKCMD_EXAMINE = 36
BLKCMD_FINGER = 37
BLKCMD_FLAG = 38
BLKCMD_FLIP = 39
BLKCMD_FMESSAGE = 40
BLKCMD_FOLLOW = 41
BLKCMD_FORWARD = 42
BLKCMD_GAMES = 43
BLKCMD_GETGI = 44
BLKCMD_GETPI = 45
BLKCMD_GINFO = 46
BLKCMD_GOBOARD = 47
BLKCMD_HANDLES = 48
BLKCMD_HBEST = 49
BLKCMD_HELP = 50
BLKCMD_HISTORY = 51
BLKCMD_HRANK = 52
BLKCMD_INCHANNEL = 53
BLKCMD_INDEX = 54
BLKCMD_INFO = 55
BLKCMD_ISET = 56
BLKCMD_IT = 57
BLKCMD_IVARIABLES = 58
BLKCMD_JKILL = 59
BLKCMD_JOURNAL = 60
BLKCMD_JSAVE = 61
BLKCMD_KIBITZ = 62
BLKCMD_LIMITS = 63
BLKCMD_LINE = 64 # Not on FICS
BLKCMD_LLOGONS = 65
BLKCMD_LOGONS = 66
BLKCMD_MAILHELP = 67
BLKCMD_MAILMESS = 68
BLKCMD_MAILMOVES = 69
BLKCMD_MAILOLDMOVES = 70
BLKCMD_MAILSOURCE = 71
BLKCMD_MAILSTORED = 72
BLKCMD_MATCH = 73
BLKCMD_MESSAGES = 74
BLKCMD_MEXAMINE = 75
BLKCMD_MORETIME = 76
BLKCMD_MOVES = 77
BLKCMD_NEWS = 78
BLKCMD_NEXT = 79
BLKCMD_OBSERVE = 80
BLKCMD_OLDMOVES = 81
BLKCMD_OLDSTORED = 82
BLKCMD_OPEN = 83
BLKCMD_PARTNER = 84
BLKCMD_PASSWORD = 85
BLKCMD_PAUSE = 86
BLKCMD_PENDING = 87
BLKCMD_PFOLLOW = 88
BLKCMD_POBSERVE = 89
BLKCMD_PREFRESH = 90
BLKCMD_PRIMARY = 91
BLKCMD_PROMOTE = 92
BLKCMD_PSTAT = 93
BLKCMD_PTELL = 94
BLKCMD_PTIME = 95
BLKCMD_QTELL = 96
BLKCMD_QUIT = 97
BLKCMD_RANK = 98
BLKCMD_RCOPYGAME = 99
BLKCMD_RFOLLOW = 100
BLKCMD_REFRESH = 101
BLKCMD_REMATCH = 102
BLKCMD_RESIGN = 103
BLKCMD_RESUME = 104
BLKCMD_REVERT = 105
BLKCMD_ROBSERVE = 106
BLKCMD_SAY = 107
BLKCMD_SERVERS = 108
BLKCMD_SET = 109
BLKCMD_SHOUT = 110
BLKCMD_SHOWLIST = 111
BLKCMD_SIMABORT = 112
BLKCMD_SIMALLABORT = 113
BLKCMD_SIMADJOURN = 114
BLKCMD_SIMALLADJOURN = 115
BLKCMD_SIMGAMES = 116
BLKCMD_SIMMATCH = 117
BLKCMD_SIMNEXT = 118
BLKCMD_SIMOBSERVE = 119
BLKCMD_SIMOPEN = 120
BLKCMD_SIMPASS = 121
BLKCMD_SIMPREV = 122
BLKCMD_SMOVES = 123
BLKCMD_SMPOSITION = 124
BLKCMD_SPOSITION = 125
BLKCMD_STATISTICS = 126
BLKCMD_STORED = 127
BLKCMD_STYLE = 128
BLKCMD_SWITCH = 130
BLKCMD_TAKEBACK = 131
BLKCMD_TELL = 132
BLKCMD_TIME = 133
BLKCMD_TOMOVE = 134
BLKCMD_TOURNSET = 135
BLKCMD_UNALIAS = 136
BLKCMD_UNEXAMINE = 137
BLKCMD_UNOBSERVE = 138
BLKCMD_UNPAUSE = 139
BLKCMD_UPTIME = 140
BLKCMD_USCF = 141
BLKCMD_USTAT = 142
BLKCMD_VARIABLES = 143
BLKCMD_WHENSHUT = 144
BLKCMD_WHISPER = 145
BLKCMD_WHO = 146
BLKCMD_WITHDRAW = 147
BLKCMD_WNAME = 148
BLKCMD_XKIBITZ = 149
BLKCMD_XTELL = 150
BLKCMD_XWHISPER = 151
BLKCMD_ZNOTIFY = 152
BLKCMD_REPLY = 153 # Not on FICS
BLKCMD_SUMMON = 154
BLKCMD_SEEK = 155
BLKCMD_UNSEEK = 156
BLKCMD_SOUGHT = 157
BLKCMD_PLAY = 158
BLKCMD_ALIAS = 159
BLKCMD_NEWBIES = 160
BLKCMD_SR = 161
BLKCMD_CA = 162
BLKCMD_TM = 163
BLKCMD_GETGAME = 164
BLKCMD_CCNEWSE = 165
BLKCMD_CCNEWSF = 166
BLKCMD_CCNEWSI = 167
BLKCMD_CCNEWSP = 168
BLKCMD_CCNEWST = 169
BLKCMD_CSNEWSE = 170
BLKCMD_CSNEWSF = 171
BLKCMD_CSNEWSI = 172
BLKCMD_CSNEWSP = 173
BLKCMD_CSNEWST = 174
BLKCMD_CTNEWSE = 175
BLKCMD_CTNEWSF = 176
BLKCMD_CTNEWSI = 177
BLKCMD_CTNEWSP = 178
BLKCMD_CTNEWST = 179
BLKCMD_CNEWS = 180
BLKCMD_SNEWS = 181
BLKCMD_TNEWS = 182
BLKCMD_RMATCH = 183
BLKCMD_RSTAT = 184
BLKCMD_CRSTAT = 185
BLKCMD_HRSTAT = 186
BLKCMD_GSTAT = 187
# Note admin codes start from 300.
BLKCMD_ERROR_BADCOMMAND = 512
BLKCMD_ERROR_BADPARAMS = 513
BLKCMD_ERROR_AMBIGUOUS = 514
BLKCMD_ERROR_RIGHTS = 515
BLKCMD_ERROR_OBSOLETE = 516
BLKCMD_ERROR_REMOVED = 517
BLKCMD_ERROR_NOTPLAYING = 518
BLKCMD_ERROR_NOSEQUENCE = 519
BLKCMD_ERROR_LENGTH = 520
LIMIT_BLKCMD_ERRORS = 500
FICS_COMMANDS = [
'abort', 'accept', 'addlist', 'adjourn', 'alias', 'allobservers', 'assess',
'backward', 'bell', 'best', 'boards', 'bsetup', 'bugwho', 'cbest',
'clearmessages', 'convert_bcf', 'convert_elo', 'convert_uscf', 'copygame',
'crank', 'cshout', 'date', 'decline', 'draw', 'examine', 'finger', 'flag',
'flip', 'fmessage', 'follow', 'forward', 'games', 'gnotify', 'goboard',
'handles', 'hbest', 'help', 'history', 'hrank', 'inchannel', 'index',
'info', 'it', 'jkill', 'jsave', 'kibitz', 'limits', 'llogons', 'logons',
'mailhelp', 'mailmess', 'mailmoves', 'mailoldmoves', 'mailsource',
'mailstored', 'match', 'messages', 'mexamine', 'moretime', 'moves', 'news',
'next', 'observe', 'oldmoves', 'open', 'password', 'pause', 'pending',
'pfollow', 'play', 'pobserve', 'promote', 'pstat', 'qtell', 'quit', 'rank',
'refresh', 'resign', 'resume', 'revert', 'say', 'seek', 'servers', 'set',
'shout', 'showlist', 'simabort', 'simallabort', 'simadjourn',
'simalladjourn', 'simgames', 'simmatch', 'simnext', 'simobserve',
'simopen', 'simpass', 'simprev', 'smoves', 'smposition', 'sought',
'sposition', 'statistics', 'stored', 'style', 'sublist', 'switch',
'takeback', 'tell', 'time', 'unalias', 'unexamine', 'unobserve', 'unpause',
'unseek', 'uptime', 'ustat', 'variables', 'whisper', 'who', 'withdraw',
'xkibitz', 'xtell', 'xwhisper', 'znotify']
FICS_HELP = [
'_index', 'abort', 'abuse', 'academy', 'accept', 'addlist', 'addresses',
'adjourn', 'adjournments', 'adjudicate', 'adjudication', 'adm_app',
'adm_info', 'adm_new', 'admins', 'alias', 'allobservers', 'assess',
'atomic', 'audiochat', 'avail_vars', 'backward', 'bclock', 'bell', 'best',
'blind', 'blindfold', 'blindh', 'blitz', 'block_codes', 'bname', 'boards',
'brating', 'bsetup', 'bughouse', 'bughouse_strat', 'bugreport', 'bugwho',
'busy', 'ca', 'category', 'cbest', 'censor', 'chan_1', 'chan_4', 'channel',
'channel_list', 'channels', 'chess_adviser', 'chess_advisor',
'clearmessage', 'clearmessages', 'clock', 'clocks', 'clrsquare', 'cls',
'cls_info', 'command', 'commands', 'commit', 'computer_app',
'computer_list', 'computers', 'confidentiality', 'convert_bcf',
'convert_elo', 'convert_uscf', 'copygame', 'crank', 'crazyhouse',
'crazyhouse_strat', 'credit', 'crstat', 'cshout', 'csnewse', 'csnewsf',
'csnewsi', 'csnewsp', 'csnewst', 'date', 'decline', 'disclaimer',
'disconnection', 'draw', 'eco', 'eggo', 'email', 'etime', 'examine', 'exl',
'fen', 'fics_faq', 'fics_lingo', 'finger', 'flag', 'flip', 'fmessage',
'follow', 'formula', 'forward', 'fr', 'fr_rules', 'ftp_hints', 'games',
'games', 'getgame', 'getgi', 'getpi', 'ginfo', 'glicko', 'gnotify',
'goboard', 'handle', 'handles', 'hbest', 'help', 'highlight', 'history',
'hrank', 'hrstat', 'hstat', 'icsdrone', 'idlenotify', 'inchannel', 'index',
'indexfile', 'inetchesslib', 'info', 'intellegence', 'interfaces',
'intro_analysis', 'intro_basics', 'intro_general', 'intro_information',
'intro_moving', 'intro_playing', 'intro_settings', 'intro_talking',
'intro_welcome', 'irc_help', 'iset', 'it', 'iv_allresults', 'iv_atomic',
'iv_audiochat', 'iv_block', 'iv_boardinfo', 'iv_compressmove',
'iv_crazyhouse', 'iv_defprompt', 'iv_extascii', 'iv_extuserinfo', 'iv_fr',
'iv_gameinfo', 'iv_graph', 'iv_list', 'iv_lock', 'iv_pendinfo',
'iv_seekinfo', 'iv_seekremove', 'iv_startpos', 'ivariables', 'jkill',
'journal', 'jsave', 'kibitz', 'kiblevel', 'lag', 'lecture1', 'lessons',
'lightning', 'limits', 'links', 'lists', 'llogons', 'logons', 'losers',
'losers_chess', 'mailhelp', 'mailmess', 'mailmoves', 'mailoldmoves',
'mailstored', 'mamer', 'manual_usage', 'manual_vars', 'match',
'meeting_1_followup', 'meeting_1_long', 'meeting_1_short',
'meetings_index', 'messages', 'mexamine', 'moretime', 'motd', 'motd_fri',
'motd_help', 'motd_mon', 'motd_sat', 'motd_sun', 'motd_thu', 'motd_tue',
'motd_wed', 'moves', 'mule', 'new_features', 'newbie', 'news', 'next',
'noescape', 'noplay', 'notes', 'notify', 'observe', 'odds', 'oldmoves',
'oldpstat', 'open', 'partner', 'password', 'pause', 'pending', 'pfollow',
'pgn', 'ping', 'play', 'pobserve', 'powericsfaq', 'prefresh', 'primary',
'private', 'promote', 'pstat', 'ptell', 'ptime', 'qtell', 'quit', 'rank',
'rating_changes', 'ratings', 'rcopygame', 'rd', 'refresh', 'register',
'relay', 'relay_operator', 'rematch', 'replay', 'resign', 'result',
'resume', 'revert', 'rfollow', 'rmatch', 'robofics', 'robserve', 'rstat',
'sabort', 'say', 'sdraw', 'seek', 'servers', 'set', 'setup', 'shout',
'shout_quota', 'showadmins', 'showlist', 'showsrs', 'simabort',
'simadjourn', 'simallabort', 'simalladjourn', 'simgames', 'simmatch',
'simnext', 'simobserve', 'simopen', 'simpass', 'simprev', 'simuls',
'skype', 'smoves', 'smposition', 'sought', 'spending', 'sposition', 'sr',
'sr_info', 'standard', 'statistics', 'stats', 'stc', 'stored', 'style',
'style12', 'sublist', 'suicide_chess', 'summon', 'switch', 'system_alias',
'takeback', 'team', 'teamgames', 'tell', 'time', 'timeseal',
'timeseal_mac', 'timeseal_os2', 'timeseal_unix', 'timeseal_windows',
'timezones', 'tm', 'tomove', 'totals', 'totals_info', 'tournset',
'town_meetings', 'townmtg1', 'unalias', 'unexamine', 'unobserve',
'unpause', 'unseek', 'untimed', 'uptime', 'uscf', 'uscf_faq', 'ustat',
'v_autoflag', 'v_automail', 'v_availinfo', 'v_availmax', 'v_availmin',
'v_bell', 'v_bugopen', 'v_chanoff', 'v_cshout', 'v_ctell', 'v_echo',
'v_flip', 'v_formula', 'v_gin', 'v_height', 'v_highlight', 'v_inc',
'v_interface', 'v_jprivate', 'v_kibitz', 'v_kiblevel', 'v_language',
'v_mailmess', 'v_messreply', 'v_notakeback', 'v_notifiedby', 'v_open',
'v_pgn', 'v_pin', 'v_private', 'v_prompt', 'v_provshow', 'v_ptime',
'v_rated', 'v_ropen', 'v_seek', 'v_shout', 'v_silence', 'v_simopen',
'v_style', 'v_tell', 'v_time', 'v_tolerance', 'v_tourney', 'v_tzone',
'v_unobserve', 'v_width', 'variables', 'wclock', 'webpage', 'whenshut',
'whisper', 'who', 'wild', 'withdraw', 'wname', 'wrating', 'xkibitz',
'xtell', 'xwhisper', 'zhouse', 'znotify']
| gpl-3.0 |
VipSaran/Py_Led_Clock | Adafruit_7Segment.py | 1 | 2204 | #!/usr/bin/python
import time
import datetime
from Adafruit_LEDBackpack import LEDBackpack
# ===========================================================================
# 7-Segment Display
# ===========================================================================
# This class is meant to be used with the four-character, seven segment
# displays available from Adafruit
class SevenSegment:
disp = None
# Hexadecimal character lookup table (row 1 = 0..9, row 2 = A..F)
digits = [ 0x3F, 0x06, 0x5B, 0x4F, 0x66, 0x6D, 0x7D, 0x07, 0x7F, 0x6F, \
0x77, 0x7C, 0x39, 0x5E, 0x79, 0x71 ]
# Constructor
def __init__(self, address=0x72, debug=False):
if (debug):
print "Initializing a new instance of LEDBackpack at 0x%02X" % address
self.disp = LEDBackpack(address=address, debug=debug)
# self.disp.setBrightness(1)
def writeDigitRaw(self, charNumber, value):
"Sets a digit using the raw 16-bit value"
if (charNumber > 7):
return
# Set the appropriate digit
self.disp.setBufferRow(charNumber, value)
def writeDigit(self, charNumber, value, dot=False):
"Sets a single decimal or hexademical value (0..9 and A..F)"
if (charNumber > 7):
return
if (value > 0xF):
return
# Set the appropriate digit
self.disp.setBufferRow(charNumber, self.digits[value] | (dot << 7))
def setColon(self, state=True):
"Enables or disables the colon character"
# Warning: This function assumes that the colon is character '2',
# which is the case on 4 char displays, but may need to be modified
# if another display type is used
if (state):
self.disp.setBufferRow(2, 0xFFFF)
else:
self.disp.setBufferRow(2, 0)
def setBrightness(self, brightness=15):
# print "setBrightness called", brightness
"Sets the display brightness"
if (self.is_number(brightness) and brightness >= 0 and brightness <= 15):
# print "Setting brightness to", brightness
self.disp.setBrightness(brightness)
def clear(self, update=True):
self.disp.clear(update)
def is_number(self, s):
try:
float(s)
return True
except ValueError:
return False
| apache-2.0 |
HelloLily/hellolily | lily/messaging/email/templatetags/email.py | 1 | 8094 | from __future__ import absolute_import
from bs4 import BeautifulSoup
from collections import OrderedDict
import urllib
from datetime import datetime
from dateutil.tz import gettz, tzutc
from dateutil.parser import parse
from django import template
from django.conf import settings
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from lily.messaging.email.utils import INBOX, SENT, DRAFTS, TRASH, SPAM
register = template.Library()
def localized_times(time):
if isinstance(time, basestring):
parsed_time = parse(time)
parsed_time.tzinfo._name = None # clear tzname to rely solely on the offset (not all tznames are supported)
utc_time = parsed_time.astimezone(tzutc())
elif isinstance(time, datetime):
utc_time = time.astimezone(tzutc())
else:
return None
# Convert to local
localized_time = utc_time.astimezone(gettz(settings.TIME_ZONE))
localized_now = datetime.now(tzutc()).astimezone(gettz(settings.TIME_ZONE))
return localized_time, localized_now
@register.filter(name='pretty_datetime')
def pretty_datetime(time, format=None):
"""
Returns a string telling how long ago datetime differs from now or format
it accordingly. Time is an UTC datetime.
"""
# Convert to local
localized_time, localized_now = localized_times(time)
if isinstance(format, basestring):
return datetime.strftime(localized_time, format)
# Format based on local times
if localized_now.toordinal() - localized_time.toordinal() == 0: # same day
return datetime.strftime(localized_time, '%H:%M')
elif localized_now.year != localized_time.year:
return datetime.strftime(localized_time, '%d-%m-%y')
else:
return datetime.strftime(localized_time, '%d-%b.')
@register.filter(name='pretty_datetime_relative')
def pretty_datetime_relative(time, format=None):
result = pretty_datetime(time, format)
# Convert to local
localized_time, localized_now = localized_times(time)
diff = localized_now - localized_time
if diff.days > 14 or diff.days < 0:
return result
else:
s = diff.seconds
if diff.days > 1:
return _('%s (%s days ago)') % (result, diff.days)
elif diff.days == 1:
if localized_now.toordinal() - localized_time.toordinal() == 1:
return _('%s (yesterday)') % localized_time.strftime('%H:%M')
else:
return _('%s (2 days ago)') % result
elif s <= 1:
return _('%s (just now)') % result
elif s < 60:
return _('%s (%d seconds ago)') % (result, s)
elif s < 120:
return _('%s (1 minute ago)') % result
elif s < 3600:
return _('%s (%d minutes ago)') % (result, (s / 60))
elif s < 7200:
return _('1 hour ago')
else:
return _('%s (%d hours ago)') % (result, (s / 3600))
@register.filter(name='other_mailbox_folders')
def other_mailbox_folders(email_account, active_url):
def filter_other_folders(folder_tree):
other_folders = OrderedDict()
for folder_name, folder in folder_tree.items():
if not len(set([INBOX, SENT, DRAFTS, TRASH, SPAM]).intersection(set(folder.get('flags', [])))):
other_folders[folder_name] = folder
# Sort other mailbox folders
other_folders_sorted = OrderedDict()
for folder_name in sorted(other_folders, key=unicode.lower):
other_folders_sorted[folder_name] = other_folders.get(folder_name)
return other_folders_sorted
def get_folder_html(folder_name, folder):
folder_url = reverse('messaging_email_account_folder', kwargs={
'account_id': email_account.id,
'folder': urllib.quote_plus(folder.get('full_name').encode('utf-8'))
})
is_endpoint = is_active = urllib.unquote_plus(folder_url.encode('utf-8')) == urllib.unquote_plus(active_url)
data_href = u'data-href="%(folder_url)s"' % {'folder_url': folder_url}
html = u''
if folder.get('is_parent', False):
if u'\\Noselect' in folder.get('flags'):
data_href = u''
else:
folder_name = u'''%(folder_name)s (%(unread_emails_count)s)''' % {
'folder_name': folder_name,
}
html += u'''<div class="tree-folder">
<div class="tree-folder-header %(tree_folder_class)s" %(data_href)s>
<i class="icon-folder-%(state)s"></i>
<div class="tree-folder-name">%(folder)s</div>
</div>
<div class="tree-folder-content %(folder_content_class)s" data-scroller="true"
data-max-height="256px" data-always-visible="1" data-rail-visible="0">'''
is_folder_active, subfolder_html = get_subfolder_html(folder.get('children'))
# Make sure parent is marked active as well
if is_folder_active:
is_active = True
html %= {
'tree_folder_class': 'tree-selected' if is_endpoint else '',
'data_href': data_href,
'state': 'open' if is_active else 'close',
'folder': folder_name,
'folder_content_class': '' if is_active else 'hide',
}
html += subfolder_html
html += u'''</div>
</div>'''
else:
html += u'''<div class="tree-item %(tree_item_class)s" %(data_href)s>
<div class="tree-item-name">%(folder_name)s (%(unread_emails_count)s)</div>
</div>''' % {
'tree_item_class': 'tree-selected' if is_endpoint else '',
'data_href': data_href,
'folder_name': folder_name,
}
return is_active, html
def get_subfolder_html(folder_tree):
other_folder_tree = filter_other_folders(folder_tree)
html = u''
is_active = False
for folder_name, folder in other_folder_tree.items():
is_folder_active, folder_html = get_folder_html(folder_name, folder)
if is_folder_active:
is_active = True
html += folder_html
return is_active, html
# Find email_account's other mailbox folders
other_folders = filter_other_folders(email_account.folders)
# Generate output
html = u''
if len(other_folders):
html += u'''<div class="tree-folder">
<div class="tree-folder-header">
<i class="icon-folder-%(state)s"></i>
<div class="tree-folder-name">%(folder_name)s</div>
</div>
<div class="tree-folder-content %(folder_content_class)s">'''
# Loop through other mailbox folder trees
folders_html = u''
is_active = False
for folder_name, folder in other_folders.items():
is_folder_active, folder_html = get_folder_html(folder_name, folder)
if is_folder_active:
is_active = True
folders_html += folder_html
html %= {
'state': 'open' if is_active else 'close',
'folder_name': _('Other'),
'folder_content_class': '' if is_active else 'hide',
}
html += folders_html
html += u'''</div>
</div>'''
return html
@register.filter(name='replace_mailto')
def replace_mailto(value):
'''
Filter to replace mailto links with an url to compose new mail within Lily.
'''
soup = BeautifulSoup(value, 'lxml')
mailto_tags = soup.select('a[href^=mailto]')
for mailto_tag in mailto_tags:
mailto_tag['href'] = mailto_tag['href'].replace("mailto:", "/#/email/compose/")
mailto_tag['target'] = '_top' # Break out of iframe.
return str(soup)
| agpl-3.0 |
tucbill/manila | manila/openstack/common/network_utils.py | 13 | 2157 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Network-related utilities and helper functions.
"""
import logging
LOG = logging.getLogger(__name__)
def parse_host_port(address, default_port=None):
"""
Interpret a string as a host:port pair.
An IPv6 address MUST be escaped if accompanied by a port,
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
means both [2001:db8:85a3::8a2e:370:7334] and
[2001:db8:85a3::8a2e:370]:7334.
>>> parse_host_port('server01:80')
('server01', 80)
>>> parse_host_port('server01')
('server01', None)
>>> parse_host_port('server01', default_port=1234)
('server01', 1234)
>>> parse_host_port('[::1]:80')
('::1', 80)
>>> parse_host_port('[::1]')
('::1', None)
>>> parse_host_port('[::1]', default_port=1234)
('::1', 1234)
>>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
('2001:db8:85a3::8a2e:370:7334', 1234)
"""
if address[0] == '[':
# Escaped ipv6
_host, _port = address[1:].split(']')
host = _host
if ':' in _port:
port = _port.split(':')[1]
else:
port = default_port
else:
if address.count(':') == 1:
host, port = address.split(':')
else:
# 0 means ipv4, >1 means ipv6.
# We prohibit unescaped ipv6 addresses with port.
host = address
port = default_port
return (host, None if port is None else int(port))
| apache-2.0 |
diagramsoftware/account-financial-reporting | account_chart_report/wizard/__init__.py | 34 | 1040 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Savoir-faire Linux (<www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import account_report_chart_of_account
| agpl-3.0 |
benvermaercke/pyqtgraph | pyqtgraph/graphicsItems/PlotCurveItem.py | 28 | 22481 | from ..Qt import QtGui, QtCore
try:
from ..Qt import QtOpenGL
HAVE_OPENGL = True
except:
HAVE_OPENGL = False
import numpy as np
from .GraphicsObject import GraphicsObject
from .. import functions as fn
from ..Point import Point
import struct, sys
from .. import getConfigOption
from .. import debug
__all__ = ['PlotCurveItem']
class PlotCurveItem(GraphicsObject):
"""
Class representing a single plot curve. Instances of this class are created
automatically as part of PlotDataItem; these rarely need to be instantiated
directly.
Features:
- Fast data update
- Fill under curve
- Mouse interaction
==================== ===============================================
**Signals:**
sigPlotChanged(self) Emitted when the data being plotted has changed
sigClicked(self) Emitted when the curve is clicked
==================== ===============================================
"""
sigPlotChanged = QtCore.Signal(object)
sigClicked = QtCore.Signal(object)
def __init__(self, *args, **kargs):
"""
Forwards all arguments to :func:`setData <pyqtgraph.PlotCurveItem.setData>`.
Some extra arguments are accepted as well:
============== =======================================================
**Arguments:**
parent The parent GraphicsObject (optional)
clickable If True, the item will emit sigClicked when it is
clicked on. Defaults to False.
============== =======================================================
"""
GraphicsObject.__init__(self, kargs.get('parent', None))
self.clear()
## this is disastrous for performance.
#self.setCacheMode(QtGui.QGraphicsItem.DeviceCoordinateCache)
self.metaData = {}
self.opts = {
'pen': fn.mkPen('w'),
'shadowPen': None,
'fillLevel': None,
'brush': None,
'stepMode': False,
'name': None,
'antialias': getConfigOption('antialias'),
'connect': 'all',
'mouseWidth': 8, # width of shape responding to mouse click
}
self.setClickable(kargs.get('clickable', False))
self.setData(*args, **kargs)
def implements(self, interface=None):
ints = ['plotData']
if interface is None:
return ints
return interface in ints
def name(self):
return self.opts.get('name', None)
def setClickable(self, s, width=None):
"""Sets whether the item responds to mouse clicks.
The *width* argument specifies the width in pixels orthogonal to the
curve that will respond to a mouse click.
"""
self.clickable = s
if width is not None:
self.opts['mouseWidth'] = width
self._mouseShape = None
self._boundingRect = None
def getData(self):
return self.xData, self.yData
def dataBounds(self, ax, frac=1.0, orthoRange=None):
## Need this to run as fast as possible.
## check cache first:
cache = self._boundsCache[ax]
if cache is not None and cache[0] == (frac, orthoRange):
return cache[1]
(x, y) = self.getData()
if x is None or len(x) == 0:
return (None, None)
if ax == 0:
d = x
d2 = y
elif ax == 1:
d = y
d2 = x
## If an orthogonal range is specified, mask the data now
if orthoRange is not None:
mask = (d2 >= orthoRange[0]) * (d2 <= orthoRange[1])
d = d[mask]
#d2 = d2[mask]
if len(d) == 0:
return (None, None)
## Get min/max (or percentiles) of the requested data range
if frac >= 1.0:
b = (np.nanmin(d), np.nanmax(d))
elif frac <= 0.0:
raise Exception("Value for parameter 'frac' must be > 0. (got %s)" % str(frac))
else:
mask = np.isfinite(d)
d = d[mask]
b = np.percentile(d, [50 * (1 - frac), 50 * (1 + frac)])
## adjust for fill level
if ax == 1 and self.opts['fillLevel'] is not None:
b = (min(b[0], self.opts['fillLevel']), max(b[1], self.opts['fillLevel']))
## Add pen width only if it is non-cosmetic.
pen = self.opts['pen']
spen = self.opts['shadowPen']
if not pen.isCosmetic():
b = (b[0] - pen.widthF()*0.7072, b[1] + pen.widthF()*0.7072)
if spen is not None and not spen.isCosmetic() and spen.style() != QtCore.Qt.NoPen:
b = (b[0] - spen.widthF()*0.7072, b[1] + spen.widthF()*0.7072)
self._boundsCache[ax] = [(frac, orthoRange), b]
return b
def pixelPadding(self):
pen = self.opts['pen']
spen = self.opts['shadowPen']
w = 0
if pen.isCosmetic():
w += pen.widthF()*0.7072
if spen is not None and spen.isCosmetic() and spen.style() != QtCore.Qt.NoPen:
w = max(w, spen.widthF()*0.7072)
if self.clickable:
w = max(w, self.opts['mouseWidth']//2 + 1)
return w
def boundingRect(self):
if self._boundingRect is None:
(xmn, xmx) = self.dataBounds(ax=0)
(ymn, ymx) = self.dataBounds(ax=1)
if xmn is None:
return QtCore.QRectF()
px = py = 0.0
pxPad = self.pixelPadding()
if pxPad > 0:
# determine length of pixel in local x, y directions
px, py = self.pixelVectors()
try:
px = 0 if px is None else px.length()
except OverflowError:
px = 0
try:
py = 0 if py is None else py.length()
except OverflowError:
py = 0
# return bounds expanded by pixel size
px *= pxPad
py *= pxPad
#px += self._maxSpotWidth * 0.5
#py += self._maxSpotWidth * 0.5
self._boundingRect = QtCore.QRectF(xmn-px, ymn-py, (2*px)+xmx-xmn, (2*py)+ymx-ymn)
return self._boundingRect
def viewTransformChanged(self):
self.invalidateBounds()
self.prepareGeometryChange()
#def boundingRect(self):
#if self._boundingRect is None:
#(x, y) = self.getData()
#if x is None or y is None or len(x) == 0 or len(y) == 0:
#return QtCore.QRectF()
#if self.opts['shadowPen'] is not None:
#lineWidth = (max(self.opts['pen'].width(), self.opts['shadowPen'].width()) + 1)
#else:
#lineWidth = (self.opts['pen'].width()+1)
#pixels = self.pixelVectors()
#if pixels == (None, None):
#pixels = [Point(0,0), Point(0,0)]
#xmin = x.min()
#xmax = x.max()
#ymin = y.min()
#ymax = y.max()
#if self.opts['fillLevel'] is not None:
#ymin = min(ymin, self.opts['fillLevel'])
#ymax = max(ymax, self.opts['fillLevel'])
#xmin -= pixels[0].x() * lineWidth
#xmax += pixels[0].x() * lineWidth
#ymin -= abs(pixels[1].y()) * lineWidth
#ymax += abs(pixels[1].y()) * lineWidth
#self._boundingRect = QtCore.QRectF(xmin, ymin, xmax-xmin, ymax-ymin)
#return self._boundingRect
def invalidateBounds(self):
self._boundingRect = None
self._boundsCache = [None, None]
def setPen(self, *args, **kargs):
"""Set the pen used to draw the curve."""
self.opts['pen'] = fn.mkPen(*args, **kargs)
self.invalidateBounds()
self.update()
def setShadowPen(self, *args, **kargs):
"""Set the shadow pen used to draw behind tyhe primary pen.
This pen must have a larger width than the primary
pen to be visible.
"""
self.opts['shadowPen'] = fn.mkPen(*args, **kargs)
self.invalidateBounds()
self.update()
def setBrush(self, *args, **kargs):
"""Set the brush used when filling the area under the curve"""
self.opts['brush'] = fn.mkBrush(*args, **kargs)
self.invalidateBounds()
self.update()
def setFillLevel(self, level):
"""Set the level filled to when filling under the curve"""
self.opts['fillLevel'] = level
self.fillPath = None
self.invalidateBounds()
self.update()
def setData(self, *args, **kargs):
"""
============== ========================================================
**Arguments:**
x, y (numpy arrays) Data to show
pen Pen to use when drawing. Any single argument accepted by
:func:`mkPen <pyqtgraph.mkPen>` is allowed.
shadowPen Pen for drawing behind the primary pen. Usually this
is used to emphasize the curve by providing a
high-contrast border. Any single argument accepted by
:func:`mkPen <pyqtgraph.mkPen>` is allowed.
fillLevel (float or None) Fill the area 'under' the curve to
*fillLevel*
brush QBrush to use when filling. Any single argument accepted
by :func:`mkBrush <pyqtgraph.mkBrush>` is allowed.
antialias (bool) Whether to use antialiasing when drawing. This
is disabled by default because it decreases performance.
stepMode If True, two orthogonal lines are drawn for each sample
as steps. This is commonly used when drawing histograms.
Note that in this case, len(x) == len(y) + 1
connect Argument specifying how vertexes should be connected
by line segments. Default is "all", indicating full
connection. "pairs" causes only even-numbered segments
to be drawn. "finite" causes segments to be omitted if
they are attached to nan or inf values. For any other
connectivity, specify an array of boolean values.
============== ========================================================
If non-keyword arguments are used, they will be interpreted as
setData(y) for a single argument and setData(x, y) for two
arguments.
"""
self.updateData(*args, **kargs)
def updateData(self, *args, **kargs):
profiler = debug.Profiler()
if len(args) == 1:
kargs['y'] = args[0]
elif len(args) == 2:
kargs['x'] = args[0]
kargs['y'] = args[1]
if 'y' not in kargs or kargs['y'] is None:
kargs['y'] = np.array([])
if 'x' not in kargs or kargs['x'] is None:
kargs['x'] = np.arange(len(kargs['y']))
for k in ['x', 'y']:
data = kargs[k]
if isinstance(data, list):
data = np.array(data)
kargs[k] = data
if not isinstance(data, np.ndarray) or data.ndim > 1:
raise Exception("Plot data must be 1D ndarray.")
if 'complex' in str(data.dtype):
raise Exception("Can not plot complex data types.")
profiler("data checks")
#self.setCacheMode(QtGui.QGraphicsItem.NoCache) ## Disabling and re-enabling the cache works around a bug in Qt 4.6 causing the cached results to display incorrectly
## Test this bug with test_PlotWidget and zoom in on the animated plot
self.invalidateBounds()
self.prepareGeometryChange()
self.informViewBoundsChanged()
self.yData = kargs['y'].view(np.ndarray)
self.xData = kargs['x'].view(np.ndarray)
profiler('copy')
if 'stepMode' in kargs:
self.opts['stepMode'] = kargs['stepMode']
if self.opts['stepMode'] is True:
if len(self.xData) != len(self.yData)+1: ## allow difference of 1 for step mode plots
raise Exception("len(X) must be len(Y)+1 since stepMode=True (got %s and %s)" % (self.xData.shape, self.yData.shape))
else:
if self.xData.shape != self.yData.shape: ## allow difference of 1 for step mode plots
raise Exception("X and Y arrays must be the same shape--got %s and %s." % (self.xData.shape, self.yData.shape))
self.path = None
self.fillPath = None
self._mouseShape = None
#self.xDisp = self.yDisp = None
if 'name' in kargs:
self.opts['name'] = kargs['name']
if 'connect' in kargs:
self.opts['connect'] = kargs['connect']
if 'pen' in kargs:
self.setPen(kargs['pen'])
if 'shadowPen' in kargs:
self.setShadowPen(kargs['shadowPen'])
if 'fillLevel' in kargs:
self.setFillLevel(kargs['fillLevel'])
if 'brush' in kargs:
self.setBrush(kargs['brush'])
if 'antialias' in kargs:
self.opts['antialias'] = kargs['antialias']
profiler('set')
self.update()
profiler('update')
self.sigPlotChanged.emit(self)
profiler('emit')
def generatePath(self, x, y):
if self.opts['stepMode']:
## each value in the x/y arrays generates 2 points.
x2 = np.empty((len(x),2), dtype=x.dtype)
x2[:] = x[:,np.newaxis]
if self.opts['fillLevel'] is None:
x = x2.reshape(x2.size)[1:-1]
y2 = np.empty((len(y),2), dtype=y.dtype)
y2[:] = y[:,np.newaxis]
y = y2.reshape(y2.size)
else:
## If we have a fill level, add two extra points at either end
x = x2.reshape(x2.size)
y2 = np.empty((len(y)+2,2), dtype=y.dtype)
y2[1:-1] = y[:,np.newaxis]
y = y2.reshape(y2.size)[1:-1]
y[0] = self.opts['fillLevel']
y[-1] = self.opts['fillLevel']
path = fn.arrayToQPath(x, y, connect=self.opts['connect'])
return path
def getPath(self):
if self.path is None:
x,y = self.getData()
if x is None or len(x) == 0 or y is None or len(y) == 0:
self.path = QtGui.QPainterPath()
else:
self.path = self.generatePath(*self.getData())
self.fillPath = None
self._mouseShape = None
return self.path
@debug.warnOnException ## raising an exception here causes crash
def paint(self, p, opt, widget):
profiler = debug.Profiler()
if self.xData is None or len(self.xData) == 0:
return
if HAVE_OPENGL and getConfigOption('enableExperimental') and isinstance(widget, QtOpenGL.QGLWidget):
self.paintGL(p, opt, widget)
return
x = None
y = None
path = self.getPath()
profiler('generate path')
if self._exportOpts is not False:
aa = self._exportOpts.get('antialias', True)
else:
aa = self.opts['antialias']
p.setRenderHint(p.Antialiasing, aa)
if self.opts['brush'] is not None and self.opts['fillLevel'] is not None:
if self.fillPath is None:
if x is None:
x,y = self.getData()
p2 = QtGui.QPainterPath(self.path)
p2.lineTo(x[-1], self.opts['fillLevel'])
p2.lineTo(x[0], self.opts['fillLevel'])
p2.lineTo(x[0], y[0])
p2.closeSubpath()
self.fillPath = p2
profiler('generate fill path')
p.fillPath(self.fillPath, self.opts['brush'])
profiler('draw fill path')
sp = fn.mkPen(self.opts['shadowPen'])
cp = fn.mkPen(self.opts['pen'])
## Copy pens and apply alpha adjustment
#sp = QtGui.QPen(self.opts['shadowPen'])
#cp = QtGui.QPen(self.opts['pen'])
#for pen in [sp, cp]:
#if pen is None:
#continue
#c = pen.color()
#c.setAlpha(c.alpha() * self.opts['alphaHint'])
#pen.setColor(c)
##pen.setCosmetic(True)
if sp is not None and sp.style() != QtCore.Qt.NoPen:
p.setPen(sp)
p.drawPath(path)
p.setPen(cp)
p.drawPath(path)
profiler('drawPath')
#print "Render hints:", int(p.renderHints())
#p.setPen(QtGui.QPen(QtGui.QColor(255,0,0)))
#p.drawRect(self.boundingRect())
def paintGL(self, p, opt, widget):
p.beginNativePainting()
import OpenGL.GL as gl
## set clipping viewport
view = self.getViewBox()
if view is not None:
rect = view.mapRectToItem(self, view.boundingRect())
#gl.glViewport(int(rect.x()), int(rect.y()), int(rect.width()), int(rect.height()))
#gl.glTranslate(-rect.x(), -rect.y(), 0)
gl.glEnable(gl.GL_STENCIL_TEST)
gl.glColorMask(gl.GL_FALSE, gl.GL_FALSE, gl.GL_FALSE, gl.GL_FALSE) # disable drawing to frame buffer
gl.glDepthMask(gl.GL_FALSE) # disable drawing to depth buffer
gl.glStencilFunc(gl.GL_NEVER, 1, 0xFF)
gl.glStencilOp(gl.GL_REPLACE, gl.GL_KEEP, gl.GL_KEEP)
## draw stencil pattern
gl.glStencilMask(0xFF)
gl.glClear(gl.GL_STENCIL_BUFFER_BIT)
gl.glBegin(gl.GL_TRIANGLES)
gl.glVertex2f(rect.x(), rect.y())
gl.glVertex2f(rect.x()+rect.width(), rect.y())
gl.glVertex2f(rect.x(), rect.y()+rect.height())
gl.glVertex2f(rect.x()+rect.width(), rect.y()+rect.height())
gl.glVertex2f(rect.x()+rect.width(), rect.y())
gl.glVertex2f(rect.x(), rect.y()+rect.height())
gl.glEnd()
gl.glColorMask(gl.GL_TRUE, gl.GL_TRUE, gl.GL_TRUE, gl.GL_TRUE)
gl.glDepthMask(gl.GL_TRUE)
gl.glStencilMask(0x00)
gl.glStencilFunc(gl.GL_EQUAL, 1, 0xFF)
try:
x, y = self.getData()
pos = np.empty((len(x), 2))
pos[:,0] = x
pos[:,1] = y
gl.glEnableClientState(gl.GL_VERTEX_ARRAY)
try:
gl.glVertexPointerf(pos)
pen = fn.mkPen(self.opts['pen'])
color = pen.color()
gl.glColor4f(color.red()/255., color.green()/255., color.blue()/255., color.alpha()/255.)
width = pen.width()
if pen.isCosmetic() and width < 1:
width = 1
gl.glPointSize(width)
gl.glEnable(gl.GL_LINE_SMOOTH)
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
gl.glHint(gl.GL_LINE_SMOOTH_HINT, gl.GL_NICEST)
gl.glDrawArrays(gl.GL_LINE_STRIP, 0, pos.size / pos.shape[-1])
finally:
gl.glDisableClientState(gl.GL_VERTEX_ARRAY)
finally:
p.endNativePainting()
def clear(self):
self.xData = None ## raw values
self.yData = None
self.xDisp = None ## display values (after log / fft)
self.yDisp = None
self.path = None
self.fillPath = None
self._mouseShape = None
self._mouseBounds = None
self._boundsCache = [None, None]
#del self.xData, self.yData, self.xDisp, self.yDisp, self.path
def mouseShape(self):
"""
Return a QPainterPath representing the clickable shape of the curve
"""
if self._mouseShape is None:
view = self.getViewBox()
if view is None:
return QtGui.QPainterPath()
stroker = QtGui.QPainterPathStroker()
path = self.getPath()
path = self.mapToItem(view, path)
stroker.setWidth(self.opts['mouseWidth'])
mousePath = stroker.createStroke(path)
self._mouseShape = self.mapFromItem(view, mousePath)
return self._mouseShape
def mouseClickEvent(self, ev):
if not self.clickable or ev.button() != QtCore.Qt.LeftButton:
return
if self.mouseShape().contains(ev.pos()):
ev.accept()
self.sigClicked.emit(self)
class ROIPlotItem(PlotCurveItem):
"""Plot curve that monitors an ROI and image for changes to automatically replot."""
def __init__(self, roi, data, img, axes=(0,1), xVals=None, color=None):
self.roi = roi
self.roiData = data
self.roiImg = img
self.axes = axes
self.xVals = xVals
PlotCurveItem.__init__(self, self.getRoiData(), x=self.xVals, color=color)
#roi.connect(roi, QtCore.SIGNAL('regionChanged'), self.roiChangedEvent)
roi.sigRegionChanged.connect(self.roiChangedEvent)
#self.roiChangedEvent()
def getRoiData(self):
d = self.roi.getArrayRegion(self.roiData, self.roiImg, axes=self.axes)
if d is None:
return
while d.ndim > 1:
d = d.mean(axis=1)
return d
def roiChangedEvent(self):
d = self.getRoiData()
self.updateData(d, self.xVals)
| mit |
phoebusliang/parallel-lettuce | tests/integration/lib/Django-1.3/django/contrib/localflavor/fi/fi_municipalities.py | 394 | 10822 | # -*- coding: utf-8 -*-
"""
An alphabetical list of Finnish municipalities for use as `choices` in a
formfield.
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
MUNICIPALITY_CHOICES = (
('akaa', u"Akaa"),
('alajarvi', u"Alajärvi"),
('alavieska', u"Alavieska"),
('alavus', u"Alavus"),
('artjarvi', u"Artjärvi"),
('asikkala', u"Asikkala"),
('askola', u"Askola"),
('aura', u"Aura"),
('brando', u"Brändö"),
('eckero', u"Eckerö"),
('enonkoski', u"Enonkoski"),
('enontekio', u"Enontekiö"),
('espoo', u"Espoo"),
('eura', u"Eura"),
('eurajoki', u"Eurajoki"),
('evijarvi', u"Evijärvi"),
('finstrom', u"Finström"),
('forssa', u"Forssa"),
('foglo', u"Föglö"),
('geta', u"Geta"),
('haapajarvi', u"Haapajärvi"),
('haapavesi', u"Haapavesi"),
('hailuoto', u"Hailuoto"),
('halsua', u"Halsua"),
('hamina', u"Hamina"),
('hammarland', u"Hammarland"),
('hankasalmi', u"Hankasalmi"),
('hanko', u"Hanko"),
('harjavalta', u"Harjavalta"),
('hartola', u"Hartola"),
('hattula', u"Hattula"),
('haukipudas', u"Haukipudas"),
('hausjarvi', u"Hausjärvi"),
('heinola', u"Heinola"),
('heinavesi', u"Heinävesi"),
('helsinki', u"Helsinki"),
('hirvensalmi', u"Hirvensalmi"),
('hollola', u"Hollola"),
('honkajoki', u"Honkajoki"),
('huittinen', u"Huittinen"),
('humppila', u"Humppila"),
('hyrynsalmi', u"Hyrynsalmi"),
('hyvinkaa', u"Hyvinkää"),
('hameenkoski', u"Hämeenkoski"),
('hameenkyro', u"Hämeenkyrö"),
('hameenlinna', u"Hämeenlinna"),
('ii', u"Ii"),
('iisalmi', u"Iisalmi"),
('iitti', u"Iitti"),
('ikaalinen', u"Ikaalinen"),
('ilmajoki', u"Ilmajoki"),
('ilomantsi', u"Ilomantsi"),
('imatra', u"Imatra"),
('inari', u"Inari"),
('inkoo', u"Inkoo"),
('isojoki', u"Isojoki"),
('isokyro', u"Isokyrö"),
('jalasjarvi', u"Jalasjärvi"),
('janakkala', u"Janakkala"),
('joensuu', u"Joensuu"),
('jokioinen', u"Jokioinen"),
('jomala', u"Jomala"),
('joroinen', u"Joroinen"),
('joutsa', u"Joutsa"),
('juankoski', u"Juankoski"),
('juuka', u"Juuka"),
('juupajoki', u"Juupajoki"),
('juva', u"Juva"),
('jyvaskyla', u"Jyväskylä"),
('jamijarvi', u"Jämijärvi"),
('jamsa', u"Jämsä"),
('jarvenpaa', u"Järvenpää"),
('kaarina', u"Kaarina"),
('kaavi', u"Kaavi"),
('kajaani', u"Kajaani"),
('kalajoki', u"Kalajoki"),
('kangasala', u"Kangasala"),
('kangasniemi', u"Kangasniemi"),
('kankaanpaa', u"Kankaanpää"),
('kannonkoski', u"Kannonkoski"),
('kannus', u"Kannus"),
('karijoki', u"Karijoki"),
('karjalohja', u"Karjalohja"),
('karkkila', u"Karkkila"),
('karstula', u"Karstula"),
('karttula', u"Karttula"),
('karvia', u"Karvia"),
('kaskinen', u"Kaskinen"),
('kauhajoki', u"Kauhajoki"),
('kauhava', u"Kauhava"),
('kauniainen', u"Kauniainen"),
('kaustinen', u"Kaustinen"),
('keitele', u"Keitele"),
('kemi', u"Kemi"),
('kemijarvi', u"Kemijärvi"),
('keminmaa', u"Keminmaa"),
('kemionsaari', u"Kemiönsaari"),
('kempele', u"Kempele"),
('kerava', u"Kerava"),
('kerimaki', u"Kerimäki"),
('kesalahti', u"Kesälahti"),
('keuruu', u"Keuruu"),
('kihnio', u"Kihniö"),
('kiikoinen', u"Kiikoinen"),
('kiiminki', u"Kiiminki"),
('kinnula', u"Kinnula"),
('kirkkonummi', u"Kirkkonummi"),
('kitee', u"Kitee"),
('kittila', u"Kittilä"),
('kiuruvesi', u"Kiuruvesi"),
('kivijarvi', u"Kivijärvi"),
('kokemaki', u"Kokemäki"),
('kokkola', u"Kokkola"),
('kolari', u"Kolari"),
('konnevesi', u"Konnevesi"),
('kontiolahti', u"Kontiolahti"),
('korsnas', u"Korsnäs"),
('koskitl', u"Koski Tl"),
('kotka', u"Kotka"),
('kouvola', u"Kouvola"),
('kristiinankaupunki', u"Kristiinankaupunki"),
('kruunupyy', u"Kruunupyy"),
('kuhmalahti', u"Kuhmalahti"),
('kuhmo', u"Kuhmo"),
('kuhmoinen', u"Kuhmoinen"),
('kumlinge', u"Kumlinge"),
('kuopio', u"Kuopio"),
('kuortane', u"Kuortane"),
('kurikka', u"Kurikka"),
('kustavi', u"Kustavi"),
('kuusamo', u"Kuusamo"),
('kylmakoski', u"Kylmäkoski"),
('kyyjarvi', u"Kyyjärvi"),
('karkola', u"Kärkölä"),
('karsamaki', u"Kärsämäki"),
('kokar', u"Kökar"),
('koylio', u"Köyliö"),
('lahti', u"Lahti"),
('laihia', u"Laihia"),
('laitila', u"Laitila"),
('lapinjarvi', u"Lapinjärvi"),
('lapinlahti', u"Lapinlahti"),
('lappajarvi', u"Lappajärvi"),
('lappeenranta', u"Lappeenranta"),
('lapua', u"Lapua"),
('laukaa', u"Laukaa"),
('lavia', u"Lavia"),
('lemi', u"Lemi"),
('lemland', u"Lemland"),
('lempaala', u"Lempäälä"),
('leppavirta', u"Leppävirta"),
('lestijarvi', u"Lestijärvi"),
('lieksa', u"Lieksa"),
('lieto', u"Lieto"),
('liminka', u"Liminka"),
('liperi', u"Liperi"),
('lohja', u"Lohja"),
('loimaa', u"Loimaa"),
('loppi', u"Loppi"),
('loviisa', u"Loviisa"),
('luhanka', u"Luhanka"),
('lumijoki', u"Lumijoki"),
('lumparland', u"Lumparland"),
('luoto', u"Luoto"),
('luumaki', u"Luumäki"),
('luvia', u"Luvia"),
('lansi-turunmaa', u"Länsi-Turunmaa"),
('maalahti', u"Maalahti"),
('maaninka', u"Maaninka"),
('maarianhamina', u"Maarianhamina"),
('marttila', u"Marttila"),
('masku', u"Masku"),
('merijarvi', u"Merijärvi"),
('merikarvia', u"Merikarvia"),
('miehikkala', u"Miehikkälä"),
('mikkeli', u"Mikkeli"),
('muhos', u"Muhos"),
('multia', u"Multia"),
('muonio', u"Muonio"),
('mustasaari', u"Mustasaari"),
('muurame', u"Muurame"),
('mynamaki', u"Mynämäki"),
('myrskyla', u"Myrskylä"),
('mantsala', u"Mäntsälä"),
('mantta-vilppula', u"Mänttä-Vilppula"),
('mantyharju', u"Mäntyharju"),
('naantali', u"Naantali"),
('nakkila', u"Nakkila"),
('nastola', u"Nastola"),
('nilsia', u"Nilsiä"),
('nivala', u"Nivala"),
('nokia', u"Nokia"),
('nousiainen', u"Nousiainen"),
('nummi-pusula', u"Nummi-Pusula"),
('nurmes', u"Nurmes"),
('nurmijarvi', u"Nurmijärvi"),
('narpio', u"Närpiö"),
('oravainen', u"Oravainen"),
('orimattila', u"Orimattila"),
('oripaa', u"Oripää"),
('orivesi', u"Orivesi"),
('oulainen', u"Oulainen"),
('oulu', u"Oulu"),
('oulunsalo', u"Oulunsalo"),
('outokumpu', u"Outokumpu"),
('padasjoki', u"Padasjoki"),
('paimio', u"Paimio"),
('paltamo', u"Paltamo"),
('parikkala', u"Parikkala"),
('parkano', u"Parkano"),
('pedersore', u"Pedersöre"),
('pelkosenniemi', u"Pelkosenniemi"),
('pello', u"Pello"),
('perho', u"Perho"),
('pertunmaa', u"Pertunmaa"),
('petajavesi', u"Petäjävesi"),
('pieksamaki', u"Pieksämäki"),
('pielavesi', u"Pielavesi"),
('pietarsaari', u"Pietarsaari"),
('pihtipudas', u"Pihtipudas"),
('pirkkala', u"Pirkkala"),
('polvijarvi', u"Polvijärvi"),
('pomarkku', u"Pomarkku"),
('pori', u"Pori"),
('pornainen', u"Pornainen"),
('porvoo', u"Porvoo"),
('posio', u"Posio"),
('pudasjarvi', u"Pudasjärvi"),
('pukkila', u"Pukkila"),
('punkaharju', u"Punkaharju"),
('punkalaidun', u"Punkalaidun"),
('puolanka', u"Puolanka"),
('puumala', u"Puumala"),
('pyhtaa', u"Pyhtää"),
('pyhajoki', u"Pyhäjoki"),
('pyhajarvi', u"Pyhäjärvi"),
('pyhanta', u"Pyhäntä"),
('pyharanta', u"Pyhäranta"),
('palkane', u"Pälkäne"),
('poytya', u"Pöytyä"),
('raahe', u"Raahe"),
('raasepori', u"Raasepori"),
('raisio', u"Raisio"),
('rantasalmi', u"Rantasalmi"),
('ranua', u"Ranua"),
('rauma', u"Rauma"),
('rautalampi', u"Rautalampi"),
('rautavaara', u"Rautavaara"),
('rautjarvi', u"Rautjärvi"),
('reisjarvi', u"Reisjärvi"),
('riihimaki', u"Riihimäki"),
('ristiina', u"Ristiina"),
('ristijarvi', u"Ristijärvi"),
('rovaniemi', u"Rovaniemi"),
('ruokolahti', u"Ruokolahti"),
('ruovesi', u"Ruovesi"),
('rusko', u"Rusko"),
('raakkyla', u"Rääkkylä"),
('saarijarvi', u"Saarijärvi"),
('salla', u"Salla"),
('salo', u"Salo"),
('saltvik', u"Saltvik"),
('sastamala', u"Sastamala"),
('sauvo', u"Sauvo"),
('savitaipale', u"Savitaipale"),
('savonlinna', u"Savonlinna"),
('savukoski', u"Savukoski"),
('seinajoki', u"Seinäjoki"),
('sievi', u"Sievi"),
('siikainen', u"Siikainen"),
('siikajoki', u"Siikajoki"),
('siikalatva', u"Siikalatva"),
('siilinjarvi', u"Siilinjärvi"),
('simo', u"Simo"),
('sipoo', u"Sipoo"),
('siuntio', u"Siuntio"),
('sodankyla', u"Sodankylä"),
('soini', u"Soini"),
('somero', u"Somero"),
('sonkajarvi', u"Sonkajärvi"),
('sotkamo', u"Sotkamo"),
('sottunga', u"Sottunga"),
('sulkava', u"Sulkava"),
('sund', u"Sund"),
('suomenniemi', u"Suomenniemi"),
('suomussalmi', u"Suomussalmi"),
('suonenjoki', u"Suonenjoki"),
('sysma', u"Sysmä"),
('sakyla', u"Säkylä"),
('taipalsaari', u"Taipalsaari"),
('taivalkoski', u"Taivalkoski"),
('taivassalo', u"Taivassalo"),
('tammela', u"Tammela"),
('tampere', u"Tampere"),
('tarvasjoki', u"Tarvasjoki"),
('tervo', u"Tervo"),
('tervola', u"Tervola"),
('teuva', u"Teuva"),
('tohmajarvi', u"Tohmajärvi"),
('toholampi', u"Toholampi"),
('toivakka', u"Toivakka"),
('tornio', u"Tornio"),
('turku', u"Turku"),
('tuusniemi', u"Tuusniemi"),
('tuusula', u"Tuusula"),
('tyrnava', u"Tyrnävä"),
('toysa', u"Töysä"),
('ulvila', u"Ulvila"),
('urjala', u"Urjala"),
('utajarvi', u"Utajärvi"),
('utsjoki', u"Utsjoki"),
('uurainen', u"Uurainen"),
('uusikaarlepyy', u"Uusikaarlepyy"),
('uusikaupunki', u"Uusikaupunki"),
('vaala', u"Vaala"),
('vaasa', u"Vaasa"),
('valkeakoski', u"Valkeakoski"),
('valtimo', u"Valtimo"),
('vantaa', u"Vantaa"),
('varkaus', u"Varkaus"),
('varpaisjarvi', u"Varpaisjärvi"),
('vehmaa', u"Vehmaa"),
('vesanto', u"Vesanto"),
('vesilahti', u"Vesilahti"),
('veteli', u"Veteli"),
('vierema', u"Vieremä"),
('vihanti', u"Vihanti"),
('vihti', u"Vihti"),
('viitasaari', u"Viitasaari"),
('vimpeli', u"Vimpeli"),
('virolahti', u"Virolahti"),
('virrat', u"Virrat"),
('vardo', u"Vårdö"),
('vahakyro', u"Vähäkyrö"),
('voyri-maksamaa', u"Vöyri-Maksamaa"),
('yli-ii', u"Yli-Ii"),
('ylitornio', u"Ylitornio"),
('ylivieska', u"Ylivieska"),
('ylojarvi', u"Ylöjärvi"),
('ypaja', u"Ypäjä"),
('ahtari', u"Ähtäri"),
('aanekoski', u"Äänekoski")
) | gpl-3.0 |
Kami/libcloud | libcloud/test/__init__.py | 6 | 8581 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import random
import requests
from libcloud.common.base import Response
from libcloud.http import LibcloudConnection
from libcloud.utils.py3 import PY2
if PY2:
from StringIO import StringIO
else:
from io import StringIO
import requests_mock
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qs
from libcloud.utils.py3 import parse_qsl
from libcloud.utils.py3 import urlquote
XML_HEADERS = {'content-type': 'application/xml'}
class LibcloudTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
self._visited_urls = []
self._executed_mock_methods = []
super(LibcloudTestCase, self).__init__(*args, **kwargs)
def setUp(self):
self._visited_urls = []
self._executed_mock_methods = []
def _add_visited_url(self, url):
self._visited_urls.append(url)
def _add_executed_mock_method(self, method_name):
self._executed_mock_methods.append(method_name)
def assertExecutedMethodCount(self, expected):
actual = len(self._executed_mock_methods)
self.assertEqual(actual, expected,
'expected %d, but %d mock methods were executed'
% (expected, actual))
class multipleresponse(object):
"""
A decorator that allows MockHttp objects to return multi responses
"""
count = 0
func = None
def __init__(self, f):
self.func = f
def __call__(self, *args, **kwargs):
ret = self.func(self.func.__class__, *args, **kwargs)
response = ret[self.count]
self.count = self.count + 1
return response
class BodyStream(StringIO):
def next(self, chunk_size=None):
return StringIO.next(self)
def __next__(self, chunk_size=None):
return StringIO.__next__(self)
def read(self, chunk_size=None):
return StringIO.read(self)
class MockHttp(LibcloudConnection):
"""
A mock HTTP client/server suitable for testing purposes. This replaces
`HTTPConnection` by implementing its API and returning a mock response.
Define methods by request path, replacing slashes (/) with underscores (_).
Each of these mock methods should return a tuple of:
(int status, str body, dict headers, str reason)
"""
type = None
use_param = None # will use this param to namespace the request function
test = None # TestCase instance which is using this mock
proxy_url = None
def __init__(self, *args, **kwargs):
# Load assertion methods into the class, incase people want to assert
# within a response
if isinstance(self, unittest.TestCase):
unittest.TestCase.__init__(self, '__init__')
super(MockHttp, self).__init__(*args, **kwargs)
def _get_request(self, method, url, body=None, headers=None):
# Find a method we can use for this request
parsed = urlparse.urlparse(url)
_, _, path, _, query, _ = parsed
qs = parse_qs(query)
if path.endswith('/'):
path = path[:-1]
meth_name = self._get_method_name(type=self.type,
use_param=self.use_param,
qs=qs, path=path)
meth = getattr(self, meth_name.replace('%', '_'))
if self.test and isinstance(self.test, LibcloudTestCase):
self.test._add_visited_url(url=url)
self.test._add_executed_mock_method(method_name=meth_name)
return meth(method, url, body, headers)
def request(self, method, url, body=None, headers=None, raw=False, stream=False):
headers = self._normalize_headers(headers=headers)
r_status, r_body, r_headers, r_reason = self._get_request(method, url, body, headers)
if r_body is None:
r_body = ''
# this is to catch any special chars e.g. ~ in the request. URL
url = urlquote(url)
with requests_mock.mock() as m:
m.register_uri(method, url, text=r_body, reason=r_reason,
headers=r_headers, status_code=r_status)
try:
super(MockHttp, self).request(
method=method, url=url, body=body, headers=headers,
raw=raw, stream=stream)
except requests_mock.exceptions.NoMockAddress as nma:
raise AttributeError("Failed to mock out URL {0} - {1}".format(
url, nma.request.url
))
def prepared_request(self, method, url, body=None,
headers=None, raw=False, stream=False):
headers = self._normalize_headers(headers=headers)
r_status, r_body, r_headers, r_reason = self._get_request(method, url, body, headers)
with requests_mock.mock() as m:
m.register_uri(method, url, text=r_body, reason=r_reason,
headers=r_headers, status_code=r_status)
super(MockHttp, self).prepared_request(
method=method, url=url, body=body, headers=headers,
raw=raw, stream=stream)
# Mock request/response example
def _example(self, method, url, body, headers):
"""
Return a simple message and header, regardless of input.
"""
return (httplib.OK, 'Hello World!', {'X-Foo': 'libcloud'},
httplib.responses[httplib.OK])
def _example_fail(self, method, url, body, headers):
return (httplib.FORBIDDEN, 'Oh Noes!', {'X-Foo': 'fail'},
httplib.responses[httplib.FORBIDDEN])
def _get_method_name(self, type, use_param, qs, path):
path = path.split('?')[0]
meth_name = (
path
.replace('/', '_')
.replace('.', '_')
.replace('-', '_')
.replace('~', '%7E')) # Python 3.7 no longer quotes ~
if type:
meth_name = '%s_%s' % (meth_name, self.type)
if use_param and use_param in qs:
param = qs[use_param][0].replace('.', '_').replace('-', '_')
meth_name = '%s_%s' % (meth_name, param)
if meth_name == '':
meth_name = 'root'
return meth_name
def assertUrlContainsQueryParams(self, url, expected_params, strict=False):
"""
Assert that provided url contains provided query parameters.
:param url: URL to assert.
:type url: ``str``
:param expected_params: Dictionary of expected query parameters.
:type expected_params: ``dict``
:param strict: Assert that provided url contains only expected_params.
(defaults to ``False``)
:type strict: ``bool``
"""
question_mark_index = url.find('?')
if question_mark_index != -1:
url = url[question_mark_index + 1:]
params = dict(parse_qsl(url))
if strict:
assert params == expected_params
else:
for key, value in expected_params.items():
assert key in params
assert params[key] == value
class MockConnection(object):
def __init__(self, action):
self.action = action
StorageMockHttp = MockHttp
def make_response(status=200, headers={}, connection=None):
response = requests.Response()
response.status_code = status
response.headers = headers
return Response(response, connection)
def generate_random_data(size):
data = ''
current_size = 0
while current_size < size:
value = str(random.randint(0, 9))
value_size = len(value)
data += value
current_size += value_size
return data
if __name__ == "__main__":
import doctest
doctest.testmod()
| apache-2.0 |
DazWorrall/ansible | lib/ansible/utils/module_docs_fragments/sros.py | 224 | 2754 | #
# (c) 2015, Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
provider:
description:
- A dict object containing connection details.
default: null
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
default: 22
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
default: null
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is
exceeded before the operation is completed, the module will error.
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This value is the path to the
key used to authenticate the SSH session. If the value is not specified
in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
will be used instead.
"""
| gpl-3.0 |
EDUlib/edx-platform | common/djangoapps/third_party_auth/saml.py | 1 | 23124 | """
Slightly customized python-social-auth backend for SAML 2.0 support
"""
import logging
from copy import deepcopy
import requests
from django.contrib.sites.models import Site
from django.http import Http404
from django.utils.functional import cached_property
from django_countries import countries
from onelogin.saml2.settings import OneLogin_Saml2_Settings
from social_core.backends.saml import OID_EDU_PERSON_ENTITLEMENT, SAMLAuth, SAMLIdentityProvider
from social_core.exceptions import AuthForbidden
from openedx.core.djangoapps.theming.helpers import get_current_request
from common.djangoapps.third_party_auth.exceptions import IncorrectConfigurationException
STANDARD_SAML_PROVIDER_KEY = 'standard_saml_provider'
SAP_SUCCESSFACTORS_SAML_KEY = 'sap_success_factors'
log = logging.getLogger(__name__)
class SAMLAuthBackend(SAMLAuth): # pylint: disable=abstract-method
"""
Customized version of SAMLAuth that gets the list of IdPs from common.djangoapps.third_party_auth's list of
enabled providers.
"""
name = "tpa-saml"
def get_idp(self, idp_name):
""" Given the name of an IdP, get a SAMLIdentityProvider instance """
from .models import SAMLProviderConfig
return SAMLProviderConfig.current(idp_name).get_config()
def setting(self, name, default=None):
""" Get a setting, from SAMLConfiguration """
try:
return self._config.get_setting(name)
except KeyError:
return self.strategy.setting(name, default, backend=self)
def get_idp_setting(self, idp, name, default=None):
try:
return idp.saml_sp_configuration.get_setting(name)
except KeyError:
return self.setting(name, default)
def generate_saml_config(self, idp=None):
"""
Override of SAMLAuth.generate_saml_config to use an idp's configured saml_sp_configuration if given.
"""
if idp:
abs_completion_url = self.redirect_uri
config = {
'contactPerson': {
'technical': self.get_idp_setting(idp, 'TECHNICAL_CONTACT'),
'support': self.get_idp_setting(idp, 'SUPPORT_CONTACT')
},
'debug': True,
'idp': idp.saml_config_dict if idp else {},
'organization': self.get_idp_setting(idp, 'ORG_INFO'),
'security': {
'metadataValidUntil': '',
'metadataCacheDuration': 'P10D', # metadata valid for ten days
},
'sp': {
'assertionConsumerService': {
'url': abs_completion_url,
# python-saml only supports HTTP-POST
'binding': 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST'
},
'entityId': self.get_idp_setting(idp, 'SP_ENTITY_ID'),
'x509cert': self.get_idp_setting(idp, 'SP_PUBLIC_CERT'),
'privateKey': self.get_idp_setting(idp, 'SP_PRIVATE_KEY'),
},
'strict': True, # We must force strict mode - for security
}
config["security"].update(self.get_idp_setting(idp, "SECURITY_CONFIG", {}))
config["sp"].update(self.get_idp_setting(idp, "SP_EXTRA", {}))
return config
else:
return super().generate_saml_config()
def get_user_id(self, details, response):
"""
Calling the parent function and handling the exception properly.
"""
try:
return super().get_user_id(details, response)
except KeyError as ex:
log.warning(
'[THIRD_PARTY_AUTH] Error in SAML authentication flow. '
'Provider: {idp_name}, Message: {message}'.format(
message=ex.message, # lint-amnesty, pylint: disable=no-member
idp_name=response.get('idp_name')
)
)
raise IncorrectConfigurationException(self) # lint-amnesty, pylint: disable=raise-missing-from
def generate_metadata_xml(self, idp_name=None): # pylint: disable=arguments-differ
"""
Override of SAMLAuth.generate_metadata_xml to accept an optional idp parameter.
"""
idp = self.get_idp(idp_name) if idp_name else None
config = self.generate_saml_config(idp)
saml_settings = OneLogin_Saml2_Settings(
config,
sp_validation_only=True
)
metadata = saml_settings.get_sp_metadata()
errors = saml_settings.validate_metadata(metadata)
return metadata, errors
def auth_url(self):
"""
Check that SAML is enabled and that the request includes an 'idp'
parameter before getting the URL to which we must redirect in order to
authenticate the user.
raise Http404 if SAML authentication is disabled.
"""
if not self._config.enabled:
log.error('[THIRD_PARTY_AUTH] SAML authentication is not enabled')
raise Http404
return super().auth_url()
def disconnect(self, *args, **kwargs):
"""
Override of SAMLAuth.disconnect to unlink the learner from enterprise customer if associated.
"""
from openedx.features.enterprise_support.api import unlink_enterprise_user_from_idp
user = kwargs.get('user', None)
unlink_enterprise_user_from_idp(self.strategy.request, user, self.name)
return super().disconnect(*args, **kwargs)
def _check_entitlements(self, idp, attributes):
"""
Check if we require the presence of any specific eduPersonEntitlement.
raise AuthForbidden if the user should not be authenticated, or do nothing
to allow the login pipeline to continue.
"""
if "requiredEntitlements" in idp.conf:
entitlements = attributes.get(OID_EDU_PERSON_ENTITLEMENT, [])
for expected in idp.conf['requiredEntitlements']:
if expected not in entitlements:
log.warning(
'[THIRD_PARTY_AUTH] SAML user rejected due to missing eduPersonEntitlement. '
'Provider: {provider}, Entitlement: {entitlement}'.format(
provider=idp.name,
entitlement=expected)
)
raise AuthForbidden(self)
def _create_saml_auth(self, idp):
"""
Get an instance of OneLogin_Saml2_Auth
idp: The Identity Provider - a social_core.backends.saml.SAMLIdentityProvider instance
"""
# We only override this method so that we can add extra debugging when debug_mode is True
# Note that auth_inst is instantiated just for the current HTTP request, then is destroyed
auth_inst = super()._create_saml_auth(idp)
from .models import SAMLProviderConfig
if SAMLProviderConfig.current(idp.name).debug_mode:
def wrap_with_logging(method_name, action_description, xml_getter, request_data, next_url):
""" Wrap the request and response handlers to add debug mode logging """
method = getattr(auth_inst, method_name)
def wrapped_method(*args, **kwargs):
""" Wrapped login or process_response method """
result = method(*args, **kwargs)
log.info(
"SAML login %s for IdP %s. Data: %s. Next url %s. XML is:\n%s",
action_description, idp.name, request_data, next_url, xml_getter()
)
return result
setattr(auth_inst, method_name, wrapped_method)
request_data = self.strategy.request_data()
next_url = self.strategy.session_get('next')
wrap_with_logging("login", "request", auth_inst.get_last_request_xml, request_data, next_url)
wrap_with_logging("process_response", "response", auth_inst.get_last_response_xml, request_data, next_url)
return auth_inst
@cached_property
def _config(self):
from .models import SAMLConfiguration
return SAMLConfiguration.current(Site.objects.get_current(get_current_request()), 'default')
class EdXSAMLIdentityProvider(SAMLIdentityProvider):
"""
Customized version of SAMLIdentityProvider that can retrieve details beyond the standard
details supported by the canonical upstream version.
"""
def get_user_details(self, attributes):
"""
Overrides `get_user_details` from the base class; retrieves those details,
then updates the dict with values from whatever additional fields are desired.
"""
details = super().get_user_details(attributes)
extra_field_definitions = self.conf.get('extra_field_definitions', [])
details.update({
field['name']: attributes[field['urn']][0] if field['urn'] in attributes else None
for field in extra_field_definitions
})
return details
def get_attr(self, attributes, conf_key, default_attribute):
"""
Internal helper method.
Get the attribute 'default_attribute' out of the attributes,
unless self.conf[conf_key] overrides the default by specifying
another attribute to use.
"""
key = self.conf.get(conf_key, default_attribute)
if key in attributes:
try:
return attributes[key][0]
except IndexError:
log.warning('[THIRD_PARTY_AUTH] SAML attribute value not found. '
'SamlAttribute: {attribute}'.format(attribute=key))
return self.conf['attr_defaults'].get(conf_key) or None
@property
def saml_sp_configuration(self):
"""Get the SAMLConfiguration for this IdP"""
return self.conf['saml_sp_configuration']
class SapSuccessFactorsIdentityProvider(EdXSAMLIdentityProvider):
"""
Customized version of EdXSAMLIdentityProvider that knows how to retrieve user details
from the SAPSuccessFactors OData API, rather than parse them directly off the
SAML assertion that we get in response to a login attempt.
"""
required_variables = (
'sapsf_oauth_root_url',
'sapsf_private_key',
'odata_api_root_url',
'odata_company_id',
'odata_client_id',
)
# Define the relationships between SAPSF record fields and Open edX logistration fields.
default_field_mapping = {
'firstName': ['username', 'first_name'],
'lastName': 'last_name',
'defaultFullName': 'fullname',
'email': 'email',
'country': 'country',
}
defaults_value_mapping = {
'defaultFullName': 'attr_full_name',
'firstName': 'attr_first_name',
'lastName': 'attr_last_name',
'username': 'attr_username',
'email': 'attr_email',
}
# Define a simple mapping to relate SAPSF values to Open edX-compatible values for
# any given field. By default, this only contains the Country field, as SAPSF supplies
# a country name, which has to be translated to a country code.
default_value_mapping = {
'country': {name: code for code, name in countries}
}
# Unfortunately, not everything has a 1:1 name mapping between Open edX and SAPSF, so
# we need some overrides. TODO: Fill in necessary mappings
default_value_mapping.update({
'United States': 'US',
})
def get_registration_fields(self, response):
"""
Get a dictionary mapping registration field names to default values.
"""
field_mapping = self.field_mappings
value_defaults = self.conf.get('attr_defaults', {})
value_defaults = {key: value_defaults.get(value, '') for key, value in self.defaults_value_mapping.items()}
registration_fields = {}
for odata_name, edx_name in field_mapping.items():
if isinstance(edx_name, list):
for value in edx_name:
registration_fields[value] = response['d'].get(odata_name, value_defaults.get(odata_name, ''))
else:
registration_fields[edx_name] = response['d'].get(odata_name, value_defaults.get(odata_name, ''))
value_mapping = self.value_mappings
for field, value in registration_fields.items():
if field in value_mapping and value in value_mapping[field]:
registration_fields[field] = value_mapping[field][value]
return registration_fields
@property
def field_mappings(self):
"""
Get a dictionary mapping the field names returned in an SAP SuccessFactors
user entity to the field names with which those values should be used in
the Open edX registration form.
"""
overrides = self.conf.get('sapsf_field_mappings', {})
base = self.default_field_mapping.copy()
base.update(overrides)
return base
@property
def value_mappings(self):
"""
Get a dictionary mapping of field names to override objects which each
map values received from SAP SuccessFactors to values expected in the
Open edX platform registration form.
"""
overrides = self.conf.get('sapsf_value_mappings', {})
base = deepcopy(self.default_value_mapping)
for field, override in overrides.items():
if field in base:
base[field].update(override)
else:
base[field] = override[field]
return base
@property
def timeout(self):
"""
The number of seconds OData API requests should wait for a response before failing.
"""
return self.conf.get('odata_api_request_timeout', 10)
@property
def sapsf_idp_url(self):
return self.conf['sapsf_oauth_root_url'] + 'idp'
@property
def sapsf_token_url(self):
return self.conf['sapsf_oauth_root_url'] + 'token'
@property
def sapsf_private_key(self):
return self.conf['sapsf_private_key']
@property
def odata_api_root_url(self):
return self.conf['odata_api_root_url']
@property
def odata_company_id(self):
return self.conf['odata_company_id']
@property
def odata_client_id(self):
return self.conf['odata_client_id']
@property
def oauth_user_id(self):
return self.conf.get('oauth_user_id')
def invalid_configuration(self):
"""
Check that we have all the details we need to properly retrieve rich data from the
SAP SuccessFactors BizX OData API. If we don't, then we should log a warning indicating
the specific variables that are missing.
"""
if not all(var in self.conf for var in self.required_variables):
missing = [var for var in self.required_variables if var not in self.conf]
log.warning(
'[THIRD_PARTY_AUTH] To retrieve rich user data for a SAP SuccessFactors identity provider, '
'the following keys in other_settings are required, but were missing. MissingKeys: {keys}'.format(
keys=missing
)
)
return missing
def log_bizx_api_exception(self, transaction_data, err): # lint-amnesty, pylint: disable=missing-function-docstring
try:
sys_msg = err.response.content
except AttributeError:
sys_msg = 'Not available'
try:
headers = err.response.headers
except AttributeError:
headers = 'Not available'
token_data = transaction_data.get('token_data')
token_data = token_data if token_data else 'Not available'
log_msg_template = (
'SAPSuccessFactors exception received for {operation_name} request. ' +
'URL: {url} ' +
'Company ID: {company_id}. ' +
'User ID: {user_id}. ' +
'Error message: {err_msg}. ' +
'System message: {sys_msg}. ' +
'Headers: {headers}. ' +
'Token Data: {token_data}.'
)
log_msg = log_msg_template.format(
operation_name=transaction_data['operation_name'],
url=transaction_data['endpoint_url'],
company_id=transaction_data['company_id'],
user_id=transaction_data['user_id'],
err_msg=str(err),
sys_msg=sys_msg,
headers=headers,
token_data=token_data,
)
log.warning(log_msg, exc_info=True)
def generate_bizx_oauth_api_saml_assertion(self, user_id):
"""
Obtain a SAML assertion from the SAP SuccessFactors BizX OAuth2 identity provider service using
information specified in the third party authentication configuration "Advanced Settings" section.
Utilizes the OAuth user_id if defined in Advanced Settings in order to generate the SAML assertion,
otherwise utilizes the user_id for the current user in context.
"""
session = requests.Session()
oauth_user_id = self.oauth_user_id if self.oauth_user_id else user_id
transaction_data = {
'token_url': self.sapsf_token_url,
'client_id': self.odata_client_id,
'user_id': oauth_user_id,
'private_key': self.sapsf_private_key,
}
try:
assertion = session.post(
self.sapsf_idp_url,
data=transaction_data,
timeout=self.timeout,
)
assertion.raise_for_status()
except requests.RequestException as err:
transaction_data['operation_name'] = 'generate_bizx_oauth_api_saml_assertion'
transaction_data['endpoint_url'] = self.sapsf_idp_url
transaction_data['company_id'] = self.odata_company_id
self.log_bizx_api_exception(transaction_data, err)
return None
return assertion.text
def generate_bizx_oauth_api_access_token(self, user_id):
"""
Request a new access token from the SuccessFactors BizX OAuth2 identity provider service
using a valid SAML assertion (see generate_bizx_api_saml_assertion) and the infomration specified
in the third party authentication configuration "Advanced Settings" section.
"""
session = requests.Session()
transaction_data = {
'client_id': self.odata_client_id,
'company_id': self.odata_company_id,
'grant_type': 'urn:ietf:params:oauth:grant-type:saml2-bearer',
}
assertion = self.generate_bizx_oauth_api_saml_assertion(user_id)
if not assertion:
return None
try:
transaction_data['assertion'] = assertion
token_response = session.post(
self.sapsf_token_url,
data=transaction_data,
timeout=self.timeout,
)
token_response.raise_for_status()
except requests.RequestException as err:
transaction_data['operation_name'] = 'generate_bizx_oauth_api_access_token'
transaction_data['endpoint_url'] = self.sapsf_token_url
transaction_data['user_id'] = user_id
self.log_bizx_api_exception(transaction_data, err)
return None
return token_response.json()
def get_bizx_odata_api_client(self, user_id): # lint-amnesty, pylint: disable=missing-function-docstring
session = requests.Session()
access_token_data = self.generate_bizx_oauth_api_access_token(user_id)
if not access_token_data:
return None
token_string = access_token_data['access_token']
session.headers.update({'Authorization': f'Bearer {token_string}', 'Accept': 'application/json'})
session.token_data = access_token_data
return session
def get_user_details(self, attributes):
"""
Attempt to get rich user details from the SAP SuccessFactors OData API. If we're missing any
of the info we need to do that, or if the request triggers an exception, then fail nicely by
returning the basic user details we're able to extract from just the SAML response.
"""
basic_details = super().get_user_details(attributes)
if self.invalid_configuration():
return basic_details
user_id = basic_details['username']
# endpoint_url is constructed from field_mappings setting of SAML Provider config.
# We convert field_mappings to make comma separated list of the fields which needs to be pulled from BizX
fields = ','.join(self.field_mappings)
endpoint_url = '{root_url}User(userId=\'{user_id}\')?$select={fields}'.format(
root_url=self.odata_api_root_url,
user_id=user_id,
fields=fields,
)
client = self.get_bizx_odata_api_client(user_id=user_id)
if not client:
return basic_details
try:
response = client.get(
endpoint_url,
timeout=self.timeout,
)
response.raise_for_status()
response = response.json()
except requests.RequestException as err:
transaction_data = {
'operation_name': 'get_user_details',
'endpoint_url': endpoint_url,
'user_id': user_id,
'company_id': self.odata_company_id,
'token_data': client.token_data,
}
self.log_bizx_api_exception(transaction_data, err)
return basic_details
log.info('[THIRD_PARTY_AUTH] BizX Odata response for user [%s] %s', user_id, response)
return self.get_registration_fields(response)
def get_saml_idp_choices():
"""
Get a list of the available SAMLIdentityProvider subclasses that can be used to process
SAML requests, for use in the Django administration form.
"""
return (
(STANDARD_SAML_PROVIDER_KEY, 'Standard SAML provider'),
(SAP_SUCCESSFACTORS_SAML_KEY, 'SAP SuccessFactors provider'),
)
def get_saml_idp_class(idp_identifier_string):
"""
Given a string ID indicating the type of identity provider in use during a given request, return
the SAMLIdentityProvider subclass able to handle requests for that type of identity provider.
"""
choices = {
STANDARD_SAML_PROVIDER_KEY: EdXSAMLIdentityProvider,
SAP_SUCCESSFACTORS_SAML_KEY: SapSuccessFactorsIdentityProvider,
}
if idp_identifier_string not in choices:
log.error(
'[THIRD_PARTY_AUTH] Invalid EdXSAMLIdentityProvider subclass--'
'using EdXSAMLIdentityProvider base class. Provider: {provider}'.format(provider=idp_identifier_string)
)
return choices.get(idp_identifier_string, EdXSAMLIdentityProvider)
| agpl-3.0 |
moollaza/zeroclickinfo-goodies | share/goodie/currency_in/parse.py | 87 | 2347 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Released under the GPL v2 license
# https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
import lxml.html
import sys
#url = "http://en.wikipedia.org/wiki/List_of_circulating_currencies"
url = "https://secure.wikimedia.org/wikipedia/en/wiki/List_of_circulating_currencies"
countries = {}; # country:[[currency, code] [currency, code],...]
country = "" # store current country for each row
currency = "" # store current currency for each row
iso_code = "" # store current iso code for currency
description = "" # store currency and iso code when saving into file
def add_currency(country, currency, iso_code, countries):
"Add country into countries list"
country = country.encode("utf8")
if country in countries:
countries[country].append([currency, iso_code])
else:
countries[country] = [[currency, iso_code]]
def clear_text(text):
"Clear text of anotations in []. When e.g. 'Ascension pound[A]' contains [A]"
start = text.find("[")
if start != -1:
text = text[:start]
return text
tree = lxml.html.parse("download/page.dat").getroot()
tables = tree.find_class("wikitable sortable")
for table in tables:
for row in table.findall('tr'):
cells = row.findall('td')
if len(cells) == 6:
country = cells[0].text_content()
currency = cells[1].text_content()
iso_code = cells[3].text_content()
if len(cells) == 5:
currency = cells[0].text_content()
iso_code = cells[2].text_content()
currency = clear_text(currency)
iso_code = iso_code if iso_code != "None" else ""
if currency != "None" and currency != "":
add_currency(country[1:], currency, iso_code, countries)
"Make output file 'currency.txt' as Perl 'hash table' ready for 'CurrencyIn.pm' module"
output = "currency.txt"
f= open(output, "w")
result = []
for country in sorted(countries):
description = ""
formated_record = []
for record in countries[country]:
iso_code = "" if record[1] == "" else (" (" + record[1] + ")")
currency = record[0]
formated_record.append((currency + iso_code).encode("utf8"))
description = ','.join(str(x) for x in formated_record)
f.write(country.lower() + "\n" + description + "\n")
f.close()
| apache-2.0 |
digris/openbroadcast.org | website/apps/abcast/signals.py | 2 | 1470 | from __future__ import unicode_literals
import logging
from django.apps import apps
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from django_elasticsearch_dsl.registries import registry
from .utils import notify
from .models import Emission, Channel
log = logging.getLogger(__name__)
# provided signals
playout_started = Signal(providing_args=["obj_ct", "obj_uuid", "emission_uuid"])
@receiver(post_save, sender=Emission)
def emission_post_save(sender, instance, created, **kwargs):
log.debug("emission saved - {} - {}".format(instance, instance.content_object))
registry.update(instance.content_object)
@receiver(post_save, sender=Channel)
def channel_post_save(sender, instance, created, **kwargs):
log.debug("channel saved - {}".format(instance))
try:
notify.start_play(instance.on_air, instance)
except Exception as e:
# TODO: propperly handle exceptions
log.warning("unable to update metadata: {}".format(e))
# registry.update(instance.content_object)
@receiver(playout_started)
def _playout_started(sender, obj_ct, obj_uuid, emission_uuid, **kwargs):
log.debug("_playout_started - {}".format(kwargs))
emission = Emission.objects.get(uuid=emission_uuid)
registry.update(emission.content_object)
# _m = apps.get_model(*obj_ct.split(".")).get(uuid=obj_uuid)
registry.update(apps.get_model(*obj_ct.split(".")).objects.get(uuid=obj_uuid))
| gpl-3.0 |
jumpstarter-io/nova | nova/tests/fake_network.py | 10 | 18653 | # Copyright 2011 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.compute import api as compute_api
from nova.compute import manager as compute_manager
import nova.context
from nova import db
from nova import exception
from nova.network import api as network_api
from nova.network import manager as network_manager
from nova.network import model as network_model
from nova.network import nova_ipam_lib
from nova.network import rpcapi as network_rpcapi
from nova import objects
from nova.objects import base as obj_base
from nova.objects import virtual_interface as vif_obj
from nova.openstack.common import jsonutils
from nova.pci import pci_device
from nova.tests.objects import test_fixed_ip
from nova.tests.objects import test_instance_info_cache
from nova.tests.objects import test_pci_device
HOST = "testhost"
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.netconf')
class FakeModel(dict):
"""Represent a model from the db."""
def __init__(self, *args, **kwargs):
self.update(kwargs)
class FakeNetworkManager(network_manager.NetworkManager):
"""This NetworkManager doesn't call the base class so we can bypass all
inherited service cruft and just perform unit tests.
"""
class FakeDB:
vifs = [{'id': 0,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'instance_uuid': '00000000-0000-0000-0000-000000000010',
'network_id': 1,
'uuid': 'fake-uuid',
'address': 'DC:AD:BE:FF:EF:01'},
{'id': 1,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'instance_uuid': '00000000-0000-0000-0000-000000000020',
'network_id': 21,
'uuid': 'fake-uuid2',
'address': 'DC:AD:BE:FF:EF:02'},
{'id': 2,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'instance_uuid': '00000000-0000-0000-0000-000000000030',
'network_id': 31,
'uuid': 'fake-uuid3',
'address': 'DC:AD:BE:FF:EF:03'}]
floating_ips = [dict(address='172.16.1.1',
fixed_ip_id=100),
dict(address='172.16.1.2',
fixed_ip_id=200),
dict(address='173.16.1.2',
fixed_ip_id=210)]
fixed_ips = [dict(test_fixed_ip.fake_fixed_ip,
id=100,
address='172.16.0.1',
virtual_interface_id=0),
dict(test_fixed_ip.fake_fixed_ip,
id=200,
address='172.16.0.2',
virtual_interface_id=1),
dict(test_fixed_ip.fake_fixed_ip,
id=210,
address='173.16.0.2',
virtual_interface_id=2)]
def fixed_ip_get_by_instance(self, context, instance_uuid):
return [dict(address='10.0.0.0'), dict(address='10.0.0.1'),
dict(address='10.0.0.2')]
def network_get_by_cidr(self, context, cidr):
raise exception.NetworkNotFoundForCidr(cidr=cidr)
def network_create_safe(self, context, net):
fakenet = dict(net)
fakenet['id'] = 999
return fakenet
def network_get(self, context, network_id, project_only="allow_none"):
return {'cidr_v6': '2001:db8:69:%x::/64' % network_id}
def network_get_by_uuid(self, context, network_uuid):
raise exception.NetworkNotFoundForUUID(uuid=network_uuid)
def network_get_all(self, context):
raise exception.NoNetworksFound()
def network_get_all_by_uuids(self, context, project_only="allow_none"):
raise exception.NoNetworksFound()
def network_disassociate(self, context, network_id):
return True
def virtual_interface_get_all(self, context):
return self.vifs
def fixed_ips_by_virtual_interface(self, context, vif_id):
return [ip for ip in self.fixed_ips
if ip['virtual_interface_id'] == vif_id]
def fixed_ip_disassociate(self, context, address):
return True
def __init__(self, stubs=None):
self.db = self.FakeDB()
if stubs:
stubs.Set(vif_obj, 'db', self.db)
self.deallocate_called = None
self.deallocate_fixed_ip_calls = []
self.network_rpcapi = network_rpcapi.NetworkAPI()
# TODO(matelakat) method signature should align with the faked one's
def deallocate_fixed_ip(self, context, address=None, host=None,
instance=None):
self.deallocate_fixed_ip_calls.append((context, address, host))
# TODO(matelakat) use the deallocate_fixed_ip_calls instead
self.deallocate_called = address
def _create_fixed_ips(self, context, network_id, fixed_cidr=None,
extra_reserved=None, bottom_reserved=0,
top_reserved=0):
pass
def get_instance_nw_info(context, instance_id, rxtx_factor,
host, instance_uuid=None, **kwargs):
pass
def fake_network(network_id, ipv6=None):
if ipv6 is None:
ipv6 = CONF.use_ipv6
fake_network = {'id': network_id,
'uuid': '00000000-0000-0000-0000-00000000000000%02d' % network_id,
'label': 'test%d' % network_id,
'injected': False,
'multi_host': False,
'cidr': '192.168.%d.0/24' % network_id,
'cidr_v6': None,
'netmask': '255.255.255.0',
'netmask_v6': None,
'bridge': 'fake_br%d' % network_id,
'bridge_interface': 'fake_eth%d' % network_id,
'gateway': '192.168.%d.1' % network_id,
'gateway_v6': None,
'broadcast': '192.168.%d.255' % network_id,
'dns1': '192.168.%d.3' % network_id,
'dns2': '192.168.%d.4' % network_id,
'dns3': '192.168.%d.3' % network_id,
'vlan': None,
'host': None,
'project_id': 'fake_project',
'vpn_public_address': '192.168.%d.2' % network_id,
'vpn_public_port': None,
'vpn_private_address': None,
'dhcp_start': None,
'rxtx_base': network_id * 10,
'priority': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'mtu': None,
'dhcp_server': '192.168.%d.1' % network_id,
'enable_dhcp': True,
'share_address': False}
if ipv6:
fake_network['cidr_v6'] = '2001:db8:0:%x::/64' % network_id
fake_network['gateway_v6'] = '2001:db8:0:%x::1' % network_id
fake_network['netmask_v6'] = '64'
if CONF.flat_injected:
fake_network['injected'] = True
return fake_network
def vifs(n):
for x in xrange(1, n + 1):
yield {'id': x,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'address': 'DE:AD:BE:EF:00:%02x' % x,
'uuid': '00000000-0000-0000-0000-00000000000000%02d' % x,
'network_id': x,
'instance_uuid': 'fake-uuid'}
def floating_ip_ids():
for i in xrange(1, 100):
yield i
def fixed_ip_ids():
for i in xrange(1, 100):
yield i
floating_ip_id = floating_ip_ids()
fixed_ip_id = fixed_ip_ids()
def next_fixed_ip(network_id, num_floating_ips=0):
next_id = fixed_ip_id.next()
f_ips = [FakeModel(**next_floating_ip(next_id))
for i in xrange(num_floating_ips)]
return {'id': next_id,
'network_id': network_id,
'address': '192.168.%d.%03d' % (network_id, (next_id + 99)),
'instance_uuid': 1,
'allocated': False,
# and since network_id and vif_id happen to be equivalent
'virtual_interface_id': network_id,
'floating_ips': f_ips}
def next_floating_ip(fixed_ip_id):
next_id = floating_ip_id.next()
return {'id': next_id,
'address': '10.10.10.%03d' % (next_id + 99),
'fixed_ip_id': fixed_ip_id,
'project_id': None,
'auto_assigned': False}
def ipv4_like(ip, match_string):
ip = ip.split('.')
match_octets = match_string.split('.')
for i, octet in enumerate(match_octets):
if octet == '*':
continue
if octet != ip[i]:
return False
return True
def fake_get_instance_nw_info(stubs, num_networks=1, ips_per_vif=2,
floating_ips_per_fixed_ip=0):
# stubs is the self.stubs from the test
# ips_per_vif is the number of ips each vif will have
# num_floating_ips is number of float ips for each fixed ip
network = network_manager.FlatManager(host=HOST)
network.db = db
# reset the fixed and floating ip generators
global floating_ip_id, fixed_ip_id, fixed_ips
floating_ip_id = floating_ip_ids()
fixed_ip_id = fixed_ip_ids()
fixed_ips = []
networks = [fake_network(x) for x in xrange(1, num_networks + 1)]
def fixed_ips_fake(*args, **kwargs):
global fixed_ips
ips = [next_fixed_ip(i, floating_ips_per_fixed_ip)
for i in xrange(1, num_networks + 1)
for j in xrange(ips_per_vif)]
fixed_ips = ips
return ips
def floating_ips_fake(context, address):
for ip in fixed_ips:
if address == ip['address']:
return ip['floating_ips']
return []
def fixed_ips_v6_fake():
return ['2001:db8:0:%x::1' % i
for i in xrange(1, num_networks + 1)]
def virtual_interfaces_fake(*args, **kwargs):
return [vif for vif in vifs(num_networks)]
def vif_by_uuid_fake(context, uuid):
return {'id': 1,
'address': 'DE:AD:BE:EF:00:01',
'uuid': uuid,
'network_id': 1,
'network': None,
'instance_uuid': 'fake-uuid'}
def network_get_fake(context, network_id, project_only='allow_none'):
nets = [n for n in networks if n['id'] == network_id]
if not nets:
raise exception.NetworkNotFound(network_id=network_id)
return nets[0]
def update_cache_fake(*args, **kwargs):
pass
def get_subnets_by_net_id(self, context, project_id, network_uuid,
vif_uuid):
i = int(network_uuid[-2:])
subnet_v4 = dict(
cidr='192.168.%d.0/24' % i,
dns1='192.168.%d.3' % i,
dns2='192.168.%d.4' % i,
gateway='192.168.%d.1' % i)
subnet_v6 = dict(
cidr='2001:db8:0:%x::/64' % i,
gateway='2001:db8:0:%x::1' % i)
return [subnet_v4, subnet_v6]
def get_network_by_uuid(context, uuid):
return dict(id=1,
cidr_v6='fe80::/64',
bridge='br0',
label='public')
def get_v4_fake(*args, **kwargs):
ips = fixed_ips_fake(*args, **kwargs)
return [ip['address'] for ip in ips]
def get_v6_fake(*args, **kwargs):
return fixed_ips_v6_fake()
stubs.Set(db, 'fixed_ip_get_by_instance', fixed_ips_fake)
stubs.Set(db, 'floating_ip_get_by_fixed_address', floating_ips_fake)
stubs.Set(db, 'virtual_interface_get_by_uuid', vif_by_uuid_fake)
stubs.Set(db, 'network_get_by_uuid', get_network_by_uuid)
stubs.Set(db, 'virtual_interface_get_by_instance', virtual_interfaces_fake)
stubs.Set(db, 'network_get', network_get_fake)
stubs.Set(db, 'instance_info_cache_update', update_cache_fake)
stubs.Set(nova_ipam_lib.NeutronNovaIPAMLib, 'get_subnets_by_net_id',
get_subnets_by_net_id)
stubs.Set(nova_ipam_lib.NeutronNovaIPAMLib, 'get_v4_ips_by_interface',
get_v4_fake)
stubs.Set(nova_ipam_lib.NeutronNovaIPAMLib, 'get_v6_ips_by_interface',
get_v6_fake)
class FakeContext(nova.context.RequestContext):
def is_admin(self):
return True
nw_model = network.get_instance_nw_info(
FakeContext('fakeuser', 'fake_project'),
0, 3, None)
return nw_model
def stub_out_nw_api_get_instance_nw_info(stubs, func=None,
num_networks=1,
ips_per_vif=1,
floating_ips_per_fixed_ip=0):
def get_instance_nw_info(self, context, instance, conductor_api=None):
return fake_get_instance_nw_info(stubs, num_networks=num_networks,
ips_per_vif=ips_per_vif,
floating_ips_per_fixed_ip=floating_ips_per_fixed_ip)
if func is None:
func = get_instance_nw_info
stubs.Set(network_api.API, 'get_instance_nw_info', func)
def stub_out_network_cleanup(stubs):
stubs.Set(network_api.API, 'deallocate_for_instance',
lambda *args, **kwargs: None)
_real_functions = {}
def set_stub_network_methods(stubs):
global _real_functions
cm = compute_manager.ComputeManager
if not _real_functions:
_real_functions = {
'_get_instance_nw_info': cm._get_instance_nw_info,
'_allocate_network': cm._allocate_network,
'_deallocate_network': cm._deallocate_network}
def fake_networkinfo(*args, **kwargs):
return network_model.NetworkInfo()
def fake_async_networkinfo(*args, **kwargs):
return network_model.NetworkInfoAsyncWrapper(fake_networkinfo)
stubs.Set(cm, '_get_instance_nw_info', fake_networkinfo)
stubs.Set(cm, '_allocate_network', fake_async_networkinfo)
stubs.Set(cm, '_deallocate_network', lambda *args, **kwargs: None)
def unset_stub_network_methods(stubs):
global _real_functions
if _real_functions:
cm = compute_manager.ComputeManager
for name in _real_functions:
stubs.Set(cm, name, _real_functions[name])
def stub_compute_with_ips(stubs):
orig_get = compute_api.API.get
orig_get_all = compute_api.API.get_all
orig_create = compute_api.API.create
def fake_get(*args, **kwargs):
return _get_instances_with_cached_ips(orig_get, *args, **kwargs)
def fake_get_all(*args, **kwargs):
return _get_instances_with_cached_ips(orig_get_all, *args, **kwargs)
def fake_create(*args, **kwargs):
return _create_instances_with_cached_ips(orig_create, *args, **kwargs)
def fake_pci_device_get_by_addr(context, node_id, dev_addr):
return test_pci_device.fake_db_dev
stubs.Set(db, 'pci_device_get_by_addr', fake_pci_device_get_by_addr)
stubs.Set(compute_api.API, 'get', fake_get)
stubs.Set(compute_api.API, 'get_all', fake_get_all)
stubs.Set(compute_api.API, 'create', fake_create)
def _get_fake_cache():
def _ip(ip, fixed=True, floats=None):
ip_dict = {'address': ip, 'type': 'fixed'}
if not fixed:
ip_dict['type'] = 'floating'
if fixed and floats:
ip_dict['floating_ips'] = [_ip(f, fixed=False) for f in floats]
return ip_dict
info = [{'address': 'aa:bb:cc:dd:ee:ff',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip('192.168.0.3')]}]}}]
if CONF.use_ipv6:
ipv6_addr = 'fe80:b33f::a8bb:ccff:fedd:eeff'
info[0]['network']['subnets'].append({'cidr': 'fe80:b33f::/64',
'ips': [_ip(ipv6_addr)]})
return jsonutils.dumps(info)
def _get_instances_with_cached_ips(orig_func, *args, **kwargs):
"""Kludge the cache into instance(s) without having to create DB
entries
"""
instances = orig_func(*args, **kwargs)
context = args[0]
fake_device = objects.PciDevice.get_by_dev_addr(context, 1, 'a')
def _info_cache_for(instance):
info_cache = dict(test_instance_info_cache.fake_info_cache,
network_info=_get_fake_cache(),
instance_uuid=instance['uuid'])
if isinstance(instance, obj_base.NovaObject):
_info_cache = objects.InstanceInfoCache(context)
objects.InstanceInfoCache._from_db_object(context, _info_cache,
info_cache)
info_cache = _info_cache
instance['info_cache'] = info_cache
if isinstance(instances, (list, obj_base.ObjectListBase)):
for instance in instances:
_info_cache_for(instance)
pci_device.claim(fake_device, instance)
pci_device.allocate(fake_device, instance)
else:
_info_cache_for(instances)
pci_device.claim(fake_device, instances)
pci_device.allocate(fake_device, instances)
return instances
def _create_instances_with_cached_ips(orig_func, *args, **kwargs):
"""Kludge the above kludge so that the database doesn't get out
of sync with the actual instance.
"""
instances, reservation_id = orig_func(*args, **kwargs)
fake_cache = _get_fake_cache()
for instance in instances:
instance['info_cache']['network_info'] = fake_cache
db.instance_info_cache_update(args[1], instance['uuid'],
{'network_info': fake_cache})
return (instances, reservation_id)
| apache-2.0 |
jorge-marques/shoop | shoop/notify/enums.py | 6 | 1163 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.utils.translation import ugettext_lazy as _
from enumfields.enums import Enum
UNILINGUAL_TEMPLATE_LANGUAGE = "default"
class TemplateUse(Enum):
NONE = 0
UNILINGUAL = 1
MULTILINGUAL = 2
class ConstantUse(Enum):
VARIABLE_ONLY = 1
CONSTANT_ONLY = 2
VARIABLE_OR_CONSTANT = 3
class StepNext(Enum):
CONTINUE = "continue"
STOP = "stop"
class Labels:
CONTINUE = _("continue to the next step")
STOP = _("stop processing")
class StepConditionOperator(Enum):
ALL = "all"
ANY = "any"
NONE = "none"
class Labels:
ALL = _("all")
ANY = _("any")
NONE = _("none")
class RecipientType(Enum):
ADMINS = 1
SPECIFIC_USER = 2
class Labels:
ADMINS = _("any shop administrator")
SPECIFIC_USER = _("a specific user")
class Priority(Enum):
LOW = 1
NORMAL = 2
HIGH = 3
CRITICAL = 4
| agpl-3.0 |
mmoya/ansible | v2/ansible/plugins/cache/base.py | 8 | 1316 | # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from abc import ABCMeta, abstractmethod
from six import add_metaclass
@add_metaclass(ABCMeta)
class BaseCacheModule:
@abstractmethod
def get(self, key):
pass
@abstractmethod
def set(self, key, value):
pass
@abstractmethod
def keys(self):
pass
@abstractmethod
def contains(self, key):
pass
@abstractmethod
def delete(self, key):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def copy(self):
pass
| gpl-3.0 |
techtonik/pip | src/pip/_vendor/cachecontrol/adapter.py | 42 | 4863 | import types
import functools
import zlib
from pip._vendor.requests.adapters import HTTPAdapter
from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = {"PUT", "DELETE"}
def __init__(
self,
cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
cacheable_methods=None,
*args,
**kw
):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache()
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ("GET",)
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache, cache_etags=cache_etags, serializer=serializer
)
def send(self, request, cacheable_methods=None, **kw):
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
cacheable = cacheable_methods or self.cacheable_methods
if request.method in cacheable:
try:
cached_response = self.controller.cached_request(request)
except zlib.error:
cached_response = None
if cached_response:
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(
self, request, response, from_cache=False, cacheable_methods=None
):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
cacheable = cacheable_methods or self.cacheable_methods
if not from_cache and request.method in cacheable:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
# We always cache the 301 responses
elif response.status == 301:
self.controller.cache_response(request, response)
else:
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
def _update_chunk_length(self):
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close()
response._update_chunk_length = types.MethodType(
_update_chunk_length, response
)
resp = super(CacheControlAdapter, self).build_response(request, response)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
def close(self):
self.cache.close()
super(CacheControlAdapter, self).close()
| mit |
resmo/cloudstack | test/integration/component/test_network_offering.py | 6 | 67587 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for network offering
"""
#Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
#from marvin.cloudstackAPI import *
from marvin.lib.utils import (cleanup_resources,
validateList)
from marvin.lib.base import (VirtualMachine,
Account,
Network,
LoadBalancerRule,
PublicIPAddress,
FireWallRule,
NATRule,
Vpn,
ServiceOffering,
NetworkOffering)
from marvin.lib.common import (get_domain,
get_zone,
get_template)
from marvin.codes import *
class Services:
"""Test network offering Services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 128, # In MBs
},
"network_offering": {
"name": 'Network offering-VR services',
"displaytext": 'Network offering-VR services',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Firewall,Lb,UserData,StaticNat',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
"Vpn": 'VirtualRouter',
"Firewall": 'VirtualRouter',
"Lb": 'VirtualRouter',
"UserData": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network_offering_netscaler": {
"name": 'Network offering-netscaler',
"displaytext": 'Network offering-netscaler',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Vpn,Firewall,Lb,UserData,StaticNat',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
"Vpn": 'VirtualRouter',
"Firewall": 'VirtualRouter',
"Lb": 'Netscaler',
"UserData": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network_offering_sourcenat" : {
"name": 'Network offering - SourceNat only',
"displaytext": 'Network offering - SourceNat only',
"guestiptype": 'Isolated',
"supportedservices": 'SourceNat,Dhcp,Dns',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
},
},
"network_offering_withoutDNS" : {
"name": 'NW offering without DNS',
"displaytext": 'NW offering without DNS',
"guestiptype": 'Isolated',
"supportedservices": 'SourceNat,StaticNat,Dhcp',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList": {
"Dhcp": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"StaticNat": 'VirtualRouter',
},
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
},
"lbrule": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2222,
"openfirewall": False,
},
"lbrule_port_2221": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2221,
"openfirewall": False,
},
"natrule": {
"privateport": 22,
"publicport": 22,
"protocol": "TCP"
},
"natrule_port_66": {
"privateport": 22,
"publicport": 66,
"protocol": "TCP"
},
"fw_rule": {
"startport": 1,
"endport": 6000,
"cidr": '55.55.0.0/11',
# Any network (For creating FW rule)
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestNOVirtualRouter(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNOVirtualRouter, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = []
return
def tearDown(self):
try:
self.account.delete(self.apiclient)
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced"], required_hardware="false")
def test_01_network_off_without_conserve_mode(self):
"""Test Network offering with Conserve mode off and VR - All services
"""
# Validate the following
# 1. Create a Network from the above network offering and deploy a VM.
# 2. On source NAT ipaddress, we should NOT be allowed to add a
# LB rules
# 3. On source NAT ipaddress, we should be NOT be allowed to add
# PF rule
# 4. On an ipaddress that has PF rules, we should NOT be allowed to
# add a LB rules.
# 5. On an ipaddress that has Lb rules, we should NOT allow PF rules
# to be programmed.
# 6. We should be allowed to program multiple PF rules on the same Ip
# address on different public ports.
# 7. We should be allowed to program multiple LB rules on the same Ip
# address for different public port ranges.
# 8. On source NAT ipaddress, we should be allowed to Enable VPN.
# 9. On SOurce NAT ipaddress, we will be allowed to add firewall rule
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering"],
conservemode=False
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
with self.assertRaises(Exception):
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
with self.assertRaises(Exception):
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Associating public IP for network: %s" % self.network.id)
ip_with_nat_rule = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
ip_with_nat_rule.ipaddress,
self.network.id
))
self.debug("Creating PF rule for IP address: %s" %
ip_with_nat_rule.ipaddress)
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=ip_with_nat_rule.ipaddress.id
)
self.debug("Trying to create LB rule on IP with NAT: %s" %
ip_with_nat_rule.ipaddress)
# Create Load Balancer rule on IP already having NAT rule
with self.assertRaises(Exception):
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=ip_with_nat_rule.ipaddress.id,
accountid=self.account.name
)
self.debug("Creating PF rule with public port: 66")
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule_port_66"],
ipaddressid=ip_with_nat_rule.ipaddress.id
)
# Check if NAT rule created successfully
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules should return valid list"
)
self.debug("Associating public IP for network: %s" % self.network.id)
ip_with_lb_rule = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
ip_with_lb_rule.ipaddress,
self.network.id
))
self.debug("Creating LB rule for IP address: %s" %
ip_with_lb_rule.ipaddress)
LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=ip_with_lb_rule.ipaddress.id,
accountid=self.account.name
)
self.debug("Trying to create PF rule on IP with LB rule: %s" %
ip_with_nat_rule.ipaddress)
with self.assertRaises(Exception):
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=ip_with_lb_rule.ipaddress.id
)
self.debug("Creating LB rule with public port: 2221")
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule_port_2221"],
ipaddressid=ip_with_lb_rule.ipaddress.id,
accountid=self.account.name
)
# Check if NAT rule created successfully
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules should return valid list"
)
self.debug("Creating firewall rule on source NAT: %s" %
src_nat.ipaddress)
#Create Firewall rule on source NAT
fw_rule = FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
self.debug("Created firewall rule: %s" % fw_rule.id)
fw_rules = FireWallRule.list(
self.apiclient,
id=fw_rule.id
)
self.assertEqual(
isinstance(fw_rules, list),
True,
"List fw rules should return a valid firewall rules"
)
self.assertNotEqual(
len(fw_rules),
0,
"Length of fw rules response should not be zero"
)
return
@attr(tags=["advanced"], required_hardware="false")
def test_02_network_off_with_conserve_mode(self):
"""Test Network offering with Conserve mode ON and VR - All services
"""
# Validate the following
# 1. Create a Network from the above network offering and deploy a VM.
# 2. On source NAT ipaddress, we should be allowed to add a LB rules
# 3. On source NAT ipaddress, we should be allowed to add a PF rules
# 4. On source NAT ipaddress, we should be allowed to add a Firewall
# rules
# 5. On an ipaddress that has Lb rules, we should be allowed to
# program PF rules.
# 6. We should be allowed to program multiple PF rules on the same Ip
# address on different public ports.
# 7. We should be allowed to program multiple LB rules on the same Ip
# address for different public port ranges.
# 8. On source NAT ipaddress, we should be allowed to Enable VPN
# access.
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering"],
conservemode=True
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug("Creating firewall rule on source NAT: %s" %
src_nat.ipaddress)
#Create Firewall rule on source NAT
fw_rule = FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
self.debug("Created firewall rule: %s" % fw_rule.id)
fw_rules = FireWallRule.list(
self.apiclient,
id=fw_rule.id
)
self.assertEqual(
isinstance(fw_rules, list),
True,
"List fw rules should return a valid firewall rules"
)
self.assertNotEqual(
len(fw_rules),
0,
"Length of fw rules response should not be zero"
)
self.debug("Associating public IP for network: %s" % self.network.id)
public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
networkid=self.network.id
)
self.debug("Associated %s with network %s" % (
public_ip.ipaddress,
self.network.id
))
self.debug("Creating PF rule for IP address: %s" %
public_ip.ipaddress)
NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=public_ip.ipaddress.id
)
self.debug("Trying to create LB rule on IP with NAT: %s" %
public_ip.ipaddress)
# Create Load Balancer rule on IP already having NAT rule
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=public_ip.ipaddress.id,
accountid=self.account.name
)
self.debug("Creating PF rule with public port: 66")
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule_port_66"],
ipaddressid=public_ip.ipaddress.id
)
# Check if NAT rule created successfully
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules should return valid list"
)
self.debug("Creating LB rule with public port: 2221")
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule_port_2221"],
ipaddressid=public_ip.ipaddress.id,
accountid=self.account.name
)
# Check if NAT rule created successfully
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules should return valid list"
)
# User should be able to enable VPN on source NAT
self.debug("Created VPN with source NAT IP: %s" % src_nat.ipaddress)
# Assign VPN to source NAT
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
return
@attr(tags=["advanced"], required_hardware="false")
def test_03_network_off_CS5332(self):
"""
@Desc: Test Network offering with Custom system offering for VR
@Steps:
Step1: Create new system offering for domain router
Step2: Verify the custom system offering creation for domain router
Step3: Create new network offering with domain router system offering created in step1
Step4: Verify the network offering creation with custom system offering for VR
Step5: Enable the network offering created in step3
Step5: Create isolated guest network using network offering created in step3
Step6: Deploy guest vm in network created above
"""
#create custom system offering for VR
self.services["service_offering"]["name"] = "test_service_offering_for_router"
self.services["service_offering"]["displaytext"] = "test_service_offering_for_router"
self.services["service_offering"]["cpuspeed"] = 500
self.services["service_offering"]["memory"] = 512
self.services["service_offering"]["systemvmtype"] = "domainrouter"
self.services["service_offering"]["storagetype"] = "shared"
self.services["service_offering"]["issystem"] = "true"
vr_sys_off = ServiceOffering.create(
self.apiclient,
self.services["service_offering"],
)
self.assertIsNotNone(
vr_sys_off,
"Failed to create custom system offering for VR"
)
vr_sys_off_res = ServiceOffering.list(
self.apiclient,
id = vr_sys_off.id,
issystem = "true"
)
status = validateList(vr_sys_off_res)
self.assertEquals(
PASS,
status[0],
"Listing of VR system offering failed"
)
self.assertEqual(
len(vr_sys_off_res),
1,
"Listing more than VR system offerings created"
)
self.debug("Created system offering with id %s" % vr_sys_off.id)
# Create a network offering with all virtual router services enabled using custom system offering for VR
self.debug(
"Creating n/w offering with all services in VR & using custom system offering for VR"
)
self.network_offering = NetworkOffering.create(
self.apiclient,
self.services["network_offering"],
conservemode=False,
serviceofferingid=vr_sys_off.id
)
self.assertIsNotNone(
self.network_offering,
"Failed to create network offering with custom system offering for VR"
)
network_off_res = NetworkOffering.list(
self.apiclient,
id=self.network_offering.id
)
status = validateList(network_off_res)
self.assertEquals(
PASS,
status[0],
"Listing of network offerings failed"
)
self.assertEquals(
len(network_off_res),
1,
"More than one network offerings are created"
)
self.assertEquals(
network_off_res[0].serviceofferingid,
vr_sys_off.id,
"FAIL: Network offering has been created with default system offering"
)
self.cleanup.append(self.network_offering)
self.cleanup.append(vr_sys_off)
self.debug("Created n/w offering with ID: %s" % self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" % self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.assertIsNotNone(self.network,"Failed to create network")
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.assertIsNotNone(
virtual_machine,
"VM creation failed with network %s" % self.network.id
)
self.debug("Deployed VM in network: %s" % self.network.id)
return
@attr(tags=["advanced"], required_hardware="false")
def test_04_network_without_domain_CS19303(self):
"""
@Desc: Errors editing a network without a network domain specified
@Steps:
Step1: Create a network offering with SourceNAT,staticNAT and dhcp services
Step2: Verify the network offering creation
Step3: Create an isolated network with the offering created in step1 and without a network domain specified
Step4: Verify the network creation
Step5: Edit the network and verify that updating network should not error out
"""
self.debug(
"Creating n/w offering with SourceNat,StaticNat and DHCP services in VR & conserve mode:off"
)
self.network_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_withoutDNS"],
conservemode=False
)
self.assertIsNotNone(
self.network_offering,
"Failed to create NO with Sourcenat,staticnat and dhcp only services"
)
self.cleanup.append(self.network_offering)
self.debug("Created n/w offering with ID: %s" % self.network_offering.id)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
self.debug("Creating nw without dns service using no id: %s" % self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.assertIsNotNone(
self.network,
"Failed to create network without DNS service and network domain"
)
self.debug("Created network with NO: %s" % self.network_offering.id)
try:
self.network_update = self.network.update(
self.apiclient,
name="NW without nw domain"
)
self.debug("Success:Network update has been successful without network domain")
except Exception as e:
self.fail("Error editing a network without network domain specified: %s" % e)
return
class TestNetworkUpgrade(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNetworkUpgrade, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.network_offering = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=True
)
# Enable Network offering
cls.network_offering.update(cls.api_client, state='Enabled')
cls._cleanup = [
cls.service_offering,
cls.network_offering
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = []
return
def tearDown(self):
try:
self.account.delete(self.apiclient)
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(speed = "slow")
@attr(tags=["advancedns"], required_hardware="true")
def test_01_nwupgrade_netscaler_conserve_on(self):
"""Test Nw upgrade to netscaler lb service and conserve mode ON
"""
# Validate the following
# 1. Upgrade a network with VR and conserve mode ON TO
# A network that has Lb provided by "Netscaler" and all other
# services provided by "VR" and Conserve mode ON
# 2. Have PF and LB rules on the same ip address. Upgrade network
# should fail.
# 3. Have SourceNat,PF and VPN on the same IP address. Upgrade of
# network should succeed.
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:ON LB- Netscaler"
)
ns_lb_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_netscaler"],
conservemode=True
)
self.cleanup.append(ns_lb_offering)
ns_lb_offering.update(self.apiclient, state='Enabled')
#Stop all the VMs associated with network to update cidr
self.debug("Stopping the VM: %s" % virtual_machine.name)
virtual_machine.stop(self.apiclient)
self.debug("Updating network offering for network: %s" %
self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
self.debug("Network upgrade failed!")
self.debug("Deleting LB Rule: %s" % lb_rule.id)
lb_rule.delete(self.apiclient)
self.debug("LB rule deleted")
# Assign VPN to source NAT
self.debug("Enabling VPN on source NAT")
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
self.debug("Upgrading the network: %s" % self.network.id)
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
networks = Network.list(
self.apiclient,
id=self.network.id,
listall=True
)
self.assertEqual(
isinstance(networks, list),
True,
"List Networks should return a valid list for given network ID"
)
self.assertNotEqual(
len(networks),
0,
"Length of list networks should not be 0"
)
network = networks[0]
self.assertEqual(
network.networkofferingid,
ns_lb_offering.id,
"Network offering ID should match with new offering ID"
)
return
@attr(speed = "slow")
@attr(tags=["advancedns"], required_hardware="true")
def test_02_nwupgrade_netscaler_conserve_off(self):
"""Test Nw upgrade to netscaler lb service and conserve mode OFF
"""
# Validate the following
# 1. Upgrade a network with VR and conserve mode ON TO
# A network that has Lb provided by "Netscaler" and all other
# services provided by "VR" and Conserve mode OFF
# 2. Have PF and LB rules on the same ip address. Upgrade network
# should fail.
# 3. Have SourceNat,PF and VPN on the same IP address. Upgrade of
# network should fail.
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created network with ID: %s" % self.network.id)
self.debug("Deploying VM in account: %s" % self.account.name)
# Spawn an instance in that network
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Trying to create LB rule on source NAT IP: %s" %
src_nat.ipaddress)
# Create Load Balancer rule with source NAT
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
ipaddressid=src_nat.id,
accountid=self.account.name
)
self.debug("Created LB rule on source NAT: %s" % src_nat.ipaddress)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List lb rules should return a valid lb rules"
)
self.assertNotEqual(
len(lb_rules),
0,
"Length of response from listLbRules should not be 0"
)
self.debug(
"Trying to create a port forwarding rule in source NAT: %s" %
src_nat.ipaddress)
#Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
virtual_machine,
self.services["natrule"],
ipaddressid=src_nat.id
)
self.debug("Created PF rule on source NAT: %s" % src_nat.ipaddress)
nat_rules = NATRule.list(
self.apiclient,
id=nat_rule.id
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT should return a valid port forwarding rules"
)
self.assertNotEqual(
len(nat_rules),
0,
"Length of response from listLbRules should not be 0"
)
# Create a network offering with all virtual router services enabled
self.debug(
"Creating n/w offering with all services in VR & conserve mode:ON LB- Netscaler"
)
ns_lb_offering = NetworkOffering.create(
self.api_client,
self.services["network_offering_netscaler"],
conservemode=False
)
self.cleanup.append(ns_lb_offering)
ns_lb_offering.update(self.apiclient, state='Enabled')
#Stop all the VMs associated with network to update cidr
self.debug("Stopping the VM: %s" % virtual_machine.name)
virtual_machine.stop(self.apiclient)
self.debug("Updating network offering for network: %s" %
self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
self.debug("Network upgrade failed!")
self.debug("Deleting LB Rule: %s" % lb_rule.id)
lb_rule.delete(self.apiclient)
self.debug("LB rule deleted")
# Assign VPN to source NAT
self.debug("Enabling VPN on source NAT")
Vpn.create(
self.apiclient,
src_nat.id,
account=self.account.name,
domainid=self.account.domainid
)
vpns = Vpn.list(
self.apiclient,
publicipid=src_nat.id,
listall=True,
)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs should return a valid VPN list"
)
self.assertNotEqual(
len(vpns),
0,
"Length of list VPN response should not be zero"
)
self.debug("Upgrading the network: %s" % self.network.id)
with self.assertRaises(Exception):
self.network.update(
self.apiclient,
networkofferingid=ns_lb_offering.id,
changecidr=True
)
return
class TestNOWithOnlySourceNAT(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestNOWithOnlySourceNAT, cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offering"]
)
cls.cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.apiclient, cls.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "advancedns"], required_hardware="false")
def test_create_network_with_snat(self):
"""Test to create a network with SourceNAT service only"""
# Validate the following
# 1. create a network offering with source nat service
# 2. create a network and deploy a vm within the network
# 3. deployment and network creation should be successful
# 4. attempt to create a fw rule. should fail since offering hasn't allowed it
# 5. try to ping out of the guest to www.google.com to check SourceNAT is working
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=False,
domainid=self.domain.id
)
self.cleanup.append(self.account)
# Create a network offering VR and only SourceNAT service
self.debug(
"creating network offering with source NAT only"
)
self.network_offering = NetworkOffering.create(
self.apiclient,
self.services["network_offering_sourcenat"]
)
# Enable Network offering
self.network_offering.update(self.apiclient, state='Enabled')
self.debug("Created n/w offering with ID: %s" %
self.network_offering.id)
# Creating network using the network offering created
self.debug("Creating network with network offering: %s" %
self.network_offering.id)
self.network = Network.create(
self.apiclient,
self.services["network"],
accountid=self.account.name,
domainid=self.account.domainid,
networkofferingid=self.network_offering.id,
zoneid=self.zone.id
)
self.debug("Created guest network with ID: %s within account %s" % (self.network.id, self.account.name))
self.debug("Deploying VM in account: %s on the network %s" % (self.account.name, self.network.id))
# Spawn an instance in that network
VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network.id)]
)
self.debug("Deployed VM in network: %s" % self.network.id)
src_nat_list = PublicIPAddress.list(
self.apiclient,
associatednetworkid=self.network.id,
account=self.account.name,
domainid=self.account.domainid,
listall=True,
issourcenat=True,
)
self.assertEqual(
isinstance(src_nat_list, list),
True,
"List Public IP should return a valid source NAT"
)
self.assertNotEqual(
len(src_nat_list),
0,
"Length of response from listPublicIp should not be 0"
)
src_nat = src_nat_list[0]
self.debug("Successfully implemented network with source NAT IP: %s" %
src_nat.ipaddress)
with self.assertRaises(Exception):
FireWallRule.create(
self.apiclient,
ipaddressid=src_nat.id,
protocol='TCP',
cidrlist=[self.services["fw_rule"]["cidr"]],
startport=self.services["fw_rule"]["startport"],
endport=self.services["fw_rule"]["endport"]
)
return
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.