repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
pbaesse/Sissens | lib/python2.7/site-packages/sqlalchemy/dialects/postgresql/array.py | 32 | 10320 | # postgresql/array.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from .base import ischema_names
from ...sql import expression, operators
from ...sql.base import SchemaEventTarget
from ... import types as sqltypes
try:
from uuid import UUID as _python_UUID
except ImportError:
_python_UUID = None
def Any(other, arrexpr, operator=operators.eq):
"""A synonym for the :meth:`.ARRAY.Comparator.any` method.
This method is legacy and is here for backwards-compatibility.
.. seealso::
:func:`.expression.any_`
"""
return arrexpr.any(other, operator)
def All(other, arrexpr, operator=operators.eq):
"""A synonym for the :meth:`.ARRAY.Comparator.all` method.
This method is legacy and is here for backwards-compatibility.
.. seealso::
:func:`.expression.all_`
"""
return arrexpr.all(other, operator)
class array(expression.Tuple):
"""A PostgreSQL ARRAY literal.
This is used to produce ARRAY literals in SQL expressions, e.g.::
from sqlalchemy.dialects.postgresql import array
from sqlalchemy.dialects import postgresql
from sqlalchemy import select, func
stmt = select([
array([1,2]) + array([3,4,5])
])
print stmt.compile(dialect=postgresql.dialect())
Produces the SQL::
SELECT ARRAY[%(param_1)s, %(param_2)s] ||
ARRAY[%(param_3)s, %(param_4)s, %(param_5)s]) AS anon_1
An instance of :class:`.array` will always have the datatype
:class:`.ARRAY`. The "inner" type of the array is inferred from
the values present, unless the ``type_`` keyword argument is passed::
array(['foo', 'bar'], type_=CHAR)
.. versionadded:: 0.8 Added the :class:`~.postgresql.array` literal type.
See also:
:class:`.postgresql.ARRAY`
"""
__visit_name__ = 'array'
def __init__(self, clauses, **kw):
super(array, self).__init__(*clauses, **kw)
self.type = ARRAY(self.type)
def _bind_param(self, operator, obj, _assume_scalar=False, type_=None):
if _assume_scalar or operator is operators.getitem:
# if getitem->slice were called, Indexable produces
# a Slice object from that
assert isinstance(obj, int)
return expression.BindParameter(
None, obj, _compared_to_operator=operator,
type_=type_,
_compared_to_type=self.type, unique=True)
else:
return array([
self._bind_param(operator, o, _assume_scalar=True, type_=type_)
for o in obj])
def self_group(self, against=None):
if (against in (
operators.any_op, operators.all_op, operators.getitem)):
return expression.Grouping(self)
else:
return self
CONTAINS = operators.custom_op("@>", precedence=5)
CONTAINED_BY = operators.custom_op("<@", precedence=5)
OVERLAP = operators.custom_op("&&", precedence=5)
class ARRAY(SchemaEventTarget, sqltypes.ARRAY):
"""PostgreSQL ARRAY type.
.. versionchanged:: 1.1 The :class:`.postgresql.ARRAY` type is now
a subclass of the core :class:`.types.ARRAY` type.
The :class:`.postgresql.ARRAY` type is constructed in the same way
as the core :class:`.types.ARRAY` type; a member type is required, and a
number of dimensions is recommended if the type is to be used for more
than one dimension::
from sqlalchemy.dialects import postgresql
mytable = Table("mytable", metadata,
Column("data", postgresql.ARRAY(Integer, dimensions=2))
)
The :class:`.postgresql.ARRAY` type provides all operations defined on the
core :class:`.types.ARRAY` type, including support for "dimensions", indexed
access, and simple matching such as :meth:`.types.ARRAY.Comparator.any`
and :meth:`.types.ARRAY.Comparator.all`. :class:`.postgresql.ARRAY` class also
provides PostgreSQL-specific methods for containment operations, including
:meth:`.postgresql.ARRAY.Comparator.contains`
:meth:`.postgresql.ARRAY.Comparator.contained_by`,
and :meth:`.postgresql.ARRAY.Comparator.overlap`, e.g.::
mytable.c.data.contains([1, 2])
The :class:`.postgresql.ARRAY` type may not be supported on all
PostgreSQL DBAPIs; it is currently known to work on psycopg2 only.
Additionally, the :class:`.postgresql.ARRAY` type does not work directly in
conjunction with the :class:`.ENUM` type. For a workaround, see the
special type at :ref:`postgresql_array_of_enum`.
.. seealso::
:class:`.types.ARRAY` - base array type
:class:`.postgresql.array` - produces a literal array value.
"""
class Comparator(sqltypes.ARRAY.Comparator):
"""Define comparison operations for :class:`.ARRAY`.
Note that these operations are in addition to those provided
by the base :class:`.types.ARRAY.Comparator` class, including
:meth:`.types.ARRAY.Comparator.any` and
:meth:`.types.ARRAY.Comparator.all`.
"""
def contains(self, other, **kwargs):
"""Boolean expression. Test if elements are a superset of the
elements of the argument array expression.
"""
return self.operate(CONTAINS, other, result_type=sqltypes.Boolean)
def contained_by(self, other):
"""Boolean expression. Test if elements are a proper subset of the
elements of the argument array expression.
"""
return self.operate(
CONTAINED_BY, other, result_type=sqltypes.Boolean)
def overlap(self, other):
"""Boolean expression. Test if array has elements in common with
an argument array expression.
"""
return self.operate(OVERLAP, other, result_type=sqltypes.Boolean)
comparator_factory = Comparator
def __init__(self, item_type, as_tuple=False, dimensions=None,
zero_indexes=False):
"""Construct an ARRAY.
E.g.::
Column('myarray', ARRAY(Integer))
Arguments are:
:param item_type: The data type of items of this array. Note that
dimensionality is irrelevant here, so multi-dimensional arrays like
``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
``ARRAY(ARRAY(Integer))`` or such.
:param as_tuple=False: Specify whether return results
should be converted to tuples from lists. DBAPIs such
as psycopg2 return lists by default. When tuples are
returned, the results are hashable.
:param dimensions: if non-None, the ARRAY will assume a fixed
number of dimensions. This will cause the DDL emitted for this
ARRAY to include the exact number of bracket clauses ``[]``,
and will also optimize the performance of the type overall.
Note that PG arrays are always implicitly "non-dimensioned",
meaning they can store any number of dimensions no matter how
they were declared.
:param zero_indexes=False: when True, index values will be converted
between Python zero-based and PostgreSQL one-based indexes, e.g.
a value of one will be added to all index values before passing
to the database.
.. versionadded:: 0.9.5
"""
if isinstance(item_type, ARRAY):
raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
"handles multi-dimensional arrays of basetype")
if isinstance(item_type, type):
item_type = item_type()
self.item_type = item_type
self.as_tuple = as_tuple
self.dimensions = dimensions
self.zero_indexes = zero_indexes
@property
def hashable(self):
return self.as_tuple
@property
def python_type(self):
return list
def compare_values(self, x, y):
return x == y
def _set_parent(self, column):
"""Support SchemaEventTarget"""
if isinstance(self.item_type, SchemaEventTarget):
self.item_type._set_parent(column)
def _set_parent_with_dispatch(self, parent):
"""Support SchemaEventTarget"""
if isinstance(self.item_type, SchemaEventTarget):
self.item_type._set_parent_with_dispatch(parent)
def _proc_array(self, arr, itemproc, dim, collection):
if dim is None:
arr = list(arr)
if dim == 1 or dim is None and (
# this has to be (list, tuple), or at least
# not hasattr('__iter__'), since Py3K strings
# etc. have __iter__
not arr or not isinstance(arr[0], (list, tuple))):
if itemproc:
return collection(itemproc(x) for x in arr)
else:
return collection(arr)
else:
return collection(
self._proc_array(
x, itemproc,
dim - 1 if dim is not None else None,
collection)
for x in arr
)
def bind_processor(self, dialect):
item_proc = self.item_type.dialect_impl(dialect).\
bind_processor(dialect)
def process(value):
if value is None:
return value
else:
return self._proc_array(
value,
item_proc,
self.dimensions,
list)
return process
def result_processor(self, dialect, coltype):
item_proc = self.item_type.dialect_impl(dialect).\
result_processor(dialect, coltype)
def process(value):
if value is None:
return value
else:
return self._proc_array(
value,
item_proc,
self.dimensions,
tuple if self.as_tuple else list)
return process
ischema_names['_array'] = ARRAY
| gpl-3.0 |
youdonghai/intellij-community | python/lib/Lib/site-packages/django/contrib/syndication/feeds.py | 245 | 1367 | from django.contrib.syndication import views
from django.core.exceptions import ObjectDoesNotExist
import warnings
# This is part of the deprecated API
from django.contrib.syndication.views import FeedDoesNotExist, add_domain
class Feed(views.Feed):
"""Provided for backwards compatibility."""
def __init__(self, slug, request):
warnings.warn('The syndication feeds.Feed class is deprecated. Please '
'use the new class based view API.',
category=DeprecationWarning)
self.slug = slug
self.request = request
self.feed_url = getattr(self, 'feed_url', None) or request.path
self.title_template = self.title_template or ('feeds/%s_title.html' % slug)
self.description_template = self.description_template or ('feeds/%s_description.html' % slug)
def get_object(self, bits):
return None
def get_feed(self, url=None):
"""
Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.
"""
if url:
bits = url.split('/')
else:
bits = []
try:
obj = self.get_object(bits)
except ObjectDoesNotExist:
raise FeedDoesNotExist
return super(Feed, self).get_feed(obj, self.request)
| apache-2.0 |
ravindrapanda/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/independent_test.py | 20 | 6751 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Independent distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import numpy as np
from tensorflow.contrib.distributions.python.ops import independent as independent_lib
from tensorflow.contrib.distributions.python.ops import mvn_diag as mvn_diag_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bernoulli as bernoulli_lib
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
def try_import(name): # pylint: disable=invalid-name
module = None
try:
module = importlib.import_module(name)
except ImportError as e:
tf_logging.warning("Could not import %s: %s" % (name, str(e)))
return module
stats = try_import("scipy.stats")
class ProductDistributionTest(test.TestCase):
def setUp(self):
self._rng = np.random.RandomState(42)
def testSampleAndLogProbUnivariate(self):
loc = np.float32([-1., 1])
scale = np.float32([0.1, 0.5])
with self.test_session() as sess:
ind = independent_lib.Independent(
distribution=normal_lib.Normal(loc=loc, scale=scale),
reinterpreted_batch_ndims=1)
x = ind.sample([4, 5], seed=42)
log_prob_x = ind.log_prob(x)
x_, actual_log_prob_x = sess.run([x, log_prob_x])
self.assertEqual([], ind.batch_shape)
self.assertEqual([2], ind.event_shape)
self.assertEqual([4, 5, 2], x.shape)
self.assertEqual([4, 5], log_prob_x.shape)
expected_log_prob_x = stats.norm(loc, scale).logpdf(x_).sum(-1)
self.assertAllClose(expected_log_prob_x, actual_log_prob_x,
rtol=1e-5, atol=0.)
def testSampleAndLogProbMultivariate(self):
loc = np.float32([[-1., 1], [1, -1]])
scale = np.float32([1., 0.5])
with self.test_session() as sess:
ind = independent_lib.Independent(
distribution=mvn_diag_lib.MultivariateNormalDiag(
loc=loc,
scale_identity_multiplier=scale),
reinterpreted_batch_ndims=1)
x = ind.sample([4, 5], seed=42)
log_prob_x = ind.log_prob(x)
x_, actual_log_prob_x = sess.run([x, log_prob_x])
self.assertEqual([], ind.batch_shape)
self.assertEqual([2, 2], ind.event_shape)
self.assertEqual([4, 5, 2, 2], x.shape)
self.assertEqual([4, 5], log_prob_x.shape)
expected_log_prob_x = stats.norm(loc, scale[:, None]).logpdf(
x_).sum(-1).sum(-1)
self.assertAllClose(expected_log_prob_x, actual_log_prob_x,
rtol=1e-6, atol=0.)
def testSampleConsistentStats(self):
loc = np.float32([[-1., 1], [1, -1]])
scale = np.float32([1., 0.5])
n_samp = 1e4
with self.test_session() as sess:
ind = independent_lib.Independent(
distribution=mvn_diag_lib.MultivariateNormalDiag(
loc=loc,
scale_identity_multiplier=scale),
reinterpreted_batch_ndims=1)
x = ind.sample(int(n_samp), seed=42)
sample_mean = math_ops.reduce_mean(x, axis=0)
sample_var = math_ops.reduce_mean(
math_ops.squared_difference(x, sample_mean), axis=0)
sample_std = math_ops.sqrt(sample_var)
sample_entropy = -math_ops.reduce_mean(ind.log_prob(x), axis=0)
[
sample_mean_, sample_var_, sample_std_, sample_entropy_,
actual_mean_, actual_var_, actual_std_, actual_entropy_,
actual_mode_,
] = sess.run([
sample_mean, sample_var, sample_std, sample_entropy,
ind.mean(), ind.variance(), ind.stddev(), ind.entropy(), ind.mode(),
])
self.assertAllClose(sample_mean_, actual_mean_, rtol=0.02, atol=0.)
self.assertAllClose(sample_var_, actual_var_, rtol=0.04, atol=0.)
self.assertAllClose(sample_std_, actual_std_, rtol=0.02, atol=0.)
self.assertAllClose(sample_entropy_, actual_entropy_, rtol=0.01, atol=0.)
self.assertAllClose(loc, actual_mode_, rtol=1e-6, atol=0.)
def _testMnistLike(self, static_shape):
sample_shape = [4, 5]
batch_shape = [10]
image_shape = [28, 28, 1]
logits = 3 * self._rng.random_sample(
batch_shape + image_shape).astype(np.float32) - 1
def expected_log_prob(x, logits):
return (x * logits - np.log1p(np.exp(logits))).sum(-1).sum(-1).sum(-1)
with self.test_session() as sess:
logits_ph = array_ops.placeholder(
dtypes.float32, shape=logits.shape if static_shape else None)
ind = independent_lib.Independent(
distribution=bernoulli_lib.Bernoulli(logits=logits_ph))
x = ind.sample(sample_shape, seed=42)
log_prob_x = ind.log_prob(x)
[
x_,
actual_log_prob_x,
ind_batch_shape,
ind_event_shape,
x_shape,
log_prob_x_shape,
] = sess.run([
x,
log_prob_x,
ind.batch_shape_tensor(),
ind.event_shape_tensor(),
array_ops.shape(x),
array_ops.shape(log_prob_x),
], feed_dict={logits_ph: logits})
if static_shape:
ind_batch_shape = ind.batch_shape
ind_event_shape = ind.event_shape
x_shape = x.shape
log_prob_x_shape = log_prob_x.shape
self.assertAllEqual(batch_shape, ind_batch_shape)
self.assertAllEqual(image_shape, ind_event_shape)
self.assertAllEqual(sample_shape + batch_shape + image_shape, x_shape)
self.assertAllEqual(sample_shape + batch_shape, log_prob_x_shape)
self.assertAllClose(expected_log_prob(x_, logits),
actual_log_prob_x,
rtol=1e-6, atol=0.)
def testMnistLikeStaticShape(self):
self._testMnistLike(static_shape=True)
def testMnistLikeDynamicShape(self):
self._testMnistLike(static_shape=False)
if __name__ == "__main__":
test.main()
| apache-2.0 |
zerobatu/edx-platform | common/test/acceptance/tests/studio/test_studio_library.py | 19 | 27414 | """
Acceptance tests for Content Libraries in Studio
"""
from ddt import ddt, data
from nose.plugins.attrib import attr
from flaky import flaky
from .base_studio_test import StudioLibraryTest
from ...fixtures.course import XBlockFixtureDesc
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.utils import add_component
from ...pages.studio.library import LibraryEditPage
from ...pages.studio.users import LibraryUsersPage
@attr('shard_2')
@ddt
class LibraryEditPageTest(StudioLibraryTest):
"""
Test the functionality of the library edit page.
"""
def setUp(self): # pylint: disable=arguments-differ
"""
Ensure a library exists and navigate to the library edit page.
"""
super(LibraryEditPageTest, self).setUp()
self.lib_page = LibraryEditPage(self.browser, self.library_key)
self.lib_page.visit()
self.lib_page.wait_until_ready()
def test_page_header(self):
"""
Scenario: Ensure that the library's name is displayed in the header and title.
Given I have a library in Studio
And I navigate to Library Page in Studio
Then I can see library name in page header title
And I can see library name in browser page title
"""
self.assertIn(self.library_info['display_name'], self.lib_page.get_header_title())
self.assertIn(self.library_info['display_name'], self.browser.title)
def test_add_duplicate_delete_actions(self):
"""
Scenario: Ensure that we can add an HTML block, duplicate it, then delete the original.
Given I have a library in Studio with no XBlocks
And I navigate to Library Page in Studio
Then there are no XBlocks displayed
When I add Text XBlock
Then one XBlock is displayed
When I duplicate first XBlock
Then two XBlocks are displayed
And those XBlocks locators' are different
When I delete first XBlock
Then one XBlock is displayed
And displayed XBlock are second one
"""
self.assertEqual(len(self.lib_page.xblocks), 0)
# Create a new block:
add_component(self.lib_page, "html", "Text")
self.assertEqual(len(self.lib_page.xblocks), 1)
first_block_id = self.lib_page.xblocks[0].locator
# Duplicate the block:
self.lib_page.click_duplicate_button(first_block_id)
self.assertEqual(len(self.lib_page.xblocks), 2)
second_block_id = self.lib_page.xblocks[1].locator
self.assertNotEqual(first_block_id, second_block_id)
# Delete the first block:
self.lib_page.click_delete_button(first_block_id, confirm=True)
self.assertEqual(len(self.lib_page.xblocks), 1)
self.assertEqual(self.lib_page.xblocks[0].locator, second_block_id)
def test_no_edit_visibility_button(self):
"""
Scenario: Ensure that library xblocks do not have 'edit visibility' buttons.
Given I have a library in Studio with no XBlocks
And I navigate to Library Page in Studio
When I add Text XBlock
Then one XBlock is displayed
And no 'edit visibility' button is shown
"""
add_component(self.lib_page, "html", "Text")
self.assertFalse(self.lib_page.xblocks[0].has_edit_visibility_button)
def test_add_edit_xblock(self):
"""
Scenario: Ensure that we can add an XBlock, edit it, then see the resulting changes.
Given I have a library in Studio with no XBlocks
And I navigate to Library Page in Studio
Then there are no XBlocks displayed
When I add Multiple Choice XBlock
Then one XBlock is displayed
When I edit first XBlock
And I go to basic tab
And set it's text to a fairly trivial question about Battlestar Galactica
And save XBlock
Then one XBlock is displayed
And first XBlock student content contains at least part of text I set
"""
self.assertEqual(len(self.lib_page.xblocks), 0)
# Create a new problem block:
add_component(self.lib_page, "problem", "Multiple Choice")
self.assertEqual(len(self.lib_page.xblocks), 1)
problem_block = self.lib_page.xblocks[0]
# Edit it:
problem_block.edit()
problem_block.open_basic_tab()
problem_block.set_codemirror_text(
"""
>>Who is "Starbuck"?<<
(x) Kara Thrace
( ) William Adama
( ) Laura Roslin
( ) Lee Adama
( ) Gaius Baltar
"""
)
problem_block.save_settings()
# Check that the save worked:
self.assertEqual(len(self.lib_page.xblocks), 1)
problem_block = self.lib_page.xblocks[0]
self.assertIn("Laura Roslin", problem_block.student_content)
def test_no_discussion_button(self):
"""
Ensure the UI is not loaded for adding discussions.
"""
self.assertFalse(self.browser.find_elements_by_css_selector('span.large-discussion-icon'))
@flaky # TODO fix this, see TNL-2322
def test_library_pagination(self):
"""
Scenario: Ensure that adding several XBlocks to a library results in pagination.
Given that I have a library in Studio with no XBlocks
And I create 10 Multiple Choice XBlocks
Then 10 are displayed.
When I add one more Multiple Choice XBlock
Then 1 XBlock will be displayed
When I delete that XBlock
Then 10 are displayed.
"""
self.assertEqual(len(self.lib_page.xblocks), 0)
for _ in range(10):
add_component(self.lib_page, "problem", "Multiple Choice")
self.assertEqual(len(self.lib_page.xblocks), 10)
add_component(self.lib_page, "problem", "Multiple Choice")
self.assertEqual(len(self.lib_page.xblocks), 1)
self.lib_page.click_delete_button(self.lib_page.xblocks[0].locator)
self.assertEqual(len(self.lib_page.xblocks), 10)
@data('top', 'bottom')
def test_nav_present_but_disabled(self, position):
"""
Scenario: Ensure that the navigation buttons aren't active when there aren't enough XBlocks.
Given that I have a library in Studio with no XBlocks
The Navigation buttons should be disabled.
When I add a multiple choice problem
The Navigation buttons should be disabled.
"""
self.assertEqual(len(self.lib_page.xblocks), 0)
self.assertTrue(self.lib_page.nav_disabled(position))
add_component(self.lib_page, "problem", "Multiple Choice")
self.assertTrue(self.lib_page.nav_disabled(position))
def test_delete_deletes_only_desired_block(self):
"""
Scenario: Ensure that when deleting XBlock only desired XBlock is deleted
Given that I have a library in Studio with no XBlocks
And I create Blank Common Problem XBlock
And I create Checkboxes XBlock
When I delete Blank Problem XBlock
Then Checkboxes XBlock is not deleted
And Blank Common Problem XBlock is deleted
"""
self.assertEqual(len(self.lib_page.xblocks), 0)
add_component(self.lib_page, "problem", "Blank Common Problem")
add_component(self.lib_page, "problem", "Checkboxes")
self.assertEqual(len(self.lib_page.xblocks), 2)
self.assertIn("Blank Common Problem", self.lib_page.xblocks[0].name)
self.assertIn("Checkboxes", self.lib_page.xblocks[1].name)
self.lib_page.click_delete_button(self.lib_page.xblocks[0].locator)
self.assertEqual(len(self.lib_page.xblocks), 1)
problem_block = self.lib_page.xblocks[0]
self.assertIn("Checkboxes", problem_block.name)
@attr('shard_5')
@ddt
class LibraryNavigationTest(StudioLibraryTest):
"""
Test common Navigation actions
"""
def setUp(self): # pylint: disable=arguments-differ
"""
Ensure a library exists and navigate to the library edit page.
"""
super(LibraryNavigationTest, self).setUp()
self.lib_page = LibraryEditPage(self.browser, self.library_key)
self.lib_page.visit()
self.lib_page.wait_until_ready()
def populate_library_fixture(self, library_fixture):
"""
Create four pages worth of XBlocks, and offset by one so each is named
after the number they should be in line by the user's perception.
"""
# pylint: disable=attribute-defined-outside-init
self.blocks = [XBlockFixtureDesc('html', str(i)) for i in xrange(1, 41)]
library_fixture.add_children(*self.blocks)
def test_arbitrary_page_selection(self):
"""
Scenario: I can pick a specific page number of a Library at will.
Given that I have a library in Studio with 40 XBlocks
When I go to the 3rd page
The first XBlock should be the 21st XBlock
When I go to the 4th Page
The first XBlock should be the 31st XBlock
When I go to the 1st page
The first XBlock should be the 1st XBlock
When I go to the 2nd page
The first XBlock should be the 11th XBlock
"""
self.lib_page.go_to_page(3)
self.assertEqual(self.lib_page.xblocks[0].name, '21')
self.lib_page.go_to_page(4)
self.assertEqual(self.lib_page.xblocks[0].name, '31')
self.lib_page.go_to_page(1)
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.lib_page.go_to_page(2)
self.assertEqual(self.lib_page.xblocks[0].name, '11')
def test_bogus_page_selection(self):
"""
Scenario: I can't pick a nonsense page number of a Library
Given that I have a library in Studio with 40 XBlocks
When I attempt to go to the 'a'th page
The input field will be cleared and no change of XBlocks will be made
When I attempt to visit the 5th page
The input field will be cleared and no change of XBlocks will be made
When I attempt to visit the -1st page
The input field will be cleared and no change of XBlocks will be made
When I attempt to visit the 0th page
The input field will be cleared and no change of XBlocks will be made
"""
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.lib_page.go_to_page('a')
self.assertTrue(self.lib_page.check_page_unchanged('1'))
self.lib_page.go_to_page(-1)
self.assertTrue(self.lib_page.check_page_unchanged('1'))
self.lib_page.go_to_page(5)
self.assertTrue(self.lib_page.check_page_unchanged('1'))
self.lib_page.go_to_page(0)
self.assertTrue(self.lib_page.check_page_unchanged('1'))
@data('top', 'bottom')
def test_nav_buttons(self, position):
"""
Scenario: Ensure that the navigation buttons work.
Given that I have a library in Studio with 40 XBlocks
The previous button should be disabled.
The first XBlock should be the 1st XBlock
Then if I hit the next button
The first XBlock should be the 11th XBlock
Then if I hit the next button
The first XBlock should be the 21st XBlock
Then if I hit the next button
The first XBlock should be the 31st XBlock
And the next button should be disabled
Then if I hit the previous button
The first XBlock should be the 21st XBlock
Then if I hit the previous button
The first XBlock should be the 11th XBlock
Then if I hit the previous button
The first XBlock should be the 1st XBlock
And the previous button should be disabled
"""
# Check forward navigation
self.assertTrue(self.lib_page.nav_disabled(position, ['previous']))
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.lib_page.move_forward(position)
self.assertEqual(self.lib_page.xblocks[0].name, '11')
self.lib_page.move_forward(position)
self.assertEqual(self.lib_page.xblocks[0].name, '21')
self.lib_page.move_forward(position)
self.assertEqual(self.lib_page.xblocks[0].name, '31')
self.lib_page.nav_disabled(position, ['next'])
# Check backward navigation
self.lib_page.move_back(position)
self.assertEqual(self.lib_page.xblocks[0].name, '21')
self.lib_page.move_back(position)
self.assertEqual(self.lib_page.xblocks[0].name, '11')
self.lib_page.move_back(position)
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.assertTrue(self.lib_page.nav_disabled(position, ['previous']))
def test_library_pagination(self):
"""
Scenario: Ensure that adding several XBlocks to a library results in pagination.
Given that I have a library in Studio with 40 XBlocks
Then 10 are displayed
And the first XBlock will be the 1st one
And I'm on the 1st page
When I add 1 Multiple Choice XBlock
Then 1 XBlock will be displayed
And I'm on the 5th page
The first XBlock will be the newest one
When I delete that XBlock
Then 10 are displayed
And I'm on the 4th page
And the first XBlock is the 31st one
And the last XBlock is the 40th one.
"""
self.assertEqual(len(self.lib_page.xblocks), 10)
self.assertEqual(self.lib_page.get_page_number(), '1')
self.assertEqual(self.lib_page.xblocks[0].name, '1')
add_component(self.lib_page, "problem", "Multiple Choice")
self.assertEqual(len(self.lib_page.xblocks), 1)
self.assertEqual(self.lib_page.get_page_number(), '5')
self.assertEqual(self.lib_page.xblocks[0].name, "Multiple Choice")
self.lib_page.click_delete_button(self.lib_page.xblocks[0].locator)
self.assertEqual(len(self.lib_page.xblocks), 10)
self.assertEqual(self.lib_page.get_page_number(), '4')
self.assertEqual(self.lib_page.xblocks[0].name, '31')
self.assertEqual(self.lib_page.xblocks[-1].name, '40')
def test_delete_shifts_blocks(self):
"""
Scenario: Ensure that removing an XBlock shifts other blocks back.
Given that I have a library in Studio with 40 XBlocks
Then 10 are displayed
And I will be on the first page
When I delete the third XBlock
There will be 10 displayed
And the first XBlock will be the first one
And the last XBlock will be the 11th one
And I will be on the first page
"""
self.assertEqual(len(self.lib_page.xblocks), 10)
self.assertEqual(self.lib_page.get_page_number(), '1')
self.lib_page.click_delete_button(self.lib_page.xblocks[2].locator, confirm=True)
self.assertEqual(len(self.lib_page.xblocks), 10)
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.assertEqual(self.lib_page.xblocks[-1].name, '11')
self.assertEqual(self.lib_page.get_page_number(), '1')
def test_previews(self):
"""
Scenario: Ensure the user is able to hide previews of XBlocks.
Given that I have a library in Studio with 40 XBlocks
Then previews are visible
And when I click the toggle previews button
Then the previews will not be visible
And when I click the toggle previews button
Then the previews are visible
"""
self.assertTrue(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
self.assertFalse(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
self.assertTrue(self.lib_page.are_previews_showing())
def test_previews_navigation(self):
"""
Scenario: Ensure preview settings persist across navigation.
Given that I have a library in Studio with 40 XBlocks
Then previews are visible
And when I click the toggle previews button
And click the next page button
Then the previews will not be visible
And the first XBlock will be the 11th one
And the last XBlock will be the 20th one
And when I click the toggle previews button
And I click the previous page button
Then the previews will be visible
And the first XBlock will be the first one
And the last XBlock will be the 11th one
"""
self.assertTrue(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
# Which set of arrows shouldn't matter for this test.
self.lib_page.move_forward('top')
self.assertFalse(self.lib_page.are_previews_showing())
self.assertEqual(self.lib_page.xblocks[0].name, '11')
self.assertEqual(self.lib_page.xblocks[-1].name, '20')
self.lib_page.toggle_previews()
self.lib_page.move_back('top')
self.assertTrue(self.lib_page.are_previews_showing())
self.assertEqual(self.lib_page.xblocks[0].name, '1')
self.assertEqual(self.lib_page.xblocks[-1].name, '10')
def test_preview_state_persistance(self):
"""
Scenario: Ensure preview state persists between page loads.
Given that I have a library in Studio with 40 XBlocks
Then previews are visible
And when I click the toggle previews button
And I revisit the page
Then the previews will not be visible
"""
self.assertTrue(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
self.lib_page.visit()
self.lib_page.wait_until_ready()
self.assertFalse(self.lib_page.are_previews_showing())
def test_preview_add_xblock(self):
"""
Scenario: Ensure previews are shown when adding new blocks, regardless of preview setting.
Given that I have a library in Studio with 40 XBlocks
Then previews are visible
And when I click the toggle previews button
Then the previews will not be visible
And when I add an XBlock
Then I will be on the 5th page
And the XBlock will have loaded a preview
And when I revisit the library
And I go to the 5th page
Then the top XBlock will be the one I added
And it will not have a preview
And when I add an XBlock
Then the XBlock I added will have a preview
And the top XBlock will not have one.
"""
self.assertTrue(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
self.assertFalse(self.lib_page.are_previews_showing())
add_component(self.lib_page, "problem", "Checkboxes")
self.assertEqual(self.lib_page.get_page_number(), '5')
first_added = self.lib_page.xblocks[0]
self.assertIn("Checkboxes", first_added.name)
self.assertFalse(self.lib_page.xblocks[0].is_placeholder())
self.lib_page.visit()
self.lib_page.wait_until_ready()
self.lib_page.go_to_page(5)
self.assertTrue(self.lib_page.xblocks[0].is_placeholder())
add_component(self.lib_page, "problem", "Multiple Choice")
# DOM has detatched the element since last assignment
first_added = self.lib_page.xblocks[0]
second_added = self.lib_page.xblocks[1]
self.assertIn("Multiple Choice", second_added.name)
self.assertFalse(second_added.is_placeholder())
self.assertTrue(first_added.is_placeholder())
def test_edit_with_preview(self):
"""
Scenario: Editing an XBlock should show me a preview even if previews are hidden.
Given that I have a library in Studio with 40 XBlocks
Then previews are visible
And when I click the toggle previews button
Then the previews will not be visible
And when I edit the first XBlock
Then the first XBlock will show a preview
And the other XBlocks will still be placeholders
"""
self.assertTrue(self.lib_page.are_previews_showing())
self.lib_page.toggle_previews()
self.assertFalse(self.lib_page.are_previews_showing())
target = self.lib_page.xblocks[0]
target.edit()
target.save_settings()
self.assertFalse(target.is_placeholder())
self.assertTrue(all([xblock.is_placeholder() for xblock in self.lib_page.xblocks[1:]]))
def test_duplicate_xblock_pagination(self):
"""
Scenario: Duplicating an XBlock should not shift the page if the XBlock is not at the end.
Given that I have a library in Studio with 40 XBlocks
When I duplicate the third XBlock
Then the page should not change
And the duplicate XBlock should be there
And it should show a preview
And there should not be more than 10 XBlocks visible.
"""
third_block_id = self.lib_page.xblocks[2].locator
self.lib_page.click_duplicate_button(third_block_id)
self.lib_page.wait_until_ready()
target = self.lib_page.xblocks[3]
self.assertIn('Duplicate', target.name)
self.assertFalse(target.is_placeholder())
self.assertEqual(len(self.lib_page.xblocks), 10)
def test_duplicate_xblock_pagination_end(self):
"""
Scenario: Duplicating an XBlock if it's the last one should bring me to the next page with a preview.
Given that I have a library in Studio with 40 XBlocks
And when I hide previews
And I duplicate the last XBlock
The page should change to page 2
And the duplicate XBlock should be the first XBlock
And it should not be a placeholder
"""
self.lib_page.toggle_previews()
last_block_id = self.lib_page.xblocks[-1].locator
self.lib_page.click_duplicate_button(last_block_id)
self.lib_page.wait_until_ready()
self.assertEqual(self.lib_page.get_page_number(), '2')
target_block = self.lib_page.xblocks[0]
self.assertIn('Duplicate', target_block.name)
self.assertFalse(target_block.is_placeholder())
class LibraryUsersPageTest(StudioLibraryTest):
"""
Test the functionality of the library "Instructor Access" page.
"""
def setUp(self):
super(LibraryUsersPageTest, self).setUp()
# Create a second user for use in these tests:
AutoAuthPage(self.browser, username="second", email="second@example.com", no_login=True).visit()
self.page = LibraryUsersPage(self.browser, self.library_key)
self.page.visit()
def _refresh_page(self):
"""
Reload the page.
"""
self.page = LibraryUsersPage(self.browser, self.library_key)
self.page.visit()
self.page.wait_until_no_loading_indicator()
@flaky # TODO fix this; see TNL-2647
def test_user_management(self):
"""
Scenario: Ensure that we can edit the permissions of users.
Given I have a library in Studio where I am the only admin
assigned (which is the default for a newly-created library)
And I navigate to Library "Instructor Access" Page in Studio
Then there should be one user listed (myself), and I must
not be able to remove myself or my instructor privilege.
When I click Add Instructor
Then I see a form to complete
When I complete the form and submit it
Then I can see the new user is listed as a "User" of the library
When I click to Add Staff permissions to the new user
Then I can see the new user has staff permissions and that I am now
able to promote them to an Admin or remove their staff permissions.
When I click to Add Admin permissions to the new user
Then I can see the new user has admin permissions and that I can now
remove Admin permissions from either user.
"""
def check_is_only_admin(user):
"""
Ensure user is an admin user and cannot be removed.
(There must always be at least one admin user.)
"""
self.assertIn("admin", user.role_label.lower())
self.assertFalse(user.can_promote)
self.assertFalse(user.can_demote)
self.assertFalse(user.can_delete)
self.assertTrue(user.has_no_change_warning)
self.assertIn("Promote another member to Admin to remove your admin rights", user.no_change_warning_text)
self.assertEqual(len(self.page.users), 1)
user = self.page.users[0]
self.assertTrue(user.is_current_user)
check_is_only_admin(user)
# Add a new user:
self.assertTrue(self.page.has_add_button)
self.assertFalse(self.page.new_user_form_visible)
self.page.click_add_button()
self.assertTrue(self.page.new_user_form_visible)
self.page.set_new_user_email('second@example.com')
self.page.click_submit_new_user_form()
# Check the new user's listing:
def get_two_users():
"""
Expect two users to be listed, one being me, and another user.
Returns me, them
"""
users = self.page.users
self.assertEqual(len(users), 2)
self.assertEqual(len([u for u in users if u.is_current_user]), 1)
if users[0].is_current_user:
return users[0], users[1]
else:
return users[1], users[0]
self._refresh_page()
user_me, them = get_two_users()
check_is_only_admin(user_me)
self.assertIn("user", them.role_label.lower())
self.assertTrue(them.can_promote)
self.assertIn("Add Staff Access", them.promote_button_text)
self.assertFalse(them.can_demote)
self.assertTrue(them.can_delete)
self.assertFalse(them.has_no_change_warning)
# Add Staff permissions to the new user:
them.click_promote()
self._refresh_page()
user_me, them = get_two_users()
check_is_only_admin(user_me)
self.assertIn("staff", them.role_label.lower())
self.assertTrue(them.can_promote)
self.assertIn("Add Admin Access", them.promote_button_text)
self.assertTrue(them.can_demote)
self.assertIn("Remove Staff Access", them.demote_button_text)
self.assertTrue(them.can_delete)
self.assertFalse(them.has_no_change_warning)
# Add Admin permissions to the new user:
them.click_promote()
self._refresh_page()
user_me, them = get_two_users()
self.assertIn("admin", user_me.role_label.lower())
self.assertFalse(user_me.can_promote)
self.assertTrue(user_me.can_demote)
self.assertTrue(user_me.can_delete)
self.assertFalse(user_me.has_no_change_warning)
self.assertIn("admin", them.role_label.lower())
self.assertFalse(them.can_promote)
self.assertTrue(them.can_demote)
self.assertIn("Remove Admin Access", them.demote_button_text)
self.assertTrue(them.can_delete)
self.assertFalse(them.has_no_change_warning)
# Delete the new user:
them.click_delete()
self._refresh_page()
self.assertEqual(len(self.page.users), 1)
user = self.page.users[0]
self.assertTrue(user.is_current_user)
| agpl-3.0 |
pombredanne/pyjs | examples/clickablerootpanel/ClickableRootPanel.py | 11 | 4034 | import pyjd # this is dummy in pyjs.
from pyjamas.ui.RootPanel import RootPanelCls, RootPanel, manageRootPanel
from pyjamas.ui.Button import Button
from pyjamas.ui.FocusPanel import FocusPanel
from pyjamas.ui.KeyboardListener import KeyboardHandler
from pyjamas.ui.ClickListener import ClickHandler
from pyjamas.ui.HTML import HTML
from pyjamas import Window
from pyjamas import DOM
from __pyjamas__ import doc
class RootPanelListener(RootPanelCls, KeyboardHandler, ClickHandler):
def __init__(self, Parent, *args, **kwargs):
self.Parent = Parent
self.focussed = False
RootPanelCls.__init__(self, *args, **kwargs)
ClickHandler.__init__(self)
KeyboardHandler.__init__(self)
self.addClickListener(self)
self.addKeyboardListener(self)
def onClick(self, Sender):
self.focussed = not self.focussed
self.Parent.setFocus(self.focussed)
txt = self.focussed and 'yes. now press keys' or 'no. keys fail now'
self.add(HTML('focus: %s' % txt))
def onKeyDown(self, sender, keyCode, modifiers = None):
self.add(HTML('keyDOWN: %d' % keyCode))
def heightset(fred):
DOM.setStyleAttribute(doc().body, 'height', '100%')
def marginset(fred):
DOM.setStyleAttribute(doc().body, 'margin', '0px')
if __name__ == '__main__':
pyjd.setup("public/ClickableRootPanel.html")
bh = Button("Click me to set body height to 100%", heightset,
StyleName='teststyle')
b = Button("Click me to set body margin to 0", marginset,
StyleName='teststyle')
h = HTML("<b>Hello World</b> - watch for focus highlighting after click",
StyleName='teststyle')
panel = FocusPanel(Widget=h)
gp = RootPanelListener(panel, StyleName='rootstyle')
# as this is replacing the 'normal' usage of RootPanel(),
# it is necessary to add this in 'by hand' so that the
# window-close notification is 'hooked into', and so when
# the browser window is closed, cleanups (onDetach) will
# be triggered.
#
# calling manageRootPanel (with the default arg id=None)
# also has the advantage that RootPanel(id=None) will
# 'pick up' the above RootPanelListener instance, meaning
# that you don't need to have a silly global variable
# all over the place, you can just use the standard pyjamas
# API as normal.
#
# kinda cute.
manageRootPanel(gp)
info = """Click anywhere in the Root (grey) to activate key input;
click again to disable it. Note the focus highlighting
that occurs on the "Hello World" HTML box.
<br /> <br />
The CSS style has been set to 100% width
and the margin to 100px. Even though it is the "body" - root
element, clicking outside the margin (indicated by the black border)
will NOT activate key input.
<br /><br />
Note that many browsers screw up the sizes of the window when the
margin is set as well as width or height to 100%, as evidenced by
the black border being off the screen. (Normally, you would add a
WindowResize Listener which received the window size and then
directly adjusted the CSS width and height of the body element
to correct these problems (!) or, much better, add a SimplePanel
on which the appropriate (100% width+height) CSS styles are set).
<br /> <br />
However that's not the issue: the point is that you <b>must</b>
actually set the body to be 100% of the screen area in order to
receive click events, and the above highlights why it is important
to set margin and padding of body to 0 as well, and also to not
set any borders.
<br /> <br />
Click the button to change the margin on the document "body" tag
to zero, in order to test this out. Note that the border may still
be off the screen, even when the margin is zero.
<br /> <br />
"""
RootPanel().add(panel)
RootPanel().add(b)
RootPanel().add(bh)
RootPanel().add(HTML(info))
pyjd.run()
| apache-2.0 |
Edu-Glez/Bank_sentiment_analysis | env/lib/python3.6/site-packages/jupyter_client/ioloop/restarter.py | 12 | 1684 | """A basic in process kernel monitor with autorestarting.
This watches a kernel's state using KernelManager.is_alive and auto
restarts the kernel if it dies.
"""
#-----------------------------------------------------------------------------
# Copyright (c) The Jupyter Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from zmq.eventloop import ioloop
from jupyter_client.restarter import KernelRestarter
from traitlets import (
Instance,
)
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class IOLoopKernelRestarter(KernelRestarter):
"""Monitor and autorestart a kernel."""
loop = Instance('zmq.eventloop.ioloop.IOLoop')
def _loop_default(self):
return ioloop.IOLoop.instance()
_pcallback = None
def start(self):
"""Start the polling of the kernel."""
if self._pcallback is None:
self._pcallback = ioloop.PeriodicCallback(
self.poll, 1000*self.time_to_dead, self.loop
)
self._pcallback.start()
def stop(self):
"""Stop the kernel polling."""
if self._pcallback is not None:
self._pcallback.stop()
self._pcallback = None
| apache-2.0 |
dot-bob/Marlin | buildroot/share/atom/create_custom_upload_command_DFU.py | 45 | 1370 | #
# Builds custom upload command
# 1) Run platformio as a subprocess to find a COM port
# 2) Build the upload command
# 3) Exit and let upload tool do the work
#
# This script runs between completion of the library/dependencies installation and compilation.
#
# Will continue on if a COM port isn't found so that the compilation can be done.
#
import os
import sys
from SCons.Script import DefaultEnvironment
import platform
current_OS = platform.system()
env = DefaultEnvironment()
build_type = os.environ.get("BUILD_TYPE", 'Not Set')
if not(build_type == 'upload' or build_type == 'traceback' or build_type == 'Not Set') :
env.Replace(UPLOAD_PROTOCOL = 'teensy-gui') # run normal Teensy2 scripts
else:
if current_OS == 'Windows':
avrdude_conf_path = env.get("PIOHOME_DIR") + '\\packages\\toolchain-atmelavr\\etc\\avrdude.conf'
source_path = env.get("PROJECTBUILD_DIR") + '\\' + env.get("PIOENV") + '\\firmware.hex'
upload_string = 'avrdude -p usb1286 -c flip1 -C ' + avrdude_conf_path + ' -U flash:w:' + source_path + ':i'
else:
source_path = env.get("PROJECTBUILD_DIR") + '/' + env.get("PIOENV") + '/firmware.hex'
upload_string = 'avrdude -p usb1286 -c flip1 -U flash:w:' + source_path + ':i'
env.Replace(
UPLOADCMD = upload_string,
MAXIMUM_RAM_SIZE = 8192,
MAXIMUM_SIZE = 130048
)
| gpl-3.0 |
algiopensource/l10n-spain | l10n_es_aeat_mod115/__openerp__.py | 7 | 1562 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'AEAT modelo 115',
'version': '8.0.1.3.0',
'category': "Localisation/Accounting",
'author': "Serv. Tecnol. Avanzados - Pedro M. Baeza,"
"AvanzOSC,"
"Antiun Ingeniería S.L.,"
"Factor Libre S.L.,"
"Odoo Community Association (OCA)",
'website': "https://github.com/OCA/l10n-spain",
'license': 'AGPL-3',
'depends': ['l10n_es_aeat'],
'data': [
'data/aeat_export_mod115_2017_data.xml',
'wizard/export_mod115_to_boe.xml',
'views/mod115_view.xml',
'security/ir.model.access.csv',
'security/ir_rule.xml'],
'installable': True,
}
| agpl-3.0 |
Meriipu/quodlibet | quodlibet/commands.py | 2 | 14557 | # Copyright 2004-2005 Joe Wreschnig, Michael Urman, Iñigo Serna,
# 2011-2020 Nick Boultbee
# 2014 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
from senf import uri2fsn, fsnative, fsn2text, text2fsn
from quodlibet.util.string import split_escape
from quodlibet import browsers
from quodlibet import util
from quodlibet.util import print_d, print_e, copool
from quodlibet.qltk.browser import LibraryBrowser
from quodlibet.qltk.properties import SongProperties
from quodlibet.util.library import scan_library
from quodlibet.order.repeat import RepeatListForever, RepeatSongForever, OneSong
from quodlibet.order.reorder import OrderWeighted, OrderShuffle
from quodlibet.config import RATINGS
class CommandError(Exception):
pass
class CommandRegistry:
"""Knows about all commands and handles parsing/executing them"""
def __init__(self):
self._commands = {}
def register(self, name, args=0, optional=0):
"""Register a new command function
The functions gets zero or more arguments as `fsnative`
and should return `None` or `fsnative`. In case an error
occurred the command should raise `CommandError`.
Args:
name (str): the command name
args (int): amount of required arguments
optional (int): amount of additional optional arguments
Returns:
Callable
"""
def wrap(func):
self._commands[name] = (func, args, optional)
return func
return wrap
def handle_line(self, app, line):
"""Parses a command line and executes the command.
Can not fail.
Args:
app (Application)
line (fsnative)
Returns:
fsnative or None
"""
assert isinstance(line, fsnative)
# only one arg supported atm
parts = line.split(" ", 1)
command = parts[0]
args = parts[1:]
print_d("command: %r(*%r)" % (command, args))
try:
return self.run(app, command, *args)
except CommandError as e:
print_e(e)
util.print_exc()
except:
util.print_exc()
def run(self, app, name, *args):
"""Execute the command `name` passing args
May raise CommandError
"""
if name not in self._commands:
raise CommandError("Unknown command %r" % name)
cmd, argcount, optcount = self._commands[name]
if len(args) < argcount:
raise CommandError("Not enough arguments for %r" % name)
if len(args) > argcount + optcount:
raise CommandError("Too many arguments for %r" % name)
print_d("Running %r with params %s " % (cmd.__name__, args))
try:
result = cmd(app, *args)
except CommandError as e:
raise CommandError("%s: %s" % (name, str(e)))
else:
if result is not None and not isinstance(result, fsnative):
raise CommandError(
"%s: returned %r which is not fsnative" % (name, result))
return result
def arg2text(arg):
"""Like fsn2text but is strict by default and raises CommandError"""
try:
return fsn2text(arg, strict=True)
except ValueError as e:
raise CommandError(e)
registry = CommandRegistry()
@registry.register("previous")
def _previous(app):
app.player.previous()
@registry.register("force-previous")
def _force_previous(app):
app.player.previous(True)
@registry.register("next")
def _next(app):
app.player.next()
@registry.register("pause")
def _pause(app):
app.player.paused = True
@registry.register("play")
def _play(app):
app.player.play()
@registry.register("play-pause")
def _play_pause(app):
app.player.playpause()
@registry.register("stop")
def _stop(app):
app.player.stop()
@registry.register("focus")
def _focus(app):
app.present()
@registry.register("volume", args=1)
def _volume(app, value):
if not value:
raise CommandError("invalid arg")
if value[0] in ('+', '-'):
if len(value) > 1:
try:
change = (float(value[1:]) / 100.0)
except ValueError:
return
else:
change = 0.05
if value[0] == '-':
change = -change
volume = app.player.volume + change
else:
try:
volume = (float(value) / 100.0)
except ValueError:
return
app.player.volume = min(1.0, max(0.0, volume))
@registry.register("stop-after", args=1)
def _stop_after(app, value):
po = app.player_options
if value == "0":
po.stop_after = False
elif value == "1":
po.stop_after = True
elif value == "t":
po.stop_after = not po.stop_after
else:
raise CommandError("Invalid value %r" % value)
@registry.register("shuffle", args=1)
def _shuffle(app, value):
po = app.player_options
if value in ["0", "off"]:
po.shuffle = False
elif value in ["1", "on"]:
po.shuffle = True
elif value in ["t", "toggle"]:
po.shuffle = not po.shuffle
@registry.register("shuffle-type", args=1)
def _shuffle_type(app, value):
if value in ["random", "weighted"]:
app.player_options.shuffle = True
if value == "random":
app.window.order.shuffler = OrderShuffle
elif value == "weighted":
app.window.order.shuffler = OrderWeighted
elif value in ["off", "0"]:
app.player_options.shuffle = False
@registry.register("repeat", args=1)
def _repeat(app, value):
po = app.player_options
if value in ["0", "off"]:
po.repeat = False
elif value in ["1", "on"]:
print_d("Enabling repeat")
po.repeat = True
elif value in ["t", "toggle"]:
po.repeat = not po.repeat
@registry.register("repeat-type", args=1)
def _repeat_type(app, value):
if value in ["current", "all", "one"]:
app.player_options.repeat = True
if value == "current":
app.window.order.repeater = RepeatSongForever
elif value == "all":
app.window.order.repeater = RepeatListForever
elif value == "one":
app.window.order.repeater = OneSong
elif value in ["off", "0"]:
app.player_options.repeat = False
@registry.register("seek", args=1)
def _seek(app, time):
player = app.player
if not player.song:
return
seek_to = player.get_position()
if time[0] == "+":
seek_to += util.parse_time(time[1:]) * 1000
elif time[0] == "-":
seek_to -= util.parse_time(time[1:]) * 1000
else:
seek_to = util.parse_time(time) * 1000
seek_to = min(player.song.get("~#length", 0) * 1000 - 1,
max(0, seek_to))
player.seek(seek_to)
@registry.register("play-file", args=1)
def _play_file(app, value):
app.window.open_file(value)
@registry.register("add-location", args=1)
def _add_location(app, value):
if os.path.isfile(value):
ret = app.library.add_filename(value)
if not ret:
print_e("Couldn't add file to library")
elif os.path.isdir(value):
copool.add(app.library.scan, [value], cofuncid="library",
funcid="library")
else:
print_e("Invalid location")
@registry.register("toggle-window")
def _toggle_window(app):
if app.window.get_property('visible'):
app.hide()
else:
app.show()
@registry.register("hide-window")
def _hide_window(app):
app.hide()
@registry.register("show-window")
def _show_window(app):
app.show()
@registry.register("rating", args=1)
def _rating(app, value):
song = app.player.song
if not song:
return
if value[0] in ('+', '-'):
if len(value) > 1:
try:
change = float(value[1:])
except ValueError:
return
else:
change = (1 / RATINGS.number)
if value[0] == '-':
change = -change
rating = song["~#rating"] + change
else:
try:
rating = float(value)
except (ValueError, TypeError):
return
song["~#rating"] = max(0.0, min(1.0, rating))
app.library.changed([song])
@registry.register("dump-browsers")
def _dump_browsers(app):
response = u""
for i, b in enumerate(browsers.browsers):
response += u"%d. %s\n" % (i, browsers.name(b))
return text2fsn(response)
@registry.register("set-browser", args=1)
def _set_browser(app, value):
if not app.window.select_browser(value, app.library, app.player):
raise CommandError("Unknown browser %r" % value)
@registry.register("open-browser", args=1)
def _open_browser(app, value):
value = arg2text(value)
try:
Kind = browsers.get(value)
except ValueError:
raise CommandError("Unknown browser %r" % value)
LibraryBrowser.open(Kind, app.library, app.player)
@registry.register("random", args=1)
def _random(app, tag):
tag = arg2text(tag)
if app.browser.can_filter(tag):
app.browser.filter_random(tag)
@registry.register("filter", args=1)
def _filter(app, value):
value = arg2text(value)
try:
tag, value = value.split('=', 1)
except ValueError:
raise CommandError("invalid argument")
if app.browser.can_filter(tag):
app.browser.filter(tag, [value])
@registry.register("query", args=1)
def _query(app, value):
value = arg2text(value)
if app.browser.can_filter_text():
app.browser.filter_text(value)
@registry.register("unfilter")
def _unfilter(app):
app.browser.unfilter()
@registry.register("properties", optional=1)
def _properties(app, value=None):
library = app.library
player = app.player
window = app.window
if value is not None:
value = arg2text(value)
if value in library:
songs = [library[value]]
else:
songs = library.query(value)
else:
songs = [player.song]
songs = list(filter(None, songs))
if songs:
window = SongProperties(library, songs, parent=window)
window.show()
@registry.register("enqueue", args=1)
def _enqueue(app, value):
playlist = app.window.playlist
library = app.library
if value in library:
songs = [library[value]]
elif os.path.isfile(value):
songs = [library.add_filename(os.path.realpath(value))]
else:
songs = library.query(arg2text(value))
songs.sort()
playlist.enqueue(songs)
@registry.register("enqueue-files", args=1)
def _enqueue_files(app, value):
"""Enqueues comma-separated filenames or song names.
Commas in filenames should be backslash-escaped"""
library = app.library
window = app.window
songs = []
for param in split_escape(value, ","):
try:
song_path = uri2fsn(param)
except ValueError:
song_path = param
if song_path in library:
songs.append(library[song_path])
elif os.path.isfile(song_path):
songs.append(library.add_filename(os.path.realpath(value)))
if songs:
window.playlist.enqueue(songs)
@registry.register("unqueue", args=1)
def _unqueue(app, value):
window = app.window
library = app.library
playlist = window.playlist
if value in library:
songs = [library[value]]
else:
songs = library.query(arg2text(value))
playlist.unqueue(songs)
@registry.register("quit")
def _quit(app):
app.quit()
@registry.register("status")
def _status(app):
player = app.player
if player.paused:
strings = ["paused"]
else:
strings = ["playing"]
strings.append(type(app.browser).__name__)
po = app.player_options
strings.append("%0.3f" % player.volume)
strings.append("shuffle" if po.shuffle else "inorder")
strings.append("on" if po.repeat else "off")
progress = 0
if player.info:
length = player.info.get("~#length", 0)
if length:
progress = player.get_position() / (length * 1000.0)
strings.append("%0.3f" % progress)
status = u" ".join(strings) + u"\n"
return text2fsn(status)
@registry.register("queue", args=1)
def _queue(app, value):
window = app.window
value = arg2text(value)
if value.startswith("t"):
value = not window.qexpander.get_property('visible')
else:
value = value not in ['0', 'off', 'false']
window.qexpander.set_property('visible', value)
@registry.register("dump-playlist")
def _dump_playlist(app):
window = app.window
uris = []
for song in window.playlist.pl.get():
uris.append(song("~uri"))
return text2fsn(u"\n".join(uris) + u"\n")
@registry.register("dump-queue")
def _dump_queue(app):
window = app.window
uris = []
for song in window.playlist.q.get():
uris.append(song("~uri"))
return text2fsn(u"\n".join(uris) + u"\n")
@registry.register("refresh")
def _refresh(app):
scan_library(app.library, False)
@registry.register("print-query", args=1)
def _print_query(app, query):
"""Queries library, dumping filenames of matches to stdout
See Issue 716
"""
query = arg2text(query)
songs = app.library.query(query)
return "\n".join([song("~filename") for song in songs]) + "\n"
@registry.register("print-query-text")
def _print_query_text(app):
if app.browser.can_filter_text():
return text2fsn(str(app.browser.get_filter_text()) + u"\n")
@registry.register("print-playing", optional=1)
def _print_playing(app, fstring=None):
from quodlibet.formats import AudioFile
from quodlibet.pattern import Pattern
if fstring is None:
fstring = u"<artist~album~tracknumber~title>"
else:
fstring = arg2text(fstring)
song = app.player.info
if song is None:
song = AudioFile({"~filename": fsnative(u"/")})
song.sanitize()
else:
song = app.player.with_elapsed_info(song)
return text2fsn(Pattern(fstring).format(song) + u"\n")
@registry.register("uri-received", args=1)
def _uri_received(app, uri):
uri = arg2text(uri)
app.browser.emit("uri-received", uri)
| gpl-2.0 |
ariegg/webiopi-drivers | chips/sensor/ina219/ina219.py | 1 | 22877 | # Copyright 2017 Andreas Riegg - t-h-i-n-x.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Changelog
#
# 1.0 2017/01/03 Initial release
#
# Config parameters
#
# - slave 8 bit Value of the I2C slave address for the chip.
# Defaults to 0x40. Possible values are from 0x40 to 0x4F.
# - shunt Float Value of the shunt resistor in Ohms. Default is 0.1.
# - vrange Integer Vrange value of the chip. Valid values are 16 or 32.
# Default is 32.
# - gaindiv Integer Gain divider (PGA) value of the chip. Valid values
# are from (1, 2, 4 , 8). Default is 8.
# - mode Integer Value of the chip mode. Possible values are from
# 0x0 to 0x7. Default is 0x7.
# - badc Integer Value of the voltage bus ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - sadc Integer Value of the shunt voltage ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - vmax Float Value of the desired vmax value for automatic
# calibration. Default is None. This parameter will
# only be used of imax is also not None.
# - imax Float Value of the desired imax value for automatic
# calibration. Default is None. If imax is given,
# the values for vrange, gaindiv and currentLSB will be
# ignored and calculated instead. If imax is higher than
# possible, then the highest possible value will be
# used instead and overflow may occur.
# - currentLSB Float Value of the current LSB to use. Default is None.
# If you mistrust the automatic calibration you can
# set the current LSB manual with this parameter. If
# used, make sure to manual set the desired gaindiv also.
# - bus String Name of the I2C bus
#
# Usage remarks
#
# - The default values of this driver are valid for a 32 V Bus range, a maximum
# possible current of 3.2 A and a current resolution of around 98 microAmperes/Bit.
# If you are fine with this you can just use those defaults.
# - If you want to have some more configuration while keeping it still simple you
# can provide parameters for vmax and imax and the driver will do its best to
# automatically calculate vrange, gaindiv and calibration with a very good resolution.
# - If you prefer complete manual setup you should set vrange, gaindiv, currentLSB and
# optional fine-tuned calibration (in this order).
# - Setting the calibration register via setCalibration() is to be used for the final
# calibration as explained in the chip spec for the final fine tuning. It must not
# be used for the currentLSB setting as this is calculated automatically by this
# driver based on the values of shunt and gaindiv.
# - This driver implements an automatical calibration feature calibrate(vmax, imax)
# that can be used during device creation and also at runtime. The value for vmax
# is used to set vrange within the allowed limits. The value for imax is used to
# set gaindiv so that the maximal desired current can be measured at the highest
# possible resolution for current LSB. If the desired imax is higher than the
# possible imax based on the value of shunt, then the maximum possible imax will
# be used. You get the choosen values via the response of the calibrate(...) call.
# In this case, sending a higher current through the shunt will result in overflow
# which will generate a debugging message (only when reading the bus voltage).
# - If values for vmax and imax are given at device creation they will override the
# init values for vrange and gaindiv as those will be ignored then and calculated via
# the automatic calibration feature instead.
# - All chip parameters with the exception of shunt can be changed at runtime. If
# an updated parameter has an influence on the currentLSB and/or calibration value,
# then this/these will be re-calculated automatically and the calibration register
# will be set also. If you use setCalibration() for final fine-tuning you have to
# repeat that step again if automatic calibration has taken place.
# - Updating of the mode value at runtime allows triggered conversions and power-down
# of the chip.
# - If you are unsure about the calculated values set debugging to "True" and look at
# the debugging messages as they will notify you about all resulting values. Or
# call getConfiguration() to see all values.
# - If you encounter overflow (getting the overflow error) try to increase the
# gaindiv value or reduce the shunt value (please as real hardware change).
#
# Implementation remarks
#
# - This driver is implemented based on the specs from Intel.
# - The default value for the shunt resistor of 0.1 Ohms is appropriate for the
# breakout board from Adafruit for this chip (Adafruit PRODUCT ID: 904).
# - The parameter value for shunt can't be changed at runtime after device
# creation because it is very unlikely to modify the shunt resistor during operation
# of the chip. Please provide the correct value via the config options or at
# device creation if the default value does not suit your hardware setup.
# - This driver uses floating point calculation and takes no care about integer
# only arithmetics. For that reason, the mathematical lowest possible LSB value is
# calculated automatically and used for best resolution with the exception when you
# manual set your own current LSB value.
# - If you want to override/select the current LSB value manual you can do that
# via config parameter or at runtime. In this case make sure to use the correct
# corresponding gaindiv value otherwise the value readings will be wrong.
# - If for some reason (e.g. an impropriate setting of the currentLSB) the value
# of the calibration register would be out of its allowed bounds it will be set
# to zero so that all current and power readings will also be zero to avoid wrong
# measurements until the calibration register is set again to an allowed range.
# - This driver does not use the shunt adc register as this value is not needed
# for operation if the calibration register is used.
#
from webiopi.utils.logger import debug
from webiopi.decorators.rest import request, response, api
from webiopi.utils.types import toint, signInteger, M_JSON
from webiopi.devices.i2c import I2C
from webiopi.devices.sensor import Current, Voltage, Power
#---------- Class definition ----------
class INA219(I2C, Current, Voltage, Power):
CONFIGURATION_ADDRESS = 0x00
#SHUNTADC_ADDRESS = 0x01
BUSADC_ADDRESS = 0x02
POWER_ADDRESS = 0x03
CURRENT_ADDRESS = 0x04
CALIBRATION_ADDRESS = 0x05
RESET_FLAG = 0b1 << 15
BRNG_16_VALUE = 0b0 << 13
BRNG_32_VALUE = 0b1 << 13
BRNG_MASK = 0b0010000000000000
GAINDIV_1_VALUE = 0b00 << 11
GAINDIV_2_VALUE = 0b01 << 11
GAINDIV_4_VALUE = 0b10 << 11
GAINDIV_8_VALUE = 0b11 << 11
GAINDIV_MASK = 0b0001100000000000
BADC_MASK = 0b0000011110000000
SADC_MASK = 0b0000000001111000
MODE_MASK = 0b0000000000000111
OVERFLOW_MASK = 0b0000000000000001
CALIBRATION_MASK = 0b1111111111111110
VSHUNT_FULL_SCALE_BASE_VALUE = 0.04 # always fixed to 40mV
CALIBRATION_CONSTANT_VALUE = 0.04096 # fixed value from data sheet
BUS_VOLTAGE_LSB_VALUE = 0.004 # always fixed to 4mV
CURRENT_LSB_TO_POWER_LSB_VALUE = 20 # always 20 times the currentLSB value
#---------- Class initialisation ----------
def __init__(self, slave=0x40, shunt=0.1, vrange=32, gaindiv=8, mode=0x7, badc=0x3, sadc=0x3, vmax=None, imax=None, currentLSB=None, bus=None):
I2C.__init__(self, toint(slave), bus)
self.__setShunt__(float(shunt))
self.__reset__()
if imax != None:
if vmax == None:
vmax = toint(vrange)
else:
vmax = float(vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
else:
self.__setVrange__(toint(vrange))
self.__setGaindiv__(toint(gaindiv))
if currentLSB != None:
self.__setCurrentLSB__(float(currentLSB))
self.__setMode__(toint(mode))
self.__setBadc__(toint(badc))
self.__setSadc__(toint(sadc))
#---------- Abstraction framework contracts ----------
def __str__(self):
return "INA219(slave=0x%02X, dev=%s, shunt=%f Ohm)" % (self.slave, self.device(), self._shunt)
def __family__(self):
return [Current.__family__(self), Voltage.__family__(self), Power.__family__(self)]
#---------- Current abstraction related methods ----------
def __getMilliampere__(self):
rawCurrent = self.__read16BitRegister__(self.CURRENT_ADDRESS)
debug("%s: raw current=%s" % (self.__str__(), bin(rawCurrent)))
return signInteger(rawCurrent, 16) * self._currentLSB * 1000 # scale from Amperes to milliAmperes
#---------- Voltage abstraction related methods ----------
def __getVolt__(self):
rawVoltage = self.__read16BitRegister__(self.BUSADC_ADDRESS)
debug("%s: raw voltage=%s" % (self.__str__(), bin(rawVoltage)))
overflow = rawVoltage & self.OVERFLOW_MASK
if overflow:
debug("%s: overflow condition" % self.__str__())
return (rawVoltage >> 3) * self.BUS_VOLTAGE_LSB_VALUE
#---------- Power abstraction related methods ----------
def __getWatt__(self):
rawWatt = self.__read16BitRegister__(self.POWER_ADDRESS)
debug("%s: raw watt=%s" % (self.__str__(), bin(rawWatt)))
return rawWatt * self.CURRENT_LSB_TO_POWER_LSB_VALUE * self._currentLSB
#---------- Device methods that implement features including additional REST mappings ----------
@api("Device", 3, "feature", "driver")
@request("POST", "run/calibrate/%(pars)s")
@response(contentType=M_JSON)
def calibrate(self, pars):
(vmax, imax) = pars.split(",")
vmax = float(vmax)
if vmax <= 0 or vmax > 32:
raise ValueError("Calibration parameter error, vmax:%f out of allowed range [0 < vmax <= 32]" % vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
values = self.getConfiguration()
values["vmax required"] = "%f" % vmax
values["imax required"] = "%f" % imax
return values
def __calibrate__(self, vmax, imax):
if vmax > 16:
self.setVrange(32)
else:
self.setVrange(16)
gaindiv = 1
shuntdiv = 1 / self._shunt
while True:
imaxpossible = self.__calculateImaxpossible__(gaindiv, shuntdiv)
if gaindiv == 8:
break
if imax > imaxpossible:
gaindiv *= 2
else:
break
self.setGaindiv(gaindiv)
debug("%s: auto-calibrated, max possible current=%f A" % (self.__str__(), imaxpossible))
@api("Device", 3, "feature", "driver")
@request("POST", "run/reset")
@response("%s")
def reset(self):
self.__reset__()
return "Chip is reset."
def __reset__(self):
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, self.RESET_FLAG)
debug("%s: chip reset" % self.__str__())
@api("Device", 3, "feature", "driver")
@request("POST", "run/recalibrate")
@response("%d")
def reCalibrate(self):
self.__reCalibrate__()
return self.__getCalibration__()
#---------- Device methods that implement chip configuration settings including additional REST mappings ----------
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/*")
@response(contentType=M_JSON)
def getConfiguration(self):
values = {}
values["vmax possible"] = "%d" % self._vrange
values["imax possible"] = "%f" % self.__calculateImaxpossible__(self._gaindiv, 1 / self._shunt)
values["current LSB"] = "%f" % self._currentLSB
values["calibration"] = "%d" % self._cal
values["gaindiv"] = "%d" % self._gaindiv
values["shunt"] = "%f" % self._shunt
return values
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/calibration")
@response("%d")
def getCalibration(self):
return self.__getCalibration__()
def __getCalibration__(self):
return self.__read16BitRegister__(self.CALIBRATION_ADDRESS)
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/calibration/%(calibration)d")
@response("%d")
def setCalibration(self, calibration):
self.__setCalibration__(calibration)
return self.__getCalibration__()
def __setCalibration__(self, calibration):
if calibration not in range(0, 65535):
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, 0) # zero out calibration register to avoid wrong measurements
self._cal = 0
debug("%s: set calibration=0" % self.__str__())
raise ValueError("Parameter calibration:%d not in the allowed range [0 .. 65534]" % calibration)
calibration = calibration & self.CALIBRATION_MASK
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, calibration)
self._cal = calibration
debug("%s: set calibration=%d" % (self.__str__(), self._cal))
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/vrange/%(vrange)d")
@response("%d")
def setVrange(self, vrange):
self.__setVrange__(vrange)
return self.__getVrange__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/vrange")
@response("%d")
def getVrange(self):
return self.__getVrange__()
def __setVrange__(self, vrange):
if vrange not in (16, 32):
raise ValueError("Parameter vrange:%d not one of the allowed values (16, 32)" % vrange)
if vrange == 16:
bitsVrange = self.BRNG_16_VALUE
elif vrange == 32:
bitsVrange = self.BRNG_32_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BRNG_MASK) | bitsVrange
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._vrange = vrange
debug("%s: set vrange=%d V" % (self.__str__(), vrange))
def __getVrange__(self):
bitsVrange = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BRNG_MASK) >> 13
if bitsVrange == self.BRNG_16_VALUE:
self._vrange = 16
elif bitsVrange == self.BRNG_32_VALUE:
self._vrange = 32
return self._vrange
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/gaindiv/%(gaindiv)d")
@response("%d")
def setGaindiv(self, gaindiv):
self.__setGaindiv__(gaindiv)
return self.__getGaindiv__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/gaindiv")
@response("%d")
def getGaindiv(self):
return self.__getGaindiv__()
def __setGaindiv__(self, gaindiv):
if gaindiv not in (1, 2, 4, 8):
raise ValueError("Parameter gaindiv:%d not one of the allowed values (1, 2, 4, 8)" % gaindiv)
if gaindiv == 1:
bitsGaindiv = self.GAINDIV_1_VALUE
elif gaindiv == 2:
bitsGaindiv = self.GAINDIV_2_VALUE
elif gaindiv == 4:
bitsGaindiv = self.GAINDIV_4_VALUE
elif gaindiv == 8:
bitsGaindiv = self.GAINDIV_8_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.GAINDIV_MASK) | bitsGaindiv
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._gaindiv = gaindiv
debug("%s: set gaindiv=%d" % (self.__str__(), gaindiv))
self.__reCalculate__()
def __getGaindiv__(self):
bitsGaindiv = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.GAINDIV_MASK) >> 11
if bitsGaindiv == self.GAINDIV_1_VALUE:
self._gaindiv = 1
elif bitsGaindiv == self.GAINDIV_2_VALUE:
self._gaindiv = 2
elif bitsGaindiv == self.GAINDIV_4_VALUE:
self._gaindiv = 4
elif bitsGaindiv == self.GAINDIV_8_VALUE:
self._gaindiv = 8
return self._gaindiv
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/mode/%(mode)d")
@response("%d")
def setMode(self, mode):
self.__setMode__(mode)
return self.__getMode__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/mode")
@response("%d")
def getMode(self):
return self.__getMode__()
def __setMode__(self, mode):
if mode not in range(0, 0x8):
raise ValueError("Parameter mode:0x%1X not in the allowed range [0x0 .. 0x7]" % mode)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.MODE_MASK) | mode
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set mode=0x%1X" % (self.__str__(), mode))
def __getMode__(self):
bitsMode = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.MODE_MASK)
return bitsMode
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/badc/%(badc)d")
@response("%d")
def setBadc(self, badc):
self.__setBadc__(badc)
return self.__getBadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/badc")
@response("%d")
def getBadc(self):
return self.__getBadc__()
def __setBadc__(self, badc):
if badc not in range(0, 0x10):
raise ValueError("Parameter badc:0x%1X not in the allowed range [0x0 .. 0xF]" % badc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BADC_MASK) | badc << 7
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set badc=0x%1X" % (self.__str__(), badc))
def __getBadc__(self):
bitsBadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BADC_MASK) >> 7
return bitsBadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/sadc/%(sadc)d")
@response("%d")
def setSadc(self, sadc):
self.__setSadc__(sadc)
return self.__getSadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/sadc")
@response("%d")
def getSadc(self):
return self.__getSadc__()
def __setSadc__(self, sadc):
if sadc not in range(0, 0x10):
raise ValueError("Parameter sadc:0x%1X not in the allowed range [0x0 .. 0xF]" % sadc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.SADC_MASK) | sadc << 3
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set sadc=0x%1X" % (self.__str__(), sadc))
def __getSadc__(self):
bitsSadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.SADC_MASK) >> 3
return bitsSadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/currentlsb/%(currentLSB)f")
@response("%f")
def setCurrentLSB(self, currentLSB):
self.__setCurrentLSB__(currentLSB)
return self._currentLSB
#---------- Device methods that implement chip configuration settings ----------
def __setShunt__(self, shunt):
self._shunt = shunt
def __setCurrentLSB__(self, currentLSB):
self._currentLSB = currentLSB
debug("%s: set current LSB=%f mA" % (self.__str__(), self._currentLSB * 1000))
self.__setCalibration__(self.__calculateCalibration__())
#---------- Calibration helper methods ----------
def __reCalculate__(self):
self.__setCurrentLSB__(self.__calculateCurrentLSB__())
def __reCalibrate__(self):
self.__setCalibration__(self._cal)
def __calculateCurrentLSB__(self):
calCurrentLSB = self.VSHUNT_FULL_SCALE_BASE_VALUE * self._gaindiv / self._shunt / 2**15 # in Amperes
debug("%s: calculated current LSB=%f mA" % (self.__str__(), calCurrentLSB * 1000))
return calCurrentLSB
def __calculateCalibration__(self):
calCal = int(self.CALIBRATION_CONSTANT_VALUE / self._currentLSB / self._shunt) # this does trunc
debug("%s: calculated calibration=%d" % (self.__str__(), calCal))
return calCal
def __calculateImaxpossible__(self, gaindiv, shuntdiv):
return self.VSHUNT_FULL_SCALE_BASE_VALUE * gaindiv * shuntdiv
#---------- Register helper methods ----------
def __read16BitRegister__(self, addr):
regBytes = self.readRegisters(addr, 2)
return regBytes[0] << 8 | regBytes[1]
def __write16BitRegister__(self, addr, word):
data = bytearray(2)
data[0] = (word >> 8) & 0xFF
data[1] = word & 0xFF
self.writeRegisters(addr , data)
| apache-2.0 |
mhorn71/StarbaseMini | instument_builder/builder.py | 1 | 1671 | __author__ = 'mark'
# StarbaseMini Staribus/Starinet Client for the British Astronomical Association Staribus Protocol
# Copyright (C) 2015 Mark Horn
#
# This file is part of StarbaseMini.
#
# StarbaseMini is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# StarbaseMini is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with StarbaseMini. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
from PyQt5 import QtWidgets
from ui import Ui_InstrumentBuilderDialog
logger = logging.getLogger('instrument.builder')
class InstrumentBuilder(QtWidgets.QDialog, Ui_InstrumentBuilderDialog):
def __init__(self):
QtWidgets.QDialog.__init__(self)
self.setupUi(self)
# Style sheets
stylebool = False
if sys.platform.startswith('darwin'):
stylesheet = 'css/macStyle.css'
stylebool = True
elif sys.platform.startswith('win32'):
stylesheet = 'css/winStyle.css'
stylebool = True
elif sys.platform.startswith('linux'):
stylesheet = 'css/nixStyle.css'
stylebool = True
if stylebool:
with open(stylesheet, 'r') as style:
self.setStyleSheet(style.read()) | gpl-2.0 |
ga4gh/server | tests/end_to_end/server.py | 4 | 7044 | """
Servers to assist in testing
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import tempfile
import shlex
import subprocess
import socket
import requests
import ga4gh.common.utils as utils
ga4ghPort = 8001
remotePort = 8002
oidcOpPort = 8443
class ServerForTesting(object):
"""
The base class of a test server
"""
def __init__(self, port, protocol='http',
subdirectory=None, pingStatusCode=200):
# suppress requests package log messages
logging.getLogger("requests").setLevel(logging.CRITICAL)
self.port = port
self.subdirectory = subdirectory
self.pingStatusCode = pingStatusCode
self.outFile = None
self.errFile = None
self.server = None
self.serverUrl = "{}://{}:{}".format(protocol,
socket.gethostname(),
self.port)
def getUrl(self):
"""
Return the url at which the server is configured to run
"""
return self.serverUrl
def getCmdLine(self):
"""
Return the command line string used to launch the server.
Subclasses must override this method.
"""
raise NotImplementedError()
def start(self):
"""
Start the server
"""
assert not self.isRunning(), "Another server is running at {}".format(
self.serverUrl)
self.outFile = tempfile.TemporaryFile()
self.errFile = tempfile.TemporaryFile()
splits = shlex.split(self.getCmdLine())
self.server = subprocess.Popen(
splits, stdout=self.outFile,
stderr=self.errFile,
cwd=self.subdirectory)
self._waitForServerStartup()
def shutdown(self):
"""
Shut down the server
"""
if self.isRunning():
self.server.kill()
if self.server is not None:
self.server.wait()
self._assertServerShutdown()
if self.outFile is not None:
self.outFile.close()
if self.errFile is not None:
self.errFile.close()
def restart(self):
"""
Restart the server
"""
self.shutdown()
self.start()
def isRunning(self):
"""
Returns true if the server is running, false otherwise
"""
try:
response = self.ping()
if response.status_code != self.pingStatusCode:
msg = ("Ping of server {} returned unexpected status code "
"({})").format(self.serverUrl, response.status_code)
assert False, msg
return True
except requests.ConnectionError:
return False
def ping(self):
"""
Pings the server by doing a GET request to /
"""
response = requests.get(self.serverUrl, verify=False)
return response
def getOutLines(self):
"""
Return the lines of the server stdout file
"""
return utils.getLinesFromLogFile(self.outFile)
def getErrLines(self):
"""
Return the lines of the server stderr file
"""
return utils.getLinesFromLogFile(self.errFile)
def printDebugInfo(self):
"""
Print debugging information about the server
"""
className = self.__class__.__name__
print('\n')
print('*** {} CMD ***'.format(className))
print(self.getCmdLine())
print('*** {} STDOUT ***'.format(className))
print(''.join(self.getOutLines()))
print('*** {} STDERR ***'.format(className))
print(''.join(self.getErrLines()))
@utils.Timeout()
@utils.Repeat()
def _waitForServerStartup(self):
self.server.poll()
if self.server.returncode is not None:
self._waitForErrLines()
message = "Server process unexpectedly died; stderr: {0}"
failMessage = message.format(''.join(self.getErrLines()))
assert False, failMessage
return not self.isRunning()
@utils.Timeout()
@utils.Repeat()
def _waitForErrLines(self):
# not sure why there's some delay in getting the server
# process' stderr (at least for the ga4gh server)...
return self.getErrLines() == []
def _assertServerShutdown(self):
shutdownString = "Server did not shut down correctly"
assert self.server.returncode is not None, shutdownString
assert not self.isRunning(), shutdownString
class Ga4ghServerForTesting(ServerForTesting):
"""
A ga4gh test server
"""
def __init__(self, useOidc=False):
protocol = 'https' if useOidc else 'http'
super(Ga4ghServerForTesting, self).__init__(ga4ghPort, protocol)
self.configFile = None
self.useOidc = useOidc
def getConfig(self):
config = """
SIMULATED_BACKEND_NUM_VARIANT_SETS = 10
SIMULATED_BACKEND_VARIANT_DENSITY = 1
DATA_SOURCE = "simulated://"
DEBUG = True
"""
if self.useOidc:
config += """
TESTING = True
OIDC_PROVIDER = "https://localhost:{0}"
SECRET_KEY = "secret"
""".format(oidcOpPort)
return config
def getCmdLine(self):
if self.configFile is None:
self.configFile = tempfile.NamedTemporaryFile()
config = self.getConfig()
self.configFile.write(config)
self.configFile.flush()
configFilePath = self.configFile.name
cmdLine = """
python server_dev.py
--dont-use-reloader
--disable-urllib-warnings
--host 0.0.0.0
--config TestConfig
--config-file {}
--port {} """.format(configFilePath, self.port)
return cmdLine
def shutdown(self):
super(Ga4ghServerForTesting, self).shutdown()
if self.configFile is not None:
self.configFile.close()
def printDebugInfo(self):
super(Ga4ghServerForTesting, self).printDebugInfo()
className = self.__class__.__name__
print('*** {} CONFIG ***'.format(className))
print(self.getConfig())
class Ga4ghServerForTestingDataSource(Ga4ghServerForTesting):
"""
A test server that reads data from a data source
"""
def __init__(self, dataDir):
super(Ga4ghServerForTestingDataSource, self).__init__()
self.dataDir = dataDir
def getConfig(self):
config = """
DATA_SOURCE = "{}"
DEBUG = True""".format(self.dataDir)
return config
class OidcOpServerForTesting(ServerForTesting):
"""
Runs a test OP server on localhost
"""
def __init__(self):
super(OidcOpServerForTesting, self).__init__(
oidcOpPort, protocol="https",
subdirectory="oidc-provider/simple_op",
pingStatusCode=404)
def getCmdLine(self):
return ("python src/run.py --base https://localhost:{}" +
" -p {} -d settings.yaml").format(oidcOpPort, oidcOpPort)
| apache-2.0 |
assumptionsoup/pymel | tests/test_trees.py | 8 | 11955 | from pymel.util.testing import TestCase, setupUnittestModule
import pymel.util.trees as trees
class testCase_typeTrees(TestCase):
def setUp(self):
self.types = ('dependNode', ('FurAttractors', ('FurCurveAttractors', 'FurDescription', 'FurGlobals'), 'abstractBaseCreate'))
self.tree = trees.Tree( *(self.types) )
def test01_parentMethod(self):
""" Test the parent method on type tree """
pass
def tearDown(self):
pass
# to be organised in nice unit tests :
#print dir(FrozenTree)
#print dir(Tree)
##print dir(IndexedFrozenTree)
##print dir(IndexedTree)
#a = Tree ('a', ('aa', 'ab'), 'b', ('ba', 'bb'))
#print a
#print list(a)
#print list(a.preorder())
#print str(a)
#print repr(a)
#print unicode(a)
#print a.formatted()
#print a.debug()
#t = Tree ('a', ('aa', 'ab'))
#print id(t)
#print t.debug()
#t.graft('b')
#print id(t)
#print t.debug()
#b = Tree ('a')
#print id(b)
#print b.debug()
#b.graft('b')
#print b.debug()
#b.graft('ab', 'a')
#print b.debug()
#aa = Tree ('aa', ('aaa', 'aab'))
#print id(aa)
#print aa.debug()
## FIXME : next doesn't work
#b.graft(aa, 'a', 'ab')
#print id(b)
#print id(aa), id(b['aa'])
#print b.debug()
#b.remove('ab')
#ab = FrozenTree('ab', ('aba', 'abb'))
#print id(ab)
#print ab.debug()
#b.graft(ab, 'a')
#print id(b)
#print id(ab), id(b['ab'])
#print b.debug()
#b.graft('c')
#print b.debug()
#b.remove('c')
#print b.debug()
#b.graft('c', 'b')
#print b.debug()
#b.graft(('ba', 'bb'), 'c')
#print b.debug()
## FIXME : pop not working yet
## b.pop('c')
#print b.debug()
#b.prune('a')
#print b.debug()
#b.graft(('a', ('aa', 'ab')), None, 'b')
#print b.debug()
#print list(b.tops())
#print b.top(0)
#print b.top(1)
##print isinstance(a, list)
##print issubclass(a.__class__, list)
#print id(a)
#print a.root()
#print id(a)
#print a.next
#print a.child(0)
#print a.child(0).next
#print a.formatted()
#print a.debug()
#b = a
#print b.debug()
#c = a.copy()
#print c.debug()
#print c.formatted()
#print a == b
#print a is b
#print a == c
#print a is c
#for k in a.breadth() :
# print k.value
#for k in a :
# print k.value
#for k in a.postorder() :
# print k.value
#
#A = Tree ('a', ('aa', ('aaa', 'aab', 'aac'), 'ab', 'ac', ('aca', 'acb')), 'b', ('ba', 'bb'), 'c', ('ca', ('caa', 'cab', 'cac'), 'cb', ('cba', 'cbb'), 'cc', ('cca', 'ccb', 'ccc')))
#print id(A)
#for k in A :
# print k.value
#for k in A.preorder() :
# print k.value
#for k in A.postorder() :
# print k.value
#for k in A.breadth() :
# print k.value
#print b in a
#print c in a
#print a.child(0) in a
#print c.child(0) in a
#print c.child(0).value in a
#for k in A :
# parentValues = [j.value for j in k.parents()]
# root = k.root()
# if root :
# rootValue = root.value
# else :
# rootValue = None
# print "%s: %s, %s" % (k.value, rootValue, parentValues)
#
#
#temp = Tree ('a', ('aa', 'ab'), 'b', ('ba', 'bb'))
#suba = temp['aa']
#print suba
#print suba.root()
#print temp
#print id(temp)
#print suba.root().parent
#print id(suba.root().parent)
##print a[a.child(0)]
##print a
##l = a['a']
##print l
##print a[('a', 'aa')]
#del (temp)
## print a
#print suba
#print suba.root()
#print suba.root().parent
#print id(suba.root().parent)
#d = Tree ('a', ('aa', 'ab'), 'b', ('aa', 'ab'))
#def getAsList(tree, value):
# msg = ""
# try :
# tree[value]
# print "Found exactly one match"
# except :
# msg = "Not exactly one match"
# f = tree.get(value, [])
# if msg :
# print msg+": %i found" % len(f)
# for k in f:
# print k, k.parent
# return f
#getAsList(d, 'aa')
#getAsList(d,('b', 'ab'))
#getAsList(d,'xyz')
#getAsList(d,(None, 'aa'))
#getAsList(d,(None, d.child(0).child(0)))
#getAsList(d,(None, 'a', 'aa'))
#getAsList(d,('a', 'aa'))
#A = Tree ('a', ('aa', ('aaa', 'aab', 'aac'), 'ab', 'ac', ('aca', 'acb')), 'b', ('ba', 'bb'), 'c', ('ca', ('caa', 'cab', 'cac'), 'cb', ('cba', 'cbb'), 'cc', ('cca', 'ccb', 'ccc')))
#print list(A.path('aca'))
#for k in A.path('aca') :
# print k.value
#for k in A['aca'].path(A) :
# if k.value :
# print k.value
#
#def getParent(c) :
# res = cmds.listRelatives(c, parent=True)
# if res :
# return res[0]
#
#def isExactChildFn(c, p) :
# """ a function to check if c is a direct child of p """
# if (c is not None) and (p is not None) :
# #print "checking if "+c+" is child of "+p
# prt = getParent(c)
# if prt is not None and p is not None :
# return prt == p
# elif prt is None and p is None :
# return True
# else :
# return False
# else :
# return False
#
#def asOldHierarchy (*args) :
# """returns a Tree containing the PyMel objects representing Maya nodes that were passed
# as argument, or the current seleciton if no arguments are provided,
# in a way that mimics the Maya scene hierarchy existing on these nodes.
# Note that:
# >>> cmds.file ("~/pymel/examples/skel.ma", f=True, typ="mayaAscii",o=True)
# >>> File read in 0 seconds.
# >>> u'~/pymel/examples/skel.ma'
# >>> select ('FBX_Hips', replace=True, hierarchy=True)
# >>> sel=ls(selection=True)
# >>> skel=asHierarchy (sel)
# >>> skel.find('FBX_Head')
# >>> Tree(Joint('FBX_Head'), Tree(Joint('FBX_LeftEye')), Tree(Joint('FBX_RightEye')))
# >>> skel.parent('FBX_Head')
# >>> Joint('FBX_Neck1')
# >>> util.expandArgs( skel ) == tuple(sel) and sel == [k for k in skel]
# >>> True """
#
# if len(args) == 0 :
# nargs = cmds.ls( selection=True)
# else :
# args = util.expandArgs (*args)
# # nargs = map(PyNode, args)
# nargs = args
# # print "Arguments: %s"+str(nargs)
# result = oldTreeFromChildLink (isExactChildFn, *nargs)
# # print "Result: %s"+str(result)
# return result
#
#def asHierarchy (*args) :
# """returns a Tree containing the PyMel objects representing Maya nodes that were passed
# as argument, or the current seleciton if no arguments are provided,
# in a way that mimics the Maya scene hierarchy existing on these nodes.
# Note that:
# >>> cmds.file ("~/pymel/examples/skel.ma", f=True, typ="mayaAscii",o=True)
# >>> File read in 0 seconds.
# >>> u'~/pymel/examples/skel.ma'
# >>> select ('FBX_Hips', replace=True, hierarchy=True)
# >>> sel=ls(selection=True)
# >>> skel=asHierarchy (sel)
# >>> skel.find('FBX_Head')
# >>> Tree(Joint('FBX_Head'), Tree(Joint('FBX_LeftEye')), Tree(Joint('FBX_RightEye')))
# >>> skel.parent('FBX_Head')
# >>> Joint('FBX_Neck1')
# >>> util.expandArgs( skel ) == tuple(sel) and sel == [k for k in skel]
# >>> True """
#
# if len(args) == 0 :
# nargs = cmds.ls( selection=True)
# else :
# args = util.expandArgs (*args)
# # nargs = map(PyNode, args)
# nargs = args
# # print "Arguments: %s"+str(nargs)
# result = treeFromChildLink (isExactChildFn, *nargs)
# # print "Result: %s"+str(result)
# return result
#
#def asIndexedHierarchy (*args) :
# """returns a Tree containing the PyMel objects representing Maya nodes that were passed
# as argument, or the current seleciton if no arguments are provided,
# in a way that mimics the Maya scene hierarchy existing on these nodes.
# Note that:
# >>> cmds.file ("~/pymel/examples/skel.ma", f=True, typ="mayaAscii",o=True)
# >>> File read in 0 seconds.
# >>> u'~/pymel/examples/skel.ma'
# >>> select ('FBX_Hips', replace=True, hierarchy=True)
# >>> sel=ls(selection=True)
# >>> skel=asHierarchy (sel)
# >>> skel.find('FBX_Head')
# >>> Tree(Joint('FBX_Head'), Tree(Joint('FBX_LeftEye')), Tree(Joint('FBX_RightEye')))
# >>> skel.parent('FBX_Head')
# >>> Joint('FBX_Neck1')
# >>> util.expandArgs( skel ) == tuple(sel) and sel == [k for k in skel]
# >>> True """
#
# if len(args) == 0 :
# nargs = cmds.ls( selection=True)
# else :
# args = util.expandArgs (*args)
# # nargs = map(PyNode, args)
# nargs = args
# # print "Arguments: %s"+str(nargs)
# result = indexedTreeFromChildLink (isExactChildFn, *nargs)
# # print "Result: %s"+str(result)
# return result
#
#def asNetworkXHierarchy (*args) :
# """returns a Tree containing the PyMel objects representing Maya nodes that were passed
# as argument, or the current seleciton if no arguments are provided,
# in a way that mimics the Maya scene hierarchy existing on these nodes.
# Note that:
# >>> cmds.file ("~/pymel/examples/skel.ma", f=True, typ="mayaAscii",o=True)
# >>> File read in 0 seconds.
# >>> u'~/pymel/examples/skel.ma'
# >>> select ('FBX_Hips', replace=True, hierarchy=True)
# >>> sel=ls(selection=True)
# >>> skel=asHierarchy (sel)
# >>> skel.find('FBX_Head')
# >>> Tree(Joint('FBX_Head'), Tree(Joint('FBX_LeftEye')), Tree(Joint('FBX_RightEye')))
# >>> skel.parent('FBX_Head')
# >>> Joint('FBX_Neck1')
# >>> util.expandArgs( skel ) == tuple(sel) and sel == [k for k in skel]
# >>> True """
#
# if len(args) == 0 :
# nargs = cmds.ls( selection=True)
# else :
# args = util.expandArgs (*args)
# # nargs = map(PyNode, args)
# nargs = args
# # print "Arguments: "+str(nargs)
# result = networkXTreeFromChildLink (isExactChildFn, *nargs)
# # print "Result: "+str(result)
# return result
#
#
#
#def networkXTreeFromChildLink (isExactChildFn, *args):
# """
# This function will build a tree from the provided sequence and a comparison function in the form:
# cmp(a,b): returns True if a is a direct child of b, False else
# >>> lst = ['aab', 'aba', 'aa', 'bbb', 'ba', 'a', 'b', 'bb', 'ab', 'bab', 'bba']
# >>> def isChild(s1, s2) :
# >>> return s1.startswith(s2) and len(s1)==len(s2)+1
# >>> forest = treeFromChildLink (isChild, lst)
# >>> for tree in forest :
# >>> print tree
# A child cannot have more than one parent, if the isChild is ambiguous an exception will be raised
# >>> def isChild(s1, s2) :
# >>> return s1.startswith(s2)
# >>> forest = treeFromChildLink (isChild, lst)
# """
# deq = deque()
# for arg in args :
# t = nt.Tree()
# t.add_node(arg)
# t.root = arg
# deq.append(t)
# lst = []
# it = 0
# while deq:
# it+=1
# # print "iteration %i" % it
# c = deq.popleft()
# r = c.root
# hasParent = False
# fulllist = list(deq)+lst
# sd = len(deq)
# nextlist = []
# for p in fulllist :
# plist = []
# for n in p.nodes_iter() :
# # print "Is %s child of %s?" % (r, n)
# if isExactChildFn(r, n) :
# plist.append(n)
# # print "%s is child of %s!" % (r, n)
# for pr in plist :
# if not hasParent :
# # print "graft %s on %s, under %s" % (r, p.root, pr)
# np = p.union_sub(c, v_from=p.root, v_to=c.root)
# np.root = p.root
# p = np
# hasParent = True
# else :
# # should only be one parent, break on first encountered
# raise ValueError, "A child in Tree cannot have multiple parents, check the provided isChild(c, p) function: '%s'" % isExactChildFn.__name__
# nextlist.append(p)
# deq = deque(nextlist[:sd])
# lst = nextlist[sd:]
# # If it's a root we move it to final list
# if not hasParent :
# # print "%s has no parent, it goes to the list as root" % str(c.root)
# lst.append(c)
#
# # print "final list %s" % str(lst)
# if len(lst) == 1 :
# return lst[0]
# else :
# return tuple(lst)
setupUnittestModule(__name__) | bsd-3-clause |
mganeva/mantid | Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/SANSILLReductionTest.py | 1 | 6050 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.api import MatrixWorkspace
from mantid.simpleapi import SANSILLReduction, config, mtd
class SANSILLReductionTest(unittest.TestCase):
_facility = None
def setUp(self):
self._facility = config['default.facility']
config.appendDataSearchSubDir('ILL/D11/')
config.appendDataSearchSubDir('ILL/D33/')
config['default.facility'] = 'ILL'
def tearDown(self):
config['default.facility'] = self._facility
mtd.clear()
def test_absorber(self):
SANSILLReduction(Run='010462', ProcessAs='Absorber', OutputWorkspace='Cd')
self._check_output(mtd['Cd'], True, 1, 128*128)
self._check_process_flag(mtd['Cd'], 'Absorber')
def test_beam(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db', FluxOutputWorkspace='Fl')
self._check_output(mtd['Db'], True, 1, 128*128)
self._check_process_flag(mtd['Db'], 'Beam')
run = mtd['Db'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0048, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, -0.0027, delta=1e-4)
self._check_output(mtd['Fl'], False, 1, 128*128)
self._check_process_flag(mtd['Fl'], 'Beam')
self.assertAlmostEqual(mtd['Fl'].readY(0)[0], 6628249, delta=1)
self.assertAlmostEqual(mtd['Fl'].readE(0)[0], 8566, delta=1)
def test_transmission(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db')
SANSILLReduction(Run='010585', ProcessAs='Transmission', BeamInputWorkspace='Db', OutputWorkspace='Tr')
self.assertAlmostEqual(mtd['Tr'].readY(0)[0], 0.640, delta=1e-3)
self.assertAlmostEqual(mtd['Tr'].readE(0)[0], 0.0019, delta=1e-4)
self._check_process_flag(mtd['Tr'], 'Transmission')
def test_container(self):
SANSILLReduction(Run='010460', ProcessAs='Container', OutputWorkspace='can')
self._check_output(mtd['can'], True, 1, 128*128)
self._check_process_flag(mtd['can'], 'Container')
def test_reference(self):
SANSILLReduction(Run='010453', ProcessAs='Reference', SensitivityOutputWorkspace='sens', OutputWorkspace='water')
self._check_output(mtd['water'], True, 1, 128*128)
self._check_output(mtd['sens'], False, 1, 128*128)
self._check_process_flag(mtd['water'], 'Reference')
self._check_process_flag(mtd['sens'], 'Sensitivity')
def test_sample(self):
SANSILLReduction(Run='010569', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 1, 128*128)
self._check_process_flag(mtd['sample'], 'Sample')
def test_absorber_tof(self):
# D33 VTOF
# actually this is a container run, not an absorber, but is fine for this test
SANSILLReduction(Run='093409', ProcessAs='Absorber', OutputWorkspace='absorber')
self._check_output(mtd['absorber'], True, 30, 256*256)
self._check_process_flag(mtd['absorber'], 'Absorber')
def test_beam_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam', FluxOutputWorkspace='flux')
self._check_output(mtd['beam'], True, 30, 256*256)
self._check_process_flag(mtd['beam'], 'Beam')
run = mtd['beam'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0025, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, 0.0009, delta=1e-4)
self._check_output(mtd['flux'], False, 30, 256*256)
self._check_process_flag(mtd['flux'], 'Beam')
def test_transmission_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam')
SANSILLReduction(Run='093407', ProcessAs='Transmission', BeamInputWorkspace='beam', OutputWorkspace='ctr')
self._check_output(mtd['ctr'], False, 75, 1)
def test_container_tof(self):
# D33 VTOF
# this is actually a sample run, not water, but is fine for this test
SANSILLReduction(Run='093410', ProcessAs='Reference', OutputWorkspace='ref')
self._check_output(mtd['ref'], True, 30, 256*256)
self._check_process_flag(mtd['ref'], 'Reference')
def test_sample_tof(self):
# D33 VTOF, Pluronic F127
SANSILLReduction(Run='093410', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 30, 256*256)
self._check_process_flag(mtd['sample'], 'Sample')
def _check_process_flag(self, ws, value):
self.assertTrue(ws.getRun().getLogData('ProcessedAs').value, value)
def _check_output(self, ws, logs, blocksize, spectra):
self.assertTrue(ws)
self.assertTrue(isinstance(ws, MatrixWorkspace))
self.assertTrue(ws.isHistogramData())
self.assertTrue(not ws.isDistribution())
self.assertEqual(ws.getAxis(0).getUnit().unitID(), "Wavelength")
self.assertEqual(ws.blocksize(), blocksize)
self.assertEqual(ws.getNumberHistograms(), spectra)
self.assertTrue(ws.getInstrument())
self.assertTrue(ws.getRun())
self.assertTrue(ws.getHistory())
if logs:
self.assertTrue(ws.getRun().hasProperty('qmin'))
self.assertTrue(ws.getRun().hasProperty('qmax'))
self.assertTrue(ws.getRun().hasProperty('l2'))
self.assertTrue(ws.getRun().hasProperty('pixel_height'))
self.assertTrue(ws.getRun().hasProperty('pixel_width'))
self.assertTrue(ws.getRun().hasProperty('collimation.actual_position'))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
darkleons/lama | openerp/modules/registry.py | 24 | 18403 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Models registries.
"""
from collections import Mapping
from contextlib import contextmanager
import logging
import threading
import openerp
from .. import SUPERUSER_ID
from openerp.tools import assertion_report, lazy_property
_logger = logging.getLogger(__name__)
class Registry(Mapping):
""" Model registry for a particular database.
The registry is essentially a mapping between model names and model
instances. There is one registry instance per database.
"""
def __init__(self, db_name):
super(Registry, self).__init__()
self.models = {} # model name/model instance mapping
self._sql_error = {}
self._store_function = {}
self._pure_function_fields = {} # {model: [field, ...], ...}
self._init = True
self._init_parent = {}
self._assertion_report = assertion_report.assertion_report()
self.fields_by_model = None
# modules fully loaded (maintained during init phase by `loading` module)
self._init_modules = set()
self.db_name = db_name
self._db = openerp.sql_db.db_connect(db_name)
# special cursor for test mode; None means "normal" mode
self.test_cr = None
# Indicates that the registry is
self.ready = False
# Inter-process signaling (used only when openerp.multi_process is True):
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
self.base_registry_signaling_sequence = None
self.base_cache_signaling_sequence = None
# Flag indicating if at least one model cache has been cleared.
# Useful only in a multi-process context.
self._any_cache_cleared = False
cr = self.cursor()
has_unaccent = openerp.modules.db.has_unaccent(cr)
if openerp.tools.config['unaccent'] and not has_unaccent:
_logger.warning("The option --unaccent was given but no unaccent() function was found in database.")
self.has_unaccent = openerp.tools.config['unaccent'] and has_unaccent
cr.close()
#
# Mapping abstract methods implementation
# => mixin provides methods keys, items, values, get, __eq__, and __ne__
#
def __len__(self):
""" Return the size of the registry. """
return len(self.models)
def __iter__(self):
""" Return an iterator over all model names. """
return iter(self.models)
def __getitem__(self, model_name):
""" Return the model with the given name or raise KeyError if it doesn't exist."""
return self.models[model_name]
def __call__(self, model_name):
""" Same as ``self[model_name]``. """
return self.models[model_name]
@lazy_property
def pure_function_fields(self):
""" Return the list of pure function fields (field objects) """
fields = []
for mname, fnames in self._pure_function_fields.iteritems():
model_fields = self[mname]._fields
for fname in fnames:
fields.append(model_fields[fname])
return fields
def do_parent_store(self, cr):
for o in self._init_parent:
self.get(o)._parent_store_compute(cr)
self._init = False
def obj_list(self):
""" Return the list of model names in this registry."""
return self.keys()
def add(self, model_name, model):
""" Add or replace a model in the registry."""
self.models[model_name] = model
def load(self, cr, module):
""" Load a given module in the registry.
At the Python level, the modules are already loaded, but not yet on a
per-registry level. This method populates a registry with the given
modules, i.e. it instanciates all the classes of a the given module
and registers them in the registry.
"""
from .. import models
models_to_load = [] # need to preserve loading order
lazy_property.reset_all(self)
# Instantiate registered classes (via the MetaModel automatic discovery
# or via explicit constructor call), and add them to the pool.
for cls in models.MetaModel.module_to_models.get(module.name, []):
# models register themselves in self.models
model = cls._build_model(self, cr)
if model._name not in models_to_load:
# avoid double-loading models whose declaration is split
models_to_load.append(model._name)
return [self.models[m] for m in models_to_load]
def setup_models(self, cr, partial=False):
""" Complete the setup of models.
This must be called after loading modules and before using the ORM.
:param partial: ``True`` if all models have not been loaded yet.
"""
# prepare the setup on all models
for model in self.models.itervalues():
model._prepare_setup_fields(cr, SUPERUSER_ID)
# do the actual setup from a clean state
self._m2m = {}
for model in self.models.itervalues():
model._setup_fields(cr, SUPERUSER_ID, partial=partial)
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
"""
for model in self.models.itervalues():
model.clear_caches()
# Special case for ir_ui_menu which does not use openerp.tools.ormcache.
ir_ui_menu = self.models.get('ir.ui.menu')
if ir_ui_menu is not None:
ir_ui_menu.clear_cache()
# Useful only in a multi-process context.
def reset_any_cache_cleared(self):
self._any_cache_cleared = False
# Useful only in a multi-process context.
def any_cache_cleared(self):
return self._any_cache_cleared
@classmethod
def setup_multi_process_signaling(cls, cr):
if not openerp.multi_process:
return None, None
# Inter-process signaling:
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
cr.execute("""SELECT sequence_name FROM information_schema.sequences WHERE sequence_name='base_registry_signaling'""")
if not cr.fetchall():
cr.execute("""CREATE SEQUENCE base_registry_signaling INCREMENT BY 1 START WITH 1""")
cr.execute("""SELECT nextval('base_registry_signaling')""")
cr.execute("""CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1""")
cr.execute("""SELECT nextval('base_cache_signaling')""")
cr.execute("""
SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess load registry signaling: [Registry: # %s] "\
"[Cache: # %s]",
r, c)
return r, c
def enter_test_mode(self):
""" Enter the 'test' mode, where one cursor serves several requests. """
assert self.test_cr is None
self.test_cr = self._db.test_cursor()
RegistryManager.enter_test_mode()
def leave_test_mode(self):
""" Leave the test mode. """
assert self.test_cr is not None
self.test_cr.force_close()
self.test_cr = None
RegistryManager.leave_test_mode()
def cursor(self):
""" Return a new cursor for the database. The cursor itself may be used
as a context manager to commit/rollback and close automatically.
"""
cr = self.test_cr
if cr is not None:
# While in test mode, we use one special cursor across requests. The
# test cursor uses a reentrant lock to serialize accesses. The lock
# is granted here by cursor(), and automatically released by the
# cursor itself in its method close().
cr.acquire()
return cr
return self._db.cursor()
class DummyRLock(object):
""" Dummy reentrant lock, to be used while running rpc and js tests """
def acquire(self):
pass
def release(self):
pass
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
class RegistryManager(object):
""" Model registries manager.
The manager is responsible for creation and deletion of model
registries (essentially database connection/model registry pairs).
"""
# Mapping between db name and model registry.
# Accessed through the methods below.
registries = {}
_lock = threading.RLock()
_saved_lock = None
@classmethod
def lock(cls):
""" Return the current registry lock. """
return cls._lock
@classmethod
def enter_test_mode(cls):
""" Enter the 'test' mode, where the registry is no longer locked. """
assert cls._saved_lock is None
cls._lock, cls._saved_lock = DummyRLock(), cls._lock
@classmethod
def leave_test_mode(cls):
""" Leave the 'test' mode. """
assert cls._saved_lock is not None
cls._lock, cls._saved_lock = cls._saved_lock, None
@classmethod
def get(cls, db_name, force_demo=False, status=None, update_module=False):
""" Return a registry for a given database name."""
with cls.lock():
try:
return cls.registries[db_name]
except KeyError:
return cls.new(db_name, force_demo, status,
update_module)
finally:
# set db tracker - cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
threading.current_thread().dbname = db_name
@classmethod
def new(cls, db_name, force_demo=False, status=None,
update_module=False):
""" Create and return a new registry for a given database name.
The (possibly) previous registry for that database name is discarded.
"""
import openerp.modules
with cls.lock():
with openerp.api.Environment.manage():
registry = Registry(db_name)
# Initializing a registry will call general code which will in
# turn call registries.get (this object) to obtain the registry
# being initialized. Make it available in the registries
# dictionary then remove it if an exception is raised.
cls.delete(db_name)
cls.registries[db_name] = registry
try:
with registry.cursor() as cr:
seq_registry, seq_cache = Registry.setup_multi_process_signaling(cr)
registry.base_registry_signaling_sequence = seq_registry
registry.base_cache_signaling_sequence = seq_cache
# This should be a method on Registry
openerp.modules.load_modules(registry._db, force_demo, status, update_module)
except Exception:
del cls.registries[db_name]
raise
# load_modules() above can replace the registry by calling
# indirectly new() again (when modules have to be uninstalled).
# Yeah, crazy.
registry = cls.registries[db_name]
cr = registry.cursor()
try:
registry.do_parent_store(cr)
cr.commit()
finally:
cr.close()
registry.ready = True
if update_module:
# only in case of update, otherwise we'll have an infinite reload loop!
cls.signal_registry_change(db_name)
return registry
@classmethod
def delete(cls, db_name):
"""Delete the registry linked to a given database. """
with cls.lock():
if db_name in cls.registries:
cls.registries[db_name].clear_caches()
del cls.registries[db_name]
@classmethod
def delete_all(cls):
"""Delete all the registries. """
with cls.lock():
for db_name in cls.registries.keys():
cls.delete(db_name)
@classmethod
def clear_caches(cls, db_name):
"""Clear caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models
of the given database name.
This method is given to spare you a ``RegistryManager.get(db_name)``
that would loads the given database if it was not already loaded.
"""
with cls.lock():
if db_name in cls.registries:
cls.registries[db_name].clear_caches()
@classmethod
def check_registry_signaling(cls, db_name):
"""
Check if the modules have changed and performs all necessary operations to update
the registry of the corresponding database.
:returns: True if changes has been detected in the database and False otherwise.
"""
changed = False
if openerp.multi_process and db_name in cls.registries:
registry = cls.get(db_name)
cr = registry.cursor()
try:
cr.execute("""
SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess signaling check: [Registry - old# %s new# %s] "\
"[Cache - old# %s new# %s]",
registry.base_registry_signaling_sequence, r,
registry.base_cache_signaling_sequence, c)
# Check if the model registry must be reloaded (e.g. after the
# database has been updated by another process).
if registry.base_registry_signaling_sequence is not None and registry.base_registry_signaling_sequence != r:
changed = True
_logger.info("Reloading the model registry after database signaling.")
registry = cls.new(db_name)
# Check if the model caches must be invalidated (e.g. after a write
# occured on another process). Don't clear right after a registry
# has been reload.
elif registry.base_cache_signaling_sequence is not None and registry.base_cache_signaling_sequence != c:
changed = True
_logger.info("Invalidating all model caches after database signaling.")
registry.clear_caches()
registry.reset_any_cache_cleared()
# One possible reason caches have been invalidated is the
# use of decimal_precision.write(), in which case we need
# to refresh fields.float columns.
env = openerp.api.Environment(cr, SUPERUSER_ID, {})
for model in registry.values():
for field in model._fields.values():
if field.type == 'float':
field._setup_digits(env)
registry.base_registry_signaling_sequence = r
registry.base_cache_signaling_sequence = c
finally:
cr.close()
return changed
@classmethod
def signal_caches_change(cls, db_name):
if openerp.multi_process and db_name in cls.registries:
# Check the registries if any cache has been cleared and signal it
# through the database to other processes.
registry = cls.get(db_name)
if registry.any_cache_cleared():
_logger.info("At least one model cache has been cleared, signaling through the database.")
cr = registry.cursor()
r = 1
try:
cr.execute("select nextval('base_cache_signaling')")
r = cr.fetchone()[0]
finally:
cr.close()
registry.base_cache_signaling_sequence = r
registry.reset_any_cache_cleared()
@classmethod
def signal_registry_change(cls, db_name):
if openerp.multi_process and db_name in cls.registries:
_logger.info("Registry changed, signaling through the database")
registry = cls.get(db_name)
cr = registry.cursor()
r = 1
try:
cr.execute("select nextval('base_registry_signaling')")
r = cr.fetchone()[0]
finally:
cr.close()
registry.base_registry_signaling_sequence = r
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
vadimtk/chrome4sdp | third_party/markdown/extensions/meta.py | 109 | 4514 | # markdown is released under the BSD license
# Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
# Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
# Copyright 2004 Manfred Stienstra (the original version)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Meta Data Extension for Python-Markdown
=======================================
This extension adds Meta Data handling to markdown.
Basic Usage:
>>> import markdown
>>> text = '''Title: A Test Doc.
... Author: Waylan Limberg
... John Doe
... Blank_Data:
...
... The body. This is paragraph one.
... '''
>>> md = markdown.Markdown(['meta'])
>>> print md.convert(text)
<p>The body. This is paragraph one.</p>
>>> print md.Meta
{u'blank_data': [u''], u'author': [u'Waylan Limberg', u'John Doe'], u'title': [u'A Test Doc.']}
Make sure text without Meta Data still works (markdown < 1.6b returns a <p>).
>>> text = ' Some Code - not extra lines of meta data.'
>>> md = markdown.Markdown(['meta'])
>>> print md.convert(text)
<pre><code>Some Code - not extra lines of meta data.
</code></pre>
>>> md.Meta
{}
Copyright 2007-2008 [Waylan Limberg](http://achinghead.com).
Project website: <http://packages.python.org/Markdown/meta_data.html>
Contact: markdown@freewisdom.org
License: BSD (see ../LICENSE.md for details)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..preprocessors import Preprocessor
import re
# Global Vars
META_RE = re.compile(r'^[ ]{0,3}(?P<key>[A-Za-z0-9_-]+):\s*(?P<value>.*)')
META_MORE_RE = re.compile(r'^[ ]{4,}(?P<value>.*)')
class MetaExtension (Extension):
""" Meta-Data extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
""" Add MetaPreprocessor to Markdown instance. """
md.preprocessors.add("meta", MetaPreprocessor(md), "_begin")
class MetaPreprocessor(Preprocessor):
""" Get Meta-Data. """
def run(self, lines):
""" Parse Meta-Data and store in Markdown.Meta. """
meta = {}
key = None
while 1:
line = lines.pop(0)
if line.strip() == '':
break # blank line - done
m1 = META_RE.match(line)
if m1:
key = m1.group('key').lower().strip()
value = m1.group('value').strip()
try:
meta[key].append(value)
except KeyError:
meta[key] = [value]
else:
m2 = META_MORE_RE.match(line)
if m2 and key:
# Add another line to existing key
meta[key].append(m2.group('value').strip())
else:
lines.insert(0, line)
break # no meta data - done
self.markdown.Meta = meta
return lines
def makeExtension(configs={}):
return MetaExtension(configs=configs)
| bsd-3-clause |
CyanogenMod/android_external_chromium_org | tools/android/adb_profile_chrome/trace_packager.py | 9 | 2760 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gzip
import json
import os
import shutil
import sys
import zipfile
from adb_profile_chrome import util
from pylib import constants
sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
'third_party',
'trace-viewer'))
# pylint: disable=F0401
from trace_viewer.build import trace2html
def _PackageTracesAsHtml(trace_files, html_file):
with open(html_file, 'w') as f:
trace2html.WriteHTMLForTracesToFile(trace_files, f)
for trace_file in trace_files:
os.unlink(trace_file)
def _CompressFile(host_file, output):
with gzip.open(output, 'wb') as out, \
open(host_file, 'rb') as input_file:
out.write(input_file.read())
os.unlink(host_file)
def _ArchiveFiles(host_files, output):
with zipfile.ZipFile(output, 'w', zipfile.ZIP_DEFLATED) as z:
for host_file in host_files:
z.write(host_file)
os.unlink(host_file)
def _MergeTracesIfNeeded(trace_files):
if len(trace_files) <= 1:
return trace_files
merge_candidates = []
for trace_file in trace_files:
with open(trace_file) as f:
# Try to detect a JSON file cheaply since that's all we can merge.
if f.read(1) != '{':
continue
f.seek(0)
try:
json_data = json.load(f)
except ValueError:
continue
merge_candidates.append((trace_file, json_data))
if len(merge_candidates) <= 1:
return trace_files
other_files = [f for f in trace_files
if not f in [c[0] for c in merge_candidates]]
merged_file, merged_data = merge_candidates[0]
for trace_file, json_data in merge_candidates[1:]:
for key, value in json_data.items():
if not merged_data.get(key) or json_data[key]:
merged_data[key] = value
os.unlink(trace_file)
with open(merged_file, 'w') as f:
json.dump(merged_data, f)
return [merged_file] + other_files
def PackageTraces(trace_files, output=None, compress=False, write_json=False):
trace_files = _MergeTracesIfNeeded(trace_files)
if not write_json:
html_file = os.path.splitext(trace_files[0])[0] + '.html'
_PackageTracesAsHtml(trace_files, html_file)
trace_files = [html_file]
if compress and len(trace_files) == 1:
result = output or trace_files[0] + '.gz'
_CompressFile(trace_files[0], result)
elif len(trace_files) > 1:
result = output or 'chrome-combined-trace-%s.zip' % util.GetTraceTimestamp()
_ArchiveFiles(trace_files, result)
elif output:
result = output
shutil.move(trace_files[0], result)
else:
result = trace_files[0]
return result
| bsd-3-clause |
laanwj/bitcoin | test/functional/feature_filelock.py | 22 | 2522 | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Check that it's not possible to start a second bitcoind instance using the same datadir or wallet."""
import os
import random
import string
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
class FilelockTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self):
self.add_nodes(self.num_nodes, extra_args=None)
self.nodes[0].start()
self.nodes[0].wait_for_rpc_connection()
def run_test(self):
datadir = os.path.join(self.nodes[0].datadir, self.chain)
self.log.info("Using datadir {}".format(datadir))
self.log.info("Check that we can't start a second bitcoind instance using the same datadir")
expected_msg = "Error: Cannot obtain a lock on data directory {0}. {1} is probably already running.".format(datadir, self.config['environment']['PACKAGE_NAME'])
self.nodes[1].assert_start_raises_init_error(extra_args=['-datadir={}'.format(self.nodes[0].datadir), '-noserver'], expected_msg=expected_msg)
if self.is_wallet_compiled():
def check_wallet_filelock(descriptors):
wallet_name = ''.join([random.choice(string.ascii_lowercase) for _ in range(6)])
self.nodes[0].createwallet(wallet_name=wallet_name, descriptors=descriptors)
wallet_dir = os.path.join(datadir, 'wallets')
self.log.info("Check that we can't start a second bitcoind instance using the same wallet")
if descriptors:
expected_msg = "Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another bitcoind?"
else:
expected_msg = "Error: Error initializing wallet database environment"
self.nodes[1].assert_start_raises_init_error(extra_args=['-walletdir={}'.format(wallet_dir), '-wallet=' + wallet_name, '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
if self.is_bdb_compiled():
check_wallet_filelock(False)
if self.is_sqlite_compiled():
check_wallet_filelock(True)
if __name__ == '__main__':
FilelockTest().main()
| mit |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/networkx/utils/tests/test_decorators.py | 10 | 4337 | import tempfile
import os
from nose.tools import *
import networkx as nx
from networkx.utils.decorators import open_file,require,not_implemented_for
def test_not_implemented_decorator():
@not_implemented_for('directed')
def test1(G):
pass
test1(nx.Graph())
@raises(KeyError)
def test_not_implemented_decorator_key():
@not_implemented_for('foo')
def test1(G):
pass
test1(nx.Graph())
@raises(nx.NetworkXNotImplemented)
def test_not_implemented_decorator_raise():
@not_implemented_for('graph')
def test1(G):
pass
test1(nx.Graph())
def test_require_decorator1():
@require('os','sys')
def test1():
import os
import sys
test1()
def test_require_decorator2():
@require('blahhh')
def test2():
import blahhh
assert_raises(nx.NetworkXError, test2)
class TestOpenFileDecorator(object):
def setUp(self):
self.text = ['Blah... ', 'BLAH ', 'BLAH!!!!']
self.fobj = tempfile.NamedTemporaryFile('wb+', delete=False)
self.name = self.fobj.name
def write(self, path):
for text in self.text:
path.write(text.encode('ascii'))
@open_file(1, 'r')
def read(self, path):
return path.readlines()[0]
@staticmethod
@open_file(0, 'wb')
def writer_arg0(path):
path.write('demo'.encode('ascii'))
@open_file(1, 'wb+')
def writer_arg1(self, path):
self.write(path)
@open_file(2, 'wb')
def writer_arg2default(self, x, path=None):
if path is None:
fh = tempfile.NamedTemporaryFile('wb+', delete=False)
close_fh = True
else:
fh = path
close_fh = False
try:
self.write(fh)
finally:
if close_fh:
fh.close()
@open_file(4, 'wb')
def writer_arg4default(self, x, y, other='hello', path=None, **kwargs):
if path is None:
fh = tempfile.NamedTemporaryFile('wb+', delete=False)
close_fh = True
else:
fh = path
close_fh = False
try:
self.write(fh)
finally:
if close_fh:
fh.close()
@open_file('path', 'wb')
def writer_kwarg(self, **kwargs):
path = kwargs.get('path', None)
if path is None:
fh = tempfile.NamedTemporaryFile('wb+', delete=False)
close_fh = True
else:
fh = path
close_fh = False
try:
self.write(fh)
finally:
if close_fh:
fh.close()
def test_writer_arg0_str(self):
self.writer_arg0(self.name)
def test_writer_arg0_fobj(self):
self.writer_arg0(self.fobj)
def test_writer_arg1_str(self):
self.writer_arg1(self.name)
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_arg1_fobj(self):
self.writer_arg1(self.fobj)
assert_false(self.fobj.closed)
self.fobj.close()
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_arg2default_str(self):
self.writer_arg2default(0, path=None)
self.writer_arg2default(0, path=self.name)
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_arg2default_fobj(self):
self.writer_arg2default(0, path=self.fobj)
assert_false(self.fobj.closed)
self.fobj.close()
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_arg2default_fobj(self):
self.writer_arg2default(0, path=None)
def test_writer_arg4default_fobj(self):
self.writer_arg4default(0, 1, dog='dog', other='other2')
self.writer_arg4default(0, 1, dog='dog', other='other2', path=self.name)
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_kwarg_str(self):
self.writer_kwarg(path=self.name)
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_kwarg_fobj(self):
self.writer_kwarg(path=self.fobj)
self.fobj.close()
assert_equal( self.read(self.name), ''.join(self.text) )
def test_writer_kwarg_fobj(self):
self.writer_kwarg(path=None)
def tearDown(self):
os.remove(self.name)
| agpl-3.0 |
crosswalk-project/chromium-crosswalk-efl | tools/metrics/actions/print_style.py | 72 | 1380 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Holds the constants for pretty printing actions.xml."""
import os
import sys
# Import the metrics/common module for pretty print xml.
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'common'))
import pretty_print_xml
# Desired order for tag and tag attributes.
# { tag_name: [attribute_name, ...] }
ATTRIBUTE_ORDER = {
'action': ['name'],
'owner': [],
'description': [],
'obsolete': [],
}
# Tag names for top-level nodes whose children we don't want to indent.
TAGS_THAT_DONT_INDENT = ['actions']
# Extra vertical spacing rules for special tag names.
# {tag_name: (newlines_after_open, newlines_before_close, newlines_after_close)}
TAGS_THAT_HAVE_EXTRA_NEWLINE = {
'actions': (2, 1, 1),
'action': (1, 1, 1),
}
# Tags that we allow to be squished into a single line for brevity.
TAGS_THAT_ALLOW_SINGLE_LINE = ['owner', 'description', 'obsolete']
def GetPrintStyle():
"""Returns an XmlStyle object for pretty printing actions."""
return pretty_print_xml.XmlStyle(ATTRIBUTE_ORDER,
TAGS_THAT_HAVE_EXTRA_NEWLINE,
TAGS_THAT_DONT_INDENT,
TAGS_THAT_ALLOW_SINGLE_LINE)
| bsd-3-clause |
openstack/tempest | tempest/cmd/cleanup_service.py | 1 | 39207 | # Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from urllib import parse as urllib
from oslo_log import log as logging
from tempest import clients
from tempest.common import credentials_factory as credentials
from tempest.common import identity
from tempest.common import utils
from tempest.common.utils import net_info
from tempest import config
from tempest.lib import exceptions
LOG = logging.getLogger('tempest.cmd.cleanup')
CONF = config.CONF
CONF_FLAVORS = None
CONF_IMAGES = None
CONF_NETWORKS = []
CONF_PRIV_NETWORK_NAME = None
CONF_PUB_NETWORK = None
CONF_PUB_ROUTER = None
CONF_PROJECTS = None
CONF_USERS = None
IS_CINDER = None
IS_GLANCE = None
IS_NEUTRON = None
IS_NOVA = None
def init_conf():
global CONF_FLAVORS
global CONF_IMAGES
global CONF_NETWORKS
global CONF_PRIV_NETWORK
global CONF_PRIV_NETWORK_NAME
global CONF_PUB_NETWORK
global CONF_PUB_ROUTER
global CONF_PROJECTS
global CONF_USERS
global IS_CINDER
global IS_GLANCE
global IS_HEAT
global IS_NEUTRON
global IS_NOVA
IS_CINDER = CONF.service_available.cinder
IS_GLANCE = CONF.service_available.glance
IS_NEUTRON = CONF.service_available.neutron
IS_NOVA = CONF.service_available.nova
CONF_FLAVORS = [CONF.compute.flavor_ref, CONF.compute.flavor_ref_alt]
CONF_IMAGES = [CONF.compute.image_ref, CONF.compute.image_ref_alt]
CONF_PRIV_NETWORK_NAME = CONF.compute.fixed_network_name
CONF_PUB_NETWORK = CONF.network.public_network_id
CONF_PUB_ROUTER = CONF.network.public_router_id
CONF_PROJECTS = [CONF.auth.admin_project_name]
CONF_USERS = [CONF.auth.admin_username]
if IS_NEUTRON:
CONF_PRIV_NETWORK = _get_network_id(CONF.compute.fixed_network_name,
CONF.auth.admin_project_name)
CONF_NETWORKS = [CONF_PUB_NETWORK, CONF_PRIV_NETWORK]
def _get_network_id(net_name, project_name):
am = clients.Manager(
credentials.get_configured_admin_credentials())
net_cl = am.networks_client
pr_cl = am.projects_client
networks = net_cl.list_networks()
project = identity.get_project_by_name(pr_cl, project_name)
p_id = project['id']
n_id = None
for net in networks['networks']:
if (net['project_id'] == p_id and net['name'] == net_name):
n_id = net['id']
break
return n_id
class BaseService(object):
def __init__(self, kwargs):
self.client = None
for key, value in kwargs.items():
setattr(self, key, value)
self.tenant_filter = {}
if hasattr(self, 'tenant_id'):
self.tenant_filter['project_id'] = self.tenant_id
def _filter_by_tenant_id(self, item_list):
if (item_list is None or
not item_list or
not hasattr(self, 'tenant_id') or
self.tenant_id is None or
'tenant_id' not in item_list[0]):
return item_list
return [item for item in item_list
if item['tenant_id'] == self.tenant_id]
def list(self):
pass
def delete(self):
pass
def dry_run(self):
pass
def save_state(self):
pass
def run(self):
try:
if self.is_dry_run:
self.dry_run()
elif self.is_save_state:
self.save_state()
else:
self.delete()
except exceptions.NotImplemented as exc:
# Many OpenStack services use extensions logic to implement the
# features or resources. Tempest cleanup tries to clean up the test
# resources without having much logic of extensions checks etc.
# If any of the extension is missing then, service will return
# NotImplemented error.
msg = ("Got NotImplemented error in %s, full exception: %s" %
(str(self.__class__), str(exc)))
LOG.exception(msg)
self.got_exceptions.append(exc)
class SnapshotService(BaseService):
def __init__(self, manager, **kwargs):
super(SnapshotService, self).__init__(kwargs)
self.client = manager.snapshots_client_latest
def list(self):
client = self.client
snaps = client.list_snapshots()['snapshots']
if not self.is_save_state:
# recreate list removing saved snapshots
snaps = [snap for snap in snaps if snap['id']
not in self.saved_state_json['snapshots'].keys()]
LOG.debug("List count, %s Snapshots", len(snaps))
return snaps
def delete(self):
snaps = self.list()
client = self.client
for snap in snaps:
try:
LOG.debug("Deleting Snapshot with id %s", snap['id'])
client.delete_snapshot(snap['id'])
except Exception:
LOG.exception("Delete Snapshot %s exception.", snap['id'])
def dry_run(self):
snaps = self.list()
self.data['snapshots'] = snaps
def save_state(self):
snaps = self.list()
self.data['snapshots'] = {}
for snap in snaps:
self.data['snapshots'][snap['id']] = snap['name']
class ServerService(BaseService):
def __init__(self, manager, **kwargs):
super(ServerService, self).__init__(kwargs)
self.client = manager.servers_client
self.server_groups_client = manager.server_groups_client
def list(self):
client = self.client
servers_body = client.list_servers()
servers = servers_body['servers']
if not self.is_save_state:
# recreate list removing saved servers
servers = [server for server in servers if server['id']
not in self.saved_state_json['servers'].keys()]
LOG.debug("List count, %s Servers", len(servers))
return servers
def delete(self):
client = self.client
servers = self.list()
for server in servers:
try:
LOG.debug("Deleting Server with id %s", server['id'])
client.delete_server(server['id'])
except Exception:
LOG.exception("Delete Server %s exception.", server['id'])
def dry_run(self):
servers = self.list()
self.data['servers'] = servers
def save_state(self):
servers = self.list()
self.data['servers'] = {}
for server in servers:
self.data['servers'][server['id']] = server['name']
class ServerGroupService(ServerService):
def list(self):
client = self.server_groups_client
sgs = client.list_server_groups()['server_groups']
if not self.is_save_state:
# recreate list removing saved server_groups
sgs = [sg for sg in sgs if sg['id']
not in self.saved_state_json['server_groups'].keys()]
LOG.debug("List count, %s Server Groups", len(sgs))
return sgs
def delete(self):
client = self.server_groups_client
sgs = self.list()
for sg in sgs:
try:
LOG.debug("Deleting Server Group with id %s", sg['id'])
client.delete_server_group(sg['id'])
except Exception:
LOG.exception("Delete Server Group %s exception.", sg['id'])
def dry_run(self):
sgs = self.list()
self.data['server_groups'] = sgs
def save_state(self):
sgs = self.list()
self.data['server_groups'] = {}
for sg in sgs:
self.data['server_groups'][sg['id']] = sg['name']
class KeyPairService(BaseService):
def __init__(self, manager, **kwargs):
super(KeyPairService, self).__init__(kwargs)
self.client = manager.keypairs_client
def list(self):
client = self.client
keypairs = client.list_keypairs()['keypairs']
if not self.is_save_state:
# recreate list removing saved keypairs
keypairs = [keypair for keypair in keypairs
if keypair['keypair']['name']
not in self.saved_state_json['keypairs'].keys()]
LOG.debug("List count, %s Keypairs", len(keypairs))
return keypairs
def delete(self):
client = self.client
keypairs = self.list()
for k in keypairs:
name = k['keypair']['name']
try:
LOG.debug("Deleting keypair %s", name)
client.delete_keypair(name)
except Exception:
LOG.exception("Delete Keypair %s exception.", name)
def dry_run(self):
keypairs = self.list()
self.data['keypairs'] = keypairs
def save_state(self):
keypairs = self.list()
self.data['keypairs'] = {}
for keypair in keypairs:
keypair = keypair['keypair']
self.data['keypairs'][keypair['name']] = keypair
class VolumeService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeService, self).__init__(kwargs)
self.client = manager.volumes_client_latest
def list(self):
client = self.client
vols = client.list_volumes()['volumes']
if not self.is_save_state:
# recreate list removing saved volumes
vols = [vol for vol in vols if vol['id']
not in self.saved_state_json['volumes'].keys()]
LOG.debug("List count, %s Volumes", len(vols))
return vols
def delete(self):
client = self.client
vols = self.list()
for v in vols:
try:
LOG.debug("Deleting volume with id %s", v['id'])
client.delete_volume(v['id'])
except Exception:
LOG.exception("Delete Volume %s exception.", v['id'])
def dry_run(self):
vols = self.list()
self.data['volumes'] = vols
def save_state(self):
vols = self.list()
self.data['volumes'] = {}
for vol in vols:
self.data['volumes'][vol['id']] = vol['name']
class VolumeQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeQuotaService, self).__init__(kwargs)
self.client = manager.volume_quotas_client_latest
def delete(self):
client = self.client
try:
LOG.debug("Deleting Volume Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Volume Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
quotas = self.client.show_quota_set(
self.project_id, params={'usage': True})['quota_set']
self.data['volume_quotas'] = quotas
class NovaQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NovaQuotaService, self).__init__(kwargs)
self.client = manager.quotas_client
self.limits_client = manager.limits_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Nova Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Nova Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
client = self.limits_client
quotas = client.show_limits()['limits']
self.data['compute_quotas'] = quotas['absolute']
class NetworkQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NetworkQuotaService, self).__init__(kwargs)
self.client = manager.network_quotas_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Network Quotas for project with id %s",
self.project_id)
client.reset_quotas(self.project_id)
except Exception:
LOG.exception("Delete Network Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
resp = [quota for quota in self.client.list_quotas()['quotas']
if quota['project_id'] == self.project_id]
self.data['network_quotas'] = resp
# Begin network service classes
class BaseNetworkService(BaseService):
def __init__(self, manager, **kwargs):
super(BaseNetworkService, self).__init__(kwargs)
self.networks_client = manager.networks_client
self.subnets_client = manager.subnets_client
self.ports_client = manager.ports_client
self.floating_ips_client = manager.floating_ips_client
self.metering_labels_client = manager.metering_labels_client
self.metering_label_rules_client = manager.metering_label_rules_client
self.security_groups_client = manager.security_groups_client
self.routers_client = manager.routers_client
self.subnetpools_client = manager.subnetpools_client
def _filter_by_conf_networks(self, item_list):
if not item_list or not all(('network_id' in i for i in item_list)):
return item_list
return [item for item in item_list if item['network_id']
not in CONF_NETWORKS]
class NetworkService(BaseNetworkService):
def list(self):
client = self.networks_client
networks = client.list_networks(**self.tenant_filter)
networks = networks['networks']
if not self.is_save_state:
# recreate list removing saved networks
networks = [network for network in networks if network['id']
not in self.saved_state_json['networks'].keys()]
# filter out networks declared in tempest.conf
if self.is_preserve:
networks = [network for network in networks
if network['id'] not in CONF_NETWORKS]
LOG.debug("List count, %s Networks", len(networks))
return networks
def delete(self):
client = self.networks_client
networks = self.list()
for n in networks:
try:
LOG.debug("Deleting Network with id %s", n['id'])
client.delete_network(n['id'])
except Exception:
LOG.exception("Delete Network %s exception.", n['id'])
def dry_run(self):
networks = self.list()
self.data['networks'] = networks
def save_state(self):
networks = self.list()
self.data['networks'] = {}
for network in networks:
self.data['networks'][network['id']] = network
class NetworkFloatingIpService(BaseNetworkService):
def list(self):
client = self.floating_ips_client
flips = client.list_floatingips(**self.tenant_filter)
flips = flips['floatingips']
if not self.is_save_state:
# recreate list removing saved flips
flips = [flip for flip in flips if flip['id']
not in self.saved_state_json['floatingips'].keys()]
LOG.debug("List count, %s Network Floating IPs", len(flips))
return flips
def delete(self):
client = self.floating_ips_client
flips = self.list()
for flip in flips:
try:
LOG.debug("Deleting Network Floating IP with id %s",
flip['id'])
client.delete_floatingip(flip['id'])
except Exception:
LOG.exception("Delete Network Floating IP %s exception.",
flip['id'])
def dry_run(self):
flips = self.list()
self.data['floatingips'] = flips
def save_state(self):
flips = self.list()
self.data['floatingips'] = {}
for flip in flips:
self.data['floatingips'][flip['id']] = flip
class NetworkRouterService(BaseNetworkService):
def list(self):
client = self.routers_client
routers = client.list_routers(**self.tenant_filter)
routers = routers['routers']
if not self.is_save_state:
# recreate list removing saved routers
routers = [router for router in routers if router['id']
not in self.saved_state_json['routers'].keys()]
if self.is_preserve:
routers = [router for router in routers
if router['id'] != CONF_PUB_ROUTER]
LOG.debug("List count, %s Routers", len(routers))
return routers
def delete(self):
client = self.routers_client
ports_client = self.ports_client
routers = self.list()
for router in routers:
rid = router['id']
ports = [port for port
in ports_client.list_ports(device_id=rid)['ports']
if net_info.is_router_interface_port(port)]
for port in ports:
try:
LOG.debug("Deleting port with id %s of router with id %s",
port['id'], rid)
client.remove_router_interface(rid, port_id=port['id'])
except Exception:
LOG.exception("Delete Router Interface exception for "
"'port %s' of 'router %s'.", port['id'], rid)
try:
LOG.debug("Deleting Router with id %s", rid)
client.delete_router(rid)
except Exception:
LOG.exception("Delete Router %s exception.", rid)
def dry_run(self):
routers = self.list()
self.data['routers'] = routers
def save_state(self):
routers = self.list()
self.data['routers'] = {}
for router in routers:
self.data['routers'][router['id']] = router['name']
class NetworkMeteringLabelRuleService(NetworkService):
def list(self):
client = self.metering_label_rules_client
rules = client.list_metering_label_rules()
rules = rules['metering_label_rules']
rules = self._filter_by_tenant_id(rules)
if not self.is_save_state:
saved_rules = self.saved_state_json['metering_label_rules'].keys()
# recreate list removing saved rules
rules = [rule for rule in rules if rule['id'] not in saved_rules]
LOG.debug("List count, %s Metering Label Rules", len(rules))
return rules
def delete(self):
client = self.metering_label_rules_client
rules = self.list()
for rule in rules:
try:
LOG.debug("Deleting Metering Label Rule with id %s",
rule['id'])
client.delete_metering_label_rule(rule['id'])
except Exception:
LOG.exception("Delete Metering Label Rule %s exception.",
rule['id'])
def dry_run(self):
rules = self.list()
self.data['metering_label_rules'] = rules
def save_state(self):
rules = self.list()
self.data['metering_label_rules'] = {}
for rule in rules:
self.data['metering_label_rules'][rule['id']] = rule
class NetworkMeteringLabelService(BaseNetworkService):
def list(self):
client = self.metering_labels_client
labels = client.list_metering_labels()
labels = labels['metering_labels']
labels = self._filter_by_tenant_id(labels)
if not self.is_save_state:
# recreate list removing saved labels
labels = [label for label in labels if label['id']
not in self.saved_state_json['metering_labels'].keys()]
LOG.debug("List count, %s Metering Labels", len(labels))
return labels
def delete(self):
client = self.metering_labels_client
labels = self.list()
for label in labels:
try:
LOG.debug("Deleting Metering Label with id %s", label['id'])
client.delete_metering_label(label['id'])
except Exception:
LOG.exception("Delete Metering Label %s exception.",
label['id'])
def dry_run(self):
labels = self.list()
self.data['metering_labels'] = labels
def save_state(self):
labels = self.list()
self.data['metering_labels'] = {}
for label in labels:
self.data['metering_labels'][label['id']] = label['name']
class NetworkPortService(BaseNetworkService):
def list(self):
client = self.ports_client
ports = [port for port in
client.list_ports(**self.tenant_filter)['ports']
if port["device_owner"] == "" or
port["device_owner"].startswith("compute:")]
if not self.is_save_state:
# recreate list removing saved ports
ports = [port for port in ports if port['id']
not in self.saved_state_json['ports'].keys()]
if self.is_preserve:
ports = self._filter_by_conf_networks(ports)
LOG.debug("List count, %s Ports", len(ports))
return ports
def delete(self):
client = self.ports_client
ports = self.list()
for port in ports:
try:
LOG.debug("Deleting port with id %s", port['id'])
client.delete_port(port['id'])
except Exception:
LOG.exception("Delete Port %s exception.", port['id'])
def dry_run(self):
ports = self.list()
self.data['ports'] = ports
def save_state(self):
ports = self.list()
self.data['ports'] = {}
for port in ports:
self.data['ports'][port['id']] = port['name']
class NetworkSecGroupService(BaseNetworkService):
def list(self):
client = self.security_groups_client
filter = self.tenant_filter
# cannot delete default sec group so never show it.
secgroups = [secgroup for secgroup in
client.list_security_groups(**filter)['security_groups']
if secgroup['name'] != 'default']
if not self.is_save_state:
# recreate list removing saved security_groups
secgroups = [secgroup for secgroup in secgroups if secgroup['id']
not in self.saved_state_json['security_groups'].keys()
]
if self.is_preserve:
secgroups = [secgroup for secgroup in secgroups
if secgroup['security_group_rules'][0]['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s security_groups", len(secgroups))
return secgroups
def delete(self):
client = self.security_groups_client
secgroups = self.list()
for secgroup in secgroups:
try:
LOG.debug("Deleting security_group with id %s", secgroup['id'])
client.delete_security_group(secgroup['id'])
except Exception:
LOG.exception("Delete security_group %s exception.",
secgroup['id'])
def dry_run(self):
secgroups = self.list()
self.data['security_groups'] = secgroups
def save_state(self):
secgroups = self.list()
self.data['security_groups'] = {}
for secgroup in secgroups:
self.data['security_groups'][secgroup['id']] = secgroup['name']
class NetworkSubnetService(BaseNetworkService):
def list(self):
client = self.subnets_client
subnets = client.list_subnets(**self.tenant_filter)
subnets = subnets['subnets']
if not self.is_save_state:
# recreate list removing saved subnets
subnets = [subnet for subnet in subnets if subnet['id']
not in self.saved_state_json['subnets'].keys()]
if self.is_preserve:
subnets = self._filter_by_conf_networks(subnets)
LOG.debug("List count, %s Subnets", len(subnets))
return subnets
def delete(self):
client = self.subnets_client
subnets = self.list()
for subnet in subnets:
try:
LOG.debug("Deleting subnet with id %s", subnet['id'])
client.delete_subnet(subnet['id'])
except Exception:
LOG.exception("Delete Subnet %s exception.", subnet['id'])
def dry_run(self):
subnets = self.list()
self.data['subnets'] = subnets
def save_state(self):
subnets = self.list()
self.data['subnets'] = {}
for subnet in subnets:
self.data['subnets'][subnet['id']] = subnet['name']
class NetworkSubnetPoolsService(BaseNetworkService):
def list(self):
client = self.subnetpools_client
pools = client.list_subnetpools(**self.tenant_filter)['subnetpools']
if not self.is_save_state:
# recreate list removing saved subnet pools
pools = [pool for pool in pools if pool['id']
not in self.saved_state_json['subnetpools'].keys()]
if self.is_preserve:
pools = [pool for pool in pools if pool['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s Subnet Pools", len(pools))
return pools
def delete(self):
client = self.subnetpools_client
pools = self.list()
for pool in pools:
try:
LOG.debug("Deleting Subnet Pool with id %s", pool['id'])
client.delete_subnetpool(pool['id'])
except Exception:
LOG.exception("Delete Subnet Pool %s exception.", pool['id'])
def dry_run(self):
pools = self.list()
self.data['subnetpools'] = pools
def save_state(self):
pools = self.list()
self.data['subnetpools'] = {}
for pool in pools:
self.data['subnetpools'][pool['id']] = pool['name']
# begin global services
class RegionService(BaseService):
def __init__(self, manager, **kwargs):
super(RegionService, self).__init__(kwargs)
self.client = manager.regions_client
def list(self):
client = self.client
regions = client.list_regions()
if not self.is_save_state:
regions = [region for region in regions['regions'] if region['id']
not in self.saved_state_json['regions'].keys()]
LOG.debug("List count, %s Regions", len(regions))
return regions
else:
LOG.debug("List count, %s Regions", len(regions['regions']))
return regions['regions']
def delete(self):
client = self.client
regions = self.list()
for region in regions:
try:
LOG.debug("Deleting region with id %s", region['id'])
client.delete_region(region['id'])
except Exception:
LOG.exception("Delete Region %s exception.", region['id'])
def dry_run(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
def save_state(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
class FlavorService(BaseService):
def __init__(self, manager, **kwargs):
super(FlavorService, self).__init__(kwargs)
self.client = manager.flavors_client
def list(self):
client = self.client
flavors = client.list_flavors({"is_public": None})['flavors']
if not self.is_save_state:
# recreate list removing saved flavors
flavors = [flavor for flavor in flavors if flavor['id']
not in self.saved_state_json['flavors'].keys()]
if self.is_preserve:
flavors = [flavor for flavor in flavors
if flavor['id'] not in CONF_FLAVORS]
LOG.debug("List count, %s Flavors after reconcile", len(flavors))
return flavors
def delete(self):
client = self.client
flavors = self.list()
for flavor in flavors:
try:
LOG.debug("Deleting flavor with id %s", flavor['id'])
client.delete_flavor(flavor['id'])
except Exception:
LOG.exception("Delete Flavor %s exception.", flavor['id'])
def dry_run(self):
flavors = self.list()
self.data['flavors'] = flavors
def save_state(self):
flavors = self.list()
self.data['flavors'] = {}
for flavor in flavors:
self.data['flavors'][flavor['id']] = flavor['name']
class ImageService(BaseService):
def __init__(self, manager, **kwargs):
super(ImageService, self).__init__(kwargs)
self.client = manager.image_client_v2
def list(self):
client = self.client
response = client.list_images()
images = []
images.extend(response['images'])
while 'next' in response:
parsed = urllib.urlparse(response['next'])
marker = urllib.parse_qs(parsed.query)['marker'][0]
response = client.list_images(params={"marker": marker})
images.extend(response['images'])
if not self.is_save_state:
images = [image for image in images if image['id']
not in self.saved_state_json['images'].keys()]
if self.is_preserve:
images = [image for image in images
if image['id'] not in CONF_IMAGES]
LOG.debug("List count, %s Images after reconcile", len(images))
return images
def delete(self):
client = self.client
images = self.list()
for image in images:
try:
LOG.debug("Deleting image with id %s", image['id'])
client.delete_image(image['id'])
except Exception:
LOG.exception("Delete Image %s exception.", image['id'])
def dry_run(self):
images = self.list()
self.data['images'] = images
def save_state(self):
self.data['images'] = {}
images = self.list()
for image in images:
self.data['images'][image['id']] = image['name']
class UserService(BaseService):
def __init__(self, manager, **kwargs):
super(UserService, self).__init__(kwargs)
self.client = manager.users_v3_client
def list(self):
users = self.client.list_users()['users']
if not self.is_save_state:
users = [user for user in users if user['id']
not in self.saved_state_json['users'].keys()]
if self.is_preserve:
users = [user for user in users if user['name']
not in CONF_USERS]
elif not self.is_save_state: # Never delete admin user
users = [user for user in users if user['name'] !=
CONF.auth.admin_username]
LOG.debug("List count, %s Users after reconcile", len(users))
return users
def delete(self):
users = self.list()
for user in users:
try:
LOG.debug("Deleting user with id %s", user['id'])
self.client.delete_user(user['id'])
except Exception:
LOG.exception("Delete User %s exception.", user['id'])
def dry_run(self):
users = self.list()
self.data['users'] = users
def save_state(self):
users = self.list()
self.data['users'] = {}
for user in users:
self.data['users'][user['id']] = user['name']
class RoleService(BaseService):
def __init__(self, manager, **kwargs):
super(RoleService, self).__init__(kwargs)
self.client = manager.roles_v3_client
def list(self):
try:
roles = self.client.list_roles()['roles']
# reconcile roles with saved state and never list admin role
if not self.is_save_state:
roles = [role for role in roles if
(role['id'] not in
self.saved_state_json['roles'].keys() and
role['name'] != CONF.identity.admin_role)]
LOG.debug("List count, %s Roles after reconcile", len(roles))
return roles
except Exception:
LOG.exception("Cannot retrieve Roles.")
return []
def delete(self):
roles = self.list()
for role in roles:
try:
LOG.debug("Deleting role with id %s", role['id'])
self.client.delete_role(role['id'])
except Exception:
LOG.exception("Delete Role %s exception.", role['id'])
def dry_run(self):
roles = self.list()
self.data['roles'] = roles
def save_state(self):
roles = self.list()
self.data['roles'] = {}
for role in roles:
self.data['roles'][role['id']] = role['name']
class ProjectService(BaseService):
def __init__(self, manager, **kwargs):
super(ProjectService, self).__init__(kwargs)
self.client = manager.projects_client
def list(self):
projects = self.client.list_projects()['projects']
if not self.is_save_state:
project_ids = self.saved_state_json['projects']
projects = [project
for project in projects
if (project['id'] not in project_ids and
project['name'] != CONF.auth.admin_project_name)]
if self.is_preserve:
projects = [project
for project in projects
if project['name'] not in CONF_PROJECTS]
LOG.debug("List count, %s Projects after reconcile", len(projects))
return projects
def delete(self):
projects = self.list()
for project in projects:
try:
LOG.debug("Deleting project with id %s", project['id'])
self.client.delete_project(project['id'])
except Exception:
LOG.exception("Delete project %s exception.", project['id'])
def dry_run(self):
projects = self.list()
self.data['projects'] = projects
def save_state(self):
projects = self.list()
self.data['projects'] = {}
for project in projects:
self.data['projects'][project['id']] = project['name']
class DomainService(BaseService):
def __init__(self, manager, **kwargs):
super(DomainService, self).__init__(kwargs)
self.client = manager.domains_client
def list(self):
client = self.client
domains = client.list_domains()['domains']
if not self.is_save_state:
domains = [domain for domain in domains if domain['id']
not in self.saved_state_json['domains'].keys()]
LOG.debug("List count, %s Domains after reconcile", len(domains))
return domains
def delete(self):
client = self.client
domains = self.list()
for domain in domains:
try:
LOG.debug("Deleting domain with id %s", domain['id'])
client.update_domain(domain['id'], enabled=False)
client.delete_domain(domain['id'])
except Exception:
LOG.exception("Delete Domain %s exception.", domain['id'])
def dry_run(self):
domains = self.list()
self.data['domains'] = domains
def save_state(self):
domains = self.list()
self.data['domains'] = {}
for domain in domains:
self.data['domains'][domain['id']] = domain['name']
def get_project_associated_cleanup_services():
"""Returns list of project service classes.
The list contains services whose resources need to be deleted prior,
the project they are associated with, deletion. The resources cannot be
most likely deleted after the project is deleted first.
"""
project_associated_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
project_associated_services.append(NovaQuotaService)
if IS_CINDER:
project_associated_services.append(VolumeQuotaService)
if IS_NEUTRON:
project_associated_services.append(NetworkQuotaService)
return project_associated_services
def get_resource_cleanup_services():
"""Returns list of project related classes.
The list contains services whose resources are associated with a project,
however, their deletion is possible also after the project is deleted
first.
"""
resource_cleanup_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
resource_cleanup_services.append(ServerService)
resource_cleanup_services.append(KeyPairService)
resource_cleanup_services.append(ServerGroupService)
if IS_NEUTRON:
resource_cleanup_services.append(NetworkFloatingIpService)
if utils.is_extension_enabled('metering', 'network'):
resource_cleanup_services.append(NetworkMeteringLabelRuleService)
resource_cleanup_services.append(NetworkMeteringLabelService)
resource_cleanup_services.append(NetworkRouterService)
resource_cleanup_services.append(NetworkPortService)
resource_cleanup_services.append(NetworkSubnetService)
resource_cleanup_services.append(NetworkService)
resource_cleanup_services.append(NetworkSecGroupService)
resource_cleanup_services.append(NetworkSubnetPoolsService)
if IS_CINDER:
resource_cleanup_services.append(SnapshotService)
resource_cleanup_services.append(VolumeService)
return resource_cleanup_services
def get_global_cleanup_services():
global_services = []
if IS_NOVA:
global_services.append(FlavorService)
if IS_GLANCE:
global_services.append(ImageService)
global_services.append(UserService)
global_services.append(ProjectService)
global_services.append(DomainService)
global_services.append(RoleService)
global_services.append(RegionService)
return global_services
| apache-2.0 |
Smarsh/django | django/contrib/admin/models.py | 106 | 2135 | from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from django.contrib.admin.util import quote
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
from django.utils.safestring import mark_safe
ADDITION = 1
CHANGE = 2
DELETION = 3
class LogEntryManager(models.Manager):
def log_action(self, user_id, content_type_id, object_id, object_repr, action_flag, change_message=''):
e = self.model(None, None, user_id, content_type_id, smart_unicode(object_id), object_repr[:200], action_flag, change_message)
e.save()
class LogEntry(models.Model):
action_time = models.DateTimeField(_('action time'), auto_now=True)
user = models.ForeignKey(User)
content_type = models.ForeignKey(ContentType, blank=True, null=True)
object_id = models.TextField(_('object id'), blank=True, null=True)
object_repr = models.CharField(_('object repr'), max_length=200)
action_flag = models.PositiveSmallIntegerField(_('action flag'))
change_message = models.TextField(_('change message'), blank=True)
objects = LogEntryManager()
class Meta:
verbose_name = _('log entry')
verbose_name_plural = _('log entries')
db_table = 'django_admin_log'
ordering = ('-action_time',)
def __repr__(self):
return smart_unicode(self.action_time)
def is_addition(self):
return self.action_flag == ADDITION
def is_change(self):
return self.action_flag == CHANGE
def is_deletion(self):
return self.action_flag == DELETION
def get_edited_object(self):
"Returns the edited object represented by this log entry"
return self.content_type.get_object_for_this_type(pk=self.object_id)
def get_admin_url(self):
"""
Returns the admin URL to edit the object represented by this log entry.
This is relative to the Django admin index page.
"""
return mark_safe(u"%s/%s/%s/" % (self.content_type.app_label, self.content_type.model, quote(self.object_id)))
| bsd-3-clause |
Orav/kbengine | kbe/src/lib/python/Lib/test/test_pep3120.py | 2 | 1318 | # This file is marked as binary in the CVS, to prevent MacCVS from recoding it.
import unittest
from test import support
class PEP3120Test(unittest.TestCase):
def test_pep3120(self):
self.assertEqual(
"Питон".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\П".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_badsyntax(self):
try:
import test.badsyntax_pep3120
except SyntaxError as msg:
msg = str(msg).lower()
self.assertTrue('utf-8' in msg)
else:
self.fail("expected exception didn't occur")
class BuiltinCompileTests(unittest.TestCase):
# Issue 3574.
def test_latin1(self):
# Allow compile() to read Latin-1 source.
source_code = '# coding: Latin-1\nu = "Ç"\n'.encode("Latin-1")
try:
code = compile(source_code, '<dummy>', 'exec')
except SyntaxError:
self.fail("compile() cannot handle Latin-1 source")
ns = {}
exec(code, ns)
self.assertEqual('Ç', ns['u'])
def test_main():
support.run_unittest(PEP3120Test, BuiltinCompileTests)
if __name__=="__main__":
test_main()
| lgpl-3.0 |
navodissa/python-flask | flask/lib/python2.7/site-packages/sqlalchemy/orm/strategies.py | 32 | 54293 | # orm/strategies.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""sqlalchemy.orm.interfaces.LoaderStrategy
implementations, and related MapperOptions."""
from .. import exc as sa_exc, inspect
from .. import util, log, event
from ..sql import util as sql_util, visitors
from .. import sql
from . import (
attributes, interfaces, exc as orm_exc, loading,
unitofwork, util as orm_util
)
from .state import InstanceState
from .util import _none_set
from . import properties
from .interfaces import (
LoaderStrategy, StrategizedProperty
)
from .session import _state_session
import itertools
def _register_attribute(
strategy, mapper, useobject,
compare_function=None,
typecallable=None,
uselist=False,
callable_=None,
proxy_property=None,
active_history=False,
impl_class=None,
**kw
):
prop = strategy.parent_property
attribute_ext = list(util.to_list(prop.extension, default=[]))
listen_hooks = []
if useobject and prop.single_parent:
listen_hooks.append(single_parent_validator)
if prop.key in prop.parent.validators:
fn, opts = prop.parent.validators[prop.key]
listen_hooks.append(
lambda desc, prop: orm_util._validator_events(
desc,
prop.key, fn, **opts)
)
if useobject:
listen_hooks.append(unitofwork.track_cascade_events)
# need to assemble backref listeners
# after the singleparentvalidator, mapper validator
backref = kw.pop('backref', None)
if backref:
listen_hooks.append(
lambda desc, prop: attributes.backref_listeners(
desc,
backref,
uselist
)
)
for m in mapper.self_and_descendants:
if prop is m._props.get(prop.key):
desc = attributes.register_attribute_impl(
m.class_,
prop.key,
parent_token=prop,
uselist=uselist,
compare_function=compare_function,
useobject=useobject,
extension=attribute_ext,
trackparent=useobject and (
prop.single_parent
or prop.direction is interfaces.ONETOMANY),
typecallable=typecallable,
callable_=callable_,
active_history=active_history,
impl_class=impl_class,
send_modified_events=not useobject or not prop.viewonly,
doc=prop.doc,
**kw
)
for hook in listen_hooks:
hook(desc, prop)
@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
class UninstrumentedColumnLoader(LoaderStrategy):
"""Represent the a non-instrumented MapperProperty.
The polymorphic_on argument of mapper() often results in this,
if the argument is against the with_polymorphic selectable.
"""
def __init__(self, parent):
super(UninstrumentedColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
def setup_query(
self, context, entity, path, loadopt, adapter,
column_collection=None, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
def create_row_processor(
self, context, path, loadopt,
mapper, row, adapter):
return None, None, None
@log.class_logger
@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
class ColumnLoader(LoaderStrategy):
"""Provide loading behavior for a :class:`.ColumnProperty`."""
def __init__(self, parent):
super(ColumnLoader, self).__init__(parent)
self.columns = self.parent_property.columns
self.is_composite = hasattr(self.parent_property, 'composite_class')
def setup_query(
self, context, entity, path, loadopt,
adapter, column_collection, **kwargs):
for c in self.columns:
if adapter:
c = adapter.columns[c]
column_collection.append(c)
def init_class_attribute(self, mapper):
self.is_class_level = True
coltype = self.columns[0].type
# TODO: check all columns ? check for foreign key as well?
active_history = self.parent_property.active_history or \
self.columns[0].primary_key or \
mapper.version_id_col in set(self.columns)
_register_attribute(
self, mapper, useobject=False,
compare_function=coltype.compare_values,
active_history=active_history
)
def create_row_processor(
self, context, path,
loadopt, mapper, row, adapter):
key = self.key
# look through list of columns represented here
# to see which, if any, is present in the row.
for col in self.columns:
if adapter:
col = adapter.columns[col]
if col is not None and col in row:
def fetch_col(state, dict_, row):
dict_[key] = row[col]
return fetch_col, None, None
else:
def expire_for_non_present_col(state, dict_, row):
state._expire_attribute_pre_commit(dict_, key)
return expire_for_non_present_col, None, None
@log.class_logger
@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
class DeferredColumnLoader(LoaderStrategy):
"""Provide loading behavior for a deferred :class:`.ColumnProperty`."""
def __init__(self, parent):
super(DeferredColumnLoader, self).__init__(parent)
if hasattr(self.parent_property, 'composite_class'):
raise NotImplementedError("Deferred loading for composite "
"types not implemented yet")
self.columns = self.parent_property.columns
self.group = self.parent_property.group
def create_row_processor(
self, context, path, loadopt,
mapper, row, adapter):
col = self.columns[0]
if adapter:
col = adapter.columns[col]
key = self.key
if col in row:
return self.parent_property._get_strategy_by_cls(ColumnLoader).\
create_row_processor(
context, path, loadopt, mapper, row, adapter)
elif not self.is_class_level:
set_deferred_for_local_state = InstanceState._row_processor(
mapper.class_manager,
LoadDeferredColumns(key), key)
return set_deferred_for_local_state, None, None
else:
def reset_col_for_deferred(state, dict_, row):
# reset state on the key so that deferred callables
# fire off on next access.
state._reset(dict_, key)
return reset_col_for_deferred, None, None
def init_class_attribute(self, mapper):
self.is_class_level = True
_register_attribute(
self, mapper, useobject=False,
compare_function=self.columns[0].type.compare_values,
callable_=self._load_for_state,
expire_missing=False
)
def setup_query(
self, context, entity, path, loadopt, adapter,
only_load_props=None, **kwargs):
if (
(
loadopt and
'undefer_pks' in loadopt.local_opts and
set(self.columns).intersection(self.parent.primary_key)
)
or
(
loadopt and
self.group and
loadopt.local_opts.get('undefer_group', False) == self.group
)
or
(
only_load_props and self.key in only_load_props
)
):
self.parent_property._get_strategy_by_cls(ColumnLoader).\
setup_query(context, entity,
path, loadopt, adapter, **kwargs)
def _load_for_state(self, state, passive):
if not state.key:
return attributes.ATTR_EMPTY
if not passive & attributes.SQL_OK:
return attributes.PASSIVE_NO_RESULT
localparent = state.manager.mapper
if self.group:
toload = [
p.key for p in
localparent.iterate_properties
if isinstance(p, StrategizedProperty) and
isinstance(p.strategy, DeferredColumnLoader) and
p.group == self.group
]
else:
toload = [self.key]
# narrow the keys down to just those which have no history
group = [k for k in toload if k in state.unmodified]
session = _state_session(state)
if session is None:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"deferred load operation of attribute '%s' cannot proceed" %
(orm_util.state_str(state), self.key)
)
query = session.query(localparent)
if loading.load_on_ident(
query, state.key,
only_load_props=group, refresh_state=state) is None:
raise orm_exc.ObjectDeletedError(state)
return attributes.ATTR_WAS_SET
class LoadDeferredColumns(object):
"""serializable loader object used by DeferredColumnLoader"""
def __init__(self, key):
self.key = key
def __call__(self, state, passive=attributes.PASSIVE_OFF):
key = self.key
localparent = state.manager.mapper
prop = localparent._props[key]
strategy = prop._strategies[DeferredColumnLoader]
return strategy._load_for_state(state, passive)
class AbstractRelationshipLoader(LoaderStrategy):
"""LoaderStratgies which deal with related objects."""
def __init__(self, parent):
super(AbstractRelationshipLoader, self).__init__(parent)
self.mapper = self.parent_property.mapper
self.target = self.parent_property.target
self.uselist = self.parent_property.uselist
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="noload")
@properties.RelationshipProperty.strategy_for(lazy=None)
class NoLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=None".
"""
def init_class_attribute(self, mapper):
self.is_class_level = True
_register_attribute(
self, mapper,
useobject=True,
uselist=self.parent_property.uselist,
typecallable=self.parent_property.collection_class,
)
def create_row_processor(
self, context, path, loadopt, mapper,
row, adapter):
def invoke_no_load(state, dict_, row):
state._initialize(self.key)
return invoke_no_load, None, None
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy=True)
@properties.RelationshipProperty.strategy_for(lazy="select")
class LazyLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
with "lazy=True", that is loads when first accessed.
"""
def __init__(self, parent):
super(LazyLoader, self).__init__(parent)
join_condition = self.parent_property._join_condition
self._lazywhere, \
self._bind_to_col, \
self._equated_columns = join_condition.create_lazy_clause()
self._rev_lazywhere, \
self._rev_bind_to_col, \
self._rev_equated_columns = join_condition.create_lazy_clause(
reverse_direction=True)
self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
# determine if our "lazywhere" clause is the same as the mapper's
# get() clause. then we can just use mapper.get()
self.use_get = not self.uselist and \
self.mapper._get_clause[0].compare(
self._lazywhere,
use_proxies=True,
equivalents=self.mapper._equivalent_columns
)
if self.use_get:
for col in list(self._equated_columns):
if col in self.mapper._equivalent_columns:
for c in self.mapper._equivalent_columns[col]:
self._equated_columns[c] = self._equated_columns[col]
self.logger.info("%s will use query.get() to "
"optimize instance loads" % self)
def init_class_attribute(self, mapper):
self.is_class_level = True
active_history = (
self.parent_property.active_history or
self.parent_property.direction is not interfaces.MANYTOONE or
not self.use_get
)
# MANYTOONE currently only needs the
# "old" value for delete-orphan
# cascades. the required _SingleParentValidator
# will enable active_history
# in that case. otherwise we don't need the
# "old" value during backref operations.
_register_attribute(
self,
mapper,
useobject=True,
callable_=self._load_for_state,
uselist=self.parent_property.uselist,
backref=self.parent_property.back_populates,
typecallable=self.parent_property.collection_class,
active_history=active_history
)
def lazy_clause(
self, state, reverse_direction=False,
alias_secondary=False,
adapt_source=None,
passive=None):
if state is None:
return self._lazy_none_clause(
reverse_direction,
adapt_source=adapt_source)
if not reverse_direction:
criterion, bind_to_col = \
self._lazywhere, \
self._bind_to_col
else:
criterion, bind_to_col = \
self._rev_lazywhere, \
self._rev_bind_to_col
if reverse_direction:
mapper = self.parent_property.mapper
else:
mapper = self.parent_property.parent
o = state.obj() # strong ref
dict_ = attributes.instance_dict(o)
# use the "committed state" only if we're in a flush
# for this state.
if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
def visit_bindparam(bindparam):
if bindparam._identifying_key in bind_to_col:
bindparam.callable = \
lambda: mapper._get_committed_state_attr_by_column(
state, dict_,
bind_to_col[bindparam._identifying_key])
else:
def visit_bindparam(bindparam):
if bindparam._identifying_key in bind_to_col:
bindparam.callable = \
lambda: mapper._get_state_attr_by_column(
state, dict_,
bind_to_col[bindparam._identifying_key])
if self.parent_property.secondary is not None and alias_secondary:
criterion = sql_util.ClauseAdapter(
self.parent_property.secondary.alias()).\
traverse(criterion)
criterion = visitors.cloned_traverse(
criterion, {}, {'bindparam': visit_bindparam})
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
if not reverse_direction:
criterion, bind_to_col = \
self._lazywhere, \
self._bind_to_col
else:
criterion, bind_to_col = \
self._rev_lazywhere, \
self._rev_bind_to_col
criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _load_for_state(self, state, passive):
if not state.key and (
(
not self.parent_property.load_on_pending
and not state._load_pending
)
or not state.session_id
):
return attributes.ATTR_EMPTY
pending = not state.key
ident_key = None
if (
(not passive & attributes.SQL_OK and not self.use_get)
or
(not passive & attributes.NON_PERSISTENT_OK and pending)
):
return attributes.PASSIVE_NO_RESULT
session = _state_session(state)
if not session:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session; "
"lazy load operation of attribute '%s' cannot proceed" %
(orm_util.state_str(state), self.key)
)
# if we have a simple primary key load, check the
# identity map without generating a Query at all
if self.use_get:
ident = self._get_ident_for_use_get(
session,
state,
passive
)
if attributes.PASSIVE_NO_RESULT in ident:
return attributes.PASSIVE_NO_RESULT
elif attributes.NEVER_SET in ident:
return attributes.NEVER_SET
if _none_set.issuperset(ident):
return None
ident_key = self.mapper.identity_key_from_primary_key(ident)
instance = loading.get_from_identity(session, ident_key, passive)
if instance is not None:
return instance
elif not passive & attributes.SQL_OK or \
not passive & attributes.RELATED_OBJECT_OK:
return attributes.PASSIVE_NO_RESULT
return self._emit_lazyload(session, state, ident_key, passive)
def _get_ident_for_use_get(self, session, state, passive):
instance_mapper = state.manager.mapper
if passive & attributes.LOAD_AGAINST_COMMITTED:
get_attr = instance_mapper._get_committed_state_attr_by_column
else:
get_attr = instance_mapper._get_state_attr_by_column
dict_ = state.dict
return [
get_attr(
state,
dict_,
self._equated_columns[pk],
passive=passive)
for pk in self.mapper.primary_key
]
@util.dependencies("sqlalchemy.orm.strategy_options")
def _emit_lazyload(
self, strategy_options, session, state,
ident_key, passive):
q = session.query(self.mapper)._adapt_all_clauses()
if self.parent_property.secondary is not None:
q = q.select_from(self.mapper, self.parent_property.secondary)
q = q._with_invoke_all_eagers(False)
pending = not state.key
# don't autoflush on pending
if pending or passive & attributes.NO_AUTOFLUSH:
q = q.autoflush(False)
if state.load_path:
q = q._with_current_path(state.load_path[self.parent_property])
if state.load_options:
q = q._conditional_options(*state.load_options)
if self.use_get:
return loading.load_on_ident(q, ident_key)
if self.parent_property.order_by:
q = q.order_by(*util.to_list(self.parent_property.order_by))
for rev in self.parent_property._reverse_property:
# reverse props that are MANYTOONE are loading *this*
# object from get(), so don't need to eager out to those.
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, LazyLoader):
q = q.options(
strategy_options.Load(rev.parent).
lazyload(rev.key))
lazy_clause = self.lazy_clause(state, passive=passive)
if pending:
bind_values = sql_util.bind_values(lazy_clause)
if None in bind_values:
return None
q = q.filter(lazy_clause)
result = q.all()
if self.uselist:
return result
else:
l = len(result)
if l:
if l > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for lazily-loaded attribute '%s' "
% self.parent_property)
return result[0]
else:
return None
def create_row_processor(
self, context, path, loadopt,
mapper, row, adapter):
key = self.key
if not self.is_class_level:
# we are not the primary manager for this attribute
# on this class - set up a
# per-instance lazyloader, which will override the
# class-level behavior.
# this currently only happens when using a
# "lazyload" option on a "no load"
# attribute - "eager" attributes always have a
# class-level lazyloader installed.
set_lazy_callable = InstanceState._row_processor(
mapper.class_manager,
LoadLazyAttribute(key), key)
return set_lazy_callable, None, None
else:
def reset_for_lazy_callable(state, dict_, row):
# we are the primary manager for this attribute on
# this class - reset its
# per-instance attribute state, so that the class-level
# lazy loader is
# executed when next referenced on this instance.
# this is needed in
# populate_existing() types of scenarios to reset
# any existing state.
state._reset(dict_, key)
return reset_for_lazy_callable, None, None
class LoadLazyAttribute(object):
"""serializable loader object used by LazyLoader"""
def __init__(self, key):
self.key = key
def __call__(self, state, passive=attributes.PASSIVE_OFF):
key = self.key
instance_mapper = state.manager.mapper
prop = instance_mapper._props[key]
strategy = prop._strategies[LazyLoader]
return strategy._load_for_state(state, passive)
@properties.RelationshipProperty.strategy_for(lazy="immediate")
class ImmediateLoader(AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(
self, context, entity,
path, loadopt, adapter, column_collection=None,
parentmapper=None, **kwargs):
pass
def create_row_processor(
self, context, path, loadopt,
mapper, row, adapter):
def load_immediate(state, dict_, row):
state.get_impl(self.key).get(state, dict_)
return None, None, load_immediate
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="subquery")
class SubqueryLoader(AbstractRelationshipLoader):
def __init__(self, parent):
super(SubqueryLoader, self).__init__(parent)
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy_by_cls(LazyLoader).\
init_class_attribute(mapper)
def setup_query(
self, context, entity,
path, loadopt, adapter,
column_collection=None,
parentmapper=None, **kwargs):
if not context.query._enable_eagerloads:
return
path = path[self.parent_property]
# build up a path indicating the path from the leftmost
# entity to the thing we're subquery loading.
with_poly_info = path.get(
context.attributes,
"path_with_polymorphic", None)
if with_poly_info is not None:
effective_entity = with_poly_info.entity
else:
effective_entity = self.mapper
subq_path = context.attributes.get(
('subquery_path', None),
orm_util.PathRegistry.root)
subq_path = subq_path + path
# if not via query option, check for
# a cycle
if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
elif subq_path.contains_mapper(self.mapper):
return
leftmost_mapper, leftmost_attr, leftmost_relationship = \
self._get_leftmost(subq_path)
orig_query = context.attributes.get(
("orig_query", SubqueryLoader),
context.query)
# generate a new Query from the original, then
# produce a subquery from it.
left_alias = self._generate_from_original_query(
orig_query, leftmost_mapper,
leftmost_attr, leftmost_relationship,
entity.entity_zero
)
# generate another Query that will join the
# left alias to the target relationships.
# basically doing a longhand
# "from_self()". (from_self() itself not quite industrial
# strength enough for all contingencies...but very close)
q = orig_query.session.query(effective_entity)
q._attributes = {
("orig_query", SubqueryLoader): orig_query,
('subquery_path', None): subq_path
}
q = q._set_enable_single_crit(False)
to_join, local_attr, parent_alias = \
self._prep_for_joins(left_alias, subq_path)
q = q.order_by(*local_attr)
q = q.add_columns(*local_attr)
q = self._apply_joins(
q, to_join, left_alias,
parent_alias, effective_entity)
q = self._setup_options(q, subq_path, orig_query, effective_entity)
q = self._setup_outermost_orderby(q)
# add new query to attributes to be picked up
# by create_row_processor
path.set(context.attributes, "subquery", q)
def _get_leftmost(self, subq_path):
subq_path = subq_path.path
subq_mapper = orm_util._class_to_mapper(subq_path[0])
# determine attributes of the leftmost mapper
if self.parent.isa(subq_mapper) and \
self.parent_property is subq_path[1]:
leftmost_mapper, leftmost_prop = \
self.parent, self.parent_property
else:
leftmost_mapper, leftmost_prop = \
subq_mapper, \
subq_path[1]
leftmost_cols = leftmost_prop.local_columns
leftmost_attr = [
getattr(
subq_path[0].entity,
leftmost_mapper._columntoproperty[c].key)
for c in leftmost_cols
]
return leftmost_mapper, leftmost_attr, leftmost_prop
def _generate_from_original_query(
self,
orig_query, leftmost_mapper,
leftmost_attr, leftmost_relationship, orig_entity
):
# reformat the original query
# to look only for significant columns
q = orig_query._clone().correlate(None)
# set a real "from" if not present, as this is more
# accurate than just going off of the column expression
if not q._from_obj and orig_entity.mapper.isa(leftmost_mapper):
q._set_select_from([orig_entity], False)
target_cols = q._adapt_col_list(leftmost_attr)
# select from the identity columns of the outer
q._set_entities(target_cols)
distinct_target_key = leftmost_relationship.distinct_target_key
if distinct_target_key is True:
q._distinct = True
elif distinct_target_key is None:
# if target_cols refer to a non-primary key or only
# part of a composite primary key, set the q as distinct
for t in set(c.table for c in target_cols):
if not set(target_cols).issuperset(t.primary_key):
q._distinct = True
break
if q._order_by is False:
q._order_by = leftmost_mapper.order_by
# don't need ORDER BY if no limit/offset
if q._limit is None and q._offset is None:
q._order_by = None
# the original query now becomes a subquery
# which we'll join onto.
embed_q = q.with_labels().subquery()
left_alias = orm_util.AliasedClass(
leftmost_mapper, embed_q,
use_mapper_path=True)
return left_alias
def _prep_for_joins(self, left_alias, subq_path):
# figure out what's being joined. a.k.a. the fun part
to_join = []
pairs = list(subq_path.pairs())
for i, (mapper, prop) in enumerate(pairs):
if i > 0:
# look at the previous mapper in the chain -
# if it is as or more specific than this prop's
# mapper, use that instead.
# note we have an assumption here that
# the non-first element is always going to be a mapper,
# not an AliasedClass
prev_mapper = pairs[i - 1][1].mapper
to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
else:
to_append = mapper
to_join.append((to_append, prop.key))
# determine the immediate parent class we are joining from,
# which needs to be aliased.
if len(to_join) > 1:
info = inspect(to_join[-1][0])
if len(to_join) < 2:
# in the case of a one level eager load, this is the
# leftmost "left_alias".
parent_alias = left_alias
elif info.mapper.isa(self.parent):
# In the case of multiple levels, retrieve
# it from subq_path[-2]. This is the same as self.parent
# in the vast majority of cases, and [ticket:2014]
# illustrates a case where sub_path[-2] is a subclass
# of self.parent
parent_alias = orm_util.AliasedClass(
to_join[-1][0],
use_mapper_path=True)
else:
# if of_type() were used leading to this relationship,
# self.parent is more specific than subq_path[-2]
parent_alias = orm_util.AliasedClass(
self.parent,
use_mapper_path=True)
local_cols = self.parent_property.local_columns
local_attr = [
getattr(parent_alias, self.parent._columntoproperty[c].key)
for c in local_cols
]
return to_join, local_attr, parent_alias
def _apply_joins(
self, q, to_join, left_alias, parent_alias,
effective_entity):
for i, (mapper, key) in enumerate(to_join):
# we need to use query.join() as opposed to
# orm.join() here because of the
# rich behavior it brings when dealing with
# "with_polymorphic" mappers. "aliased"
# and "from_joinpoint" take care of most of
# the chaining and aliasing for us.
first = i == 0
middle = i < len(to_join) - 1
second_to_last = i == len(to_join) - 2
last = i == len(to_join) - 1
if first:
attr = getattr(left_alias, key)
if last and effective_entity is not self.mapper:
attr = attr.of_type(effective_entity)
else:
if last and effective_entity is not self.mapper:
attr = getattr(parent_alias, key).\
of_type(effective_entity)
else:
attr = getattr(mapper.entity, key)
if second_to_last:
q = q.join(parent_alias, attr, from_joinpoint=True)
else:
q = q.join(attr, aliased=middle, from_joinpoint=True)
return q
def _setup_options(self, q, subq_path, orig_query, effective_entity):
# propagate loader options etc. to the new query.
# these will fire relative to subq_path.
q = q._with_current_path(subq_path)
q = q._conditional_options(*orig_query._with_options)
if orig_query._populate_existing:
q._populate_existing = orig_query._populate_existing
return q
def _setup_outermost_orderby(self, q):
if self.parent_property.order_by:
# if there's an ORDER BY, alias it the same
# way joinedloader does, but we have to pull out
# the "eagerjoin" from the query.
# this really only picks up the "secondary" table
# right now.
eagerjoin = q._from_obj[0]
eager_order_by = \
eagerjoin._target_adapter.\
copy_and_process(
util.to_list(
self.parent_property.order_by
)
)
q = q.order_by(*eager_order_by)
return q
class _SubqCollections(object):
"""Given a :class:`.Query` used to emit the "subquery load",
provide a load interface that executes the query at the
first moment a value is needed.
"""
_data = None
def __init__(self, subq):
self.subq = subq
def get(self, key, default):
if self._data is None:
self._load()
return self._data.get(key, default)
def _load(self):
self._data = dict(
(k, [vv[0] for vv in v])
for k, v in itertools.groupby(
self.subq,
lambda x: x[1:]
)
)
def loader(self, state, dict_, row):
if self._data is None:
self._load()
def create_row_processor(
self, context, path, loadopt,
mapper, row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
"population - eager loading cannot be applied." %
self)
path = path[self.parent_property]
subq = path.get(context.attributes, 'subquery')
if subq is None:
return None, None, None
local_cols = self.parent_property.local_columns
# cache the loaded collections in the context
# so that inheriting mappers don't re-load when they
# call upon create_row_processor again
collections = path.get(context.attributes, "collections")
if collections is None:
collections = self._SubqCollections(subq)
path.set(context.attributes, 'collections', collections)
if adapter:
local_cols = [adapter.columns[c] for c in local_cols]
if self.uselist:
return self._create_collection_loader(collections, local_cols)
else:
return self._create_scalar_loader(collections, local_cols)
def _create_collection_loader(self, collections, local_cols):
def load_collection_from_subq(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
()
)
state.get_impl(self.key).\
set_committed_value(state, dict_, collection)
return load_collection_from_subq, None, None, collections.loader
def _create_scalar_loader(self, collections, local_cols):
def load_scalar_from_subq(state, dict_, row):
collection = collections.get(
tuple([row[col] for col in local_cols]),
(None,)
)
if len(collection) > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
scalar = collection[0]
state.get_impl(self.key).\
set_committed_value(state, dict_, scalar)
return load_scalar_from_subq, None, None, collections.loader
@log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="joined")
@properties.RelationshipProperty.strategy_for(lazy=False)
class JoinedLoader(AbstractRelationshipLoader):
"""Provide loading behavior for a :class:`.RelationshipProperty`
using joined eager loading.
"""
def __init__(self, parent):
super(JoinedLoader, self).__init__(parent)
self.join_depth = self.parent_property.join_depth
def init_class_attribute(self, mapper):
self.parent_property.\
_get_strategy_by_cls(LazyLoader).init_class_attribute(mapper)
def setup_query(
self, context, entity, path, loadopt, adapter,
column_collection=None, parentmapper=None,
chained_from_outerjoin=False,
**kwargs):
"""Add a left outer join to the statement that's being constructed."""
if not context.query._enable_eagerloads:
return
path = path[self.parent_property]
with_polymorphic = None
user_defined_adapter = self._init_user_defined_eager_proc(
loadopt, context) if loadopt else False
if user_defined_adapter is not False:
clauses, adapter, add_to_collection = \
self._setup_query_on_user_defined_adapter(
context, entity, path, adapter,
user_defined_adapter
)
else:
# if not via query option, check for
# a cycle
if not path.contains(context.attributes, "loader"):
if self.join_depth:
if path.length / 2 > self.join_depth:
return
elif path.contains_mapper(self.mapper):
return
clauses, adapter, add_to_collection, chained_from_outerjoin = \
self._generate_row_adapter(
context, entity, path, loadopt, adapter,
column_collection, parentmapper, chained_from_outerjoin
)
with_poly_info = path.get(
context.attributes,
"path_with_polymorphic",
None
)
if with_poly_info is not None:
with_polymorphic = with_poly_info.with_polymorphic_mappers
else:
with_polymorphic = None
path = path[self.mapper]
for value in self.mapper._iterate_polymorphic_properties(
mappers=with_polymorphic):
value.setup(
context,
entity,
path,
clauses,
parentmapper=self.mapper,
column_collection=add_to_collection,
chained_from_outerjoin=chained_from_outerjoin)
if with_poly_info is not None and \
None in set(context.secondary_columns):
raise sa_exc.InvalidRequestError(
"Detected unaliased columns when generating joined "
"load. Make sure to use aliased=True or flat=True "
"when using joined loading with with_polymorphic()."
)
def _init_user_defined_eager_proc(self, loadopt, context):
# check if the opt applies at all
if "eager_from_alias" not in loadopt.local_opts:
# nope
return False
path = loadopt.path.parent
# the option applies. check if the "user_defined_eager_row_processor"
# has been built up.
adapter = path.get(
context.attributes,
"user_defined_eager_row_processor", False)
if adapter is not False:
# just return it
return adapter
# otherwise figure it out.
alias = loadopt.local_opts["eager_from_alias"]
root_mapper, prop = path[-2:]
#from .mapper import Mapper
#from .interfaces import MapperProperty
#assert isinstance(root_mapper, Mapper)
#assert isinstance(prop, MapperProperty)
if alias is not None:
if isinstance(alias, str):
alias = prop.target.alias(alias)
adapter = sql_util.ColumnAdapter(
alias,
equivalents=prop.mapper._equivalent_columns)
else:
if path.contains(context.attributes, "path_with_polymorphic"):
with_poly_info = path.get(
context.attributes,
"path_with_polymorphic")
adapter = orm_util.ORMAdapter(
with_poly_info.entity,
equivalents=prop.mapper._equivalent_columns)
else:
adapter = context.query._polymorphic_adapters.get(
prop.mapper, None)
path.set(
context.attributes,
"user_defined_eager_row_processor",
adapter)
return adapter
def _setup_query_on_user_defined_adapter(
self, context, entity,
path, adapter, user_defined_adapter):
# apply some more wrapping to the "user defined adapter"
# if we are setting up the query for SQL render.
adapter = entity._get_entity_clauses(context.query, context)
if adapter and user_defined_adapter:
user_defined_adapter = user_defined_adapter.wrap(adapter)
path.set(
context.attributes, "user_defined_eager_row_processor",
user_defined_adapter)
elif adapter:
user_defined_adapter = adapter
path.set(
context.attributes, "user_defined_eager_row_processor",
user_defined_adapter)
add_to_collection = context.primary_columns
return user_defined_adapter, adapter, add_to_collection
def _generate_row_adapter(
self,
context, entity, path, loadopt, adapter,
column_collection, parentmapper, chained_from_outerjoin):
with_poly_info = path.get(
context.attributes,
"path_with_polymorphic",
None
)
if with_poly_info:
to_adapt = with_poly_info.entity
else:
to_adapt = orm_util.AliasedClass(
self.mapper,
flat=True,
use_mapper_path=True)
clauses = orm_util.ORMAdapter(
to_adapt,
equivalents=self.mapper._equivalent_columns,
adapt_required=True)
assert clauses.aliased_class is not None
if self.parent_property.direction != interfaces.MANYTOONE:
context.multi_row_eager_loaders = True
innerjoin = (
loadopt.local_opts.get(
'innerjoin', self.parent_property.innerjoin)
if loadopt is not None
else self.parent_property.innerjoin
)
if not innerjoin:
# if this is an outer join, all non-nested eager joins from
# this path must also be outer joins
chained_from_outerjoin = True
context.create_eager_joins.append(
(
self._create_eager_join, context,
entity, path, adapter,
parentmapper, clauses, innerjoin, chained_from_outerjoin
)
)
add_to_collection = context.secondary_columns
path.set(context.attributes, "eager_row_processor", clauses)
return clauses, adapter, add_to_collection, chained_from_outerjoin
def _create_eager_join(
self, context, entity,
path, adapter, parentmapper,
clauses, innerjoin, chained_from_outerjoin):
if parentmapper is None:
localparent = entity.mapper
else:
localparent = parentmapper
# whether or not the Query will wrap the selectable in a subquery,
# and then attach eager load joins to that (i.e., in the case of
# LIMIT/OFFSET etc.)
should_nest_selectable = context.multi_row_eager_loaders and \
context.query._should_nest_selectable
entity_key = None
if entity not in context.eager_joins and \
not should_nest_selectable and \
context.from_clause:
index, clause = sql_util.find_join_source(
context.from_clause, entity.selectable)
if clause is not None:
# join to an existing FROM clause on the query.
# key it to its list index in the eager_joins dict.
# Query._compile_context will adapt as needed and
# append to the FROM clause of the select().
entity_key, default_towrap = index, clause
if entity_key is None:
entity_key, default_towrap = entity, entity.selectable
towrap = context.eager_joins.setdefault(entity_key, default_towrap)
if adapter:
if getattr(adapter, 'aliased_class', None):
onclause = getattr(
adapter.aliased_class, self.key,
self.parent_property)
else:
onclause = getattr(
orm_util.AliasedClass(
self.parent,
adapter.selectable,
use_mapper_path=True
),
self.key, self.parent_property
)
else:
onclause = self.parent_property
assert clauses.aliased_class is not None
join_to_outer = innerjoin and isinstance(towrap, sql.Join) and \
towrap.isouter
if chained_from_outerjoin and join_to_outer and innerjoin == 'nested':
inner = orm_util.join(
towrap.right,
clauses.aliased_class,
onclause,
isouter=False
)
eagerjoin = orm_util.join(
towrap.left,
inner,
towrap.onclause,
isouter=True
)
eagerjoin._target_adapter = inner._target_adapter
else:
if chained_from_outerjoin:
innerjoin = False
eagerjoin = orm_util.join(
towrap,
clauses.aliased_class,
onclause,
isouter=not innerjoin
)
context.eager_joins[entity_key] = eagerjoin
# send a hint to the Query as to where it may "splice" this join
eagerjoin.stop_on = entity.selectable
if self.parent_property.secondary is None and \
not parentmapper:
# for parentclause that is the non-eager end of the join,
# ensure all the parent cols in the primaryjoin are actually
# in the
# columns clause (i.e. are not deferred), so that aliasing applied
# by the Query propagates those columns outward.
# This has the effect
# of "undefering" those columns.
for col in sql_util._find_columns(
self.parent_property.primaryjoin):
if localparent.mapped_table.c.contains_column(col):
if adapter:
col = adapter.columns[col]
context.primary_columns.append(col)
if self.parent_property.order_by:
context.eager_order_by += eagerjoin._target_adapter.\
copy_and_process(
util.to_list(
self.parent_property.order_by
)
)
def _create_eager_adapter(self, context, row, adapter, path, loadopt):
user_defined_adapter = self._init_user_defined_eager_proc(
loadopt, context) if loadopt else False
if user_defined_adapter is not False:
decorator = user_defined_adapter
# user defined eagerloads are part of the "primary"
# portion of the load.
# the adapters applied to the Query should be honored.
if context.adapter and decorator:
decorator = decorator.wrap(context.adapter)
elif context.adapter:
decorator = context.adapter
else:
decorator = path.get(context.attributes, "eager_row_processor")
if decorator is None:
return False
try:
self.mapper.identity_key_from_row(row, decorator)
return decorator
except KeyError:
# no identity key - don't return a row
# processor, will cause a degrade to lazy
return False
def create_row_processor(
self, context, path, loadopt, mapper,
row, adapter):
if not self.parent.class_manager[self.key].impl.supports_population:
raise sa_exc.InvalidRequestError(
"'%s' does not support object "
"population - eager loading cannot be applied." %
self
)
our_path = path[self.parent_property]
eager_adapter = self._create_eager_adapter(
context,
row,
adapter, our_path, loadopt)
if eager_adapter is not False:
key = self.key
_instance = loading.instance_processor(
self.mapper,
context,
our_path[self.mapper],
eager_adapter)
if not self.uselist:
return self._create_scalar_loader(context, key, _instance)
else:
return self._create_collection_loader(context, key, _instance)
else:
return self.parent_property._get_strategy_by_cls(LazyLoader).\
create_row_processor(
context, path, loadopt,
mapper, row, adapter)
def _create_collection_loader(self, context, key, _instance):
def load_collection_from_joined_new_row(state, dict_, row):
collection = attributes.init_state_collection(
state, dict_, key)
result_list = util.UniqueAppender(collection,
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
def load_collection_from_joined_existing_row(state, dict_, row):
if (state, key) in context.attributes:
result_list = context.attributes[(state, key)]
else:
# appender_key can be absent from context.attributes
# with isnew=False when self-referential eager loading
# is used; the same instance may be present in two
# distinct sets of result columns
collection = attributes.init_state_collection(
state, dict_, key)
result_list = util.UniqueAppender(
collection,
'append_without_event')
context.attributes[(state, key)] = result_list
_instance(row, result_list)
def load_collection_from_joined_exec(state, dict_, row):
_instance(row, None)
return load_collection_from_joined_new_row, \
load_collection_from_joined_existing_row, \
None, load_collection_from_joined_exec
def _create_scalar_loader(self, context, key, _instance):
def load_scalar_from_joined_new_row(state, dict_, row):
# set a scalar object instance directly on the parent
# object, bypassing InstrumentedAttribute event handlers.
dict_[key] = _instance(row, None)
def load_scalar_from_joined_existing_row(state, dict_, row):
# call _instance on the row, even though the object has
# been created, so that we further descend into properties
existing = _instance(row, None)
if existing is not None \
and key in dict_ \
and existing is not dict_[key]:
util.warn(
"Multiple rows returned with "
"uselist=False for eagerly-loaded attribute '%s' "
% self)
def load_scalar_from_joined_exec(state, dict_, row):
_instance(row, None)
return load_scalar_from_joined_new_row, \
load_scalar_from_joined_existing_row, \
None, load_scalar_from_joined_exec
def single_parent_validator(desc, prop):
def _do_check(state, value, oldvalue, initiator):
if value is not None and initiator.key == prop.key:
hasparent = initiator.hasparent(attributes.instance_state(value))
if hasparent and oldvalue is not value:
raise sa_exc.InvalidRequestError(
"Instance %s is already associated with an instance "
"of %s via its %s attribute, and is only allowed a "
"single parent." %
(orm_util.instance_str(value), state.class_, prop)
)
return value
def append(state, value, initiator):
return _do_check(state, value, None, initiator)
def set_(state, value, oldvalue, initiator):
return _do_check(state, value, oldvalue, initiator)
event.listen(
desc, 'append', append, raw=True, retval=True,
active_history=True)
event.listen(
desc, 'set', set_, raw=True, retval=True,
active_history=True)
| bsd-3-clause |
aoom/pattern | docs/update.py | 21 | 5101 | #### DOCUMENTATION GENERATOR ##########################################################################
# Keeps the offline documention in synch with the online documentation.
# Simply run "python update.py" to generate the latest version.
import os, sys; sys.path.insert(0, os.path.join(".."))
import codecs
import re
from pattern.web import URL, Document, strip_javascript, strip_between
url = "http://www.clips.ua.ac.be/pages/"
#--- HTML TEMPLATE -----------------------------------------------------------------------------------
# Use a simplified HTML template based on the online documentation.
template = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
<head>
<title>%s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<link type="text/css" rel="stylesheet" href="../clips.css" />
<style>
/* Small fixes because we omit the online layout.css. */
h3 { line-height: 1.3em; }
#page { margin-left: auto; margin-right: auto; }
#header, #header-inner { height: 175px; }
#header { border-bottom: 1px solid #C6D4DD; }
table { border-collapse: collapse; }
#checksum { display: none; }
</style>
<link href="../js/shCore.css" rel="stylesheet" type="text/css" />
<link href="../js/shThemeDefault.css" rel="stylesheet" type="text/css" />
<script language="javascript" src="../js/shCore.js"></script>
<script language="javascript" src="../js/shBrushXml.js"></script>
<script language="javascript" src="../js/shBrushJScript.js"></script>
<script language="javascript" src="../js/shBrushPython.js"></script>
</head>
<body class="node-type-page one-sidebar sidebar-right section-pages">
<div id="page">
<div id="page-inner">
<div id="header"><div id="header-inner"></div></div>
<div id="content">
<div id="content-inner">
<div class="node node-type-page"
<div class="node-inner">
<div class="breadcrumb">View online at: <a href="%s" class="noexternal" target="_blank">%s</a></div>
<h1>%s</h1>
<!-- Parsed from the online documentation. -->
%s
</div>
</div>
</div>
</div>
</div>
</div>
<script>
SyntaxHighlighter.all();
</script>
</body>
</html>
""".strip()
#--- DOWNLOAD & UPDATE -------------------------------------------------------------------------------
for p in ("-", "-web", "-db", "-search", "-vector", "-graph", "-canvas", "-metrics",
"-de", "-en", "-es", "-fr", "-it", "-nl",
"-shell", "stop-words", "mbsp-tags", "-dev"):
# We include some useful pages (Penn Treebank tags, stop words) referenced in the documentation.
if p.startswith("-"):
p = "pattern" + p.rstrip("-")
title = p.replace("-", ".")
if p == "stop-words":
title = "Stop words"
if p == "mbsp-tags":
title = "Penn Treebank II tag set"
# Download the online documentation pages.
print "Retrieving", url + p
html = URL(url + p).download(cached=False)
# Parse the actual documentation, we don't need the website header, footer, navigation, search.
html = Document(html)
html = html.by_id("content-area")
html = html.by_class("node-type-page")[0]
html = html.source
html = strip_javascript(html)
html = strip_between('<div id="navbar">', '/#navbar -->', html)
html = strip_between('<div id="sidebar-right">', '/#sidebar-right -->', html)
html = strip_between('<div id="footer">', '/#footer -->', html)
html = strip_between('<a class="twitter-share-button"', '</a>', html)
# Link to local pages and images.
# Link to online media.
html = html.replace('href="/pages/MBSP"', 'href="%sMBSP"' % url) # MBSP docs (online)
html = re.sub('href="/pages/(pattern-examples.*?)"', 'href="%s\\1"' % url, html) # examples (online)
html = re.sub('href="/pages/(using-.*?)"', 'href="%s\\1"' % url, html) # examples (online)
html = re.sub('href="/pages/(modeling-.*?)"', 'href="%s\\1"' % url, html) # examples (online)
html = re.sub('href="/pages/(.*?)([#|"])', 'href="\\1.html\\2', html) # pages (offline)
html = html.replace('src="/media/', 'src="../g/') # images (offline)
html = html.replace('src="/sites/all/themes/clips/g/', 'src="../g/') # images (offline)
html = html.replace('href="/media/', 'href="%smedia/' % url.replace("pages/", "")) # downloads (online)
# Apply the simplified template + set page titles.
html = template % (p, url+p, url+p, title, html)
# Generate offline HTML file.
f = os.path.join(os.path.dirname(__file__), "html", "%s.html" % p)
f = codecs.open(f, "w", encoding="utf-8")
f.write(html)
f.close()
# Create index.html (which simply redirects to pattern.html).
f = open(os.path.join(os.path.dirname(__file__), "index.html"), "w")
f.write('<meta http-equiv="refresh" content="0; url=html/pattern.html" />')
f.close() | bsd-3-clause |
SVoxel/R7800 | git_home/samba.git/third_party/pep8/testsuite/W19.py | 34 | 2637 | #: W191
if False:
print # indented with 1 tab
#:
#: W191
y = x == 2 \
or x == 3
#: E101 W191
if (
x == (
3
) or
y == 4):
pass
#: E101 W191
if x == 2 \
or y > 1 \
or x == 3:
pass
#: E101 W191
if x == 2 \
or y > 1 \
or x == 3:
pass
#:
#: E101 W191
if (foo == bar and
baz == frop):
pass
#: E101 W191
if (
foo == bar and
baz == frop
):
pass
#:
#: E101 E101 W191 W191
if start[1] > end_col and not (
over_indent == 4 and indent_next):
return(0, "E121 continuation line over-"
"indented for visual indent")
#:
#: E101 W191
def long_function_name(
var_one, var_two, var_three,
var_four):
print(var_one)
#: E101 W191
if ((row < 0 or self.moduleCount <= row or
col < 0 or self.moduleCount <= col)):
raise Exception("%s,%s - %s" % (row, col, self.moduleCount))
#: E101 E101 E101 E101 W191 W191 W191 W191 W191 W191
if bar:
return(
start, 'E121 lines starting with a '
'closing bracket should be indented '
"to match that of the opening "
"bracket's line"
)
#
#: E101 W191
# you want vertical alignment, so use a parens
if ((foo.bar("baz") and
foo.bar("frop")
)):
print "yes"
#: E101 W191
# also ok, but starting to look like LISP
if ((foo.bar("baz") and
foo.bar("frop"))):
print "yes"
#: E101 W191
if (a == 2 or
b == "abc def ghi"
"jkl mno"):
return True
#: E101 W191
if (a == 2 or
b == """abc def ghi
jkl mno"""):
return True
#: W191:2:1 W191:3:1 E101:3:2
if length > options.max_line_length:
return options.max_line_length, \
"E501 line too long (%d characters)" % length
#
#: E101 W191 W191
if os.path.exists(os.path.join(path, PEP8_BIN)):
cmd = ([os.path.join(path, PEP8_BIN)] +
self._pep8_options(targetfile))
#: W191
'''
multiline string with tab in it'''
#: E101 W191
'''multiline string
with tabs
and spaces
'''
#: Okay
'''sometimes, you just need to go nuts in a multiline string
and allow all sorts of crap
like mixed tabs and spaces
or trailing whitespace
or long long long long long long long long long long long long long long long long long lines
''' # nopep8
#: Okay
'''this one
will get no warning
even though the noqa comment is not immediately after the string
''' + foo # noqa
#
#: E101 W191
if foo is None and bar is "frop" and \
blah == 'yeah':
blah = 'yeahnah'
#
#: W191 W191 W191
if True:
foo(
1,
2)
#: W191 W191 W191 W191 W191
def test_keys(self):
"""areas.json - All regions are accounted for."""
expected = set([
u'Norrbotten',
u'V\xe4sterbotten',
])
#: W191
x = [
'abc'
]
#:
| gpl-2.0 |
haihala/modman | cli.py | 1 | 12445 | #!/usr/bin/env python3
try:
import requests
except ImportError:
print("It looks like requests is not installed.")
print("Try: pip3 install requests")
exit(1)
import os
import sys
import subprocess
from getpass import getpass
import mod_manager
from mod_manager import server
from mod_manager.exceptions import LoginError
def open_gui_editor(filename):
"""Opens default GUI text editor."""
if sys.platform == "win32":
os.startfile(filename)
elif sys.platform.startswith("darwin"):
try:
subprocess.call(["open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
elif sys.platform.startswith("linux"):
try:
subprocess.call(["xdg-open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
print("Could not determine text editor.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
def open_editor(filename):
"""Opens default text editor, preferring CLI editors to GUI editors."""
if sys.platform.startswith("win32"):
open_gui_editor(filename)
elif sys.platform.startswith("darwin") or sys.platform.startswith("linux"):
default_editor = os.environ.get("EDITOR", None)
if default_editor:
try:
subprocess.call([default_editor, filename])
except FileNotFoundError:
# could not use default editor
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
open_gui_editor(filename)
class CLI(object):
ACTIONS = [
"help [action]",
"list",
"contents <packname> [packname2]...",
"edit <packname>",
"compress <packname>",
"decompress <base64>",
"install <packname>",
"match <server_address>",
"enabled",
"enable <modname> [version]",
"disable <modname>",
"search <query> [-n <integer>]",
"credentials <action> [args]",
"cache <action>",
"apicache <action>",
"serv_install <modpacks> [experimental]",
]
HELP = {
"help": "If action is present, prints detailed information of the action, otherwise this help message is printed",
"list": "Lists all available modpacks",
"contents": "Lists all mods in a modpack",
"edit": "Opens the specified pack in default text editor",
"compress": "Makes a base64 digest of the mentioned modpack",
"decompress": "Unpacks a mod from base64 digest (overrides existing modpacks with the same name)",
"install": "Despite what is in the mod folder, downloads the newest mods into the specified folder",
"match": "Match your mod configuration to one in a server, using exactly same versions",
"enabled": "List enabled mods",
"enable": "Enables a single mod by name and optionally a version number",
"disable": "Disable a single mod",
"search": "Search for mods from the Factorio mod portal. Specify the amount of results with -n parameter. By default 5 results are displayed.",
"credentials": "Manage mod portal credentials. Actions: set, set [username] [password], clear",
"cache": "Manage cache. Actions: reset, list",
"apicache": "Manage api call cache. Actions: reset",
"serv_install": "Installs the newest server with the chosen modpacks. If '-experimental' or '-e' are present in the command, the newest experimental release is installed."
}
ACTION_NAMES = [a.split()[0] for a in ACTIONS]
def __init__(self):
self.mod_manager = mod_manager.ModManager(login_callback=self.login)
def print_progress_message(self, step):
print(step.message, end="")
sys.stdout.flush()
def print_2col_table(self, rows, indent=0, empty_msg=None):
if rows:
c1_max_width = max([len(c1) for c1, c2 in rows])
for c1, c2 in rows:
print("".join([" "*2*indent, c1, " "*(c1_max_width - len(c1) + 2), c2]))
elif empty_msg:
print("({})".format(empty_msg))
def prompt_credentials(self):
print("")
print("Logging in to Factorio mod portal")
print("(Password will not be displayed.)")
username = input("Username: ")
password = getpass("Password: ")
print("")
return mod_manager.credentials.Credentials(username, password)
def login(self):
if not mod_manager.credentials.Keyring.credentials_stored:
cred = self.prompt_credentials()
else:
cred = None
try:
self.mod_manager.mod_portal.login(cred)
except LoginError:
print("Could not log in to the mod portal.")
exit(1)
def cmd_help(self, args):
if args == []:
print("")
print("Usage: {} [action] [args]".format(sys.argv[0]))
print("")
self.print_2col_table([(action, self.HELP[action.split()[0]]) for action in self.ACTIONS], indent=1)
print("")
elif args[0] in self.ACTION_NAMES:
action = [a for a in self.ACTIONS if a.startswith(args[0])][0]
print(action+": "+self.HELP[args[0]])
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_list(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
for p in self.mod_manager.modpacks:
print(p.name)
def cmd_contents(self, args):
if len(args) == 0:
print("Invalid argument count")
exit(1)
packs = {p.name: p for p in self.mod_manager.modpacks}
for arg in args:
matching = []
if arg in packs:
pack = packs[arg]
if pack not in matching:
matching.append(pack)
else:
print("Mod pack \"{}\" does not exist.".format(arg))
exit(1)
lengths = [len(mod.name) for pack in matching for mod in pack.contents]
if lengths:
maxlen = max(lengths)
for pack in matching:
print(pack.name)
if pack.empty:
print(" (modpack is empty)")
else:
for mod in pack.contents:
ver = mod.version + " (" + ("fixed" if mod.fixed_version else "floating") + ")"
print(" "*2 + mod.name + " "*((maxlen-len(mod.name))+2) + ver)
def cmd_edit(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
open_editor(mp.path)
def cmd_compress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
if mp.exists:
print(mp.compress())
else:
print("Mod pack \"{}\" does not exist.".format(args[0]))
exit(1)
def cmd_decompress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
self.mod_manager.decompress_modpack(args[0]).save()
def cmd_install(self, args):
if args:
packs = []
for p in args:
mp = self.mod_manager.get_pack(p)
if mp.exists:
packs.append(mp)
else:
print("Mod pack \"{}\" does not exist.".format(p))
exit(1)
self.mod_manager.install_packs(packs, self.print_progress_message)
else:
print("Invalid argument count")
exit(1)
def cmd_match(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
try:
self.mod_manager.install_matching(args[0], callback=self.print_progress_message)
except ConnectionRefusedError:
print("Could not connect to the server. Is it running?")
exit(1)
except BrokenPipeError:
print("Could not communicate with the server. Are you using same Factorio version?")
exit(1)
def cmd_enabled(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
self.print_2col_table(
[(mod.name, mod.version) for mod in self.mod_manager.installed_mods],
empty_msg="no mods enabled"
)
def cmd_search(self, args):
search_args = " ".join(args)
wanted_responces = 5
lenght_param = search_args.rsplit(" -n ", 1)
if len(lenght_param) == 2 and len(lenght_param[1]):
try:
wanted_responces = int(lenght_param[1])
wanted_responces = min(max(wanted_responces, 0), 25)
search_args = " ".join(args[:-2])
except ValueError:
pass
results = self.mod_manager.mod_portal.search(search_args, n=wanted_responces)
for i,s in enumerate(results):
print("{}. {}: {} ({} downloads)".format(i+1, s.name, s.title, s.downloads_count))
def cmd_credentials(self, args):
if len(args) not in [1,3]:
print("Invalid argument count")
exit(1)
if args[0] == "clear":
if len(args) != 1:
print("Invalid arguments: clear doesn't take any")
exit(1)
mod_manager.credentials.Keyring.clear()
elif args[0] == "set":
if len(args) == 1:
c = self.prompt_credentials()
else:
c = mod_manager.credentials.Credentials(*args[1:])
print("Verifying... ", end="")
sys.stdout.flush()
try:
self.mod_manager.mod_portal.login(c)
except LoginError:
print("invalid credentials")
exit(1)
else:
print("ok")
mod_manager.credentials.Keyring.set_credentials(c)
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_cache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_cache.reset()
elif args[0] == "list":
self.print_2col_table(
[(cmod.name, cmod.version) for cmod in self.mod_manager.mod_cache.mods],
empty_msg="no cached mods"
)
else:
print("Invalid arguments")
print("Usage: cache <action>")
print("Actions: reset, list")
exit(1)
def cmd_apicache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_portal.api_cache.reset()
else:
print("Invalid arguments")
print("Usage: apicache reset")
exit(1)
def cmd_serv_install(self, args):
experimental = args[-1] in ["-e", "-experimental"]
if experimental:
modpacks = args[:-1]
else:
modpacks = args[:]
mod_manager.server.create_server(modpacks, experimental, self.mod_manager, self.print_progress_message)
def run(self, cmd):
if cmd == []:
cmd = ["help"]
if cmd[0] in self.ACTION_NAMES:
try:
# get function in this folder named "cmd_<action>"
fn = getattr(self, "cmd_"+cmd[0])
except AttributeError:
print("Action not implemented yet.")
exit(1)
fn(cmd[1:])
else:
print("Invalid action \"{}\"".format(cmd[0]))
exit(1)
def main():
CLI().run(sys.argv[1:])
if __name__ == '__main__':
main()
| mit |
Intel-Corporation/tensorflow | tensorflow/python/ops/ragged/ragged_eager_test.py | 13 | 1953 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.ragged in eager execution mode."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.framework import ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_test_util
from tensorflow.python.platform import googletest
class RaggedTensorTest(ragged_test_util.RaggedTensorTestCase,
parameterized.TestCase):
@parameterized.parameters([
dict(pylist=[[b'a', b'b'], [b'c']]),
dict(pylist=[[[1, 2], [3]], [[4, 5, 6], [], [7]]]),
dict(pylist=[[[1, 2], [3, 4]], [[5, 6], [], [7, 8]]], ragged_rank=1),
])
def testRaggedTensorToList(self, pylist, ragged_rank=None):
rt = ragged_factory_ops.constant(pylist, ragged_rank)
self.assertRaggedEqual(rt, pylist)
@parameterized.parameters([
dict(pylist=[[b'a', b'b'], [b'c']]),
dict(pylist=[[[1, 2], [3]], [[4, 5, 6], [], [7]]]),
])
def testRaggedTensorStr(self, pylist):
rt = ragged_factory_ops.constant(pylist)
self.assertEqual(str(rt), '<tf.RaggedTensor %s>' % pylist)
if __name__ == '__main__':
ops.enable_eager_execution()
googletest.main()
| apache-2.0 |
pybel/pybel | tests/test_struct/test_node_utils.py | 1 | 4775 | # -*- coding: utf-8 -*-
"""Tests for node utilities."""
import unittest
from pybel import BELGraph
from pybel.constants import INCREASES
from pybel.dsl import ComplexAbundance as g, CompositeAbundance as c, Protein, Reaction
from pybel.examples.various_example import adp, atp, glucose, glucose_6_phosphate, hk1, phosphate, single_reaction_graph
from pybel.struct.node_utils import flatten_list_abundance, reaction_cartesian_expansion
class TestNodeUtils(unittest.TestCase):
"""Test node utilities."""
def test_flatten_complex(self):
"""Test flattening a nested complex."""
p1, p2, p3 = (Protein('N', str(i + 1)) for i in range(3))
pairs = [
# Mainly complexes
(g([p1, p2, p3]), g([p1, p2, p3])), # no nesting
(g([p1, p2, p3]), g([g([p1, p2]), p3])), # one nesting
(g([p1, p2, p3]), g([g([p1]), p2, p3])), # one nesting
(g([p1, p2, p3]), g([g([p1]), g([p2]), p3])), # one nesting
# Mainly composites
(c([p1, p2, p3]), c([p1, p2, p3])), # no nesting
(c([p1, p2, p3]), c([c([p1, p2]), p3])), # one nesting
(c([p1, p2, p3]), c([c([p1]), p2, p3])), # one nesting
(c([p1, p2, p3]), c([c([p1]), c([p2]), p3])), # one nesting
# TODO: mixtures of composites and complexes?
]
for expected, source in pairs:
self.assertEqual(expected, flatten_list_abundance(source))
def test_flatten_reaction(self):
"""Test flattening a reaction."""
single_reaction_graph_copy = single_reaction_graph.copy()
self.assertEqual(single_reaction_graph_copy.number_of_nodes(), 7)
self.assertEqual(single_reaction_graph_copy.number_of_edges(), 7)
reaction_cartesian_expansion(single_reaction_graph_copy)
self.assertEqual(single_reaction_graph_copy.number_of_nodes(), 6)
self.assertEqual(single_reaction_graph_copy.number_of_edges(), 8)
pairs = [
(glucose, INCREASES, glucose_6_phosphate),
(glucose, INCREASES, adp),
(hk1, INCREASES, glucose_6_phosphate),
(hk1, INCREASES, adp),
(atp, INCREASES, glucose_6_phosphate),
(atp, INCREASES, adp),
(phosphate, INCREASES, glucose_6_phosphate),
(phosphate, INCREASES, adp),
]
for source, target, data in single_reaction_graph_copy.edges(data=True):
self.assertIn((source, INCREASES, target), pairs)
def test_flatten_reaction_2(self):
"""Test flattening a qualified reaction."""
node_increases_reaction_graph = BELGraph()
glycolisis_step_1 = Reaction(reactants=[glucose, hk1, atp], products=[glucose_6_phosphate, adp, hk1])
node_increases_reaction_graph.add_increases(glucose_6_phosphate, glycolisis_step_1, citation='X', evidence='X')
self.assertEqual(node_increases_reaction_graph.number_of_nodes(), 6)
self.assertEqual(node_increases_reaction_graph.number_of_edges(), 7)
reaction_cartesian_expansion(node_increases_reaction_graph)
self.assertEqual(node_increases_reaction_graph.number_of_nodes(), 5)
# TODO Fix so unqualified duplicate edges are not created (it should be the 8 edges below)
self.assertEqual(node_increases_reaction_graph.number_of_edges(), 12)
# pairs = [
# (glucose, INCREASES, glucose_6_phosphate),
# (glucose, INCREASES, adp),
# (hk1, INCREASES, glucose_6_phosphate),
# (hk1, INCREASES, adp),
# (atp, INCREASES, glucose_6_phosphate),
# (atp, INCREASES, adp),
# (phosphate, INCREASES, glucose_6_phosphate),
# (phosphate, INCREASES, adp),
# ]
#
# for source, target, data in node_increases_reaction_graph.edges(data=True):
# self.assertIn((source, INCREASES, target), pairs)
def test_flatten_reaction_3(self):
"""Test flattening a graph containing 2 reactions connected to each other."""
two_reactions_graph = BELGraph()
reaction_1 = Reaction(reactants=[glucose, atp], products=hk1)
reaction_2 = Reaction(reactants=glucose_6_phosphate, products=adp)
two_reactions_graph.add_increases(reaction_1, reaction_2, citation='X', evidence='X')
self.assertEqual(two_reactions_graph.number_of_nodes(), 7)
self.assertEqual(two_reactions_graph.number_of_edges(), 6)
reaction_cartesian_expansion(two_reactions_graph)
# TODO Fix so unqualified duplicate edges are not created (it should be the 6 edges below)
self.assertEqual(two_reactions_graph.number_of_nodes(), 5)
self.assertEqual(two_reactions_graph.number_of_edges(), 8)
| mit |
awesto/django-shop | shop/transition.py | 1 | 3695 | from urllib.parse import urlparse
from django.contrib.auth.models import AnonymousUser
from django.db import models
from django.http.request import HttpRequest
from post_office import mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.models.order import BaseOrder
from shop.models.notification import Notification
from shop.serializers.delivery import DeliverySerializer
from shop.serializers.order import OrderDetailSerializer
from shop.signals import email_queued
class EmulateHttpRequest(HttpRequest):
"""
Use this class to emulate a HttpRequest object, when templates must be rendered
asynchronously, for instance when an email must be generated out of an Order object.
"""
def __init__(self, customer, stored_request):
super().__init__()
parsedurl = urlparse(stored_request.get('absolute_base_uri'))
self.path = self.path_info = parsedurl.path
self.environ = {}
self.META['PATH_INFO'] = parsedurl.path
self.META['SCRIPT_NAME'] = ''
self.META['HTTP_HOST'] = parsedurl.netloc
self.META['HTTP_X_FORWARDED_PROTO'] = parsedurl.scheme
self.META['QUERY_STRING'] = parsedurl.query
self.META['HTTP_USER_AGENT'] = stored_request.get('user_agent')
self.META['REMOTE_ADDR'] = stored_request.get('remote_ip')
self.method = 'GET'
self.LANGUAGE_CODE = self.COOKIES['django_language'] = stored_request.get('language')
self.customer = customer
self.user = customer.is_anonymous and AnonymousUser or customer.user
self.current_page = None
def transition_change_notification(order):
"""
This function shall be called, after an Order object performed a transition change.
"""
if not isinstance(order, BaseOrder):
raise TypeError("Object order must inherit from class BaseOrder")
emails_in_queue = False
for notification in Notification.objects.filter(transition_target=order.status):
recipient = notification.get_recipient(order)
if recipient is None:
continue
# emulate a request object which behaves similar to that one, when the customer submitted its order
emulated_request = EmulateHttpRequest(order.customer, order.stored_request)
customer_serializer = app_settings.CUSTOMER_SERIALIZER(order.customer)
render_context = {'request': emulated_request, 'render_label': 'email'}
order_serializer = OrderDetailSerializer(order, context=render_context)
language = order.stored_request.get('language')
context = {
'customer': customer_serializer.data,
'order': order_serializer.data,
'ABSOLUTE_BASE_URI': emulated_request.build_absolute_uri().rstrip('/'),
'render_language': language,
}
try:
latest_delivery = order.delivery_set.latest()
context['latest_delivery'] = DeliverySerializer(latest_delivery, context=render_context).data
except (AttributeError, models.ObjectDoesNotExist):
pass
try:
template = notification.mail_template.translated_templates.get(language=language)
except EmailTemplate.DoesNotExist:
template = notification.mail_template
attachments = {}
for notiatt in notification.notificationattachment_set.all():
attachments[notiatt.attachment.original_filename] = notiatt.attachment.file.file
mail.send(recipient, template=template, context=context,
attachments=attachments, render_on_delivery=True)
emails_in_queue = True
if emails_in_queue:
email_queued()
| bsd-3-clause |
chanceraine/nupic | external/linux32/lib/python2.6/site-packages/matplotlib/legend.py | 69 | 30705 | """
Place a legend on the axes at location loc. Labels are a
sequence of strings and loc can be a string or an integer
specifying the legend location
The location codes are
'best' : 0, (only implemented for axis legends)
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
Return value is a sequence of text, line instances that make
up the legend
"""
from __future__ import division
import warnings
import numpy as np
from matplotlib import rcParams
from matplotlib.artist import Artist
from matplotlib.cbook import is_string_like, iterable, silent_list, safezip
from matplotlib.font_manager import FontProperties
from matplotlib.lines import Line2D
from matplotlib.patches import Patch, Rectangle, Shadow, FancyBboxPatch
from matplotlib.collections import LineCollection, RegularPolyCollection
from matplotlib.transforms import Bbox
from matplotlib.offsetbox import HPacker, VPacker, PackerBase, TextArea, DrawingArea
class Legend(Artist):
"""
Place a legend on the axes at location loc. Labels are a
sequence of strings and loc can be a string or an integer
specifying the legend location
The location codes are::
'best' : 0, (only implemented for axis legends)
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
loc can be a tuple of the noramilzed coordinate values with
respect its parent.
Return value is a sequence of text, line instances that make
up the legend
"""
codes = {'best' : 0, # only implemented for axis legends
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
}
zorder = 5
def __str__(self):
return "Legend"
def __init__(self, parent, handles, labels,
loc = None,
numpoints = None, # the number of points in the legend line
markerscale = None, # the relative size of legend markers vs. original
scatterpoints = 3, # TODO: may be an rcParam
scatteryoffsets=None,
prop = None, # properties for the legend texts
# the following dimensions are in axes coords
pad = None, # deprecated; use borderpad
labelsep = None, # deprecated; use labelspacing
handlelen = None, # deprecated; use handlelength
handletextsep = None, # deprecated; use handletextpad
axespad = None, # deprecated; use borderaxespad
# spacing & pad defined as a fractionof the font-size
borderpad = None, # the whitespace inside the legend border
labelspacing=None, #the vertical space between the legend entries
handlelength=None, # the length of the legend handles
handletextpad=None, # the pad between the legend handle and text
borderaxespad=None, # the pad between the axes and legend border
columnspacing=None, # spacing between columns
ncol=1, # number of columns
mode=None, # mode for horizontal distribution of columns. None, "expand"
fancybox=None, # True use a fancy box, false use a rounded box, none use rc
shadow = None,
):
"""
- *parent* : the artist that contains the legend
- *handles* : a list of artists (lines, patches) to add to the legend
- *labels* : a list of strings to label the legend
Optional keyword arguments:
================ ==================================================================
Keyword Description
================ ==================================================================
loc a location code or a tuple of coordinates
numpoints the number of points in the legend line
prop the font property
markerscale the relative size of legend markers vs. original
fancybox if True, draw a frame with a round fancybox. If None, use rc
shadow if True, draw a shadow behind legend
scatteryoffsets a list of yoffsets for scatter symbols in legend
borderpad the fractional whitespace inside the legend border
labelspacing the vertical space between the legend entries
handlelength the length of the legend handles
handletextpad the pad between the legend handle and text
borderaxespad the pad between the axes and legend border
columnspacing the spacing between columns
================ ==================================================================
The dimensions of pad and spacing are given as a fraction of the
fontsize. Values from rcParams will be used if None.
"""
from matplotlib.axes import Axes # local import only to avoid circularity
from matplotlib.figure import Figure # local import only to avoid circularity
Artist.__init__(self)
if prop is None:
self.prop=FontProperties(size=rcParams["legend.fontsize"])
else:
self.prop=prop
self.fontsize = self.prop.get_size_in_points()
propnames=['numpoints', 'markerscale', 'shadow', "columnspacing",
"scatterpoints"]
localdict = locals()
for name in propnames:
if localdict[name] is None:
value = rcParams["legend."+name]
else:
value = localdict[name]
setattr(self, name, value)
# Take care the deprecated keywords
deprecated_kwds = {"pad":"borderpad",
"labelsep":"labelspacing",
"handlelen":"handlelength",
"handletextsep":"handletextpad",
"axespad":"borderaxespad"}
# convert values of deprecated keywords (ginve in axes coords)
# to new vaules in a fraction of the font size
# conversion factor
bbox = parent.bbox
axessize_fontsize = min(bbox.width, bbox.height)/self.fontsize
for k, v in deprecated_kwds.items():
# use deprecated value if not None and if their newer
# counter part is None.
if localdict[k] is not None and localdict[v] is None:
warnings.warn("Use '%s' instead of '%s'." % (v, k),
DeprecationWarning)
setattr(self, v, localdict[k]*axessize_fontsize)
continue
# Otherwise, use new keywords
if localdict[v] is None:
setattr(self, v, rcParams["legend."+v])
else:
setattr(self, v, localdict[v])
del localdict
self._ncol = ncol
if self.numpoints <= 0:
raise ValueError("numpoints must be >= 0; it was %d"% numpoints)
# introduce y-offset for handles of the scatter plot
if scatteryoffsets is None:
self._scatteryoffsets = np.array([3./8., 4./8., 2.5/8.])
else:
self._scatteryoffsets = np.asarray(scatteryoffsets)
reps = int(self.numpoints / len(self._scatteryoffsets)) + 1
self._scatteryoffsets = np.tile(self._scatteryoffsets, reps)[:self.scatterpoints]
# _legend_box is an OffsetBox instance that contains all
# legend items and will be initialized from _init_legend_box()
# method.
self._legend_box = None
if isinstance(parent,Axes):
self.isaxes = True
self.set_figure(parent.figure)
elif isinstance(parent,Figure):
self.isaxes = False
self.set_figure(parent)
else:
raise TypeError("Legend needs either Axes or Figure as parent")
self.parent = parent
if loc is None:
loc = rcParams["legend.loc"]
if not self.isaxes and loc in [0,'best']:
loc = 'upper right'
if is_string_like(loc):
if loc not in self.codes:
if self.isaxes:
warnings.warn('Unrecognized location "%s". Falling back on "best"; '
'valid locations are\n\t%s\n'
% (loc, '\n\t'.join(self.codes.keys())))
loc = 0
else:
warnings.warn('Unrecognized location "%s". Falling back on "upper right"; '
'valid locations are\n\t%s\n'
% (loc, '\n\t'.join(self.codes.keys())))
loc = 1
else:
loc = self.codes[loc]
if not self.isaxes and loc == 0:
warnings.warn('Automatic legend placement (loc="best") not implemented for figure legend. '
'Falling back on "upper right".')
loc = 1
self._loc = loc
self._mode = mode
# We use FancyBboxPatch to draw a legend frame. The location
# and size of the box will be updated during the drawing time.
self.legendPatch = FancyBboxPatch(
xy=(0.0, 0.0), width=1., height=1.,
facecolor='w', edgecolor='k',
mutation_scale=self.fontsize,
snap=True
)
# The width and height of the legendPatch will be set (in the
# draw()) to the length that includes the padding. Thus we set
# pad=0 here.
if fancybox is None:
fancybox = rcParams["legend.fancybox"]
if fancybox == True:
self.legendPatch.set_boxstyle("round",pad=0,
rounding_size=0.2)
else:
self.legendPatch.set_boxstyle("square",pad=0)
self._set_artist_props(self.legendPatch)
self._drawFrame = True
# init with null renderer
self._init_legend_box(handles, labels)
self._last_fontsize_points = self.fontsize
def _set_artist_props(self, a):
"""
set the boilerplate props for artists added to axes
"""
a.set_figure(self.figure)
for c in self.get_children():
c.set_figure(self.figure)
a.set_transform(self.get_transform())
def _findoffset_best(self, width, height, xdescent, ydescent, renderer):
"Heper function to locate the legend at its best position"
ox, oy = self._find_best_position(width, height, renderer)
return ox+xdescent, oy+ydescent
def _findoffset_loc(self, width, height, xdescent, ydescent, renderer):
"Heper function to locate the legend using the location code"
if iterable(self._loc) and len(self._loc)==2:
# when loc is a tuple of axes(or figure) coordinates.
fx, fy = self._loc
bbox = self.parent.bbox
x, y = bbox.x0 + bbox.width * fx, bbox.y0 + bbox.height * fy
else:
bbox = Bbox.from_bounds(0, 0, width, height)
x, y = self._get_anchored_bbox(self._loc, bbox, self.parent.bbox, renderer)
return x+xdescent, y+ydescent
def draw(self, renderer):
"Draw everything that belongs to the legend"
if not self.get_visible(): return
self._update_legend_box(renderer)
renderer.open_group('legend')
# find_offset function will be provided to _legend_box and
# _legend_box will draw itself at the location of the return
# value of the find_offset.
if self._loc == 0:
_findoffset = self._findoffset_best
else:
_findoffset = self._findoffset_loc
def findoffset(width, height, xdescent, ydescent):
return _findoffset(width, height, xdescent, ydescent, renderer)
self._legend_box.set_offset(findoffset)
fontsize = renderer.points_to_pixels(self.fontsize)
# if mode == fill, set the width of the legend_box to the
# width of the paret (minus pads)
if self._mode in ["expand"]:
pad = 2*(self.borderaxespad+self.borderpad)*fontsize
self._legend_box.set_width(self.parent.bbox.width-pad)
if self._drawFrame:
# update the location and size of the legend
bbox = self._legend_box.get_window_extent(renderer)
self.legendPatch.set_bounds(bbox.x0, bbox.y0,
bbox.width, bbox.height)
self.legendPatch.set_mutation_scale(fontsize)
if self.shadow:
shadow = Shadow(self.legendPatch, 2, -2)
shadow.draw(renderer)
self.legendPatch.draw(renderer)
self._legend_box.draw(renderer)
renderer.close_group('legend')
def _approx_text_height(self, renderer=None):
"""
Return the approximate height of the text. This is used to place
the legend handle.
"""
if renderer is None:
return self.fontsize
else:
return renderer.points_to_pixels(self.fontsize)
def _init_legend_box(self, handles, labels):
"""
Initiallize the legend_box. The legend_box is an instance of
the OffsetBox, which is packed with legend handles and
texts. Once packed, their location is calculated during the
drawing time.
"""
fontsize = self.fontsize
# legend_box is a HPacker, horizontally packed with
# columns. Each column is a VPacker, vertically packed with
# legend items. Each legend item is HPacker packed with
# legend handleBox and labelBox. handleBox is an instance of
# offsetbox.DrawingArea which contains legend handle. labelBox
# is an instance of offsetbox.TextArea which contains legend
# text.
text_list = [] # the list of text instances
handle_list = [] # the list of text instances
label_prop = dict(verticalalignment='baseline',
horizontalalignment='left',
fontproperties=self.prop,
)
labelboxes = []
for l in labels:
textbox = TextArea(l, textprops=label_prop,
multilinebaseline=True, minimumdescent=True)
text_list.append(textbox._text)
labelboxes.append(textbox)
handleboxes = []
# The approximate height and descent of text. These values are
# only used for plotting the legend handle.
height = self._approx_text_height() * 0.7
descent = 0.
# each handle needs to be drawn inside a box of (x, y, w, h) =
# (0, -descent, width, height). And their corrdinates should
# be given in the display coordinates.
# NOTE : the coordinates will be updated again in
# _update_legend_box() method.
# The transformation of each handle will be automatically set
# to self.get_trasnform(). If the artist does not uses its
# default trasnform (eg, Collections), you need to
# manually set their transform to the self.get_transform().
for handle in handles:
if isinstance(handle, RegularPolyCollection):
npoints = self.scatterpoints
else:
npoints = self.numpoints
if npoints > 1:
# we put some pad here to compensate the size of the
# marker
xdata = np.linspace(0.3*fontsize,
(self.handlelength-0.3)*fontsize,
npoints)
xdata_marker = xdata
elif npoints == 1:
xdata = np.linspace(0, self.handlelength*fontsize, 2)
xdata_marker = [0.5*self.handlelength*fontsize]
if isinstance(handle, Line2D):
ydata = ((height-descent)/2.)*np.ones(xdata.shape, float)
legline = Line2D(xdata, ydata)
legline.update_from(handle)
self._set_artist_props(legline) # after update
legline.set_clip_box(None)
legline.set_clip_path(None)
legline.set_drawstyle('default')
legline.set_marker('None')
handle_list.append(legline)
legline_marker = Line2D(xdata_marker, ydata[:len(xdata_marker)])
legline_marker.update_from(handle)
self._set_artist_props(legline_marker)
legline_marker.set_clip_box(None)
legline_marker.set_clip_path(None)
legline_marker.set_linestyle('None')
# we don't want to add this to the return list because
# the texts and handles are assumed to be in one-to-one
# correpondence.
legline._legmarker = legline_marker
elif isinstance(handle, Patch):
p = Rectangle(xy=(0., 0.),
width = self.handlelength*fontsize,
height=(height-descent),
)
p.update_from(handle)
self._set_artist_props(p)
p.set_clip_box(None)
p.set_clip_path(None)
handle_list.append(p)
elif isinstance(handle, LineCollection):
ydata = ((height-descent)/2.)*np.ones(xdata.shape, float)
legline = Line2D(xdata, ydata)
self._set_artist_props(legline)
legline.set_clip_box(None)
legline.set_clip_path(None)
lw = handle.get_linewidth()[0]
dashes = handle.get_dashes()[0]
color = handle.get_colors()[0]
legline.set_color(color)
legline.set_linewidth(lw)
legline.set_dashes(dashes)
handle_list.append(legline)
elif isinstance(handle, RegularPolyCollection):
#ydata = self._scatteryoffsets
ydata = height*self._scatteryoffsets
size_max, size_min = max(handle.get_sizes()),\
min(handle.get_sizes())
# we may need to scale these sizes by "markerscale"
# attribute. But other handle types does not seem
# to care about this attribute and it is currently ignored.
if self.scatterpoints < 4:
sizes = [.5*(size_max+size_min), size_max,
size_min]
else:
sizes = (size_max-size_min)*np.linspace(0,1,self.scatterpoints)+size_min
p = type(handle)(handle.get_numsides(),
rotation=handle.get_rotation(),
sizes=sizes,
offsets=zip(xdata_marker,ydata),
transOffset=self.get_transform(),
)
p.update_from(handle)
p.set_figure(self.figure)
p.set_clip_box(None)
p.set_clip_path(None)
handle_list.append(p)
else:
handle_list.append(None)
handlebox = DrawingArea(width=self.handlelength*fontsize,
height=height,
xdescent=0., ydescent=descent)
handle = handle_list[-1]
handlebox.add_artist(handle)
if hasattr(handle, "_legmarker"):
handlebox.add_artist(handle._legmarker)
handleboxes.append(handlebox)
# We calculate number of lows in each column. The first
# (num_largecol) columns will have (nrows+1) rows, and remaing
# (num_smallcol) columns will have (nrows) rows.
nrows, num_largecol = divmod(len(handleboxes), self._ncol)
num_smallcol = self._ncol-num_largecol
# starting index of each column and number of rows in it.
largecol = safezip(range(0, num_largecol*(nrows+1), (nrows+1)),
[nrows+1] * num_largecol)
smallcol = safezip(range(num_largecol*(nrows+1), len(handleboxes), nrows),
[nrows] * num_smallcol)
handle_label = safezip(handleboxes, labelboxes)
columnbox = []
for i0, di in largecol+smallcol:
# pack handleBox and labelBox into itemBox
itemBoxes = [HPacker(pad=0,
sep=self.handletextpad*fontsize,
children=[h, t], align="baseline")
for h, t in handle_label[i0:i0+di]]
# minimumdescent=False for the text of the last row of the column
itemBoxes[-1].get_children()[1].set_minimumdescent(False)
# pack columnBox
columnbox.append(VPacker(pad=0,
sep=self.labelspacing*fontsize,
align="baseline",
children=itemBoxes))
if self._mode == "expand":
mode = "expand"
else:
mode = "fixed"
sep = self.columnspacing*fontsize
self._legend_box = HPacker(pad=self.borderpad*fontsize,
sep=sep, align="baseline",
mode=mode,
children=columnbox)
self._legend_box.set_figure(self.figure)
self.texts = text_list
self.legendHandles = handle_list
def _update_legend_box(self, renderer):
"""
Update the dimension of the legend_box. This is required
becuase the paddings, the hadle size etc. depends on the dpi
of the renderer.
"""
# fontsize in points.
fontsize = renderer.points_to_pixels(self.fontsize)
if self._last_fontsize_points == fontsize:
# no update is needed
return
# each handle needs to be drawn inside a box of
# (x, y, w, h) = (0, -descent, width, height).
# And their corrdinates should be given in the display coordinates.
# The approximate height and descent of text. These values are
# only used for plotting the legend handle.
height = self._approx_text_height(renderer) * 0.7
descent = 0.
for handle in self.legendHandles:
if isinstance(handle, RegularPolyCollection):
npoints = self.scatterpoints
else:
npoints = self.numpoints
if npoints > 1:
# we put some pad here to compensate the size of the
# marker
xdata = np.linspace(0.3*fontsize,
(self.handlelength-0.3)*fontsize,
npoints)
xdata_marker = xdata
elif npoints == 1:
xdata = np.linspace(0, self.handlelength*fontsize, 2)
xdata_marker = [0.5*self.handlelength*fontsize]
if isinstance(handle, Line2D):
legline = handle
ydata = ((height-descent)/2.)*np.ones(xdata.shape, float)
legline.set_data(xdata, ydata)
legline_marker = legline._legmarker
legline_marker.set_data(xdata_marker, ydata[:len(xdata_marker)])
elif isinstance(handle, Patch):
p = handle
p.set_bounds(0., 0.,
self.handlelength*fontsize,
(height-descent),
)
elif isinstance(handle, RegularPolyCollection):
p = handle
ydata = height*self._scatteryoffsets
p.set_offsets(zip(xdata_marker,ydata))
# correction factor
cor = fontsize / self._last_fontsize_points
# helper function to iterate over all children
def all_children(parent):
yield parent
for c in parent.get_children():
for cc in all_children(c): yield cc
#now update paddings
for box in all_children(self._legend_box):
if isinstance(box, PackerBase):
box.pad = box.pad * cor
box.sep = box.sep * cor
elif isinstance(box, DrawingArea):
box.width = self.handlelength*fontsize
box.height = height
box.xdescent = 0.
box.ydescent=descent
self._last_fontsize_points = fontsize
def _auto_legend_data(self):
"""
Returns list of vertices and extents covered by the plot.
Returns a two long list.
First element is a list of (x, y) vertices (in
display-coordinates) covered by all the lines and line
collections, in the legend's handles.
Second element is a list of bounding boxes for all the patches in
the legend's handles.
"""
assert self.isaxes # should always hold because function is only called internally
ax = self.parent
vertices = []
bboxes = []
lines = []
for handle in ax.lines:
assert isinstance(handle, Line2D)
path = handle.get_path()
trans = handle.get_transform()
tpath = trans.transform_path(path)
lines.append(tpath)
for handle in ax.patches:
assert isinstance(handle, Patch)
if isinstance(handle, Rectangle):
transform = handle.get_data_transform()
bboxes.append(handle.get_bbox().transformed(transform))
else:
transform = handle.get_transform()
bboxes.append(handle.get_path().get_extents(transform))
return [vertices, bboxes, lines]
def draw_frame(self, b):
'b is a boolean. Set draw frame to b'
self._drawFrame = b
def get_children(self):
'return a list of child artists'
children = []
if self._legend_box:
children.append(self._legend_box)
return children
def get_frame(self):
'return the Rectangle instance used to frame the legend'
return self.legendPatch
def get_lines(self):
'return a list of lines.Line2D instances in the legend'
return [h for h in self.legendHandles if isinstance(h, Line2D)]
def get_patches(self):
'return a list of patch instances in the legend'
return silent_list('Patch', [h for h in self.legendHandles if isinstance(h, Patch)])
def get_texts(self):
'return a list of text.Text instance in the legend'
return silent_list('Text', self.texts)
def get_window_extent(self):
'return a extent of the the legend'
return self.legendPatch.get_window_extent()
def _get_anchored_bbox(self, loc, bbox, parentbbox, renderer):
"""
Place the *bbox* inside the *parentbbox* according to a given
location code. Return the (x,y) coordinate of the bbox.
- loc: a location code in range(1, 11).
This corresponds to the possible values for self._loc, excluding "best".
- bbox: bbox to be placed, display coodinate units.
- parentbbox: a parent box which will contain the bbox. In
display coordinates.
"""
assert loc in range(1,11) # called only internally
BEST, UR, UL, LL, LR, R, CL, CR, LC, UC, C = range(11)
anchor_coefs={UR:"NE",
UL:"NW",
LL:"SW",
LR:"SE",
R:"E",
CL:"W",
CR:"E",
LC:"S",
UC:"N",
C:"C"}
c = anchor_coefs[loc]
fontsize = renderer.points_to_pixels(self.fontsize)
container = parentbbox.padded(-(self.borderaxespad) * fontsize)
anchored_box = bbox.anchored(c, container=container)
return anchored_box.x0, anchored_box.y0
def _find_best_position(self, width, height, renderer, consider=None):
"""
Determine the best location to place the legend.
`consider` is a list of (x, y) pairs to consider as a potential
lower-left corner of the legend. All are display coords.
"""
assert self.isaxes # should always hold because function is only called internally
verts, bboxes, lines = self._auto_legend_data()
bbox = Bbox.from_bounds(0, 0, width, height)
consider = [self._get_anchored_bbox(x, bbox, self.parent.bbox, renderer) for x in range(1, len(self.codes))]
#tx, ty = self.legendPatch.get_x(), self.legendPatch.get_y()
candidates = []
for l, b in consider:
legendBox = Bbox.from_bounds(l, b, width, height)
badness = 0
badness = legendBox.count_contains(verts)
badness += legendBox.count_overlaps(bboxes)
for line in lines:
if line.intersects_bbox(legendBox):
badness += 1
ox, oy = l, b
if badness == 0:
return ox, oy
candidates.append((badness, (l, b)))
# rather than use min() or list.sort(), do this so that we are assured
# that in the case of two equal badnesses, the one first considered is
# returned.
# NOTE: list.sort() is stable.But leave as it is for now. -JJL
minCandidate = candidates[0]
for candidate in candidates:
if candidate[0] < minCandidate[0]:
minCandidate = candidate
ox, oy = minCandidate[1]
return ox, oy
| agpl-3.0 |
naokimiyasaka/sublime-text | Backup/20140106101518/BracketHighlighter/bh_swapping.py | 15 | 1471 | import sublime
import sublime_plugin
import bh_wrapping
class SwapBrackets(bh_wrapping.WrapBrackets):
def wrap(self, wrap_entry):
if wrap_entry < 0:
return
self._style = ["inline"]
self.brackets = self._brackets[wrap_entry]
self.wrap_brackets(0)
class SwapBracketsCommand(sublime_plugin.WindowCommand):
def finalize(self, callback):
if self.view is not None:
if not self.view.settings().get("BracketHighlighterBusy", False):
callback()
else:
sublime.set_timeout(lambda: self.finalize(callback), 100)
def swap_brackets(self, value):
if value < 0:
return
self.brackets = self.wrap._brackets[value]
self.window.run_command(
"bh_key",
{
"plugin": {
"type": ["__all__"],
"command": "bh_modules.swapbrackets"
}
}
)
self.view = self.window.active_view()
sublime.set_timeout(lambda: self.finalize(lambda: self.wrap.wrap(value)), 100)
def run(self):
view = self.window.active_view()
if view is None:
return
self.wrap = SwapBrackets(view, "bh_swapping.sublime-settings", "swapping")
if len(self.wrap._menu):
self.window.show_quick_panel(
self.wrap._menu,
self.swap_brackets
)
| mit |
stevewardle/rose | metomi/rosie/db_create.py | 4 | 8364 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (C) 2012-2019 British Crown (Met Office) & Contributors.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""Create database files for Rosie web service."""
import os
import sqlalchemy as al
import sys
from metomi.rose.fs_util import FileSystemUtil
from metomi.rose.opt_parse import RoseOptionParser
from metomi.rose.popen import RosePopener
from metomi.rose.reporter import Reporter, Event
from metomi.rose.resource import ResourceLocator
from metomi.rosie.db import (
LATEST_TABLE_NAME, MAIN_TABLE_NAME, META_TABLE_NAME, OPTIONAL_TABLE_NAME)
from metomi.rosie.svn_post_commit import RosieSvnPostCommitHook
class RosieDatabaseCreateEvent(Event):
"""Event raised when a Rosie database is created."""
def __str__(self):
return "%s: DB created." % (self.args[0])
class RosieDatabaseCreateSkipEvent(Event):
"""Event raised when a Rosie database creation is skipped."""
KIND = Event.KIND_ERR
def __str__(self):
return "%s: DB already exists, skip." % (self.args[0])
class RosieDatabaseLoadEvent(Event):
"""Event raised when a Rosie database has loaded with I of N revisions."""
LEVEL = Event.V
def __str__(self):
return "%s: DB loaded, r%d of %d." % self.args
class RosieDatabaseLoadSkipEvent(Event):
"""Event raised when a Rosie database load is skipped."""
KIND = Event.KIND_ERR
def __str__(self):
return "%s: DB not loaded." % (self.args[0])
class RosieDatabaseInitiator(object):
"""Initiate a database file from the repository information."""
LEN_DB_STRING = 1024
LEN_STATUS = 2
SQLITE_PREFIX = "sqlite:///"
def __init__(self, event_handler=None, popen=None, fs_util=None):
if event_handler is None:
event_handler = self._dummy
self.event_handler = event_handler
if popen is None:
popen = RosePopener(event_handler)
self.popen = popen
if fs_util is None:
fs_util = FileSystemUtil(event_handler)
self.fs_util = fs_util
self.post_commit_hook = RosieSvnPostCommitHook(
event_handler=event_handler,
popen=popen)
def _dummy(self, *args, **kwargs):
"""Does nothing."""
pass
def create_and_load(self, db_url, repos_path):
"""Create web service database and load content from repository."""
try:
self.create(db_url)
except al.exc.OperationalError:
pass
else:
self.load(repos_path)
__call__ = create_and_load
def handle_event(self, *args, **kwargs):
"""Handle an event using the runner's event handler."""
if callable(self.event_handler):
return self.event_handler(*args, **kwargs)
def create(self, db_url):
"""Create database tables."""
if db_url.startswith(self.SQLITE_PREFIX):
db_url_dir = os.path.dirname(db_url[len(self.SQLITE_PREFIX):])
self.fs_util.makedirs(db_url_dir)
try:
engine = al.create_engine(db_url)
metadata = al.MetaData()
db_string = al.String(self.LEN_DB_STRING)
tables = []
tables.append(al.Table(
LATEST_TABLE_NAME, metadata,
al.Column("idx", db_string, nullable=False,
primary_key=True),
al.Column("branch", db_string, nullable=False,
primary_key=True),
al.Column("revision", al.Integer, nullable=False,
primary_key=True)))
tables.append(al.Table(
MAIN_TABLE_NAME, metadata,
al.Column("idx", db_string, nullable=False,
primary_key=True),
al.Column("branch", db_string, nullable=False,
primary_key=True),
al.Column("revision", al.Integer, nullable=False,
primary_key=True),
al.Column("owner", db_string, nullable=False),
al.Column("project", db_string, nullable=False),
al.Column("title", db_string, nullable=False),
al.Column("author", db_string, nullable=False),
al.Column("date", al.Integer, nullable=False),
al.Column("status", al.String(self.LEN_STATUS),
nullable=False),
al.Column("from_idx", db_string)))
tables.append(al.Table(
OPTIONAL_TABLE_NAME, metadata,
al.Column("idx", db_string, nullable=False,
primary_key=True),
al.Column("branch", db_string, nullable=False,
primary_key=True),
al.Column("revision", al.Integer, nullable=False,
primary_key=True),
al.Column("name", db_string, nullable=False,
primary_key=True),
al.Column("value", db_string)))
tables.append(al.Table(
META_TABLE_NAME, metadata,
al.Column("name", db_string, primary_key=True,
nullable=False),
al.Column("value", db_string)))
for table in tables:
table.create(engine)
engine.connect()
self.handle_event(RosieDatabaseCreateEvent(db_url))
except al.exc.OperationalError as exc:
self.handle_event(RosieDatabaseCreateSkipEvent(db_url))
raise exc
def load(self, repos_path):
"""Load database contents from a repository."""
if not repos_path or not os.path.exists(repos_path):
self.handle_event(RosieDatabaseLoadSkipEvent(repos_path))
return
repos_path = os.path.abspath(repos_path)
youngest = int(self.popen("svnlook", "youngest", repos_path)[0])
revision = 1
while revision <= youngest:
if sys.stdout.isatty():
sys.stdout.write(
"\r%s... loading revision %d of %d" %
(Reporter.PREFIX_INFO, revision, youngest))
sys.stdout.flush()
self.post_commit_hook.run(
repos_path, str(revision), no_notification=True)
event = RosieDatabaseLoadEvent(repos_path, revision, youngest)
if revision == youngest:
# Check if any new revisions have been added.
youngest = self.popen("svnlook", "youngest", repos_path)[0]
youngest = int(youngest)
if revision == youngest:
event.level = event.DEFAULT
if sys.stdout.isatty():
sys.stdout.write("\r")
self.handle_event(event)
revision += 1
return revision
def main():
"""rosa db-create."""
db_conf = ResourceLocator.default().get_conf().get(["rosie-db"])
if db_conf is not None:
opts = RoseOptionParser().parse_args()[0]
reporter = Reporter(opts.verbosity - opts.quietness)
init = RosieDatabaseInitiator(event_handler=reporter)
conf = ResourceLocator.default().get_conf()
for key in db_conf.value:
if key.startswith("db."):
prefix = key.replace("db.", "", 1)
db_url = conf.get_value(["rosie-db", "db." + prefix])
repos_path = conf.get_value(["rosie-db", "repos." + prefix])
init(db_url, repos_path)
if __name__ == "__main__":
main()
| gpl-3.0 |
jiobert/python | Gildner_Tyler/Assignments/registration copy/server.py | 2 | 1324 | from flask import Flask, render_template, request, redirect, session, flash
import re
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
app = Flask(__name__)
app.secret_key = '123456'
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/process', methods=['POST'])
def submit():
if len(request.form['email']) < 1:
flash("PUT YOUR EMAIL IN THE FUCKING BLANK!")
elif not EMAIL_REGEX.match(request.form['email']):
flash("Invalid Email Address!")
elif not request.form['first_name'].isalpha():
flash('FIX YOUR FIRST NAME!')
elif not request.form['last_name'].isalpha():
flash('FIX YOUR LAST NAME!')
elif request.form['password'] < 8:
flash('Password length is fucked')
elif request.form['password'] != request.form['confirm_password']:
flash('You are definitely making this difficult for yourself, passwords need to match...')
else:
flash('Success')
session['email'] = request.form['email']
session['first_name'] = request.form['first_name']
session['last_name'] = request.form['last_name']
session['password'] = request.form['password']
session['confirm_password'] = request.form['confirm_password']
return redirect('/')
app.run(debug=True)
| mit |
vhanla/CudaText | app/py/sys/chardet/sbcharsetprober.py | 269 | 5657 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .enums import CharacterCategory, ProbingState, SequenceLikelihood
class SingleByteCharSetProber(CharSetProber):
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
def __init__(self, model, reversed=False, name_prober=None):
super(SingleByteCharSetProber, self).__init__()
self._model = model
# TRUE if we need to reverse every pair in the model lookup
self._reversed = reversed
# Optional auxiliary prober for name decision
self._name_prober = name_prober
self._last_order = None
self._seq_counters = None
self._total_seqs = None
self._total_char = None
self._freq_char = None
self.reset()
def reset(self):
super(SingleByteCharSetProber, self).reset()
# char order of last character
self._last_order = 255
self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
self._total_seqs = 0
self._total_char = 0
# characters that fall in our sampling range
self._freq_char = 0
@property
def charset_name(self):
if self._name_prober:
return self._name_prober.charset_name
else:
return self._model['charset_name']
@property
def language(self):
if self._name_prober:
return self._name_prober.language
else:
return self._model.get('language')
def feed(self, byte_str):
if not self._model['keep_english_letter']:
byte_str = self.filter_international_words(byte_str)
if not byte_str:
return self.state
char_to_order_map = self._model['char_to_order_map']
for i, c in enumerate(byte_str):
# XXX: Order is in range 1-64, so one would think we want 0-63 here,
# but that leads to 27 more test failures than before.
order = char_to_order_map[c]
# XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
# CharacterCategory.SYMBOL is actually 253, so we use CONTROL
# to make it closer to the original intent. The only difference
# is whether or not we count digits and control characters for
# _total_char purposes.
if order < CharacterCategory.CONTROL:
self._total_char += 1
if order < self.SAMPLE_SIZE:
self._freq_char += 1
if self._last_order < self.SAMPLE_SIZE:
self._total_seqs += 1
if not self._reversed:
i = (self._last_order * self.SAMPLE_SIZE) + order
model = self._model['precedence_matrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * self.SAMPLE_SIZE) + self._last_order
model = self._model['precedence_matrix'][i]
self._seq_counters[model] += 1
self._last_order = order
charset_name = self._model['charset_name']
if self.state == ProbingState.DETECTING:
if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
confidence = self.get_confidence()
if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
self.logger.debug('%s confidence = %s, we have a winner',
charset_name, confidence)
self._state = ProbingState.FOUND_IT
elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
self.logger.debug('%s confidence = %s, below negative '
'shortcut threshhold %s', charset_name,
confidence,
self.NEGATIVE_SHORTCUT_THRESHOLD)
self._state = ProbingState.NOT_ME
return self.state
def get_confidence(self):
r = 0.01
if self._total_seqs > 0:
r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
self._total_seqs / self._model['typical_positive_ratio'])
r = r * self._freq_char / self._total_char
if r >= 1.0:
r = 0.99
return r
| mpl-2.0 |
dongjoon-hyun/spark | python/pyspark/mllib/tests/test_feature.py | 23 | 7216 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from math import sqrt
import unittest
from numpy import array, abs, tile
from pyspark.mllib.linalg import SparseVector, DenseVector, Vectors
from pyspark.mllib.linalg.distributed import RowMatrix
from pyspark.mllib.feature import HashingTF, IDF, StandardScaler, ElementwiseProduct, Word2Vec
from pyspark.testing.mllibutils import MLlibTestCase
class FeatureTest(MLlibTestCase):
def test_idf_model(self):
data = [
Vectors.dense([1, 2, 6, 0, 2, 3, 1, 1, 0, 0, 3]),
Vectors.dense([1, 3, 0, 1, 3, 0, 0, 2, 0, 0, 1]),
Vectors.dense([1, 4, 1, 0, 0, 4, 9, 0, 1, 2, 0]),
Vectors.dense([2, 1, 0, 3, 0, 0, 5, 0, 2, 3, 9])
]
model = IDF().fit(self.sc.parallelize(data, 2))
idf = model.idf()
self.assertEqual(len(idf), 11)
class Word2VecTests(MLlibTestCase):
def test_word2vec_setters(self):
model = Word2Vec() \
.setVectorSize(2) \
.setLearningRate(0.01) \
.setNumPartitions(2) \
.setNumIterations(10) \
.setSeed(1024) \
.setMinCount(3) \
.setWindowSize(6)
self.assertEqual(model.vectorSize, 2)
self.assertTrue(model.learningRate < 0.02)
self.assertEqual(model.numPartitions, 2)
self.assertEqual(model.numIterations, 10)
self.assertEqual(model.seed, 1024)
self.assertEqual(model.minCount, 3)
self.assertEqual(model.windowSize, 6)
def test_word2vec_get_vectors(self):
data = [
["a", "b", "c", "d", "e", "f", "g"],
["a", "b", "c", "d", "e", "f"],
["a", "b", "c", "d", "e"],
["a", "b", "c", "d"],
["a", "b", "c"],
["a", "b"],
["a"]
]
model = Word2Vec().fit(self.sc.parallelize(data))
self.assertEqual(len(model.getVectors()), 3)
class StandardScalerTests(MLlibTestCase):
def test_model_setters(self):
data = [
[1.0, 2.0, 3.0],
[2.0, 3.0, 4.0],
[3.0, 4.0, 5.0]
]
model = StandardScaler().fit(self.sc.parallelize(data))
self.assertIsNotNone(model.setWithMean(True))
self.assertIsNotNone(model.setWithStd(True))
self.assertEqual(model.transform([1.0, 2.0, 3.0]), DenseVector([-1.0, -1.0, -1.0]))
def test_model_transform(self):
data = [
[1.0, 2.0, 3.0],
[2.0, 3.0, 4.0],
[3.0, 4.0, 5.0]
]
model = StandardScaler().fit(self.sc.parallelize(data))
self.assertEqual(model.transform([1.0, 2.0, 3.0]), DenseVector([1.0, 2.0, 3.0]))
class ElementwiseProductTests(MLlibTestCase):
def test_model_transform(self):
weight = Vectors.dense([3, 2, 1])
densevec = Vectors.dense([4, 5, 6])
sparsevec = Vectors.sparse(3, [0], [1])
eprod = ElementwiseProduct(weight)
self.assertEqual(eprod.transform(densevec), DenseVector([12, 10, 6]))
self.assertEqual(
eprod.transform(sparsevec), SparseVector(3, [0], [3]))
class HashingTFTest(MLlibTestCase):
def test_binary_term_freqs(self):
hashingTF = HashingTF(100).setBinary(True)
doc = "a a b c c c".split(" ")
n = hashingTF.numFeatures
output = hashingTF.transform(doc).toArray()
expected = Vectors.sparse(n, {hashingTF.indexOf("a"): 1.0,
hashingTF.indexOf("b"): 1.0,
hashingTF.indexOf("c"): 1.0}).toArray()
for i in range(0, n):
self.assertAlmostEqual(output[i], expected[i], 14, "Error at " + str(i) +
": expected " + str(expected[i]) + ", got " + str(output[i]))
class DimensionalityReductionTests(MLlibTestCase):
denseData = [
Vectors.dense([0.0, 1.0, 2.0]),
Vectors.dense([3.0, 4.0, 5.0]),
Vectors.dense([6.0, 7.0, 8.0]),
Vectors.dense([9.0, 0.0, 1.0])
]
sparseData = [
Vectors.sparse(3, [(1, 1.0), (2, 2.0)]),
Vectors.sparse(3, [(0, 3.0), (1, 4.0), (2, 5.0)]),
Vectors.sparse(3, [(0, 6.0), (1, 7.0), (2, 8.0)]),
Vectors.sparse(3, [(0, 9.0), (2, 1.0)])
]
def assertEqualUpToSign(self, vecA, vecB):
eq1 = vecA - vecB
eq2 = vecA + vecB
self.assertTrue(sum(abs(eq1)) < 1e-6 or sum(abs(eq2)) < 1e-6)
def test_svd(self):
denseMat = RowMatrix(self.sc.parallelize(self.denseData))
sparseMat = RowMatrix(self.sc.parallelize(self.sparseData))
m = 4
n = 3
for mat in [denseMat, sparseMat]:
for k in range(1, 4):
rm = mat.computeSVD(k, computeU=True)
self.assertEqual(rm.s.size, k)
self.assertEqual(rm.U.numRows(), m)
self.assertEqual(rm.U.numCols(), k)
self.assertEqual(rm.V.numRows, n)
self.assertEqual(rm.V.numCols, k)
# Test that U returned is None if computeU is set to False.
self.assertEqual(mat.computeSVD(1).U, None)
# Test that low rank matrices cannot have number of singular values
# greater than a limit.
rm = RowMatrix(self.sc.parallelize(tile([1, 2, 3], (3, 1))))
self.assertEqual(rm.computeSVD(3, False, 1e-6).s.size, 1)
def test_pca(self):
expected_pcs = array([
[0.0, 1.0, 0.0],
[sqrt(2.0) / 2.0, 0.0, sqrt(2.0) / 2.0],
[sqrt(2.0) / 2.0, 0.0, -sqrt(2.0) / 2.0]
])
n = 3
denseMat = RowMatrix(self.sc.parallelize(self.denseData))
sparseMat = RowMatrix(self.sc.parallelize(self.sparseData))
for mat in [denseMat, sparseMat]:
for k in range(1, 4):
pcs = mat.computePrincipalComponents(k)
self.assertEqual(pcs.numRows, n)
self.assertEqual(pcs.numCols, k)
# We can just test the updated principal component for equality.
self.assertEqualUpToSign(pcs.toArray()[:, k - 1], expected_pcs[:, k - 1])
if __name__ == "__main__":
from pyspark.mllib.tests.test_feature import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
nuncjo/odoo | addons/project_issue_sheet/__init__.py | 442 | 1105 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_issue_sheet
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
BengtOFFIS/simpy | docs/examples/code/carwash.py | 2 | 2869 | """
Carwash example.
Covers:
- Waiting for other processes
- Resources: Resource
Scenario:
A carwash has a limited number of washing machines and defines
a washing processes that takes some (random) time.
Car processes arrive at the carwash at a random time. If one washing
machine is available, they start the washing process and wait for it
to finish. If not, they wait until they an use one.
"""
import random
import simpy
RANDOM_SEED = 42
NUM_MACHINES = 2 # Number of machines in the carwash
WASHTIME = 5 # Minutes it takes to clean a car
T_INTER = 7 # Create a car every ~7 minutes
SIM_TIME = 20 # Simulation time in minutes
class Carwash(object):
"""A carwash has a limited number of machines (``NUM_MACHINES``) to
clean cars in parallel.
Cars have to request one of the machines. When they got one, they
can start the washing processes and wait for it to finish (which
takes ``washtime`` minutes).
"""
def __init__(self, env, num_machines, washtime):
self.env = env
self.machine = simpy.Resource(env, num_machines)
self.washtime = washtime
def wash(self, car):
"""The washing processes. It takes a ``car`` processes and tries
to clean it."""
yield self.env.timeout(WASHTIME)
print("Carwash removed %d%% of %s's dirt." %
(random.randint(50, 99), car))
def car(env, name, cw):
"""The car process (each car has a ``name``) arrives at the carwash
(``cw``) and requests a cleaning machine.
It then starts the washing process, waits for it to finish and
leaves to never come back ...
"""
print('%s arrives at the carwash at %.2f.' % (name, env.now))
with cw.machine.request() as request:
yield request
print('%s enters the carwash at %.2f.' % (name, env.now))
yield env.process(cw.wash(name))
print('%s leaves the carwash at %.2f.' % (name, env.now))
def setup(env, num_machines, washtime, t_inter):
"""Create a carwash, a number of initial cars and keep creating cars
approx. every ``t_inter`` minutes."""
# Create the carwash
carwash = Carwash(env, num_machines, washtime)
# Create 4 initial cars
for i in range(4):
env.process(car(env, 'Car %d' % i, carwash))
# Create more cars while the simulation is running
while True:
yield env.timeout(random.randint(t_inter-2, t_inter+2))
i += 1
env.process(car(env, 'Car %d' % i, carwash))
# Setup and start the simulation
print('Carwash')
print('Check out http://youtu.be/fXXmeP9TvBg while simulating ... ;-)')
random.seed(RANDOM_SEED) # This helps reproducing the results
# Create an environment and start the setup process
env = simpy.Environment()
env.process(setup(env, NUM_MACHINES, WASHTIME, T_INTER))
# Execute!
env.run(until=SIM_TIME)
| mit |
FreeScienceCommunity/or-tools | examples/python/data/nonogram_regular/nonogram_t2.py | 74 | 1134 | # Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# http:#www.cs.mu.oz.au/433/tenpenki.html
# Note: This problem has 2 solutions.
#
rows = 6
row_rule_len = 6
row_rules = [
[0, 0, 0, 2, 2, 3],
[1, 1, 1, 1, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 3],
[0, 1, 1, 1, 1, 1],
[0, 0, 0, 2, 2, 1]]
cols = 14
col_rule_len = 3
col_rules = [
[0, 0, 4],
[0, 1, 1],
[0, 1, 1],
[0, 1, 1],
[0, 0, 0],
[0, 1, 1],
[1, 1, 1],
[1, 1, 1],
[0, 1, 1],
[0, 0, 0],
[0, 0, 6],
[0, 1, 1],
[0, 1, 1],
[0, 0, 2]]
| apache-2.0 |
UKPLab/semeval2017-scienceie | code/convNet.py | 1 | 7292 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from extras import VSM, read_and_map
from representation import VeryStupidCBOWMapper, CharMapper
import sys, numpy as np,os
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from keras.layers import Dense, Dropout, Activation, Embedding
from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Convolution1D, GlobalMaxPooling1D, Lambda, Merge
from keras.preprocessing import sequence
from keras import backend as K
maxlen=50
maxlen=100
maxlen=150
maxlen=50+2*30
try:
L = int(sys.argv[5])
M = int(sys.argv[6])
R = int(sys.argv[7])
except IndexError:
L = 30
M = 50
R = 30
maxlen=L+M+R
# this is a simple cnn
# if you would want to use it below, you would have to do
# X_train = X_train.reshape(len(X_train),input_shape[0],input_shape[1])
def build_cnn(input_shape, output_dim,nb_filter):
clf = Sequential()
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=4,border_mode="valid",activation="relu",subsample_length=1,input_shape=input_shape))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char(input_dim, output_dim,nb_filter):
clf = Sequential()
clf.add(Embedding(input_dim,
32, # character embedding size
input_length=maxlen,
dropout=0.2))
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=3,border_mode="valid",activation="relu",subsample_length=1))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char_threeModels(input_dim, output_dim,nb_filter,filter_size=3):
left = Sequential()
left.add(Embedding(input_dim,
32, # character embedding size
input_length=L,
dropout=0.2))
left.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
left.add(GlobalMaxPooling1D())
left.add(Dense(100))
left.add(Dropout(0.2))
left.add(Activation("tanh"))
center = Sequential()
center.add(Embedding(input_dim,
32, # character embedding size
input_length=M,
dropout=0.2))
center.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
center.add(GlobalMaxPooling1D())
center.add(Dense(100))
center.add(Dropout(0.2))
center.add(Activation("tanh"))
right = Sequential()
right.add(Embedding(input_dim,
32, # character embedding size
input_length=R,
dropout=0.2))
right.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
right.add(GlobalMaxPooling1D())
right.add(Dense(100))
right.add(Dropout(0.2))
right.add(Activation("tanh"))
clf = Sequential()
clf.add(Merge([left,center,right],mode="concat"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def max_1d(X):
return K.max(X,axis=1)
# multiple filters
def build_cnn_char_complex(input_dim, output_dim,nb_filter):
randomEmbeddingLayer = Embedding(input_dim,32, input_length=maxlen,dropout=0.1)
poolingLayer = Lambda(max_1d, output_shape=(nb_filter,))
conv_filters = []
for n_gram in range(2,4):
ngramModel = Sequential()
ngramModel.add(randomEmbeddingLayer)
ngramModel.add(Convolution1D(nb_filter=nb_filter,
filter_length=n_gram,
border_mode="valid",
activation="relu",
subsample_length=1))
ngramModel.add(poolingLayer)
conv_filters.append(ngramModel)
clf = Sequential()
clf.add(Merge(conv_filters,mode="concat"))
clf.add(Activation("relu"))
clf.add(Dense(100))
clf.add(Dropout(0.1))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def acc(correct, total):
return 1.0*correct/total
# example argline:
# python convNet.py ../scienceie2017_train/train2 ../scienceie2017_dev/dev ../resources/vsm/glove.6B/glove.6B.100d.txt
if __name__=="__main__":
train_src = sys.argv[1]
dev_src = sys.argv[2]
# vsm_path = sys.argv[3]
vsm_path = None
print("Loading VSM")
vsm = VSM(vsm_path)
try:
csize = 2
except IndexError:
csize = int(sys.argv[4])
try:
n_filter = int(sys.argv[8])
except IndexError:
n_filter = 250
try:
filter_size = int(sys.argv[9])
except IndexError:
filter_size = 3
if len(sys.argv)>10 and sys.argv[10]=="document":
SB = False
else:
SB = True
mapper = CharMapper(vsm,csize,L=L,M=M,R=R,sentence_boundaries=SB)
print("Reading training data")
X_train, y_train, y_values, _ = read_and_map(train_src, mapper)
X_dev, y_dev_gold, _, estrings = read_and_map(dev_src, mapper, y_values)
vocabSize = mapper.curVal
print(X_train.shape)
print(y_train.shape)
#sys.exit(1)
print("Trainig a model")
timesteps = 2*csize + 1 # left, right, center
context_dim = 100
input_shape = (timesteps,context_dim)
clf = build_cnn_char(vocabSize+1, len(y_values)+1,n_filter)
clf = build_cnn_char_threeModels(vocabSize+1, len(y_values)+1,n_filter)
X_left = X_train[:,:L]
X_center = X_train[:,L:L+M]
X_right = X_train[:,L+M:L+M+R]
print L,M,R,X_train.shape,X_left.shape,X_center.shape,X_right.shape,y_train,y_values
clf.fit([X_left,X_center,X_right], to_categorical(y_train, len(y_values)+1), verbose=1, nb_epoch=15)
print("Reading test data")
print("Testing")
X_dev_left = X_dev[:,:L]
X_dev_center = X_dev[:,L:L+M]
X_dev_right = X_dev[:,L+M:L+M+R]
print(X_dev.shape,X_dev_left.shape,X_dev_center.shape,X_dev_right.shape)
y_dev_auto = clf.predict_classes([X_dev_left,X_dev_center,X_dev_right]) # for LogisticRegression just do predict()
print "==PREDICTING=="
for i in xrange(len(y_dev_auto)):
print y_values[y_dev_auto[i]]
| apache-2.0 |
fernandezcuesta/ansible | test/units/modules/network/vyos/vyos_module.py | 56 | 3257 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
def set_module_args(args):
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class AnsibleExitJson(Exception):
pass
class AnsibleFailJson(Exception):
pass
class TestVyosModule(unittest.TestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
def fail_json(*args, **kwargs):
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
with patch.object(basic.AnsibleModule, 'fail_json', fail_json):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
def exit_json(*args, **kwargs):
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
with patch.object(basic.AnsibleModule, 'exit_json', exit_json):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
| gpl-3.0 |
massmutual/pystruct | examples/plot_exact_learning.py | 5 | 1333 | """
=========================================
Efficient exact learning of 1-slack SSVMs
=========================================
This example illustrates the role of approximate inference and caching
in exact learning of a 1-slack SSVM.
Please see plot_objetive_curve.py for an interpretation of the curves.
We start learning by using an undergenerating inference method,
QPBO-based alpha expansion. One the algorithm can not find a violated
constraint any more, we switch to a less efficient but exact inference
procedure, branch-and-bound based on AD3.
The switch to AD3 can be seen in the graph after the (approximate)
primal objective and the cutting plane lower bound touch. (zoom in)
After the switch to exact inference, the red circles show the true
primal objective.
"""
from pystruct.models import DirectionalGridCRF
import pystruct.learners as ssvm
from pystruct.datasets import generate_blocks_multinomial
from pystruct.plot_learning import plot_learning
X, Y = generate_blocks_multinomial(noise=2, n_samples=20, seed=1)
crf = DirectionalGridCRF(inference_method="qpbo", neighborhood=4)
clf = ssvm.OneSlackSSVM(model=crf, n_jobs=-1, inference_cache=100,
show_loss_every=10,
switch_to=("ad3", {'branch_and_bound': True}))
clf.fit(X, Y)
plot_learning(clf, time=False)
| bsd-2-clause |
Coolexe/shooter-ics-crc-3.0.16-294f767 | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
JaneliaSciComp/Neuroptikon | Source/library/library.py | 1 | 1804 | # Copyright (c) 2010 Howard Hughes Medical Institute.
# All rights reserved.
# Use is subject to Janelia Farm Research Campus Software Copyright 1.1 license terms.
# http://license.janelia.org/license/jfrc_copyright_1_1.html
from library_item import LibraryItem
from library_frame import LibraryFrame
from pydispatch import dispatcher
from itertools import groupby
class Library(object):
def __init__(self):
self._library = {}
self._frame = LibraryFrame()
def add(self, item):
if not issubclass(item.__class__, LibraryItem):
raise ValueError, gettext('Library items must be instances of a subclass of LibraryItem')
if item.__class__.__name__ in self._library:
# This class of item has been added before.
dict = self._library[item.__class__.__name__]
else:
# Create and retain a new dictionary for this class of item.
dict = {}
self._library[item.__class__.__name__] = dict
# Add a method to ourself that returns the full list of items of this class.
setattr(self, item.__class__.listProperty(), lambda: sorted([value for value, group in groupby(dict.values())], cmp=lambda x,y: cmp(x.name.lower(), y.name.lower())))
# Add a method to ourself that performs a lookup of items of this class.
setattr(self, item.__class__.lookupProperty(), lambda itemId: dict.get(itemId, None))
self._frame.addItemClass(item.__class__)
dict[item.identifier] = item
for synonym in item.synonyms:
dict[synonym] = item
dispatcher.send(('addition', item.__class__), self)
def browse(self):
self._frame.Show()
self._frame.Raise()
| bsd-3-clause |
3bot/3bot-hook | threebot_hook/models.py | 1 | 1773 | # -*- coding: utf-8 -*-
from django import dispatch
from django.contrib.sites.models import Site
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from rest_framework.authtoken.models import Token
from threebot.models import Workflow
from threebot.models import Worker
from threebot.models import ParameterList
@python_2_unicode_compatible
class Hook(models.Model):
slug = models.SlugField(max_length=255)
user = models.CharField(max_length=255, blank=True, null=True)
repo = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
secret = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
workflow = models.ForeignKey(Workflow)
worker = models.ForeignKey(Worker)
param_list = models.ForeignKey(ParameterList)
def get_hook_url(self):
return "%d-%d-%d-%s" % (self.workflow.id, self.worker.id, self.param_list.id, self.slug)
def __str__(self):
return "%s (%d)" % (self.get_hook_url(), self.pk)
def make_full_url(self, user):
token, created = Token.objects.get_or_create(user=user)
return "https://%s/hooks/%s/%s-%s-%s/" % (Site.objects.get_current().domain, token, self.workflow.id, self.worker.id, self.param_list.id)
class Meta():
verbose_name = _("Hook")
verbose_name_plural = _("Hooks")
db_table = 'threebot_hook'
unique_together = ("workflow", "worker", "param_list")
class HookSignal(dispatch.Signal):
pass
pre_hook_signal = HookSignal()
post_hook_signal = HookSignal()
| bsd-3-clause |
borg-project/borg | borg/tools/get_features.py | 1 | 2297 | """@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import os.path
import csv
import borg
import borg.distributors
logger = borg.get_logger(__name__, default_level = "INFO")
def features_for_path(domain, task_path):
# bring back relevant globals
import os.path
import borg
logger = borg.get_logger(__name__, default_level = "INFO")
# collect features
logger.info("getting features of %s", os.path.basename(task_path))
with domain.task_from_path(task_path) as task:
with borg.accounting() as accountant:
(names, values) = domain.compute_features(task)
return (
task_path,
["cpu_cost"] + list(names),
[accountant.total.cpu_seconds] + list(values))
@borg.annotations(
domain_name = ("suite path, or name of the problem domain", "positional"),
instances_root = ("path to instances files", "positional", None, os.path.abspath),
suffix = ("file suffix to apply", "positional"),
skip_existing = ("skip existing features?", "flag"),
distributor_name = ("name of task distributor", "option"),
workers = ("submit jobs?", "option", "w", int),
)
def main(
domain_name,
instances_root,
suffix = ".features.csv",
skip_existing = False,
distributor_name = "ipython",
workers = 0):
"""Collect task features."""
def yield_runs():
if os.path.exists(domain_name):
domain = borg.load_solvers(domain_name).domain
else:
domain = borg.get_domain(domain_name)
paths = list(borg.util.files_under(instances_root, domain.extensions))
count = 0
for path in paths:
if skip_existing and os.path.exists(path + suffix):
continue
count += 1
yield (features_for_path, [domain, path])
logger.info("collecting features for %i instances", count)
distributor = borg.distributors.make(
distributor_name,
workers=workers)
for (cnf_path, names, values) in distributor.do(yield_runs()):
csv_path = cnf_path + suffix
with open(csv_path, "wb") as csv_file:
csv.writer(csv_file).writerow(names)
csv.writer(csv_file).writerow(values)
if __name__ == "__main__":
borg.script(main)
| mit |
mzizzi/ansible | lib/ansible/module_utils/facts/system/python.py | 232 | 1999 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible.module_utils.facts.collector import BaseFactCollector
try:
# Check if we have SSLContext support
from ssl import create_default_context, SSLContext
del create_default_context
del SSLContext
HAS_SSLCONTEXT = True
except ImportError:
HAS_SSLCONTEXT = False
class PythonFactCollector(BaseFactCollector):
name = 'python'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
python_facts = {}
python_facts['python'] = {
'version': {
'major': sys.version_info[0],
'minor': sys.version_info[1],
'micro': sys.version_info[2],
'releaselevel': sys.version_info[3],
'serial': sys.version_info[4]
},
'version_info': list(sys.version_info),
'executable': sys.executable,
'has_sslcontext': HAS_SSLCONTEXT
}
try:
python_facts['python']['type'] = sys.subversion[0]
except AttributeError:
try:
python_facts['python']['type'] = sys.implementation.name
except AttributeError:
python_facts['python']['type'] = None
return python_facts
| gpl-3.0 |
Mirantis/openstack-dashboard | django-openstack/src/django_openstack/syspanel/views/services.py | 3 | 2699 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
from django import template
from django import http
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
import datetime
import json
import logging
import subprocess
import urlparse
from django.contrib import messages
from django_openstack import api
from django_openstack import forms
from django_openstack.dash.views import instances as dash_instances
from openstackx.api import exceptions as api_exceptions
class ToggleService(forms.SelfHandlingForm):
service = forms.CharField(required=False)
name = forms.CharField(required=False)
def handle(self, request, data):
try:
service = api.admin_api(request).services.get(data['service'])
api.admin_api(request).services.update(data['service'],
not service.disabled)
if service.disabled:
messages.info(request, "Service '%s' has been enabled"
% data['name'])
else:
messages.info(request, "Service '%s' has been disabled"
% data['name'])
except api_exceptions.ApiException, e:
messages.error(request, "Unable to update service '%s': %s"
% data['name'], e.message)
return redirect(request.build_absolute_uri())
@login_required
def index(request):
for f in (ToggleService,):
_, handled = f.maybe_handle(request)
if handled:
return handled
services = []
try:
services = api.admin_api(request).services.list()
except api_exceptions.ApiException, e:
messages.error(request, 'Unable to get service info: %s' % e.message)
other_services = []
for k, v in request.session['serviceCatalog'].iteritems():
v = v[0]
try:
subprocess.check_call(['curl', '-m', '1', v['internalURL']])
up = True
except:
up = False
hostname = urlparse.urlparse(v['internalURL']).hostname
row = {'type': k, 'internalURL': v['internalURL'], 'host': hostname,
'region': v['region'], 'up': up }
other_services.append(row)
return render_to_response('syspanel_services.html', {
'services': services,
'service_toggle_enabled_form': ToggleService,
'other_services': other_services,
}, context_instance = template.RequestContext(request))
| apache-2.0 |
jythontools/pip | tests/unit/test_appdirs.py | 10 | 10052 | import ntpath
import os
import posixpath
import sys
import pretend
from pip.utils import appdirs
class TestUserCacheDir:
def test_user_cache_dir_win(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Local"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert (appdirs.user_cache_dir("pip") ==
"C:\\Users\\test\\AppData\\Local\\pip\\Cache")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_cache_dir_osx(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert appdirs.user_cache_dir("pip") == "/home/test/Library/Caches/pip"
def test_user_cache_dir_linux(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.delenv("XDG_CACHE_HOME")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_cache_dir("pip") == "/home/test/.cache/pip"
def test_user_cache_dir_linux_override(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("XDG_CACHE_HOME", "/home/test/.other-cache")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_cache_dir("pip") == "/home/test/.other-cache/pip"
def test_user_cache_dir_linux_home_slash(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
# Verify that we are not affected by http://bugs.python.org/issue14768
monkeypatch.delenv("XDG_CACHE_HOME")
monkeypatch.setenv("HOME", "/")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_cache_dir("pip") == "/.cache/pip"
class TestSiteConfigDirs:
def test_site_config_dirs_win(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\ProgramData"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert appdirs.site_config_dirs("pip") == ["C:\\ProgramData\\pip"]
assert _get_win_folder.calls == [pretend.call("CSIDL_COMMON_APPDATA")]
def test_site_config_dirs_osx(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert appdirs.site_config_dirs("pip") == \
["/Library/Application Support/pip"]
def test_site_config_dirs_linux(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.delenv("XDG_CONFIG_DIRS")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.site_config_dirs("pip") == [
'/etc/xdg/pip',
'/etc'
]
def test_site_config_dirs_linux_override(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setattr(os, "pathsep", ':')
monkeypatch.setenv("XDG_CONFIG_DIRS", "/spam:/etc:/etc/xdg")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.site_config_dirs("pip") == [
'/spam/pip',
'/etc/pip',
'/etc/xdg/pip',
'/etc'
]
class TestUserDataDir:
def test_user_data_dir_win_no_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Local"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert (appdirs.user_data_dir("pip") ==
"C:\\Users\\test\\AppData\\Local\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_data_dir_win_yes_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Roaming"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert (
appdirs.user_data_dir("pip", roaming=True) ==
"C:\\Users\\test\\AppData\\Roaming\\pip"
)
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
def test_user_data_dir_osx(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_data_dir("pip") ==
"/home/test/Library/Application Support/pip")
def test_user_data_dir_linux(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.delenv("XDG_DATA_HOME")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_data_dir("pip") == "/home/test/.local/share/pip"
def test_user_data_dir_linux_override(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("XDG_DATA_HOME", "/home/test/.other-share")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_data_dir("pip") == "/home/test/.other-share/pip"
def test_user_data_dir_linux_home_slash(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
# Verify that we are not affected by http://bugs.python.org/issue14768
monkeypatch.delenv("XDG_DATA_HOME")
monkeypatch.setenv("HOME", "/")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_data_dir("pip") == "/.local/share/pip"
class TestUserConfigDir:
def test_user_config_dir_win_no_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Local"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert (
appdirs.user_config_dir("pip", roaming=False) ==
"C:\\Users\\test\\AppData\\Local\\pip"
)
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_config_dir_win_yes_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Roaming"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
monkeypatch.setattr(os, "path", ntpath)
assert (appdirs.user_config_dir("pip") ==
"C:\\Users\\test\\AppData\\Roaming\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
def test_user_config_dir_osx(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_config_dir("pip") ==
"/home/test/Library/Application Support/pip")
def test_user_config_dir_linux(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.delenv("XDG_CONFIG_HOME")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_config_dir("pip") == "/home/test/.config/pip"
def test_user_config_dir_linux_override(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
monkeypatch.setenv("XDG_CONFIG_HOME", "/home/test/.other-config")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_config_dir("pip") == "/home/test/.other-config/pip"
def test_user_config_dir_linux_home_slash(self, monkeypatch):
monkeypatch.setattr(appdirs, "WINDOWS", False)
monkeypatch.setattr(os, "path", posixpath)
# Verify that we are not affected by http://bugs.python.org/issue14768
monkeypatch.delenv("XDG_CONFIG_HOME")
monkeypatch.setenv("HOME", "/")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_config_dir("pip") == "/.config/pip"
| mit |
egraba/vbox_openbsd | VirtualBox-5.0.0/src/VBox/ValidationKit/testmanager/batch/regen_sched_queues.py | 3 | 4295 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: regen_sched_queues.py $
# pylint: disable=C0301
"""
Interface used by the admin to regenerate scheduling queues.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 101450 $"
# Standard python imports
import sys;
import os;
from optparse import OptionParser;
# Add Test Manager's modules path
g_ksTestManagerDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))));
sys.path.append(g_ksTestManagerDir);
# Test Manager imports
from testmanager.core.db import TMDatabaseConnection;
from testmanager.core.schedulerbase import SchedulerBase;
from testmanager.core.schedgroup import SchedGroupLogic;
class RegenSchedQueues(object): # pylint: disable=R0903
"""
Regenerates all the scheduling queues.
"""
def __init__(self):
"""
Parse command line.
"""
oParser = OptionParser();
oParser.add_option('-q', '--quiet', dest = 'fQuiet', action = 'store_true', default = False,
help = 'Quiet execution');
oParser.add_option('-u', '--uid', dest = 'uid', action = 'store', type = 'int', default = 1,
help = 'User ID to accredit with this job');
oParser.add_option('--profile', dest = 'fProfile', action = 'store_true', default = False,
help = 'User ID to accredit with this job');
(self.oConfig, _) = oParser.parse_args();
def doIt(self):
"""
Does the job.
"""
oDb = TMDatabaseConnection();
aoGroups = SchedGroupLogic(oDb).getAll();
iRc = 0;
for oGroup in aoGroups:
if not self.oConfig.fQuiet:
print '%s (ID %#d):' % (oGroup.sName, oGroup.idSchedGroup,);
try:
(aoErrors, asMessages) = SchedulerBase.recreateQueue(oDb, self.oConfig.uid, oGroup.idSchedGroup, 2);
except Exception as oXcpt:
oDb.rollback();
print ' !!Hit exception processing "%s": %s' % (oGroup.sName, oXcpt,);
else:
if len(aoErrors) == 0:
if not self.oConfig.fQuiet:
print ' Successfully regenerated.';
else:
iRc = 1;
print ' %d errors:' % (len(aoErrors,));
for oError in aoErrors:
if oError[1] is None:
print ' !!%s' % (oError[0],);
else:
print ' !!%s (%s)' % (oError[0], oError[1]);
if len(asMessages) > 0 and not self.oConfig.fQuiet:
print ' %d messages:' % (len(asMessages),);
for sMsg in asMessages:
print ' ##%s' % (sMsg,);
return iRc;
@staticmethod
def main():
""" Main function. """
oMain = RegenSchedQueues();
if oMain.oConfig.fProfile is not True:
iRc = oMain.doIt();
else:
import cProfile;
oProfiler = cProfile.Profile();
iRc = oProfiler.runcall(oMain.doIt);
oProfiler.print_stats(sort = 'time');
oProfiler = None;
return iRc;
if __name__ == '__main__':
sys.exit(RegenSchedQueues().main());
| mit |
girving/tensorflow | tensorflow/contrib/distribute/python/input_ops_test.py | 7 | 9593 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for input pipeline modifications for distribution strategies."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.contrib.distribute.python import input_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import readers
from tensorflow.python.framework import errors
from tensorflow.python.lib.io import python_io
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class AutoShardDatasetTest(test.TestCase):
def setUp(self):
super(AutoShardDatasetTest, self).setUp()
self._num_files = 10
self._num_records = 4
self._num_shards = 2
self._shard_index = 0
self._record_bytes = 10
def _record(self, r, f):
return compat.as_bytes("Record %d of file %d" % (r, f))
def _text_line(self, r, f):
return compat.as_bytes("Text line %d of file %d" % (r, f))
def _fixed_length_record(self, r, f):
return compat.as_bytes(str((r * f) % 10) * self._record_bytes)
def _createTFRecordFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "tf_record.%d.txt" % i)
filenames.append(fn)
writer = python_io.TFRecordWriter(fn)
for j in range(self._num_records):
record = self._record(j, i)
writer.write(record)
writer.close()
return filenames
def _createTextFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "text_line.%d.txt" % i)
filenames.append(fn)
contents = []
for j in range(self._num_records):
contents.append(self._text_line(j, i))
if j + 1 != self._num_records or i == 0:
contents.append(b"\r\n")
contents = b"".join(contents)
with open(fn, "wb") as f:
f.write(contents)
return filenames
def _createFixedLengthRecordFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "fixed_length_record.%d.txt" % i)
filenames.append(fn)
with open(fn, "wb") as f:
for j in range(self._num_records):
f.write(self._fixed_length_record(j, i))
return filenames
def _verifySimpleShardingOutput(self, dataset, record_fn):
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for f in range(self._shard_index, self._num_files, self._num_shards):
for r in range(self._num_records):
self.assertAllEqual(record_fn(r, f), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testTFRecordDataset(self):
dataset = readers.TFRecordDataset(self._createTFRecordFiles())
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._record)
def testFlatMap(self):
dataset = dataset_ops.Dataset.from_tensor_slices(
self._createTFRecordFiles())
dataset = dataset.flat_map(readers.TFRecordDataset)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._record)
def testInterleave(self):
dataset = dataset_ops.Dataset.from_tensor_slices(
self._createTFRecordFiles())
dataset = dataset.interleave(
readers.TFRecordDataset, cycle_length=4, block_length=self._num_records)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
# Since block_length == num records in each file, the output will still
# contain records in order of files.
self._verifySimpleShardingOutput(dataset, self._record)
def testListfiles(self):
filenames = self._createTFRecordFiles()
file_pattern = filenames[0].rsplit("/", 1)[0] + "/tf_record.*.txt"
dataset = dataset_ops.Dataset.list_files(file_pattern, shuffle=False)
dataset = dataset.flat_map(readers.TFRecordDataset)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
actual, expected = [], []
for f in range(self._shard_index, self._num_files, self._num_shards):
for r in range(self._num_records):
actual.append(sess.run(next_element))
expected.append(self._record(r, f))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
self.assertAllEqual(expected, actual)
def testComplexPipeline(self):
# Setup a complex input pipeline.
batch_size = 2
num_epochs = 5
dataset = dataset_ops.Dataset.from_tensor_slices(
self._createTFRecordFiles())
dataset = dataset.shuffle(buffer_size=self._num_files)
dataset = dataset.flat_map(readers.TFRecordDataset)
dataset = dataset.prefetch(buffer_size=batch_size)
dataset = dataset.shuffle(2 * self._num_files * self._num_records)
dataset = dataset.repeat(num_epochs)
dataset = dataset.map(lambda x: x)
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(buffer_size=None)
# Auto shard.
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
# Verify output.
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
actual = []
num_iterations = (self._num_files * self._num_records * num_epochs) // (
self._num_shards * batch_size)
for _ in range(num_iterations):
actual.extend(sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
expected = []
for f in range(0, self._num_files, self._num_shards):
for r in range(self._num_records):
expected.append(self._record(r, f))
expected *= num_epochs
self.assertAllEqual(sorted(expected), sorted(actual))
def testZip(self):
dataset1 = readers.TFRecordDataset(self._createTFRecordFiles())
dataset2 = readers.TextLineDataset(self._createTextFiles())
dataset = dataset_ops.Dataset.zip((dataset1, dataset2))
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
record_fn = lambda r, f: (self._record(r, f), self._text_line(r, f))
self._verifySimpleShardingOutput(dataset, record_fn)
def testConcat(self):
dataset1 = readers.TFRecordDataset(self._createTFRecordFiles())
dataset2 = readers.TextLineDataset(self._createTextFiles())
dataset = dataset1.concatenate(dataset2)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for f in range(self._shard_index, self._num_files, self._num_shards):
for r in range(self._num_records):
self.assertAllEqual(self._record(r, f), sess.run(next_element))
for f in range(self._shard_index, self._num_files, self._num_shards):
for r in range(self._num_records):
self.assertAllEqual(self._text_line(r, f), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testTextLineReader(self):
dataset = readers.TextLineDataset(self._createTextFiles())
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._text_line)
def testTextLineReaderWithFlatMap(self):
dataset = dataset_ops.Dataset.from_tensor_slices(self._createTextFiles())
dataset = dataset.flat_map(readers.TextLineDataset)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._text_line)
def testFixedLengthReader(self):
dataset = readers.FixedLengthRecordDataset(
self._createFixedLengthRecordFiles(), self._record_bytes)
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._fixed_length_record)
def testFixedLengthReaderWithFlatMap(self):
dataset = dataset_ops.Dataset.from_tensor_slices(
self._createFixedLengthRecordFiles())
dataset = dataset.flat_map(
lambda f: readers.FixedLengthRecordDataset(f, self._record_bytes))
dataset = input_ops.auto_shard_dataset(
dataset, self._num_shards, self._shard_index)
self._verifySimpleShardingOutput(dataset, self._fixed_length_record)
if __name__ == "__main__":
test.main()
| apache-2.0 |
ArthurGarnier/SickRage | lib/sqlalchemy/dialects/mysql/mysqldb.py | 75 | 3479 | # mysql/mysqldb.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+mysqldb
:name: MySQL-Python
:dbapi: mysqldb
:connectstring: mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
:url: http://sourceforge.net/projects/mysql-python
Unicode
-------
MySQLdb requires a "charset" parameter to be passed in order for it
to handle non-ASCII characters correctly. When this parameter is passed,
MySQLdb will also implicitly set the "use_unicode" flag to true, which means
that it will return Python unicode objects instead of bytestrings.
However, SQLAlchemy's decode process, when C extensions are enabled,
is orders of magnitude faster than that of MySQLdb as it does not call into
Python functions to do so. Therefore, the **recommended URL to use for
unicode** will include both charset and use_unicode=0::
create_engine("mysql+mysqldb://user:pass@host/dbname?charset=utf8&use_unicode=0")
As of this writing, MySQLdb only runs on Python 2. It is not known how
MySQLdb behaves on Python 3 as far as unicode decoding.
Known Issues
-------------
MySQL-python version 1.2.2 has a serious memory leak related
to unicode conversion, a feature which is disabled via ``use_unicode=0``.
It is strongly advised to use the latest version of MySQL-Python.
"""
from .base import (MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer)
from ...connectors.mysqldb import (
MySQLDBExecutionContext,
MySQLDBCompiler,
MySQLDBIdentifierPreparer,
MySQLDBConnector
)
from .base import TEXT
from ... import sql
class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
pass
class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler):
pass
class MySQLIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, MySQLIdentifierPreparer):
pass
class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
execution_ctx_cls = MySQLExecutionContext_mysqldb
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
def _check_unicode_returns(self, connection):
# work around issue fixed in
# https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
# specific issue w/ the utf8_bin collation and unicode returns
has_utf8_bin = connection.scalar(
"show collation where %s = 'utf8' and %s = 'utf8_bin'"
% (
self.identifier_preparer.quote("Charset"),
self.identifier_preparer.quote("Collation")
))
if has_utf8_bin:
additional_tests = [
sql.collate(sql.cast(
sql.literal_column(
"'test collated returns'"),
TEXT(charset='utf8')), "utf8_bin")
]
else:
additional_tests = []
return super(MySQLDBConnector, self)._check_unicode_returns(
connection, additional_tests)
dialect = MySQLDialect_mysqldb
| gpl-3.0 |
nanolearning/edx-platform | common/djangoapps/course_modes/models.py | 10 | 4686 | """
Add and create new modes for running courses on this particular LMS
"""
import pytz
from datetime import datetime
from django.db import models
from collections import namedtuple
from django.utils.translation import ugettext as _
from django.db.models import Q
from xmodule_django.models import CourseKeyField
Mode = namedtuple('Mode', ['slug', 'name', 'min_price', 'suggested_prices', 'currency', 'expiration_datetime'])
class CourseMode(models.Model):
"""
We would like to offer a course in a variety of modes.
"""
# the course that this mode is attached to
course_id = CourseKeyField(max_length=255, db_index=True)
# the reference to this mode that can be used by Enrollments to generate
# similar behavior for the same slug across courses
mode_slug = models.CharField(max_length=100)
# The 'pretty' name that can be translated and displayed
mode_display_name = models.CharField(max_length=255)
# minimum price in USD that we would like to charge for this mode of the course
min_price = models.IntegerField(default=0)
# the suggested prices for this mode
suggested_prices = models.CommaSeparatedIntegerField(max_length=255, blank=True, default='')
# the currency these prices are in, using lower case ISO currency codes
currency = models.CharField(default="usd", max_length=8)
# turn this mode off after the given expiration date
expiration_date = models.DateField(default=None, null=True, blank=True)
expiration_datetime = models.DateTimeField(default=None, null=True, blank=True)
DEFAULT_MODE = Mode('honor', _('Honor Code Certificate'), 0, '', 'usd', None)
DEFAULT_MODE_SLUG = 'honor'
class Meta:
""" meta attributes of this model """
unique_together = ('course_id', 'mode_slug', 'currency')
@classmethod
def modes_for_course(cls, course_id):
"""
Returns a list of the non-expired modes for a given course id
If no modes have been set in the table, returns the default mode
"""
now = datetime.now(pytz.UTC)
found_course_modes = cls.objects.filter(Q(course_id=course_id) &
(Q(expiration_datetime__isnull=True) |
Q(expiration_datetime__gte=now)))
modes = ([Mode(
mode.mode_slug,
mode.mode_display_name,
mode.min_price,
mode.suggested_prices,
mode.currency,
mode.expiration_datetime
) for mode in found_course_modes])
if not modes:
modes = [cls.DEFAULT_MODE]
return modes
@classmethod
def modes_for_course_dict(cls, course_id):
"""
Returns the non-expired modes for a particular course as a
dictionary with the mode slug as the key
"""
return {mode.slug: mode for mode in cls.modes_for_course(course_id)}
@classmethod
def mode_for_course(cls, course_id, mode_slug):
"""
Returns the mode for the course corresponding to mode_slug.
Returns only non-expired modes.
If this particular mode is not set for the course, returns None
"""
modes = cls.modes_for_course(course_id)
matched = [m for m in modes if m.slug == mode_slug]
if matched:
return matched[0]
else:
return None
@classmethod
def min_course_price_for_verified_for_currency(cls, course_id, currency):
"""
Returns the minimum price of the course int he appropriate currency over all the
course's *verified*, non-expired modes.
Assuming all verified courses have a minimum price of >0, this value should always
be >0.
If no verified mode is found, 0 is returned.
"""
modes = cls.modes_for_course(course_id)
for mode in modes:
if (mode.currency == currency) and (mode.slug == 'verified'):
return mode.min_price
return 0
@classmethod
def min_course_price_for_currency(cls, course_id, currency):
"""
Returns the minimum price of the course in the appropriate currency over all the course's
non-expired modes.
If there is no mode found, will return the price of DEFAULT_MODE, which is 0
"""
modes = cls.modes_for_course(course_id)
return min(mode.min_price for mode in modes if mode.currency == currency)
def __unicode__(self):
return u"{} : {}, min={}, prices={}".format(
self.course_id.to_deprecated_string(), self.mode_slug, self.min_price, self.suggested_prices
)
| agpl-3.0 |
isrohutamahopetechnik/MissionPlanner | Lib/site-packages/numpy/ma/tests/test_extras.py | 76 | 33410 | # pylint: disable-msg=W0611, W0612, W0511
"""Tests suite for MaskedArray.
Adapted from the original test_ma by Pierre Gerard-Marchant
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
:version: $Id: test_extras.py 3473 2007-10-29 15:18:13Z jarrod.millman $
"""
__author__ = "Pierre GF Gerard-Marchant ($Author: jarrod.millman $)"
__version__ = '1.0'
__revision__ = "$Revision: 3473 $"
__date__ = '$Date: 2007-10-29 17:18:13 +0200 (Mon, 29 Oct 2007) $'
import numpy as np
from numpy.testing import TestCase, run_module_suite
from numpy.ma.testutils import *
from numpy.ma.core import *
from numpy.ma.extras import *
class TestGeneric(TestCase):
#
def test_masked_all(self):
"Tests masked_all"
# Standard dtype
test = masked_all((2,), dtype=float)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
test = masked_all((2,), dtype=dt)
control = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
test = masked_all((2, 2), dtype=dt)
control = array([[(0, 0), (0, 0)], [(0, 0), (0, 0)]],
mask=[[(1, 1), (1, 1)], [(1, 1), (1, 1)]],
dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((2,), dtype=dt)
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
assert_equal(test, control)
test = masked_all((1, 1), dtype=dt)
control = array([[(1, (1, 1))]], mask=[[(1, (1, 1))]], dtype=dt)
assert_equal(test, control)
def test_masked_all_like(self):
"Tests masked_all"
# Standard dtype
base = array([1, 2], dtype=float)
test = masked_all_like(base)
control = array([1, 1], mask=[1, 1], dtype=float)
assert_equal(test, control)
# Flexible dtype
dt = np.dtype({'names': ['a', 'b'], 'formats': ['f', 'f']})
base = array([(0, 0), (0, 0)], mask=[(1, 1), (1, 1)], dtype=dt)
test = masked_all_like(base)
control = array([(10, 10), (10, 10)], mask=[(1, 1), (1, 1)], dtype=dt)
assert_equal(test, control)
# Nested dtype
dt = np.dtype([('a', 'f'), ('b', [('ba', 'f'), ('bb', 'f')])])
control = array([(1, (1, 1)), (1, (1, 1))],
mask=[(1, (1, 1)), (1, (1, 1))], dtype=dt)
test = masked_all_like(control)
assert_equal(test, control)
def test_clump_masked(self):
"Test clump_masked"
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
#
test = clump_masked(a)
control = [slice(0, 3), slice(6, 7), slice(8, 10)]
assert_equal(test, control)
def test_clump_unmasked(self):
"Test clump_unmasked"
a = masked_array(np.arange(10))
a[[0, 1, 2, 6, 8, 9]] = masked
test = clump_unmasked(a)
control = [slice(3, 6), slice(7, 8), ]
assert_equal(test, control)
def test_flatnotmasked_contiguous(self):
"Test flatnotmasked_contiguous"
a = arange(10)
# No mask
test = flatnotmasked_contiguous(a)
assert_equal(test, slice(0, a.size))
# Some mask
a[(a < 3) | (a > 8) | (a == 5)] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, [slice(3, 5), slice(6, 9)])
#
a[:] = masked
test = flatnotmasked_contiguous(a)
assert_equal(test, None)
class TestAverage(TestCase):
"Several tests of average. Why so many ? Good point..."
def test_testAverage1(self):
"Test of average."
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
assert_equal(2.0, average(ott, axis=0))
assert_equal(2.0, average(ott, weights=[1., 1., 2., 1.]))
result, wts = average(ott, weights=[1., 1., 2., 1.], returned=1)
assert_equal(2.0, result)
self.assertTrue(wts == 4.0)
ott[:] = masked
assert_equal(average(ott, axis=0).mask, [True])
ott = array([0., 1., 2., 3.], mask=[True, False, False, False])
ott = ott.reshape(2, 2)
ott[:, 1] = masked
assert_equal(average(ott, axis=0), [2.0, 0.0])
assert_equal(average(ott, axis=1).mask[0], [True])
assert_equal([2., 0.], average(ott, axis=0))
result, wts = average(ott, axis=0, returned=1)
assert_equal(wts, [1., 0.])
def test_testAverage2(self):
"More tests of average."
w1 = [0, 1, 1, 1, 1, 0]
w2 = [[0, 1, 1, 1, 1, 0], [1, 0, 0, 0, 0, 1]]
x = arange(6, dtype=float_)
assert_equal(average(x, axis=0), 2.5)
assert_equal(average(x, axis=0, weights=w1), 2.5)
y = array([arange(6, dtype=float_), 2.0 * arange(6)])
assert_equal(average(y, None), np.add.reduce(np.arange(6)) * 3. / 12.)
assert_equal(average(y, axis=0), np.arange(6) * 3. / 2.)
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
assert_equal(average(y, None, weights=w2), 20. / 6.)
assert_equal(average(y, axis=0, weights=w2),
[0., 1., 2., 3., 4., 10.])
assert_equal(average(y, axis=1),
[average(x, axis=0), average(x, axis=0) * 2.0])
m1 = zeros(6)
m2 = [0, 0, 1, 1, 0, 0]
m3 = [[0, 0, 1, 1, 0, 0], [0, 1, 1, 1, 1, 0]]
m4 = ones(6)
m5 = [0, 1, 1, 1, 1, 1]
assert_equal(average(masked_array(x, m1), axis=0), 2.5)
assert_equal(average(masked_array(x, m2), axis=0), 2.5)
assert_equal(average(masked_array(x, m4), axis=0).mask, [True])
assert_equal(average(masked_array(x, m5), axis=0), 0.0)
assert_equal(count(average(masked_array(x, m4), axis=0)), 0)
z = masked_array(y, m3)
assert_equal(average(z, None), 20. / 6.)
assert_equal(average(z, axis=0), [0., 1., 99., 99., 4.0, 7.5])
assert_equal(average(z, axis=1), [2.5, 5.0])
assert_equal(average(z, axis=0, weights=w2),
[0., 1., 99., 99., 4.0, 10.0])
def test_testAverage3(self):
"Yet more tests of average!"
a = arange(6)
b = arange(6) * 3
r1, w1 = average([[a, b], [b, a]], axis=1, returned=1)
assert_equal(shape(r1) , shape(w1))
assert_equal(r1.shape , w1.shape)
r2, w2 = average(ones((2, 2, 3)), axis=0, weights=[3, 1], returned=1)
assert_equal(shape(w2) , shape(r2))
r2, w2 = average(ones((2, 2, 3)), returned=1)
assert_equal(shape(w2) , shape(r2))
r2, w2 = average(ones((2, 2, 3)), weights=ones((2, 2, 3)), returned=1)
assert_equal(shape(w2), shape(r2))
a2d = array([[1, 2], [0, 4]], float)
a2dm = masked_array(a2d, [[False, False], [True, False]])
a2da = average(a2d, axis=0)
assert_equal(a2da, [0.5, 3.0])
a2dma = average(a2dm, axis=0)
assert_equal(a2dma, [1.0, 3.0])
a2dma = average(a2dm, axis=None)
assert_equal(a2dma, 7. / 3.)
a2dma = average(a2dm, axis=1)
assert_equal(a2dma, [1.5, 4.0])
def test_onintegers_with_mask(self):
"Test average on integers with mask"
a = average(array([1, 2]))
assert_equal(a, 1.5)
a = average(array([1, 2, 3, 4], mask=[False, False, True, True]))
assert_equal(a, 1.5)
class TestConcatenator(TestCase):
"""
Tests for mr_, the equivalent of r_ for masked arrays.
"""
def test_1d(self):
"Tests mr_ on 1D arrays."
assert_array_equal(mr_[1, 2, 3, 4, 5, 6], array([1, 2, 3, 4, 5, 6]))
b = ones(5)
m = [1, 0, 0, 0, 0]
d = masked_array(b, mask=m)
c = mr_[d, 0, 0, d]
self.assertTrue(isinstance(c, MaskedArray) or isinstance(c, core.MaskedArray))
assert_array_equal(c, [1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1])
assert_array_equal(c.mask, mr_[m, 0, 0, m])
def test_2d(self):
"Tests mr_ on 2D arrays."
a_1 = rand(5, 5)
a_2 = rand(5, 5)
m_1 = np.round_(rand(5, 5), 0)
m_2 = np.round_(rand(5, 5), 0)
b_1 = masked_array(a_1, mask=m_1)
b_2 = masked_array(a_2, mask=m_2)
d = mr_['1', b_1, b_2] # append columns
self.assertTrue(d.shape == (5, 10))
assert_array_equal(d[:, :5], b_1)
assert_array_equal(d[:, 5:], b_2)
assert_array_equal(d.mask, np.r_['1', m_1, m_2])
d = mr_[b_1, b_2]
self.assertTrue(d.shape == (10, 5))
assert_array_equal(d[:5, :], b_1)
assert_array_equal(d[5:, :], b_2)
assert_array_equal(d.mask, np.r_[m_1, m_2])
class TestNotMasked(TestCase):
"""
Tests notmasked_edges and notmasked_contiguous.
"""
def test_edges(self):
"Tests unmasked_edges"
data = masked_array(np.arange(25).reshape(5, 5),
mask=[[0, 0, 1, 0, 0],
[0, 0, 0, 1, 1],
[1, 1, 0, 0, 0],
[0, 0, 0, 0, 0],
[1, 1, 1, 0, 0]],)
test = notmasked_edges(data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(3, 3, 3, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, 1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 2, 0, 3)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 2, 4, 4, 4)])
#
test = notmasked_edges(data.data, None)
assert_equal(test, [0, 24])
test = notmasked_edges(data.data, 0)
assert_equal(test[0], [(0, 0, 0, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(4, 4, 4, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data.data, -1)
assert_equal(test[0], [(0, 1, 2, 3, 4), (0, 0, 0, 0, 0)])
assert_equal(test[1], [(0, 1, 2, 3, 4), (4, 4, 4, 4, 4)])
#
data[-2] = masked
test = notmasked_edges(data, 0)
assert_equal(test[0], [(0, 0, 1, 0, 0), (0, 1, 2, 3, 4)])
assert_equal(test[1], [(1, 1, 2, 4, 4), (0, 1, 2, 3, 4)])
test = notmasked_edges(data, -1)
assert_equal(test[0], [(0, 1, 2, 4), (0, 0, 2, 3)])
assert_equal(test[1], [(0, 1, 2, 4), (4, 2, 4, 4)])
def test_contiguous(self):
"Tests notmasked_contiguous"
a = masked_array(np.arange(24).reshape(3, 8),
mask=[[0, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 1, 0], ])
tmp = notmasked_contiguous(a, None)
assert_equal(tmp[-1], slice(23, 24, None))
assert_equal(tmp[-2], slice(16, 22, None))
assert_equal(tmp[-3], slice(0, 4, None))
#
tmp = notmasked_contiguous(a, 0)
self.assertTrue(len(tmp[-1]) == 1)
self.assertTrue(tmp[-2] is None)
assert_equal(tmp[-3], tmp[-1])
self.assertTrue(len(tmp[0]) == 2)
#
tmp = notmasked_contiguous(a, 1)
assert_equal(tmp[0][-1], slice(0, 4, None))
self.assertTrue(tmp[1] is None)
assert_equal(tmp[2][-1], slice(7, 8, None))
assert_equal(tmp[2][-2], slice(0, 6, None))
class Test2DFunctions(TestCase):
"Tests 2D functions"
def test_compress2d(self):
"Tests compress2d"
x = array(np.arange(9).reshape(3, 3), mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[4, 5], [7, 8]])
assert_equal(compress_rowcols(x, 0), [[3, 4, 5], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[1, 2], [4, 5], [7, 8]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[0, 2], [6, 8]])
assert_equal(compress_rowcols(x, 0), [[0, 1, 2], [6, 7, 8]])
assert_equal(compress_rowcols(x, 1), [[0, 2], [3, 5], [6, 8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(compress_rowcols(x), [[8]])
assert_equal(compress_rowcols(x, 0), [[6, 7, 8]])
assert_equal(compress_rowcols(x, 1,), [[2], [5], [8]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert_equal(compress_rowcols(x).size, 0)
assert_equal(compress_rowcols(x, 0).size, 0)
assert_equal(compress_rowcols(x, 1).size, 0)
#
def test_mask_rowcols(self):
"Tests mask_rowcols."
x = array(np.arange(9).reshape(3, 3), mask=[[1, 0, 0], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask, [[1, 1, 1], [1, 0, 0], [1, 0, 0]])
assert_equal(mask_rowcols(x, 0).mask, [[1, 1, 1], [0, 0, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask, [[1, 0, 0], [1, 0, 0], [1, 0, 0]])
x = array(x._data, mask=[[0, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask, [[0, 1, 0], [1, 1, 1], [0, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask, [[0, 0, 0], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1).mask, [[0, 1, 0], [0, 1, 0], [0, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 0]])
assert_equal(mask_rowcols(x).mask, [[1, 1, 1], [1, 1, 1], [1, 1, 0]])
assert_equal(mask_rowcols(x, 0).mask, [[1, 1, 1], [1, 1, 1], [0, 0, 0]])
assert_equal(mask_rowcols(x, 1,).mask, [[1, 1, 0], [1, 1, 0], [1, 1, 0]])
x = array(x._data, mask=[[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.assertTrue(mask_rowcols(x).all() is masked)
self.assertTrue(mask_rowcols(x, 0).all() is masked)
self.assertTrue(mask_rowcols(x, 1).all() is masked)
self.assertTrue(mask_rowcols(x).mask.all())
self.assertTrue(mask_rowcols(x, 0).mask.all())
self.assertTrue(mask_rowcols(x, 1).mask.all())
#
def test_dot(self):
"Tests dot product"
n = np.arange(1, 7)
#
m = [1, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [1, 0]])
c = dot(b, a, True)
assert_equal(c.mask, [[1, 1, 1], [1, 0, 0], [1, 0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 1]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 1], [1, 1]])
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [1, 1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
assert_equal(c, dot(a, b))
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
m = [0, 0, 0, 0, 0, 0]
a = masked_array(n, mask=m).reshape(2, 3)
b = masked_array(n, mask=m).reshape(3, 2)
c = dot(a, b)
assert_equal(c.mask, nomask)
c = dot(b, a)
assert_equal(c.mask, nomask)
#
a = masked_array(n, mask=[1, 0, 0, 0, 0, 0]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 1], [0, 0]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[1, 0, 0], [1, 0, 0], [1, 0, 0]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 0, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[0, 0], [1, 1]])
c = dot(a, b)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [0, 0, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
#
a = masked_array(n, mask=[0, 0, 0, 0, 0, 1]).reshape(2, 3)
b = masked_array(n, mask=[0, 0, 1, 0, 0, 0]).reshape(3, 2)
c = dot(a, b, True)
assert_equal(c.mask, [[1, 0], [1, 1]])
c = dot(a, b, False)
assert_equal(c, np.dot(a.filled(0), b.filled(0)))
c = dot(b, a, True)
assert_equal(c.mask, [[0, 0, 1], [1, 1, 1], [0, 0, 1]])
c = dot(b, a, False)
assert_equal(c, np.dot(b.filled(0), a.filled(0)))
class TestApplyAlongAxis(TestCase):
#
"Tests 2D functions"
def test_3d(self):
a = arange(12.).reshape(2, 2, 3)
def myfunc(b):
return b[1]
xa = apply_along_axis(myfunc, 2, a)
assert_equal(xa, [[1, 4], [7, 10]])
class TestApplyOverAxes(TestCase):
"Tests apply_over_axes"
def test_basic(self):
a = arange(24).reshape(2, 3, 4)
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[ 60], [ 92], [124]]])
assert_equal(test, ctrl)
a[(a % 2).astype(np.bool)] = masked
test = apply_over_axes(np.sum, a, [0, 2])
ctrl = np.array([[[ 30], [ 44], [60]]])
class TestMedian(TestCase):
#
def test_2d(self):
"Tests median w/ 2D"
(n, p) = (101, 30)
x = masked_array(np.linspace(-1., 1., n),)
x[:10] = x[-10:] = masked
z = masked_array(np.empty((n, p), dtype=float))
z[:, 0] = x[:]
idx = np.arange(len(x))
for i in range(1, p):
np.random.shuffle(idx)
z[:, i] = x[idx]
assert_equal(median(z[:, 0]), 0)
assert_equal(median(z), 0)
assert_equal(median(z, axis=0), np.zeros(p))
assert_equal(median(z.T, axis=1), np.zeros(p))
#
def test_2d_waxis(self):
"Tests median w/ 2D arrays and different axis."
x = masked_array(np.arange(30).reshape(10, 3))
x[:3] = x[-3:] = masked
assert_equal(median(x), 14.5)
assert_equal(median(x, axis=0), [13.5, 14.5, 15.5])
assert_equal(median(x, axis=1), [0, 0, 0, 10, 13, 16, 19, 0, 0, 0])
assert_equal(median(x, axis=1).mask, [1, 1, 1, 0, 0, 0, 0, 1, 1, 1])
#
def test_3d(self):
"Tests median w/ 3D"
x = np.ma.arange(24).reshape(3, 4, 2)
x[x % 3 == 0] = masked
assert_equal(median(x, 0), [[12, 9], [6, 15], [12, 9], [18, 15]])
x.shape = (4, 3, 2)
assert_equal(median(x, 0), [[99, 10], [11, 99], [13, 14]])
x = np.ma.arange(24).reshape(4, 3, 2)
x[x % 5 == 0] = masked
assert_equal(median(x, 0), [[12, 10], [8, 9], [16, 17]])
class TestCov(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
def test_1d_wo_missing(self):
"Test cov on 1D variable w/o missing values"
x = self.data
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
"Test cov on 1 2D variable w/o missing values"
x = self.data.reshape(3, 4)
assert_almost_equal(np.cov(x), cov(x))
assert_almost_equal(np.cov(x, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(x, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
"Test cov 1 1D variable w/missing values"
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.cov(nx), cov(x))
assert_almost_equal(np.cov(nx, rowvar=False), cov(x, rowvar=False))
assert_almost_equal(np.cov(nx, rowvar=False, bias=True),
cov(x, rowvar=False, bias=True))
#
try:
cov(x, allow_masked=False)
except ValueError:
pass
#
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.cov(nx, nx[::-1]), cov(x, x[::-1]))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False),
cov(x, x[::-1], rowvar=False))
assert_almost_equal(np.cov(nx, nx[::-1], rowvar=False, bias=True),
cov(x, x[::-1], rowvar=False, bias=True))
def test_2d_w_missing(self):
"Test cov on 2D variable w/ missing value"
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
valid = np.logical_not(getmaskarray(x)).astype(int)
frac = np.dot(valid, valid.T)
xf = (x - x.mean(1)[:, None]).filled(0)
assert_almost_equal(cov(x), np.cov(xf) * (x.shape[1] - 1) / (frac - 1.))
assert_almost_equal(cov(x, bias=True),
np.cov(xf, bias=True) * x.shape[1] / frac)
frac = np.dot(valid.T, valid)
xf = (x - x.mean(0)).filled(0)
assert_almost_equal(cov(x, rowvar=False),
np.cov(xf, rowvar=False) * (x.shape[0] - 1) / (frac - 1.))
assert_almost_equal(cov(x, rowvar=False, bias=True),
np.cov(xf, rowvar=False, bias=True) * x.shape[0] / frac)
class TestCorrcoef(TestCase):
def setUp(self):
self.data = array(np.random.rand(12))
def test_ddof(self):
"Test ddof keyword"
x = self.data
assert_almost_equal(np.corrcoef(x, ddof=0), corrcoef(x, ddof=0))
def test_1d_wo_missing(self):
"Test cov on 1D variable w/o missing values"
x = self.data
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_2d_wo_missing(self):
"Test corrcoef on 1 2D variable w/o missing values"
x = self.data.reshape(3, 4)
assert_almost_equal(np.corrcoef(x), corrcoef(x))
assert_almost_equal(np.corrcoef(x, rowvar=False),
corrcoef(x, rowvar=False))
assert_almost_equal(np.corrcoef(x, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
def test_1d_w_missing(self):
"Test corrcoef 1 1D variable w/missing values"
x = self.data
x[-1] = masked
x -= x.mean()
nx = x.compressed()
assert_almost_equal(np.corrcoef(nx), corrcoef(x))
assert_almost_equal(np.corrcoef(nx, rowvar=False), corrcoef(x, rowvar=False))
assert_almost_equal(np.corrcoef(nx, rowvar=False, bias=True),
corrcoef(x, rowvar=False, bias=True))
#
try:
corrcoef(x, allow_masked=False)
except ValueError:
pass
#
# 2 1D variables w/ missing values
nx = x[1:-1]
assert_almost_equal(np.corrcoef(nx, nx[::-1]), corrcoef(x, x[::-1]))
assert_almost_equal(np.corrcoef(nx, nx[::-1], rowvar=False),
corrcoef(x, x[::-1], rowvar=False))
assert_almost_equal(np.corrcoef(nx, nx[::-1], rowvar=False, bias=True),
corrcoef(x, x[::-1], rowvar=False, bias=True))
def test_2d_w_missing(self):
"Test corrcoef on 2D variable w/ missing value"
x = self.data
x[-1] = masked
x = x.reshape(3, 4)
test = corrcoef(x)
control = np.corrcoef(x)
assert_almost_equal(test[:-1, :-1], control[:-1, :-1])
class TestPolynomial(TestCase):
#
def test_polyfit(self):
"Tests polyfit"
# On ndarrays
x = np.random.rand(10)
y = np.random.rand(20).reshape(-1, 2)
assert_almost_equal(polyfit(x, y, 3), np.polyfit(x, y, 3))
# ON 1D maskedarrays
x = x.view(MaskedArray)
x[0] = masked
y = y.view(MaskedArray)
y[0, 0] = y[-1, -1] = masked
#
(C, R, K, S, D) = polyfit(x, y[:, 0], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:], y[1:, 0].compressed(), 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y[:, -1], 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1, -1], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
#
(C, R, K, S, D) = polyfit(x, y, 3, full=True)
(c, r, k, s, d) = np.polyfit(x[1:-1], y[1:-1, :], 3, full=True)
for (a, a_) in zip((C, R, K, S, D), (c, r, k, s, d)):
assert_almost_equal(a, a_)
class TestArraySetOps(TestCase):
#
def test_unique_onlist(self):
"Test unique on list"
data = [1, 1, 1, 2, 2, 3]
test = unique(data, return_index=True, return_inverse=True)
self.assertTrue(isinstance(test[0], MaskedArray))
assert_equal(test[0], masked_array([1, 2, 3], mask=[0, 0, 0]))
assert_equal(test[1], [0, 3, 5])
assert_equal(test[2], [0, 0, 0, 1, 1, 2])
def test_unique_onmaskedarray(self):
"Test unique on masked data w/use_mask=True"
data = masked_array([1, 1, 1, 2, 2, 3], mask=[0, 0, 1, 0, 1, 0])
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
#
data.fill_value = 3
data = masked_array([1, 1, 1, 2, 2, 3],
mask=[0, 0, 1, 0, 1, 0], fill_value=3)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, 2, 3, -1], mask=[0, 0, 0, 1]))
assert_equal(test[1], [0, 3, 5, 2])
assert_equal(test[2], [0, 0, 3, 1, 3, 2])
def test_unique_allmasked(self):
"Test all masked"
data = masked_array([1, 1, 1], mask=True)
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array([1, ], mask=[True]))
assert_equal(test[1], [0])
assert_equal(test[2], [0, 0, 0])
#
"Test masked"
data = masked
test = unique(data, return_index=True, return_inverse=True)
assert_equal(test[0], masked_array(masked))
assert_equal(test[1], [0])
assert_equal(test[2], [0])
def test_ediff1d(self):
"Tests mediff1d"
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
control = array([1, 1, 1, 4], mask=[1, 0, 0, 1])
test = ediff1d(x)
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
def test_ediff1d_tobegin(self):
"Test ediff1d w/ to_begin"
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_begin=masked)
control = array([0, 1, 1, 1, 4], mask=[1, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_begin=[1, 2, 3])
control = array([1, 2, 3, 1, 1, 1, 4], mask=[0, 0, 0, 1, 0, 0, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
def test_ediff1d_toend(self):
"Test ediff1d w/ to_end"
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked)
control = array([1, 1, 1, 4, 0], mask=[1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3])
control = array([1, 1, 1, 4, 1, 2, 3], mask=[1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
def test_ediff1d_tobegin_toend(self):
"Test ediff1d w/ to_begin and to_end"
x = masked_array(np.arange(5), mask=[1, 0, 0, 0, 1])
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 4, 0], mask=[1, 1, 0, 0, 1, 1])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=[1, 2, 3], to_begin=masked)
control = array([0, 1, 1, 1, 4, 1, 2, 3], mask=[1, 1, 0, 0, 1, 0, 0, 0])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
def test_ediff1d_ndarray(self):
"Test ediff1d w/ a ndarray"
x = np.arange(5)
test = ediff1d(x)
control = array([1, 1, 1, 1], mask=[0, 0, 0, 0])
assert_equal(test, control)
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
#
test = ediff1d(x, to_end=masked, to_begin=masked)
control = array([0, 1, 1, 1, 1, 0], mask=[1, 0, 0, 0, 0, 1])
self.assertTrue(isinstance(test, MaskedArray))
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_intersect1d(self):
"Test intersect1d"
x = array([1, 3, 3, 3], mask=[0, 0, 0, 1])
y = array([3, 1, 1, 1], mask=[0, 0, 0, 1])
test = intersect1d(x, y)
control = array([1, 3, -1], mask=[0, 0, 1])
assert_equal(test, control)
def test_setxor1d(self):
"Test setxor1d"
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7]))
#
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = [1, 2, 3, 4, 5]
test = setxor1d(a, b)
assert_equal(test, array([3, 4, 7, -1], mask=[0, 0, 0, 1]))
#
a = array([1, 2, 3])
b = array([6, 5, 4])
test = setxor1d(a, b)
assert(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
a = array([1, 8, 2, 3], mask=[0, 1, 0, 0])
b = array([6, 5, 4, 8], mask=[0, 0, 0, 1])
test = setxor1d(a, b)
assert(isinstance(test, MaskedArray))
assert_equal(test, [1, 2, 3, 4, 5, 6])
#
assert_array_equal([], setxor1d([], []))
def test_in1d(self):
"Test in1d"
a = array([1, 2, 5, 7, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, True, False, True])
#
a = array([5, 5, 2, 1, -1], mask=[0, 0, 0, 0, 1])
b = array([1, 5, -1], mask=[0, 0, 1])
test = in1d(a, b)
assert_equal(test, [True, True, False, True, True])
#
assert_array_equal([], in1d([], []))
def test_union1d(self):
"Test union1d"
a = array([1, 2, 5, 7, 5, -1], mask=[0, 0, 0, 0, 0, 1])
b = array([1, 2, 3, 4, 5, -1], mask=[0, 0, 0, 0, 0, 1])
test = union1d(a, b)
control = array([1, 2, 3, 4, 5, 7, -1], mask=[0, 0, 0, 0, 0, 0, 1])
assert_equal(test, control)
#
assert_array_equal([], union1d([], []))
def test_setdiff1d(self):
"Test setdiff1d"
a = array([6, 5, 4, 7, 7, 1, 2, 1], mask=[0, 0, 0, 0, 0, 0, 0, 1])
b = array([2, 4, 3, 3, 2, 1, 5])
test = setdiff1d(a, b)
assert_equal(test, array([6, 7, -1], mask=[0, 0, 1]))
#
a = arange(10)
b = arange(8)
assert_equal(setdiff1d(a, b), array([8, 9]))
def test_setdiff1d_char_array(self):
"Test setdiff1d_charray"
a = np.array(['a', 'b', 'c'])
b = np.array(['a', 'b', 's'])
assert_array_equal(setdiff1d(a, b), np.array(['c']))
class TestShapeBase(TestCase):
#
def test_atleast2d(self):
"Test atleast_2d"
a = masked_array([0, 1, 2], mask=[0, 1, 0])
b = atleast_2d(a)
assert_equal(b.shape, (1, 3))
assert_equal(b.mask.shape, b.data.shape)
assert_equal(a.shape, (3,))
assert_equal(a.mask.shape, a.data.shape)
###############################################################################
#------------------------------------------------------------------------------
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/terminal/nxos.py | 7 | 1916 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br'[\r\n]?[a-zA-Z]{1}[a-zA-Z0-9-_.]*[>|#|%](?:\s*)$'),
re.compile(br'[\r\n]?[a-zA-Z]{1}[a-zA-Z0-9-_.]*\(.+\)#(?:\s*)$')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"^% \w+", re.M),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"syntax error"),
re.compile(br"unknown command"),
re.compile(br"user not present")
]
def on_open_shell(self):
try:
for cmd in (b'terminal length 0', b'terminal width 511'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
| bsd-3-clause |
djbaldey/django | tests/template_tests/filter_tests/test_make_list.py | 345 | 1611 | from django.template.defaultfilters import make_list
from django.test import SimpleTestCase
from django.test.utils import str_prefix
from django.utils.safestring import mark_safe
from ..utils import setup
class MakeListTests(SimpleTestCase):
"""
The make_list filter can destroy existing escaping, so the results are
escaped.
"""
@setup({'make_list01': '{% autoescape off %}{{ a|make_list }}{% endautoescape %}'})
def test_make_list01(self):
output = self.engine.render_to_string('make_list01', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list02': '{{ a|make_list }}'})
def test_make_list02(self):
output = self.engine.render_to_string('make_list02', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list03':
'{% autoescape off %}{{ a|make_list|stringformat:"s"|safe }}{% endautoescape %}'})
def test_make_list03(self):
output = self.engine.render_to_string('make_list03', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list04': '{{ a|make_list|stringformat:"s"|safe }}'})
def test_make_list04(self):
output = self.engine.render_to_string('make_list04', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
class FunctionTests(SimpleTestCase):
def test_string(self):
self.assertEqual(make_list('abc'), ['a', 'b', 'c'])
def test_integer(self):
self.assertEqual(make_list(1234), ['1', '2', '3', '4'])
| bsd-3-clause |
caisq/tensorflow | tensorflow/tools/common/public_api.py | 71 | 4753 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Visitor restricting traversal to only the public tensorflow API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.python.util import tf_inspect
class PublicAPIVisitor(object):
"""Visitor to use with `traverse` to visit exactly the public TF API."""
def __init__(self, visitor):
"""Constructor.
`visitor` should be a callable suitable as a visitor for `traverse`. It will
be called only for members of the public TensorFlow API.
Args:
visitor: A visitor to call for the public API.
"""
self._visitor = visitor
self._root_name = 'tf'
# Modules/classes we want to suppress entirely.
self._private_map = {
# Some implementations have this internal module that we shouldn't
# expose.
'tf.flags': ['cpp_flags'],
}
# Modules/classes we do not want to descend into if we hit them. Usually,
# system modules exposed through platforms for compatibility reasons.
# Each entry maps a module path to a name to ignore in traversal.
self._do_not_descend_map = {
'tf': [
'core',
'examples',
'flags', # Don't add flags
# TODO(drpng): This can be removed once sealed off.
'platform',
# TODO(drpng): This can be removed once sealed.
'pywrap_tensorflow',
# TODO(drpng): This can be removed once sealed.
'user_ops',
'python',
'tools',
'tensorboard',
],
## Everything below here is legitimate.
# It'll stay, but it's not officially part of the API.
'tf.app': ['flags'],
# Imported for compatibility between py2/3.
'tf.test': ['mock'],
}
@property
def private_map(self):
"""A map from parents to symbols that should not be included at all.
This map can be edited, but it should not be edited once traversal has
begun.
Returns:
The map marking symbols to not include.
"""
return self._private_map
@property
def do_not_descend_map(self):
"""A map from parents to symbols that should not be descended into.
This map can be edited, but it should not be edited once traversal has
begun.
Returns:
The map marking symbols to not explore.
"""
return self._do_not_descend_map
def set_root_name(self, root_name):
"""Override the default root name of 'tf'."""
self._root_name = root_name
def _is_private(self, path, name):
"""Return whether a name is private."""
# TODO(wicke): Find out what names to exclude.
return ((path in self._private_map and
name in self._private_map[path]) or
(name.startswith('_') and not re.match('__.*__$', name) or
name in ['__base__', '__class__']))
def _do_not_descend(self, path, name):
"""Safely queries if a specific fully qualified name should be excluded."""
return (path in self._do_not_descend_map and
name in self._do_not_descend_map[path])
def __call__(self, path, parent, children):
"""Visitor interface, see `traverse` for details."""
# Avoid long waits in cases of pretty unambiguous failure.
if tf_inspect.ismodule(parent) and len(path.split('.')) > 10:
raise RuntimeError('Modules nested too deep:\n%s.%s\n\nThis is likely a '
'problem with an accidental public import.' %
(self._root_name, path))
# Includes self._root_name
full_path = '.'.join([self._root_name, path]) if path else self._root_name
# Remove things that are not visible.
for name, child in list(children):
if self._is_private(full_path, name):
children.remove((name, child))
self._visitor(path, parent, children)
# Remove things that are visible, but which should not be descended into.
for name, child in list(children):
if self._do_not_descend(full_path, name):
children.remove((name, child))
| apache-2.0 |
a-parhom/edx-platform | openedx/core/lib/api/serializers.py | 24 | 2557 | """
Serializers to be used in APIs.
"""
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from rest_framework import serializers
class CollapsedReferenceSerializer(serializers.HyperlinkedModelSerializer):
"""Serializes arbitrary models in a collapsed format, with just an id and url."""
url = serializers.HyperlinkedIdentityField(view_name='')
def __init__(self, model_class, view_name, id_source='id', lookup_field=None, *args, **kwargs):
"""Configures the serializer.
Args:
model_class (class): Model class to serialize.
view_name (string): Name of the Django view used to lookup the
model.
id_source (string): Optional name of the id field on the model.
Defaults to 'id'. Also used as the property name of the field
in the serialized representation.
lookup_field (string): Optional name of the model field used to
lookup the model in the view. Defaults to the value of
id_source.
"""
if not lookup_field:
lookup_field = id_source
self.Meta.model = model_class
super(CollapsedReferenceSerializer, self).__init__(*args, **kwargs)
self.fields[id_source] = serializers.CharField(read_only=True)
self.fields['url'].view_name = view_name
self.fields['url'].lookup_field = lookup_field
self.fields['url'].lookup_url_kwarg = lookup_field
class Meta(object):
fields = ("url",)
class CourseKeyField(serializers.Field):
""" Serializer field for a model CourseKey field. """
def to_representation(self, data):
"""Convert a course key to unicode. """
return unicode(data)
def to_internal_value(self, data):
"""Convert unicode to a course key. """
try:
return CourseKey.from_string(data)
except InvalidKeyError as ex:
raise serializers.ValidationError("Invalid course key: {msg}".format(msg=ex.msg))
class UsageKeyField(serializers.Field):
""" Serializer field for a model UsageKey field. """
def to_representation(self, data):
"""Convert a usage key to unicode. """
return unicode(data)
def to_internal_value(self, data):
"""Convert unicode to a usage key. """
try:
return UsageKey.from_string(data)
except InvalidKeyError as ex:
raise serializers.ValidationError("Invalid usage key: {msg}".format(msg=ex.msg))
| agpl-3.0 |
ThirdProject/android_external_chromium_org | build/linux/rewrite_dirs.py | 259 | 2013 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
import sys
import os
import optparse
REWRITE_PREFIX = ['-I',
'-idirafter',
'-imacros',
'-imultilib',
'-include',
'-iprefix',
'-iquote',
'-isystem',
'-L']
def RewritePath(path, opts):
"""Rewrites a path by stripping the prefix and prepending the sysroot."""
sysroot = opts.sysroot
prefix = opts.strip_prefix
if os.path.isabs(path) and not path.startswith(sysroot):
if path.startswith(prefix):
path = path[len(prefix):]
path = path.lstrip('/')
return os.path.join(sysroot, path)
else:
return path
def RewriteLine(line, opts):
"""Rewrites all the paths in recognized options."""
args = line.split()
count = len(args)
i = 0
while i < count:
for prefix in REWRITE_PREFIX:
# The option can be either in the form "-I /path/to/dir" or
# "-I/path/to/dir" so handle both.
if args[i] == prefix:
i += 1
try:
args[i] = RewritePath(args[i], opts)
except IndexError:
sys.stderr.write('Missing argument following %s\n' % prefix)
break
elif args[i].startswith(prefix):
args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
i += 1
return ' '.join(args)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
opts, args = parser.parse_args(argv[1:])
for line in sys.stdin.readlines():
line = RewriteLine(line.strip(), opts)
print line
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
iptvgratis/iptv.stalker.latino | resources/regex/dinozap.py | 2 | 4482 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# beta.1 Regex de Dinozap
# Version 0.1 (17.10.2014)
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
# Gracias a la librería plugintools de Jesús (www.mimediacenter.info)
import os
import urllib
import urllib2
import shutil
import zipfile
import time
import xbmc
import xbmcgui
import xbmcaddon
import xbmcplugin
import plugintools, scrapertools
import sys,traceback,urllib2,re
addonName = xbmcaddon.Addon().getAddonInfo("name")
addonVersion = xbmcaddon.Addon().getAddonInfo("version")
addonId = xbmcaddon.Addon().getAddonInfo("id")
addonPath = xbmcaddon.Addon().getAddonInfo("path")
def dinozap0(params):
plugintools.log('[%s %s] Initializing Businessapp regex... %s' % (addonName, addonVersion, repr(params)))
url_user = {}
# Construimos diccionario...
url = params.get("url")
url_extracted = url.split(" ")
for entry in url_extracted:
if entry.startswith("rtmp"):
entry = entry.replace("rtmp=", "")
url_user["rtmp"]=entry
elif entry.startswith("playpath"):
entry = entry.replace("playpath=", "")
url_user["playpath"]=entry
elif entry.startswith("swfUrl"):
entry = entry.replace("swfUrl=", "")
url_user["swfurl"]=entry
elif entry.startswith("pageUrl"):
entry = entry.replace("pageUrl=", "")
url_user["pageurl"]=entry
elif entry.startswith("token"):
entry = entry.replace("token=", "")
url_user["token"]=entry
elif entry.startswith("referer"):
entry = entry.replace("referer=", "")
url_user["referer"]=entry
url = url_user.get("pageurl")
ref = 'http://www.dinozap.info/'
body='';body=gethttp_referer_headers(url,ref)
reff=url;url=plugintools.find_single_match(body,'iframe\ssrc="([^"]+)');
for i in range(1,10):
k=url;body=gethttp_referer_headers(url,reff);
scrpt='document\.write\(unescape\(\'([^\']+)';scrpt=plugintools.find_single_match(body,scrpt)
tok='securetoken([^\n]+)';tok=plugintools.find_single_match(body,tok);
try: hidd='type="hidden"\sid="([^"]+)"\svalue="([^"]*)';hidd=plugintools.find_multiple_matches(body,hidd);
except: i-=1;
diov='var\s(sUrl|cod1)\s=\s\'([^\']+)';diov=plugintools.find_multiple_matches(body,diov);#print diov;
Epoc_mil=str(int(time.time()*1000));EpocTime=str(int(time.time()));jquery = '%s?callback=jQuery17049106340911455604_%s&v_cod1=%s&v_cod2=%s&_=%s';
jurl=jquery%(hidd[3][1].decode('base64'),Epoc_mil,urllib.quote_plus(hidd[1][1]),urllib.quote_plus(hidd[2][1]),Epoc_mil);r='"result\d{1}":"([^"]+)';p='plugintools.find_multiple_matches(body,r)';
body=gethttp_referer_headers(jurl,k);x=eval(p)[0];print jurl
if x=='not_found': print 'try '+str(i)+' : '+x;
else: print 'try '+str(i)+' : OK :)';break;
if x=='not_found': eval(nolink);sys.exit();
swfUrl='http://www.businessapp1.pw/jwplayer5/addplayer/jwplayer.flash.swf';app=plugintools.find_single_match(eval(p)[1].replace('\\',''),'1735\/([^"]+)'); q='%s app=%s playpath=%s flashver=WIN%5C2017,0,0,134 swfUrl=%s swfVfy=1 pageUrl=%s live=1 timeout=15';#dzap,tvdirecto
w=eval(p)[1].replace('\\','')+' app='+app+' playpath='+eval(p)[0]+' flashver=WIN%5C2017,0,0,134 swfUrl='+swfUrl+' swfVfy=1 pageUrl='+k+' live=1 timeout=15'
if w: plugintools.play_resolved_url(w);sys.exit();
else: eval(nolink);sys.exit();
def gethttp_referer_headers(url,ref):
plugintools.log("url= "+url)
plugintools.log("ref= "+ref)
request_headers=[]
request_headers.append(["User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"])
request_headers.append(["Referer", ref])
body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers)
plugintools.log("body= "+body)
return body
def gethttp_headers(url):
plugintools.log("url= "+url)
request_headers=[]
request_headers.append(["User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0"])
body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers)
plugintools.log("body= "+body)
return body
| gpl-3.0 |
Southpaw-TACTIC/TACTIC | src/context/client/tactic-api-python-4.0.api04/Lib/encodings/cp424.py | 93 | 12618 | """ Python Character Mapping Codec cp424 generated from 'MAPPINGS/VENDORS/MISC/CP424.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp424',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> SELECT
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> REQUIRED NEW LINE
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> GRAPHIC ESCAPE
u'\x8d' # 0x09 -> SUPERSCRIPT
u'\x8e' # 0x0A -> REPEAT
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> RESTORE/ENABLE PRESENTATION
u'\x85' # 0x15 -> NEW LINE
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> PROGRAM OPERATOR COMMUNICATION
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> UNIT BACK SPACE
u'\x8f' # 0x1B -> CUSTOMER USE ONE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> DIGIT SELECT
u'\x81' # 0x21 -> START OF SIGNIFICANCE
u'\x82' # 0x22 -> FIELD SEPARATOR
u'\x83' # 0x23 -> WORD UNDERSCORE
u'\x84' # 0x24 -> BYPASS OR INHIBIT PRESENTATION
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> SET ATTRIBUTE
u'\x89' # 0x29 -> START FIELD EXTENDED
u'\x8a' # 0x2A -> SET MODE OR SWITCH
u'\x8b' # 0x2B -> CONTROL SEQUENCE PREFIX
u'\x8c' # 0x2C -> MODIFY FIELD ATTRIBUTE
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> <reserved>
u'\x91' # 0x31 -> <reserved>
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> INDEX RETURN
u'\x94' # 0x34 -> PRESENTATION POSITION
u'\x95' # 0x35 -> TRANSPARENT
u'\x96' # 0x36 -> NUMERIC BACKSPACE
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> SUBSCRIPT
u'\x99' # 0x39 -> INDENT TABULATION
u'\x9a' # 0x3A -> REVERSE FORM FEED
u'\x9b' # 0x3B -> CUSTOMER USE THREE
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> <reserved>
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\u05d0' # 0x41 -> HEBREW LETTER ALEF
u'\u05d1' # 0x42 -> HEBREW LETTER BET
u'\u05d2' # 0x43 -> HEBREW LETTER GIMEL
u'\u05d3' # 0x44 -> HEBREW LETTER DALET
u'\u05d4' # 0x45 -> HEBREW LETTER HE
u'\u05d5' # 0x46 -> HEBREW LETTER VAV
u'\u05d6' # 0x47 -> HEBREW LETTER ZAYIN
u'\u05d7' # 0x48 -> HEBREW LETTER HET
u'\u05d8' # 0x49 -> HEBREW LETTER TET
u'\xa2' # 0x4A -> CENT SIGN
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'|' # 0x4F -> VERTICAL LINE
u'&' # 0x50 -> AMPERSAND
u'\u05d9' # 0x51 -> HEBREW LETTER YOD
u'\u05da' # 0x52 -> HEBREW LETTER FINAL KAF
u'\u05db' # 0x53 -> HEBREW LETTER KAF
u'\u05dc' # 0x54 -> HEBREW LETTER LAMED
u'\u05dd' # 0x55 -> HEBREW LETTER FINAL MEM
u'\u05de' # 0x56 -> HEBREW LETTER MEM
u'\u05df' # 0x57 -> HEBREW LETTER FINAL NUN
u'\u05e0' # 0x58 -> HEBREW LETTER NUN
u'\u05e1' # 0x59 -> HEBREW LETTER SAMEKH
u'!' # 0x5A -> EXCLAMATION MARK
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'\xac' # 0x5F -> NOT SIGN
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\u05e2' # 0x62 -> HEBREW LETTER AYIN
u'\u05e3' # 0x63 -> HEBREW LETTER FINAL PE
u'\u05e4' # 0x64 -> HEBREW LETTER PE
u'\u05e5' # 0x65 -> HEBREW LETTER FINAL TSADI
u'\u05e6' # 0x66 -> HEBREW LETTER TSADI
u'\u05e7' # 0x67 -> HEBREW LETTER QOF
u'\u05e8' # 0x68 -> HEBREW LETTER RESH
u'\u05e9' # 0x69 -> HEBREW LETTER SHIN
u'\xa6' # 0x6A -> BROKEN BAR
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\ufffe' # 0x70 -> UNDEFINED
u'\u05ea' # 0x71 -> HEBREW LETTER TAV
u'\ufffe' # 0x72 -> UNDEFINED
u'\ufffe' # 0x73 -> UNDEFINED
u'\xa0' # 0x74 -> NO-BREAK SPACE
u'\ufffe' # 0x75 -> UNDEFINED
u'\ufffe' # 0x76 -> UNDEFINED
u'\ufffe' # 0x77 -> UNDEFINED
u'\u2017' # 0x78 -> DOUBLE LOW LINE
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\ufffe' # 0x80 -> UNDEFINED
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\ufffe' # 0x9A -> UNDEFINED
u'\ufffe' # 0x9B -> UNDEFINED
u'\ufffe' # 0x9C -> UNDEFINED
u'\xb8' # 0x9D -> CEDILLA
u'\ufffe' # 0x9E -> UNDEFINED
u'\xa4' # 0x9F -> CURRENCY SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\ufffe' # 0xAA -> UNDEFINED
u'\ufffe' # 0xAB -> UNDEFINED
u'\ufffe' # 0xAC -> UNDEFINED
u'\ufffe' # 0xAD -> UNDEFINED
u'\ufffe' # 0xAE -> UNDEFINED
u'\xae' # 0xAF -> REGISTERED SIGN
u'^' # 0xB0 -> CIRCUMFLEX ACCENT
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'[' # 0xBA -> LEFT SQUARE BRACKET
u']' # 0xBB -> RIGHT SQUARE BRACKET
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\ufffe' # 0xCB -> UNDEFINED
u'\ufffe' # 0xCC -> UNDEFINED
u'\ufffe' # 0xCD -> UNDEFINED
u'\ufffe' # 0xCE -> UNDEFINED
u'\ufffe' # 0xCF -> UNDEFINED
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\ufffe' # 0xDB -> UNDEFINED
u'\ufffe' # 0xDC -> UNDEFINED
u'\ufffe' # 0xDD -> UNDEFINED
u'\ufffe' # 0xDE -> UNDEFINED
u'\ufffe' # 0xDF -> UNDEFINED
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\ufffe' # 0xEB -> UNDEFINED
u'\ufffe' # 0xEC -> UNDEFINED
u'\ufffe' # 0xED -> UNDEFINED
u'\ufffe' # 0xEE -> UNDEFINED
u'\ufffe' # 0xEF -> UNDEFINED
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\ufffe' # 0xFB -> UNDEFINED
u'\ufffe' # 0xFC -> UNDEFINED
u'\ufffe' # 0xFD -> UNDEFINED
u'\ufffe' # 0xFE -> UNDEFINED
u'\x9f' # 0xFF -> EIGHT ONES
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| epl-1.0 |
thiblahute/pygobject | gi/_glib/option.py | 3 | 13195 | # -*- Mode: Python -*-
# pygobject - Python bindings for the GObject library
# Copyright (C) 2006 Johannes Hoelzl
#
# glib/option.py: GOption command line parser
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
"""GOption command line parser
Extends optparse to use the GOptionGroup, GOptionEntry and GOptionContext
objects. So it is possible to use the gtk, gnome_program and gstreamer command
line groups and contexts.
Use this interface instead of the raw wrappers of GOptionContext and
GOptionGroup in glib.
"""
import sys
import optparse
from optparse import OptParseError, OptionError, OptionValueError, \
BadOptionError, OptionConflictError
if sys.version_info >= (3, 0):
_basestring = str
_bytes = lambda s: s.encode()
else:
_basestring = basestring
_bytes = str
import gi._glib
gi # pyflakes
_glib = sys.modules['gi._glib._glib']
__all__ = [
"OptParseError",
"OptionError",
"OptionValueError",
"BadOptionError",
"OptionConflictError",
"Option",
"OptionGroup",
"OptionParser",
"make_option",
]
class Option(optparse.Option):
"""Represents a command line option
To use the extended possibilities of the GOption API Option
(and make_option) are extended with new types and attributes.
Types:
filename The supplied arguments are read as filename, GOption
parses this type in with the GLib filename encoding.
Attributes:
optional_arg This does not need a arguement, but it can be supplied.
hidden The help list does not show this option
in_main This option apears in the main group, this should only
be used for backwards compatibility.
Use Option.REMAINING as option name to get all positional arguments.
NOTE: Every argument to an option is passed as utf-8 coded string, the only
exception are options which use the 'filename' type, its arguments
are passed as strings in the GLib filename encoding.
For further help, see optparse.Option.
"""
TYPES = optparse.Option.TYPES + (
'filename',
)
ATTRS = optparse.Option.ATTRS + [
'hidden',
'in_main',
'optional_arg',
]
REMAINING = '--' + _glib.OPTION_REMAINING
def __init__(self, *args, **kwargs):
optparse.Option.__init__(self, *args, **kwargs)
if not self._long_opts:
raise ValueError("%s at least one long option name.")
if len(self._long_opts) < len(self._short_opts):
raise ValueError(
"%s at least more long option names than short option names.")
if not self.help:
raise ValueError("%s needs a help message.", self._long_opts[0])
def _set_opt_string(self, opts):
if self.REMAINING in opts:
self._long_opts.append(self.REMAINING)
optparse.Option._set_opt_string(self, opts)
if len(self._short_opts) > len(self._long_opts):
raise OptionError("goption.Option needs more long option names "
"than short option names")
def _to_goptionentries(self):
flags = 0
if self.hidden:
flags |= _glib.OPTION_FLAG_HIDDEN
if self.in_main:
flags |= _glib.OPTION_FLAG_IN_MAIN
if self.takes_value():
if self.optional_arg:
flags |= _glib.OPTION_FLAG_OPTIONAL_ARG
else:
flags |= _glib.OPTION_FLAG_NO_ARG
if self.type == 'filename':
flags |= _glib.OPTION_FLAG_FILENAME
for (long_name, short_name) in zip(self._long_opts, self._short_opts):
yield (long_name[2:], _bytes(short_name[1]), flags, self.help, self.metavar)
for long_name in self._long_opts[len(self._short_opts):]:
yield (long_name[2:], _bytes('\0'), flags, self.help, self.metavar)
class OptionGroup(optparse.OptionGroup):
"""A group of command line options.
Arguements:
name: The groups name, used to create the
--help-{name} option
description: Shown as title of the groups help view
help_description: Shown as help to the --help-{name} option
option_list: The options used in this group, must be option.Option()
defaults: A dicitionary of default values
translation_domain: Sets the translation domain for gettext().
NOTE: This OptionGroup does not exactly map the optparse.OptionGroup
interface. There is no parser object to supply, but it is possible
to set default values and option_lists. Also the default values and
values are not shared with the OptionParser.
To pass a OptionGroup into a function which expects a GOptionGroup (e.g.
gnome_program_init() ). OptionGroup.get_option_group() can be used.
For further help, see optparse.OptionGroup.
"""
def __init__(self, name, description, help_description="",
option_list=None, defaults=None,
translation_domain=None):
optparse.OptionContainer.__init__(self, Option, 'error', description)
self.name = name
self.parser = None
self.help_description = help_description
if defaults:
self.defaults = defaults
self.values = None
self.translation_domain = translation_domain
if option_list:
for option in option_list:
self.add_option(option)
def _create_option_list(self):
self.option_list = []
self._create_option_mappings()
def _to_goptiongroup(self, parser):
def callback(option_name, option_value, group):
if option_name.startswith('--'):
opt = self._long_opt[option_name]
else:
opt = self._short_opt[option_name]
try:
opt.process(option_name, option_value, self.values, parser)
except OptionValueError:
error = sys.exc_info()[1]
gerror = _glib.GError(str(error))
gerror.domain = _glib.OPTION_ERROR
gerror.code = _glib.OPTION_ERROR_BAD_VALUE
gerror.message = str(error)
raise gerror
group = _glib.OptionGroup(self.name, self.description,
self.help_description, callback)
if self.translation_domain:
group.set_translation_domain(self.translation_domain)
entries = []
for option in self.option_list:
entries.extend(option._to_goptionentries())
group.add_entries(entries)
return group
def get_option_group(self, parser=None):
""" Returns the corresponding GOptionGroup object.
Can be used as parameter for gnome_program_init(), gtk_init().
"""
self.set_values_to_defaults()
return self._to_goptiongroup(parser)
def set_values_to_defaults(self):
for option in self.option_list:
default = self.defaults.get(option.dest)
if isinstance(default, _basestring):
opt_str = option.get_opt_string()
self.defaults[option.dest] = option.check_value(
opt_str, default)
self.values = optparse.Values(self.defaults)
class OptionParser(optparse.OptionParser):
"""Command line parser with GOption support.
NOTE: The OptionParser interface is not the exactly the same as the
optparse.OptionParser interface. Especially the usage parameter
is only used to show the metavar of the arguements.
Attribues:
help_enabled: The --help, --help-all and --help-{group}
options are enabled (default).
ignore_unknown_options: Do not throw a exception when a option is not
knwon, the option will be in the result list.
OptionParser.add_option_group() does not only accept OptionGroup instances
but also glib.OptionGroup, which is returned by gtk_get_option_group().
Only glib.option.OptionGroup and glib.option.Option instances should
be passed as groups and options.
For further help, see optparse.OptionParser.
"""
def __init__(self, *args, **kwargs):
if 'option_class' not in kwargs:
kwargs['option_class'] = Option
self.help_enabled = kwargs.pop('help_enabled', True)
self.ignore_unknown_options = kwargs.pop('ignore_unknown_options',
False)
optparse.OptionParser.__init__(self, add_help_option=False,
*args, **kwargs)
def set_usage(self, usage):
if usage is None:
self.usage = ''
elif usage.startswith("%prog"):
self.usage = usage[len("%prog"):]
else:
self.usage = usage
def _to_goptioncontext(self, values):
if self.description:
parameter_string = self.usage + " - " + self.description
else:
parameter_string = self.usage
context = _glib.OptionContext(parameter_string)
context.set_help_enabled(self.help_enabled)
context.set_ignore_unknown_options(self.ignore_unknown_options)
for option_group in self.option_groups:
if isinstance(option_group, _glib.OptionGroup):
g_group = option_group
else:
g_group = option_group.get_option_group(self)
context.add_group(g_group)
def callback(option_name, option_value, group):
if option_name.startswith('--'):
opt = self._long_opt[option_name]
else:
opt = self._short_opt[option_name]
opt.process(option_name, option_value, values, self)
main_group = _glib.OptionGroup(None, None, None, callback)
main_entries = []
for option in self.option_list:
main_entries.extend(option._to_goptionentries())
main_group.add_entries(main_entries)
context.set_main_group(main_group)
return context
def add_option_group(self, *args, **kwargs):
if isinstance(args[0], _basestring):
optparse.OptionParser.add_option_group(self,
OptionGroup(self, *args, **kwargs))
return
elif len(args) == 1 and not kwargs:
if isinstance(args[0], OptionGroup):
if not args[0].parser:
args[0].parser = self
if args[0].parser is not self:
raise ValueError("invalid OptionGroup (wrong parser)")
if isinstance(args[0], _glib.OptionGroup):
self.option_groups.append(args[0])
return
optparse.OptionParser.add_option_group(self, *args, **kwargs)
def _get_all_options(self):
options = self.option_list[:]
for group in self.option_groups:
if isinstance(group, optparse.OptionGroup):
options.extend(group.option_list)
return options
def _process_args(self, largs, rargs, values):
context = self._to_goptioncontext(values)
# _process_args() returns the remaining parameters in rargs.
# The prepended program name is used to all g_set_prgname()
# The program name is cut away so it doesn't appear in the result.
rargs[:] = context.parse([sys.argv[0]] + rargs)[1:]
def parse_args(self, args=None, values=None):
old_args = args or []
try:
options, args = optparse.OptionParser.parse_args(
self, args, values)
except _glib.GError:
error = sys.exc_info()[1]
if error.domain != _glib.OPTION_ERROR:
raise
if error.code == _glib.OPTION_ERROR_BAD_VALUE:
raise OptionValueError(error.message)
elif error.code == _glib.OPTION_ERROR_UNKNOWN_OPTION:
raise BadOptionError(error.message)
elif error.code == _glib.OPTION_ERROR_FAILED:
raise OptParseError(error.message)
else:
raise
for group in self.option_groups:
for key, value in group.values.__dict__.items():
options.ensure_value(key, value)
args = args[2:-len(old_args)]
return options, args
make_option = Option
| lgpl-2.1 |
martinbuc/missionplanner | Lib/encodings/punycode.py | 93 | 7051 | # -*- coding: iso-8859-1 -*-
""" Codec for the Punicode encoding, as specified in RFC 3492
Written by Martin v. Löwis.
"""
import codecs
##################### Encoding #####################################
def segregate(str):
"""3.1 Basic code point segregation"""
base = []
extended = {}
for c in str:
if ord(c) < 128:
base.append(c)
else:
extended[c] = 1
extended = extended.keys()
extended.sort()
return "".join(base).encode("ascii"),extended
def selective_len(str, max):
"""Return the length of str, considering only characters below max."""
res = 0
for c in str:
if ord(c) < max:
res += 1
return res
def selective_find(str, char, index, pos):
"""Return a pair (index, pos), indicating the next occurrence of
char in str. index is the position of the character considering
only ordinals up to and including char, and pos is the position in
the full string. index/pos is the starting position in the full
string."""
l = len(str)
while 1:
pos += 1
if pos == l:
return (-1, -1)
c = str[pos]
if c == char:
return index+1, pos
elif c < char:
index += 1
def insertion_unsort(str, extended):
"""3.2 Insertion unsort coding"""
oldchar = 0x80
result = []
oldindex = -1
for c in extended:
index = pos = -1
char = ord(c)
curlen = selective_len(str, char)
delta = (curlen+1) * (char - oldchar)
while 1:
index,pos = selective_find(str,c,index,pos)
if index == -1:
break
delta += index - oldindex
result.append(delta-1)
oldindex = index
delta = 0
oldchar = char
return result
def T(j, bias):
# Punycode parameters: tmin = 1, tmax = 26, base = 36
res = 36 * (j + 1) - bias
if res < 1: return 1
if res > 26: return 26
return res
digits = "abcdefghijklmnopqrstuvwxyz0123456789"
def generate_generalized_integer(N, bias):
"""3.3 Generalized variable-length integers"""
result = []
j = 0
while 1:
t = T(j, bias)
if N < t:
result.append(digits[N])
return result
result.append(digits[t + ((N - t) % (36 - t))])
N = (N - t) // (36 - t)
j += 1
def adapt(delta, first, numchars):
if first:
delta //= 700
else:
delta //= 2
delta += delta // numchars
# ((base - tmin) * tmax) // 2 == 455
divisions = 0
while delta > 455:
delta = delta // 35 # base - tmin
divisions += 36
bias = divisions + (36 * delta // (delta + 38))
return bias
def generate_integers(baselen, deltas):
"""3.4 Bias adaptation"""
# Punycode parameters: initial bias = 72, damp = 700, skew = 38
result = []
bias = 72
for points, delta in enumerate(deltas):
s = generate_generalized_integer(delta, bias)
result.extend(s)
bias = adapt(delta, points==0, baselen+points+1)
return "".join(result)
def punycode_encode(text):
base, extended = segregate(text)
base = base.encode("ascii")
deltas = insertion_unsort(text, extended)
extended = generate_integers(len(base), deltas)
if base:
return base + "-" + extended
return extended
##################### Decoding #####################################
def decode_generalized_number(extended, extpos, bias, errors):
"""3.3 Generalized variable-length integers"""
result = 0
w = 1
j = 0
while 1:
try:
char = ord(extended[extpos])
except IndexError:
if errors == "strict":
raise UnicodeError, "incomplete punicode string"
return extpos + 1, None
extpos += 1
if 0x41 <= char <= 0x5A: # A-Z
digit = char - 0x41
elif 0x30 <= char <= 0x39:
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
% extended[extpos])
else:
return extpos, None
t = T(j, bias)
result += digit * w
if digit < t:
return extpos, result
w = w * (36 - t)
j += 1
def insertion_sort(base, extended, errors):
"""3.2 Insertion unsort coding"""
char = 0x80
pos = -1
bias = 72
extpos = 0
while extpos < len(extended):
newpos, delta = decode_generalized_number(extended, extpos,
bias, errors)
if delta is None:
# There was an error in decoding. We can't continue because
# synchronization is lost.
return base
pos += delta+1
char += pos // (len(base) + 1)
if char > 0x10FFFF:
if errors == "strict":
raise UnicodeError, ("Invalid character U+%x" % char)
char = ord('?')
pos = pos % (len(base) + 1)
base = base[:pos] + unichr(char) + base[pos:]
bias = adapt(delta, (extpos == 0), len(base))
extpos = newpos
return base
def punycode_decode(text, errors):
pos = text.rfind("-")
if pos == -1:
base = ""
extended = text
else:
base = text[:pos]
extended = text[pos+1:]
base = unicode(base, "ascii", errors)
extended = extended.upper()
return insertion_sort(base, extended, errors)
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
res = punycode_encode(input)
return res, len(input)
def decode(self,input,errors='strict'):
if errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+errors
res = punycode_decode(input, errors)
return res, len(input)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return punycode_encode(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
if self.errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+self.errors
return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='punycode',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-3.0 |
fnkhan/second | utils/ovxctl.py | 4 | 38336 | #!/usr/bin/env python
# OpenVirteX control script
# Heavily based on FlowVisor's fvctl
#import python utilities to parse arguments
import sys
from optparse import OptionParser
import urllib2
import json
import getpass
VERSION = '0.1'
SUPPORTED_PROTO = ['tcp']
def getUrl(opts, path):
return URL % (opts.host, opts.port, path)
def buildRequest(data, url, cmd):
j = { "id" : "ovxctl", "method" : cmd , "jsonrpc" : "2.0" }
h = {"Content-Type" : "application/json-rpc"}
if data is not None:
j['params'] = data
return urllib2.Request(url, json.dumps(j), h)
def pa_none(args, cmd):
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=USAGE.format(cmd), description=ldesc)
(options, args) = parser.parse_args(args)
return (options, args)
#Create calls
def pa_addControllers(args, cmd):
usage = "%s <tenant_id> <vdpid> <ctrlUrls>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_addControllers(gopts, opts, args):
if len(args) != 3:
print "addControllers: Must specify tenant id, virtual dpid, controller list"
sys.exit()
req = { "controllerUrls" : buildControllerList(args[2]), \
"tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":",""), 16) }
resp = connect(gopts, "tenant", "addControllers", data=req, passwd=getPasswd(gopts))
if resp:
print "Added controllers %s to switch %s" % (args[2], args[1])
print resp
def pa_createNetwork(args, cmd):
usage = "%s <protocol> <controller_urls> <ip_network> <ip_mask>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def buildControllerList(ctrls):
if ctrls.lower() == "none":
return []
l = ctrls.split(',')
controllerUrls = []
for ctrl in l:
parts = ctrl.split(":")
if len(parts) < 3:
print "%s is not a valid controller url" % ctrl
sys.exit()
if parts[0] not in SUPPORTED_PROTO:
print "%s in %s is not a supported protocol" % (parts[0], ctrl)
sys.exit()
try:
int(parts[2])
except:
print "%s in %s is not a valid port number" % (parts[2], ctrl)
sys.exit()
controllerUrls.append(ctrl)
return controllerUrls
def do_createNetwork(gopts, opts, args):
if len(args) != 3:
print "createNetwork : Must specify controllerUrls, network_ip, network_mask"
sys.exit()
req = { "controllerUrls" : buildControllerList(args[0]), \
"networkAddress" : args[1], "mask" : int(args[2]) }
network_id = connect(gopts, "tenant", "createNetwork", data=req, passwd=getPasswd(gopts))
if network_id:
print "Virtual network has been created (network_id %s)." % str(network_id)
def pa_createSwitch(args, cmd):
usage = "%s [options] <tenant_id> <physical_dpids>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
parser.add_option("-d", "--dpid", dest="dpid", type="str", default="0",
help="Specify the DPID for this switch")
return parser.parse_args(args)
def do_createSwitch(gopts, opts, args):
if len(args) != 2:
print ("createSwitch : must specify: " +
"virtual tenant_id and a comma separated list of physical dpids " +
"(e.g. 00:00:00:00:00:00:00:01) which will be associated to the virtual switch")
sys.exit()
dpids = [int(dpid.replace(":", ""), 16) for dpid in args[1].split(',')]
req = { "tenantId" : int(args[0]), "dpids" : dpids, "dpid" : int(opts.dpid.replace(":", ""), 16) }
reply = connect(gopts, "tenant", "createSwitch", data=req, passwd=getPasswd(gopts))
switchId = reply.get('vdpid')
if switchId:
switch_name = '00:' + ':'.join([("%x" % switchId)[i:i+2] for i in range(0, len(("%x" % switchId)), 2)])
print "Virtual switch has been created (tenant_id %s, switch_id %s)" % (args[0], switch_name)
def pa_createPort(args, cmd):
usage = "%s <tenant_id> <physical_dpid> <physical_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_createPort(gopts, opts, args):
if len(args) != 3:
print ("createPort : must specify: " +
"virtual tenant_id, physical dpid " +
"(e.g. 00:00:00:00:00:00:00:01) and physical port")
sys.exit()
req = { "tenantId" : int(args[0]), "dpid" : int(args[1].replace(":", ""), 16), "port" : int(args[2]) }
reply = connect(gopts, "tenant", "createPort", data=req, passwd=getPasswd(gopts))
switchId = reply.get('vdpid')
portId = reply.get('vport')
if switchId and portId:
switch_name = '00:' + ':'.join([("%x" %int(switchId))[i:i+2] for i in range(0, len(("%x" %int(switchId))), 2)])
print "Virtual port has been created (tenant_id %s, switch_id %s, port_id %s)" % (args[0], switch_name, portId)
def pa_setInternalRouting(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <routing_algorithm> <backup_routes_num>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_setInternalRouting(gopts, opts, args):
if len(args) != 4:
print ("setInternalRouting : Must specify virtual tenant_id, virtual switch_id, " +
"algorithm (spf, manual) and number of backup routes")
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"algorithm" : args[2], "backup_num" : int(args[3]) }
reply = connect(gopts, "tenant", "setInternalRouting", data=req, passwd=getPasswd(gopts))
tenantId = reply.get('tenantId')
switchId = reply.get('vdpid')
if tenantId and switchId:
print "Routing has be set for big switch (tenant_id %s, switch_id %s)" % (switchId, tenantId)
def pa_connectHost(args, cmd):
usage = "%s <tenant_id> <vitual_dpid> <virtual_port> <host_mac>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectHost(gopts, opts, args):
if len(args) != 4:
print "connectHost : Must specify virtual tenant_id, virtual switch_id, virtual port_id and host MAC address"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"vport" : int(args[2]), "mac" : args[3] }
reply = connect(gopts, "tenant", "connectHost", data=req, passwd=getPasswd(gopts))
hostId = reply.get('hostId')
if hostId:
print "Host (host_id %s) has been connected to virtual port" % (hostId)
def pa_connectLink(args, cmd):
usage = "%s <tenant_id> <src_virtual_dpid> <src_virtual_port> <dst_virtual_dpid> <dst_virtual_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectLink(gopts, opts, args):
if len(args) != 7:
print ("connectLink : Must specify tenant_id, src_virtual_dpid, src_virtual_port, dst_virtual_dpid, dst_virtual_port, "
+ "algorithm (spf, manual), number of backup routes")
sys.exit()
req = { "tenantId" : int(args[0]), "srcDpid" : int(args[1].replace(":", ""), 16),
"srcPort" : int(args[2]), "dstDpid" : int(args[3].replace(":", ""), 16),
"dstPort" : int(args[4]), "algorithm" : args[5], "backup_num" : int(args[6]) }
reply = connect(gopts, "tenant", "connectLink", data=req, passwd=getPasswd(gopts))
linkId = reply.get('linkId')
if linkId:
print "Virtual link (link_id %s) has been created" % (linkId)
def pa_setLinkPath(args, cmd):
usage = "%s <tenant_id> <link_id> <physical_path> <priority>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_setLinkPath(gopts, opts, args):
if len(args) != 4:
print "setLinkPath : Must specify tenant_id, link_id, the physical path that connect the end-points and the priority [0-255]"
sys.exit()
req = { "tenantId" : int(args[0]), "linkId" : int(args[1]), "path" : translate_path(args[2]), "priority" : int(args[3]) }
reply = connect(gopts, "tenant", "setLinkPath", data=req, passwd=getPasswd(gopts))
linkId = reply.get('linkId')
if linkId:
print "Virtual link (link_id %s) path has been set" % (linkId)
def pa_connectRoute(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <src_virtual_port> <dst_virtual_port> <physical_path> <priority>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_connectRoute(gopts, opts, args):
if len(args) != 6:
print ("connectRoute : Must specify tenant_id, virtual_dpid, src_virtual_port, dst_virtual_port, " +
"the physical path that connect the end-points and the priority [0-255]")
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16),
"srcPort" : int(args[2]), "dstPort" : int(args[3]),
"path" : translate_path(args[4]), "priority" : int(args[5]) }
reply = connect(gopts, "tenant", "connectRoute", data=req, passwd=getPasswd(gopts))
routeId = reply.get('routeId')
if routeId:
print "Big-switch internal route (route_id %s) has been created" % (routeId)
#Remove calls
def pa_removeNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removeNetwork(gopts, opts, args):
if len(args) != 1:
print "removeNetwork : Must specify a virtual tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "removeNetwork", data=req, passwd=getPasswd(gopts))
print "Network (tenant_id %s) has been removed" % (args[0])
def pa_removeSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removeSwitch(gopts, opts, args):
if len(args) != 2:
print "removeSwitch : Must specify a virtual tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16) }
result = connect(gopts, "tenant", "removeSwitch", data=req, passwd=getPasswd(gopts))
print "Switch (switch_id %s) has been removed" % (args[1])
def pa_removePort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid> <virtual_port>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_removePort(gopts, opts, args):
if len(args) != 3:
print "removePort : Must specify a virtual tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
result = connect(gopts, "tenant", "removePort", data=req, passwd=getPasswd(gopts))
print "Port (port_id %s) has been removed from virtual switch (switch_id %s)" % (args[2], args[1])
def pa_disconnectHost(args, cmd):
usage = "%s <tenant_id> <host_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectHost(gopts, opts, args):
if len(args) != 2:
print "disconnectHost : Must specify a a virtual tenant_id and a host_id"
sys.exit()
req = { "tenantId" : int(args[0]), "hostId" : int(args[1]) }
result = connect(gopts, "tenant", "disconnectHost", data=req, passwd=getPasswd(gopts))
print "Host (host_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[1], args[0])
def pa_disconnectLink(args, cmd):
usage = "%s <tenant_id> <link_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectLink(gopts, opts, args):
if len(args) != 2:
print "disconnectLink : Must specify a a virtual tenant_id and a link_id"
sys.exit()
req = { "tenantId" : int(args[0]), "linkId" : int(args[1]) }
result = connect(gopts, "tenant", "disconnectLink", data=req, passwd=getPasswd(gopts))
print "Link (link_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[1], args[0])
def pa_disconnectRoute(args, cmd):
usage = "%s <tenant_id> <route_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_disconnectRoute(gopts, opts, args):
if len(args) != 3:
print "disconnectRoute : Must specify a virtual tenant_id, switch_id and a route_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16) , "routeId" : int(args[2]) }
result = connect(gopts, "tenant", "disconnectRoute", data=req, passwd=getPasswd(gopts))
print "Route (route_id %s) in virtual big-switch (switch_id %s) has been disconnected from the virtual network (tenant_id %s)" % (args[2], args[1], args[0])
#Runtime operations
def pa_startNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startNetwork(gopts, opts, args):
if len(args) != 1:
print "startNetwork : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "startNetwork", data=req, passwd=getPasswd(gopts))
if result:
print "Network (tenant_id %s) has been booted" % (args[0])
def pa_startSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startSwitch(gopts, opts, args):
if len(args) != 2:
print "startSwitch : Must specify a tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16)}
result = connect(gopts, "tenant", "startSwitch", data=req, passwd=getPasswd(gopts))
if result:
print "Switch (switch_id %s) has been booted in virtual network (tenant_id %s)" % (args[1], args[0])
def pa_startPort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_startPort(gopts, opts, args):
if len(args) != 3:
print "startPort : Must specify a tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
reply = connect(gopts, "tenant", "startPort", data=req, passwd=getPasswd(gopts))
tenantId = reply.get('tenantId')
switchId = reply.get('vdpid')
portId = reply.get('vport')
if tenantId and switchId and hostId:
print "Port (port_id %s) has been started in virtual switch (tenant_id %s, switch_id %s)" % (portId, tenantId, switchId)
def pa_stopNetwork(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopNetwork(gopts, opts, args):
if len(args) != 1:
print "stopNetwork : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "tenant", "stopNetwork", data=req, passwd=getPasswd(gopts))
if result:
print "Network (tenant_id %s) has been shutdown" % (args[0])
def pa_stopSwitch(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopSwitch(gopts, opts, args):
if len(args) != 2:
print "stopSwitch : Must specify a tenant_id and a virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16)}
result = connect(gopts, "tenant", "stopSwitch", data=req, passwd=getPasswd(gopts))
if result:
print "Switch (switch_id %s) has been shutdown in virtual network (tenant_id %s)" % (args[1], args[0])
def pa_stopPort(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_stopPort(gopts, opts, args):
if len(args) != 3:
print "stopPort : Must specify a tenant_id, a virtual switch_id and a virtual port_id"
sys.exit()
req = { "tenantId" : int(args[0]), "vdpid" : int(args[1].replace(":", ""), 16), "vport" : int(args[2])}
result = connect(gopts, "tenant", "stopPort", data=req, passwd=getPasswd(gopts))
if result:
print "Port (port_id %s) has been shutdown in virtual switch (tenant_id %s, switch_id %s)" % (args[2], args[0], args[1])
def pa_getPhysicalFlowtable(args, cmd):
usage = "%s [<physical_dpid>]" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalFlowtable(gopts, opts, args):
if len(args) > 1:
print "getPhysicalFlowtable : May specify optional physical dpid"
sys.exit()
req = {}
if len(args) == 1:
req["dpid"] = int(args[0].replace(":", ""), 16)
result = connect(gopts, "status", "getPhysicalFlowtable", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getPhysicalHosts(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalHosts(gopts, opts, args):
if len(args) > 0:
print "getPhysicalHosts : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "getPhysicalHosts", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getPhysicalTopology(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getPhysicalTopology(gopts, opts, args):
if len(args) > 0:
print "getPhysicalTopology : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "getPhysicalTopology", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_listVirtualNetworks(args, cmd):
usage = "%s" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_listVirtualNetworks(gopts, opts, args):
if len(args) > 0:
print "listVirtualNetworks : No arguments"
sys.exit()
req = {}
result = connect(gopts, "status", "listVirtualNetworks", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualAddressMapping(args, cmd):
usage = "%s <tenant_id> <virtual_dpid>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualAddressMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualAddressMapping : Must specify a tenant_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
result = connect(gopts, "status", "getVirtualAddressMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualFlowtable(args, cmd):
usage = "%s <tenant_id> [<virtual_dpid>]" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualFlowtable(gopts, opts, args):
if (len(args) == 0) or (len(args) > 2):
print "getVirtualFlowtable : Must specify a tenant_id, and optional virtual switch_id"
sys.exit()
req = { "tenantId" : int(args[0]) }
if len(args) == 2:
req["vdpid"] = int(args[1].replace(":", ""), 16)
result = connect(gopts, "status", "getVirtualFlowtable", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualHosts(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualHosts(gopts, opts, args):
if len(args) != 1:
print "getVirtualHosts : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualHosts", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualLinkMapping(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualLinkMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualHosts : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualLinkMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualSwitchMapping(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualSwitchMapping(gopts, opts, args):
if len(args) != 1:
print "getVirtualSwitchMapping : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualSwitchMapping", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
def pa_getVirtualTopology(args, cmd):
usage = "%s <tenant_id>" % USAGE.format(cmd)
(sdesc, ldesc) = DESCS[cmd]
parser = OptionParser(usage=usage, description=ldesc)
return parser.parse_args(args)
def do_getVirtualTopology(gopts, opts, args):
if len(args) != 1:
print "getVirtualTopology : Must specify a tenant_id"
sys.exit()
req = { "tenantId": int(args[0]) }
result = connect(gopts, "status", "getVirtualTopology", data=req, passwd=getPasswd(gopts))
print json.dumps(result)
# Other methods
def translate_path(path_string):
hop_list = path_string.split(",")
path = ""
for hop in hop_list:
src, dst = hop.split("-")
src_dpid, src_port = src.split("/")
dst_dpid, dst_port = dst.split("/")
src_long_dpid = int(src_dpid.replace(":", ""), 16)
dst_long_dpid = int(dst_dpid.replace(":", ""), 16)
path = path + str(src_long_dpid) + "/" + str(src_port) + "-" + str(dst_long_dpid) + "/" + str(dst_port) + ","
if len(path) > 0:
path.rstrip(",")
return path
def pa_help(args, cmd):
usage = "%s <cmd>" % USAGE.format(cmd)
parser = OptionParser(usage=usage)
return parser.parse_args(args)
def do_help(gopts, opts, args):
if len(args) != 1:
raise IndexError
try:
(pa, func) = CMDS[args[0]]
pa(['--help'], args[0])
except KeyError, e:
print "Invalid command : %s is an unknown command." % args[0]
sys.exit()
def connect(opts, path, cmd, data=None, passwd=None):
try:
url = getUrl(opts, path)
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, url, opts.ovx_user, passwd)
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
req = buildRequest(data, url, cmd)
#ph = urllib2.urlopen(req)
ph = opener.open(req)
return parseResponse(ph.read())
except urllib2.URLError as e:
print e
sys.exit(1)
except urllib2.HTTPError as e:
if e.code == 401:
print "Authentication failed: invalid password"
sys.exit(1)
elif e.code == 504:
print "HTTP Error 504: Gateway timeout"
sys.exit(1)
else:
print e
except RuntimeError as e:
print e
def parseResponse(data):
j = json.loads(data)
if 'error' in j:
print j
sys.exit(1)
return j['result']
def printVersion(option, opt, value, parser):
"""Print ovxctl version and exit"""
print "ovxctl-%s" % VERSION
sys.exit()
def printHelp (option, opt, value, parser):
"""Print ovxctl help and exit"""
cmds = [x for x in CMDS.iterkeys()]
cmds.remove('help')
cmds.sort()
print parser.format_help().strip()
print "\n Available commands are: "
for x in cmds:
(sdesc, ldesc) = DESCS[x]
print " {0:25} {1:10}".format(x, sdesc)
print "\n See '%s help <command>' for more info on a specific command." % sys.argv[0]
sys.exit()
CMDS = {
'addControllers': (pa_addControllers, do_addControllers),
'createNetwork': (pa_createNetwork, do_createNetwork),
'createSwitch': (pa_createSwitch, do_createSwitch),
'createPort': (pa_createPort, do_createPort),
'setInternalRouting': (pa_setInternalRouting, do_setInternalRouting),
'connectHost': (pa_connectHost, do_connectHost),
'connectLink': (pa_connectLink, do_connectLink),
'setLinkPath': (pa_setLinkPath, do_setLinkPath),
'connectRoute': (pa_connectRoute, do_connectRoute),
'removeNetwork': (pa_removeNetwork, do_removeNetwork),
'removeSwitch': (pa_removeSwitch, do_removeSwitch),
'removePort': (pa_removePort, do_removePort),
'disconnectHost': (pa_disconnectHost, do_disconnectHost),
'disconnectLink': (pa_disconnectLink, do_disconnectLink),
'disconnectRoute': (pa_disconnectRoute, do_disconnectRoute),
'startNetwork': (pa_startNetwork, do_startNetwork),
'startSwitch': (pa_startSwitch, do_startSwitch),
'startPort': (pa_startPort, do_startPort),
'stopNetwork': (pa_stopNetwork, do_stopNetwork),
'stopSwitch': (pa_stopSwitch, do_stopSwitch),
'stopPort': (pa_stopPort, do_stopPort),
'getPhysicalFlowtable': (pa_getPhysicalFlowtable, do_getPhysicalFlowtable),
'getPhysicalHosts': (pa_getPhysicalHosts, do_getPhysicalHosts),
'getPhysicalTopology': (pa_getPhysicalTopology, do_getPhysicalTopology),
'listVirtualNetworks': (pa_listVirtualNetworks, do_listVirtualNetworks),
'getVirtualAddressMapping': (pa_getVirtualAddressMapping, do_getVirtualAddressMapping),
'getVirtualFlowtable': (pa_getVirtualFlowtable, do_getVirtualFlowtable),
'getVirtualHosts': (pa_getVirtualHosts, do_getVirtualHosts),
'getVirtualLinkMapping': (pa_getVirtualLinkMapping, do_getVirtualLinkMapping),
'getVirtualSwitchMapping': (pa_getVirtualSwitchMapping, do_getVirtualSwitchMapping),
'getVirtualTopology': (pa_getVirtualTopology, do_getVirtualTopology),
'help' : (pa_help, do_help)
}
DESCS = {
'addControllers' : ("Adds controllers to a virtual switch",
("Adds the specified list of controllers to a given virtual switch.\n"
"ExampleL addController <tenantId> <vdpid> <ctrlUrls>")),
'createNetwork' : ("Creates a virtual network",
("Creates a virtual network. Input: protocol, controllerIP, controller port, ip address, mask. "
"\nExample: createNetwork tcp 1.1.1.1 6634 192.168.1.0 24")),
'createSwitch' : ("Create virtual switch",
("Create a virtual switch. Must specify a tenant_id, and a list of the physical_dpids that will be part of the virtual switch."
"\nExample: createSwitch 1 00:00:00:00:00:00:00:01,00:00:00:00:00:00:00:02")),
'createPort' : ("Create virtual port",
("Create a virtual port. Must specify a tenant_id, a physical_dpid and a physical_port."
"\nExample: createPort 1 00:00:00:00:00:00:00:01 1")),
'setInternalRouting' : ("Set big-switch internal routing mechanism",
("Set big-switch internal routing mechanism. Must specify a tenant_id, a virtual switch_id, the routing type (spf, manual) "
"and the number (0-255) of the backup paths that have to be computed."
"\nExample: setInternalRouting 1 00:00:00:00:00:00:00:01 spf 128")),
'connectHost' : ("Connect host to a virtual port",
("Connect host to a virtual port. Must specify a tenant_id, a virtual switch_id, a virtual port_id and the host MAC address."
"\nExample: connectHost 1 00:a4:23:05:00:00:00:01 1 00:00:00:00:00:01")),
'connectLink' : ("Connect two virtual ports through a virtual link",
("Connect two virtual ports through a virtual link. Must specify a tenant_id, a virtual src_switch_id, a virtual src_port_id, "
"a virtual dst_switch_id, a virtual dst_port_id, the routing type (spf, manual) and the number (0-255) of the backup paths that have to be computed."
"\nExample: connectLink 1 00:a4:23:05:00:00:00:01 1 00:a4:23:05:00:00:00:02 1 spf 1")),
'setLinkPath' : ("Set the physical path of a virtual link",
("Set the physical path of a virtual link. Must specify a tenant_id, a virtual link_id, a physical path and a priority (0-255)."
"\nExample: connectLink 1 1 00:00:00:00:00:00:00:01/1-00:00:00:00:00:00:00:02/1,"
"00:00:00:00:00:00:00:2/2-00:00:00:00:00:00:00:3/1 128")),
'connectRoute' : ("Connect two virtual ports inside a virtual big-switch",
("Connect two virtual ports inside a virtual big-switch. Must specify a tenant_id, a virtual switch_id, a virtual src_port_id, "
"a virtual dst_port_id, a physical path and a priority (0-255)."
"\nExample: connectRoute 1 00:a4:23:05:00:00:00:01 1 2 00:00:00:00:00:00:00:01/1-00:00:00:00:00:00:00:02/1,"
"00:00:00:00:00:00:00:2/2-00:00:00:00:00:00:00:3/1 128")),
'removeNetwork' : ("Remove a virtual network",
("Remove a virtual network. Must specify a tenant_id."
"\nExample: removeNetwork 1")),
'removeSwitch' : ("Remove virtual switch",
("Remove a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: removeSwitch 1 00:a4:23:05:00:00:00:01")),
'removePort' : ("Remove virtual port",
("Remove a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: removePort 1 00:a4:23:05:00:00:00:01 1")),
'disconnectHost' : ("Disconnect host from a virtual port",
("Disconnect host from a virtual port. Must specify a tenant_id and the host_id."
"\nExample: disconnectHost 1 1")),
'disconnectLink' : ("Disconnect link between two virtual ports",
("Disconnect link between two virtual ports. Must specify a tenant_id and the link_id."
"\nExample: disconnectLink 1 1")),
'disconnectRoute' : ("Disconnect big-switch internal route between two virtual ports",
("Disconnect big-switch internal route between two virtual ports. Must specify a tenant_id and the route_id."
"\nExample: disconnectRoute 1 00:a4:23:05:00:00:00:01 1")),
'startNetwork' : ("Start a virtual network",
("Start a virtual network. Must specify a tenant_id."
"\nExample: startNetwork 1")),
'startSwitch' : ("Start a virtual switch",
("Start a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: startSwitch 1 00:a4:23:05:00:00:00:01")),
'startPort' : ("Start a virtual port",
("Start a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: startPort 1 00:a4:23:05:00:00:00:01 1")),
'stopNetwork' : ("Stop a virtual network",
("Stop a virtual network. Must specify a tenant_id."
"\nExample: stopNetwork 1")),
'stopSwitch' : ("Shutdown a virtual switch",
("Shutdown a virtual switch. Must specify a tenant_id and a virtual switch_id."
"\nExample: stopSwitch 1 00:a4:23:05:00:00:00:01")),
'stopPort' : ("Shutdown a virtual port",
("Shutdown a virtual port. Must specify a tenant_id, a virtual switch_id and a virtual port_id."
"\nExample: stopPort 1 00:a4:23:05:00:00:00:01 1")),
# Monitoring API - admin only
'getPhysicalFlowtable' : ("Get the physical flowtable of a specified switch or all switches",
("Get the physical flowtable of a specified switch or all switches. Specify optional physical switch_id."
"\nExample: getPhysicalFlowtable 00:00:00:00:00:00:00:01")),
'getPhysicalHosts' : ("Get a list of physical hosts",
("Get a list of physical hosts."
"\nExample: getPhysicalHosts")),
'getPhysicalTopology': ("Get the physical topology",
("Get the physical topology."
"\nExample: getPhysicalTopology")),
'listVirtualNetworks': ("Get a list of all virtual network tenant ID's",
("Get a list of all virtual network tenant ID's."
"\nExample: listVirtualNetworks")),
# Monitoring API - tenant restricted
'getVirtualAddressMapping' : ("Get the virtual to physical address mapping for a specified virtual network",
("Get the virtual to physical address mapping. Must specify a virtual network tenant_id."
"\nExample: getVirtualAddressMapping 1")),
'getVirtualFlowtable' : ("Get the flowtable in the specified virtual network",
("Get the flowtable in the specified virtual network. Must specify a virtual switch_id, optional virtual switch_id."
"\nExample: getVirtualFlowtable 00:a4:23:05:00:00:00:01")),
'getVirtualHosts' : ("Get list of hosts in virtual network",
("Get list of hosts in virtual network. Must specify a tenant_id",
"\nExample: getVirtualHosts 1")),
'getVirtualLinkMapping' : ("Get the virtual to physical link mapping",
("Get the virtual to physical link mapping. Must specify a tenant_id.",
"\nExample: getVirtualLinkMapping 1")),
'getVirtualSwitchMapping' : ("Get the virtual to physical switch mapping",
("Get the virtual to physical switch mapping. Must specify a tenant_id.",
"\nExample: getVirtualSwitchMapping 1")),
'getVirtualTopology' : ("Get the virtual topology",
("Get the virtual topology. Must specify a tenant_id.",
"\nExample: getVirtualTopology 1"))
}
USAGE="%prog {}"
URL = "http://%s:%s/%s"
def getPasswd(opts):
if opts.no_passwd:
return ""
else:
return getpass.getpass("Password: ")
def addCommonOpts (parser):
parser.add_option("-h", "--hostname", dest="host", default="localhost",
help="Specify the OpenVirteX host; default='localhost'")
parser.add_option("-p", "--port", dest="port", default="8080",
help="Specify the OpenVirteX web port; default=8080")
parser.add_option("-u", "--user", dest="ovx_user", default="admin",
help="OpenVirtex admin user; default='admin'")
parser.add_option("-n", "--no-passwd", action="store_true", dest="no_passwd", default=False,
help="Run ovxctl with no password; default false")
parser.add_option("-v", "--version", action="callback", callback=printVersion)
parser.add_option("--help", action="callback", callback=printHelp)
def parse_global_args (arglist):
usage = "%s [options] command [command_args]" % sys.argv[0]
args = []
while (len(arglist) != 0 and arglist[0] not in CMDS):
args.append(arglist[0])
arglist.pop(0)
parser = OptionParser(add_help_option=False, usage=usage)
addCommonOpts(parser)
(opts, pargs) = parser.parse_args(args)
return (opts, arglist, parser)
if __name__ == '__main__':
try:
(gopts, rargs, parser) = parse_global_args(sys.argv[1:])
if len(rargs) < 1:
raise IndexError
(parse_args, do_func) = CMDS[rargs[0]]
(opts, args) = parse_args(rargs[1:], rargs[0])
do_func(gopts, opts, args)
sys.exit(0)
except ValueError, e:
print "The argument types being sent to the function %s are incorrect. Please double check them." % sys.argv[1]
except IndexError, e:
print "%s is an unknown command" % sys.argv[-1]
except Exception, e:
print "uknown error"
printHelp(None,None,None,parser)
| apache-2.0 |
40023154/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/jqueryui/__init__.py | 603 | 3671 | """Wrapper around the jQuery UI library
Exposes a single object, jq, to manipulate the widgets designed in the library
This object supports :
- subscription : js[elt_id] returns an object matching the element with the
specified id
- a method get(**kw). The only keyword currently supported is "selector". The
method returns a list of instances of the class Element, each instance wraps
the elements matching the CSS selector passed
jq(selector="button") : returns instances of Element for all button tags
The value can be a list or tuple of CSS selector strings :
js(selector=("input[type=submit]","a")) : instances of Element for all
"input" tags with attribute "type" set to "submit" + "a" tags (anchors)
Instances of Element have the same interface as the selections made by the
jQuery function $, with the additional methods provided by jQuery UI. For
instance, to turn an element into a dialog :
jq[elt_id].dialog()
When jQuery UI methods expect a Javascript object, they can be passed as
key/value pairs :
jq['tags'].autocomplete(source=availableTags)
"""
from browser import html, document, window
import javascript
_path = __file__[:__file__.rfind('/')]+'/'
document <= html.LINK(rel="stylesheet",
href=_path+'css/smoothness/jquery-ui.css')
# The scripts must be loaded in blocking mode, by using the function
# load(script_url[, names]) in module javascript
# If we just add them to the document with script tags, eg :
#
# document <= html.SCRIPT(sciprt_url)
# _jqui = window.jQuery.noConflict(True)
#
# the name "jQuery" is not in the Javascript namespace until the script is
# fully loaded in the page, so "window.jQuery" raises an exception
# Load jQuery and put name 'jQuery' in the global Javascript namespace
javascript.load(_path+'jquery-1.11.2.js', ['jQuery'])
javascript.load(_path+'jquery-ui.js')
_jqui = window.jQuery.noConflict(True)
_events = ['abort',
'beforeinput',
'blur',
'click',
'compositionstart',
'compositionupdate',
'compositionend',
'dblclick',
'error',
'focus',
'focusin',
'focusout',
'input',
'keydown',
'keyup',
'load',
'mousedown',
'mouseenter',
'mouseleave',
'mousemove',
'mouseout',
'mouseover',
'mouseup',
'resize',
'scroll',
'select',
'unload']
class JQFunction:
def __init__(self, func):
self.func = func
def __call__(self, *args, **kw):
if kw:
# keyword arguments are passed as a single Javascript object
return self.func(*args, kw)
else:
return self.func(*args)
class Element:
"""Wrapper around the objects returned by jQuery selections"""
def __init__(self, item):
self.item = item
def bind(self, event, callback):
"""Binds an event on the element to function callback"""
getattr(self.item, event)(callback)
def __getattr__(self, attr):
res = getattr(self.item, attr)
if attr in _events:
# elt.click(f) is handled like elt.bind('click', f)
return lambda f:self.bind(attr, f)
if callable(res):
res = JQFunction(res)
return res
class jq:
@staticmethod
def get(**selectors):
items = []
for k,v in selectors.items():
if k=='selector':
if isinstance(v,[list, tuple]):
values = v
else:
values = [v]
for value in values:
items.append(Element(_jqui(value)))
elif k=='element':
items = Element(_jqui(v))
return items
@staticmethod
def __getitem__(element_id):
return jq.get(selector='#'+element_id)[0]
| gpl-2.0 |
Darkmer/masterchief | CourseBuilderenv/lib/python2.7/site-packages/pip/_vendor/html5lib/treewalkers/_base.py | 310 | 6919 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type, string_types
import gettext
_ = gettext.gettext
from xml.dom import Node
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"
from ..constants import voidElements, spaceCharacters
spaceCharacters = "".join(spaceCharacters)
def to_text(s, blank_if_none=True):
"""Wrapper around six.text_type to convert None to empty string"""
if s is None:
if blank_if_none:
return ""
else:
return None
elif isinstance(s, text_type):
return s
else:
return text_type(s)
def is_text_or_none(string):
"""Wrapper around isinstance(string_types) or is None"""
return string is None or isinstance(string, string_types)
class TreeWalker(object):
def __init__(self, tree):
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
return {"type": "SerializeError", "data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
yield {"type": "EmptyTag", "name": to_text(name, False),
"namespace": to_text(namespace),
"data": attrs}
if hasChildren:
yield self.error(_("Void element has children"))
def startTag(self, namespace, name, attrs):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(name)
assert all((namespace is None or isinstance(namespace, string_types)) and
isinstance(name, string_types) and
isinstance(value, string_types)
for (namespace, name), value in attrs.items())
return {"type": "StartTag",
"name": text_type(name),
"namespace": to_text(namespace),
"data": dict(((to_text(namespace, False), to_text(name)),
to_text(value, False))
for (namespace, name), value in attrs.items())}
def endTag(self, namespace, name):
assert namespace is None or isinstance(namespace, string_types), type(namespace)
assert isinstance(name, string_types), type(namespace)
return {"type": "EndTag",
"name": to_text(name, False),
"namespace": to_text(namespace),
"data": {}}
def text(self, data):
assert isinstance(data, string_types), type(data)
data = to_text(data)
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
if left:
yield {"type": "SpaceCharacters", "data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "data": middle}
if right:
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
assert isinstance(data, string_types), type(data)
return {"type": "Comment", "data": text_type(data)}
def doctype(self, name, publicId=None, systemId=None, correct=True):
assert is_text_or_none(name), type(name)
assert is_text_or_none(publicId), type(publicId)
assert is_text_or_none(systemId), type(systemId)
return {"type": "Doctype",
"name": to_text(name),
"publicId": to_text(publicId),
"systemId": to_text(systemId),
"correct": to_text(correct)}
def entity(self, name):
assert isinstance(name, string_types), type(name)
return {"type": "Entity", "name": text_type(name)}
def unknown(self, nodeType):
return self.error(_("Unknown node type: ") + nodeType)
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name in voidElements:
for token in self.emptyTag(namespace, name, attributes,
hasChildren):
yield token
hasChildren = False
else:
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == ENTITY:
yield self.entity(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if name not in voidElements:
yield self.endTag(namespace, name)
if self.tree is currentNode:
currentNode = None
break
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
else:
currentNode = self.getParentNode(currentNode)
| mit |
zubron/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/__init__.py | 552 | 8263 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket extension for Apache HTTP Server.
mod_pywebsocket is a WebSocket extension for Apache HTTP Server
intended for testing or experimental purposes. mod_python is required.
Installation
============
0. Prepare an Apache HTTP Server for which mod_python is enabled.
1. Specify the following Apache HTTP Server directives to suit your
configuration.
If mod_pywebsocket is not in the Python path, specify the following.
<websock_lib> is the directory where mod_pywebsocket is installed.
PythonPath "sys.path+['<websock_lib>']"
Always specify the following. <websock_handlers> is the directory where
user-written WebSocket handlers are placed.
PythonOption mod_pywebsocket.handler_root <websock_handlers>
PythonHeaderParserHandler mod_pywebsocket.headerparserhandler
To limit the search for WebSocket handlers to a directory <scan_dir>
under <websock_handlers>, configure as follows:
PythonOption mod_pywebsocket.handler_scan <scan_dir>
<scan_dir> is useful in saving scan time when <websock_handlers>
contains many non-WebSocket handler files.
If you want to allow handlers whose canonical path is not under the root
directory (i.e. symbolic link is in root directory but its target is not),
configure as follows:
PythonOption mod_pywebsocket.allow_handlers_outside_root_dir On
Example snippet of httpd.conf:
(mod_pywebsocket is in /websock_lib, WebSocket handlers are in
/websock_handlers, port is 80 for ws, 443 for wss.)
<IfModule python_module>
PythonPath "sys.path+['/websock_lib']"
PythonOption mod_pywebsocket.handler_root /websock_handlers
PythonHeaderParserHandler mod_pywebsocket.headerparserhandler
</IfModule>
2. Tune Apache parameters for serving WebSocket. We'd like to note that at
least TimeOut directive from core features and RequestReadTimeout
directive from mod_reqtimeout should be modified not to kill connections
in only a few seconds of idle time.
3. Verify installation. You can use example/console.html to poke the server.
Writing WebSocket handlers
==========================
When a WebSocket request comes in, the resource name
specified in the handshake is considered as if it is a file path under
<websock_handlers> and the handler defined in
<websock_handlers>/<resource_name>_wsh.py is invoked.
For example, if the resource name is /example/chat, the handler defined in
<websock_handlers>/example/chat_wsh.py is invoked.
A WebSocket handler is composed of the following three functions:
web_socket_do_extra_handshake(request)
web_socket_transfer_data(request)
web_socket_passive_closing_handshake(request)
where:
request: mod_python request.
web_socket_do_extra_handshake is called during the handshake after the
headers are successfully parsed and WebSocket properties (ws_location,
ws_origin, and ws_resource) are added to request. A handler
can reject the request by raising an exception.
A request object has the following properties that you can use during the
extra handshake (web_socket_do_extra_handshake):
- ws_resource
- ws_origin
- ws_version
- ws_location (HyBi 00 only)
- ws_extensions (HyBi 06 and later)
- ws_deflate (HyBi 06 and later)
- ws_protocol
- ws_requested_protocols (HyBi 06 and later)
The last two are a bit tricky. See the next subsection.
Subprotocol Negotiation
-----------------------
For HyBi 06 and later, ws_protocol is always set to None when
web_socket_do_extra_handshake is called. If ws_requested_protocols is not
None, you must choose one subprotocol from this list and set it to
ws_protocol.
For HyBi 00, when web_socket_do_extra_handshake is called,
ws_protocol is set to the value given by the client in
Sec-WebSocket-Protocol header or None if
such header was not found in the opening handshake request. Finish extra
handshake with ws_protocol untouched to accept the request subprotocol.
Then, Sec-WebSocket-Protocol header will be sent to
the client in response with the same value as requested. Raise an exception
in web_socket_do_extra_handshake to reject the requested subprotocol.
Data Transfer
-------------
web_socket_transfer_data is called after the handshake completed
successfully. A handler can receive/send messages from/to the client
using request. mod_pywebsocket.msgutil module provides utilities
for data transfer.
You can receive a message by the following statement.
message = request.ws_stream.receive_message()
This call blocks until any complete text frame arrives, and the payload data
of the incoming frame will be stored into message. When you're using IETF
HyBi 00 or later protocol, receive_message() will return None on receiving
client-initiated closing handshake. When any error occurs, receive_message()
will raise some exception.
You can send a message by the following statement.
request.ws_stream.send_message(message)
Closing Connection
------------------
Executing the following statement or just return-ing from
web_socket_transfer_data cause connection close.
request.ws_stream.close_connection()
close_connection will wait
for closing handshake acknowledgement coming from the client. When it
couldn't receive a valid acknowledgement, raises an exception.
web_socket_passive_closing_handshake is called after the server receives
incoming closing frame from the client peer immediately. You can specify
code and reason by return values. They are sent as a outgoing closing frame
from the server. A request object has the following properties that you can
use in web_socket_passive_closing_handshake.
- ws_close_code
- ws_close_reason
Threading
---------
A WebSocket handler must be thread-safe if the server (Apache or
standalone.py) is configured to use threads.
Configuring WebSocket Extension Processors
------------------------------------------
See extensions.py for supported WebSocket extensions. Note that they are
unstable and their APIs are subject to change substantially.
A request object has these extension processing related attributes.
- ws_requested_extensions:
A list of common.ExtensionParameter instances representing extension
parameters received from the client in the client's opening handshake.
You shouldn't modify it manually.
- ws_extensions:
A list of common.ExtensionParameter instances representing extension
parameters to send back to the client in the server's opening handshake.
You shouldn't touch it directly. Instead, call methods on extension
processors.
- ws_extension_processors:
A list of loaded extension processors. Find the processor for the
extension you want to configure from it, and call its methods.
"""
# vi:sts=4 sw=4 et tw=72
| mpl-2.0 |
mrquim/mrquimrepo | repo/plugin.video.live.streamspro/pyaes.py | 189 | 16661 | """Simple AES cipher implementation in pure Python following PEP-272 API
Homepage: https://bitbucket.org/intgr/pyaes/
The goal of this module is to be as fast as reasonable in Python while still
being Pythonic and readable/understandable. It is licensed under the permissive
MIT license.
Hopefully the code is readable and commented enough that it can serve as an
introduction to the AES cipher for Python coders. In fact, it should go along
well with the Stick Figure Guide to AES:
http://www.moserware.com/2009/09/stick-figure-guide-to-advanced.html
Contrary to intuition, this implementation numbers the 4x4 matrices from top to
bottom for efficiency reasons::
0 4 8 12
1 5 9 13
2 6 10 14
3 7 11 15
Effectively it's the transposition of what you'd expect. This actually makes
the code simpler -- except the ShiftRows step, but hopefully the explanation
there clears it up.
"""
####
# Copyright (c) 2010 Marti Raudsepp <marti@juffo.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
####
from array import array
# Globals mandated by PEP 272:
# http://www.python.org/dev/peps/pep-0272/
MODE_ECB = 1
MODE_CBC = 2
#MODE_CTR = 6
block_size = 16
key_size = None
def new(key, mode, IV=None):
if mode == MODE_ECB:
return ECBMode(AES(key))
elif mode == MODE_CBC:
if IV is None:
raise ValueError, "CBC mode needs an IV value!"
return CBCMode(AES(key), IV)
else:
raise NotImplementedError
#### AES cipher implementation
class AES(object):
block_size = 16
def __init__(self, key):
self.setkey(key)
def setkey(self, key):
"""Sets the key and performs key expansion."""
self.key = key
self.key_size = len(key)
if self.key_size == 16:
self.rounds = 10
elif self.key_size == 24:
self.rounds = 12
elif self.key_size == 32:
self.rounds = 14
else:
raise ValueError, "Key length must be 16, 24 or 32 bytes"
self.expand_key()
def expand_key(self):
"""Performs AES key expansion on self.key and stores in self.exkey"""
# The key schedule specifies how parts of the key are fed into the
# cipher's round functions. "Key expansion" means performing this
# schedule in advance. Almost all implementations do this.
#
# Here's a description of AES key schedule:
# http://en.wikipedia.org/wiki/Rijndael_key_schedule
# The expanded key starts with the actual key itself
exkey = array('B', self.key)
# extra key expansion steps
if self.key_size == 16:
extra_cnt = 0
elif self.key_size == 24:
extra_cnt = 2
else:
extra_cnt = 3
# 4-byte temporary variable for key expansion
word = exkey[-4:]
# Each expansion cycle uses 'i' once for Rcon table lookup
for i in xrange(1, 11):
#### key schedule core:
# left-rotate by 1 byte
word = word[1:4] + word[0:1]
# apply S-box to all bytes
for j in xrange(4):
word[j] = aes_sbox[word[j]]
# apply the Rcon table to the leftmost byte
word[0] = word[0] ^ aes_Rcon[i]
#### end key schedule core
for z in xrange(4):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
# Last key expansion cycle always finishes here
if len(exkey) >= (self.rounds+1) * self.block_size:
break
# Special substitution step for 256-bit key
if self.key_size == 32:
for j in xrange(4):
# mix in bytes from the last subkey XORed with S-box of
# current word bytes
word[j] = aes_sbox[word[j]] ^ exkey[-self.key_size + j]
exkey.extend(word)
# Twice for 192-bit key, thrice for 256-bit key
for z in xrange(extra_cnt):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
self.exkey = exkey
def add_round_key(self, block, round):
"""AddRoundKey step in AES. This is where the key is mixed into plaintext"""
offset = round * 16
exkey = self.exkey
for i in xrange(16):
block[i] ^= exkey[offset + i]
#print 'AddRoundKey:', block
def sub_bytes(self, block, sbox):
"""SubBytes step, apply S-box to all bytes
Depending on whether encrypting or decrypting, a different sbox array
is passed in.
"""
for i in xrange(16):
block[i] = sbox[block[i]]
#print 'SubBytes :', block
def shift_rows(self, b):
"""ShiftRows step. Shifts 2nd row to left by 1, 3rd row by 2, 4th row by 3
Since we're performing this on a transposed matrix, cells are numbered
from top to bottom::
0 4 8 12 -> 0 4 8 12 -- 1st row doesn't change
1 5 9 13 -> 5 9 13 1 -- row shifted to left by 1 (wraps around)
2 6 10 14 -> 10 14 2 6 -- shifted by 2
3 7 11 15 -> 15 3 7 11 -- shifted by 3
"""
b[1], b[5], b[ 9], b[13] = b[ 5], b[ 9], b[13], b[ 1]
b[2], b[6], b[10], b[14] = b[10], b[14], b[ 2], b[ 6]
b[3], b[7], b[11], b[15] = b[15], b[ 3], b[ 7], b[11]
#print 'ShiftRows :', b
def shift_rows_inv(self, b):
"""Similar to shift_rows above, but performed in inverse for decryption."""
b[ 5], b[ 9], b[13], b[ 1] = b[1], b[5], b[ 9], b[13]
b[10], b[14], b[ 2], b[ 6] = b[2], b[6], b[10], b[14]
b[15], b[ 3], b[ 7], b[11] = b[3], b[7], b[11], b[15]
#print 'ShiftRows :', b
def mix_columns(self, block):
"""MixColumns step. Mixes the values in each column"""
# Cache global multiplication tables (see below)
mul_by_2 = gf_mul_by_2
mul_by_3 = gf_mul_by_3
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
#v0, v1, v2, v3 = block[col : col+4]
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
block[col ] = mul_by_2[v0] ^ v3 ^ v2 ^ mul_by_3[v1]
block[col+1] = mul_by_2[v1] ^ v0 ^ v3 ^ mul_by_3[v2]
block[col+2] = mul_by_2[v2] ^ v1 ^ v0 ^ mul_by_3[v3]
block[col+3] = mul_by_2[v3] ^ v2 ^ v1 ^ mul_by_3[v0]
#print 'MixColumns :', block
def mix_columns_inv(self, block):
"""Similar to mix_columns above, but performed in inverse for decryption."""
# Cache global multiplication tables (see below)
mul_9 = gf_mul_by_9
mul_11 = gf_mul_by_11
mul_13 = gf_mul_by_13
mul_14 = gf_mul_by_14
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
#v0, v1, v2, v3 = block[col:col+4]
block[col ] = mul_14[v0] ^ mul_9[v3] ^ mul_13[v2] ^ mul_11[v1]
block[col+1] = mul_14[v1] ^ mul_9[v0] ^ mul_13[v3] ^ mul_11[v2]
block[col+2] = mul_14[v2] ^ mul_9[v1] ^ mul_13[v0] ^ mul_11[v3]
block[col+3] = mul_14[v3] ^ mul_9[v2] ^ mul_13[v1] ^ mul_11[v0]
#print 'MixColumns :', block
def encrypt_block(self, block):
"""Encrypts a single block. This is the main AES function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, 0)
for round in xrange(1, self.rounds):
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
self.mix_columns(block)
self.add_round_key(block, round)
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
# no mix_columns step in the last round
self.add_round_key(block, self.rounds)
def decrypt_block(self, block):
"""Decrypts a single block. This is the main AES decryption function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, self.rounds)
# count rounds down from 15 ... 1
for round in xrange(self.rounds-1, 0, -1):
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, round)
self.mix_columns_inv(block)
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, 0)
# no mix_columns step in the last round
#### ECB mode implementation
class ECBMode(object):
"""Electronic CodeBook (ECB) mode encryption.
Basically this mode applies the cipher function to each block individually;
no feedback is done. NB! This is insecure for almost all purposes
"""
def __init__(self, cipher):
self.cipher = cipher
self.block_size = cipher.block_size
def ecb(self, data, block_func):
"""Perform ECB mode with the given function"""
if len(data) % self.block_size != 0:
raise ValueError, "Plaintext length must be multiple of 16"
block_size = self.block_size
data = array('B', data)
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
block_func(block)
data[offset : offset+block_size] = block
return data.tostring()
def encrypt(self, data):
"""Encrypt data in ECB mode"""
return self.ecb(data, self.cipher.encrypt_block)
def decrypt(self, data):
"""Decrypt data in ECB mode"""
return self.ecb(data, self.cipher.decrypt_block)
#### CBC mode
class CBCMode(object):
"""Cipher Block Chaining (CBC) mode encryption. This mode avoids content leaks.
In CBC encryption, each plaintext block is XORed with the ciphertext block
preceding it; decryption is simply the inverse.
"""
# A better explanation of CBC can be found here:
# http://en.wikipedia.org/wiki/Block_cipher_modes_of_operation#Cipher-block_chaining_.28CBC.29
def __init__(self, cipher, IV):
self.cipher = cipher
self.block_size = cipher.block_size
self.IV = array('B', IV)
def encrypt(self, data):
"""Encrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError, "Plaintext length must be multiple of 16"
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
# Perform CBC chaining
for i in xrange(block_size):
block[i] ^= IV[i]
self.cipher.encrypt_block(block)
data[offset : offset+block_size] = block
IV = block
self.IV = IV
return data.tostring()
def decrypt(self, data):
"""Decrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError, "Ciphertext length must be multiple of 16"
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
ctext = data[offset : offset+block_size]
block = ctext[:]
self.cipher.decrypt_block(block)
# Perform CBC chaining
#for i in xrange(block_size):
# data[offset + i] ^= IV[i]
for i in xrange(block_size):
block[i] ^= IV[i]
data[offset : offset+block_size] = block
IV = ctext
#data[offset : offset+block_size] = block
self.IV = IV
return data.tostring()
####
def galois_multiply(a, b):
"""Galois Field multiplicaiton for AES"""
p = 0
while b:
if b & 1:
p ^= a
a <<= 1
if a & 0x100:
a ^= 0x1b
b >>= 1
return p & 0xff
# Precompute the multiplication tables for encryption
gf_mul_by_2 = array('B', [galois_multiply(x, 2) for x in range(256)])
gf_mul_by_3 = array('B', [galois_multiply(x, 3) for x in range(256)])
# ... for decryption
gf_mul_by_9 = array('B', [galois_multiply(x, 9) for x in range(256)])
gf_mul_by_11 = array('B', [galois_multiply(x, 11) for x in range(256)])
gf_mul_by_13 = array('B', [galois_multiply(x, 13) for x in range(256)])
gf_mul_by_14 = array('B', [galois_multiply(x, 14) for x in range(256)])
####
# The S-box is a 256-element array, that maps a single byte value to another
# byte value. Since it's designed to be reversible, each value occurs only once
# in the S-box
#
# More information: http://en.wikipedia.org/wiki/Rijndael_S-box
aes_sbox = array('B',
'637c777bf26b6fc53001672bfed7ab76'
'ca82c97dfa5947f0add4a2af9ca472c0'
'b7fd9326363ff7cc34a5e5f171d83115'
'04c723c31896059a071280e2eb27b275'
'09832c1a1b6e5aa0523bd6b329e32f84'
'53d100ed20fcb15b6acbbe394a4c58cf'
'd0efaafb434d338545f9027f503c9fa8'
'51a3408f929d38f5bcb6da2110fff3d2'
'cd0c13ec5f974417c4a77e3d645d1973'
'60814fdc222a908846eeb814de5e0bdb'
'e0323a0a4906245cc2d3ac629195e479'
'e7c8376d8dd54ea96c56f4ea657aae08'
'ba78252e1ca6b4c6e8dd741f4bbd8b8a'
'703eb5664803f60e613557b986c11d9e'
'e1f8981169d98e949b1e87e9ce5528df'
'8ca1890dbfe6426841992d0fb054bb16'.decode('hex')
)
# This is the inverse of the above. In other words:
# aes_inv_sbox[aes_sbox[val]] == val
aes_inv_sbox = array('B',
'52096ad53036a538bf40a39e81f3d7fb'
'7ce339829b2fff87348e4344c4dee9cb'
'547b9432a6c2233dee4c950b42fac34e'
'082ea16628d924b2765ba2496d8bd125'
'72f8f66486689816d4a45ccc5d65b692'
'6c704850fdedb9da5e154657a78d9d84'
'90d8ab008cbcd30af7e45805b8b34506'
'd02c1e8fca3f0f02c1afbd0301138a6b'
'3a9111414f67dcea97f2cfcef0b4e673'
'96ac7422e7ad3585e2f937e81c75df6e'
'47f11a711d29c5896fb7620eaa18be1b'
'fc563e4bc6d279209adbc0fe78cd5af4'
'1fdda8338807c731b11210592780ec5f'
'60517fa919b54a0d2de57a9f93c99cef'
'a0e03b4dae2af5b0c8ebbb3c83539961'
'172b047eba77d626e169146355210c7d'.decode('hex')
)
# The Rcon table is used in AES's key schedule (key expansion)
# It's a pre-computed table of exponentation of 2 in AES's finite field
#
# More information: http://en.wikipedia.org/wiki/Rijndael_key_schedule
aes_Rcon = array('B',
'8d01020408102040801b366cd8ab4d9a'
'2f5ebc63c697356ad4b37dfaefc59139'
'72e4d3bd61c29f254a943366cc831d3a'
'74e8cb8d01020408102040801b366cd8'
'ab4d9a2f5ebc63c697356ad4b37dfaef'
'c5913972e4d3bd61c29f254a943366cc'
'831d3a74e8cb8d01020408102040801b'
'366cd8ab4d9a2f5ebc63c697356ad4b3'
'7dfaefc5913972e4d3bd61c29f254a94'
'3366cc831d3a74e8cb8d010204081020'
'40801b366cd8ab4d9a2f5ebc63c69735'
'6ad4b37dfaefc5913972e4d3bd61c29f'
'254a943366cc831d3a74e8cb8d010204'
'08102040801b366cd8ab4d9a2f5ebc63'
'c697356ad4b37dfaefc5913972e4d3bd'
'61c29f254a943366cc831d3a74e8cb'.decode('hex')
) | gpl-2.0 |
samkariu/nairobi-routes | boilerplate/external/babel/messages/tests/catalog.py | 61 | 10694 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import copy
import datetime
import doctest
import unittest
from babel.messages import catalog
class MessageTestCase(unittest.TestCase):
def test_python_format(self):
assert catalog.PYTHON_FORMAT.search('foo %d bar')
assert catalog.PYTHON_FORMAT.search('foo %s bar')
assert catalog.PYTHON_FORMAT.search('foo %r bar')
assert catalog.PYTHON_FORMAT.search('foo %(name).1f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3.3f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3f')
assert catalog.PYTHON_FORMAT.search('foo %(name)06d')
assert catalog.PYTHON_FORMAT.search('foo %(name)Li')
assert catalog.PYTHON_FORMAT.search('foo %(name)#d')
assert catalog.PYTHON_FORMAT.search('foo %(name)-4.4hs')
assert catalog.PYTHON_FORMAT.search('foo %(name)*.3f')
assert catalog.PYTHON_FORMAT.search('foo %(name).*f')
assert catalog.PYTHON_FORMAT.search('foo %(name)3.*f')
assert catalog.PYTHON_FORMAT.search('foo %(name)*.*f')
assert catalog.PYTHON_FORMAT.search('foo %()s')
def test_translator_comments(self):
mess = catalog.Message('foo', user_comments=['Comment About `foo`'])
self.assertEqual(mess.user_comments, ['Comment About `foo`'])
mess = catalog.Message('foo',
auto_comments=['Comment 1 About `foo`',
'Comment 2 About `foo`'])
self.assertEqual(mess.auto_comments, ['Comment 1 About `foo`',
'Comment 2 About `foo`'])
def test_clone_message_object(self):
msg = catalog.Message('foo', locations=[('foo.py', 42)])
clone = msg.clone()
clone.locations.append(('bar.py', 42))
self.assertEqual(msg.locations, [('foo.py', 42)])
msg.flags.add('fuzzy')
assert not clone.fuzzy and msg.fuzzy
class CatalogTestCase(unittest.TestCase):
def test_two_messages_with_same_singular(self):
cat = catalog.Catalog()
cat.add('foo')
cat.add(('foo', 'foos'))
self.assertEqual(1, len(cat))
def test_duplicate_auto_comment(self):
cat = catalog.Catalog()
cat.add('foo', auto_comments=['A comment'])
cat.add('foo', auto_comments=['A comment', 'Another comment'])
self.assertEqual(['A comment', 'Another comment'],
cat['foo'].auto_comments)
def test_duplicate_user_comment(self):
cat = catalog.Catalog()
cat.add('foo', user_comments=['A comment'])
cat.add('foo', user_comments=['A comment', 'Another comment'])
self.assertEqual(['A comment', 'Another comment'],
cat['foo'].user_comments)
def test_duplicate_location(self):
cat = catalog.Catalog()
cat.add('foo', locations=[('foo.py', 1)])
cat.add('foo', locations=[('foo.py', 1)])
self.assertEqual([('foo.py', 1)], cat['foo'].locations)
def test_update_message_changed_to_plural(self):
cat = catalog.Catalog()
cat.add(u'foo', u'Voh')
tmpl = catalog.Catalog()
tmpl.add((u'foo', u'foos'))
cat.update(tmpl)
self.assertEqual((u'Voh', ''), cat['foo'].string)
assert cat['foo'].fuzzy
def test_update_message_changed_to_simple(self):
cat = catalog.Catalog()
cat.add((u'foo' u'foos'), (u'Voh', u'Vöhs'))
tmpl = catalog.Catalog()
tmpl.add(u'foo')
cat.update(tmpl)
self.assertEqual(u'Voh', cat['foo'].string)
assert cat['foo'].fuzzy
def test_update_message_updates_comments(self):
cat = catalog.Catalog()
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 5)])
self.assertEqual(cat[u'foo'].auto_comments, [])
self.assertEqual(cat[u'foo'].user_comments, [])
# Update cat[u'foo'] with a new location and a comment
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 7)],
user_comments=['Foo Bar comment 1'])
self.assertEqual(cat[u'foo'].user_comments, ['Foo Bar comment 1'])
# now add yet another location with another comment
cat[u'foo'] = catalog.Message('foo', locations=[('main.py', 9)],
auto_comments=['Foo Bar comment 2'])
self.assertEqual(cat[u'foo'].auto_comments, ['Foo Bar comment 2'])
def test_update_fuzzy_matching_with_case_change(self):
cat = catalog.Catalog()
cat.add('foo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('Foo')
cat.update(tmpl)
self.assertEqual(1, len(cat.obsolete))
assert 'foo' not in cat
self.assertEqual('Voh', cat['Foo'].string)
self.assertEqual(True, cat['Foo'].fuzzy)
def test_update_fuzzy_matching_with_char_change(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('foo')
cat.update(tmpl)
self.assertEqual(1, len(cat.obsolete))
assert 'fo' not in cat
self.assertEqual('Voh', cat['foo'].string)
self.assertEqual(True, cat['foo'].fuzzy)
def test_update_fuzzy_matching_no_msgstr(self):
cat = catalog.Catalog()
cat.add('fo', '')
tmpl = catalog.Catalog()
tmpl.add('fo')
tmpl.add('foo')
cat.update(tmpl)
assert 'fo' in cat
assert 'foo' in cat
self.assertEqual('', cat['fo'].string)
self.assertEqual(False, cat['fo'].fuzzy)
self.assertEqual(None, cat['foo'].string)
self.assertEqual(False, cat['foo'].fuzzy)
def test_update_fuzzy_matching_no_cascading(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('foo', 'Vohe')
tmpl = catalog.Catalog()
tmpl.add('fo')
tmpl.add('foo')
tmpl.add('fooo')
cat.update(tmpl)
assert 'fo' in cat
assert 'foo' in cat
self.assertEqual('Voh', cat['fo'].string)
self.assertEqual(False, cat['fo'].fuzzy)
self.assertEqual('Vohe', cat['foo'].string)
self.assertEqual(False, cat['foo'].fuzzy)
self.assertEqual('Vohe', cat['fooo'].string)
self.assertEqual(True, cat['fooo'].fuzzy)
def test_update_without_fuzzy_matching(self):
cat = catalog.Catalog()
cat.add('fo', 'Voh')
cat.add('bar', 'Bahr')
tmpl = catalog.Catalog()
tmpl.add('foo')
cat.update(tmpl, no_fuzzy_matching=True)
self.assertEqual(2, len(cat.obsolete))
def test_fuzzy_matching_regarding_plurals(self):
cat = catalog.Catalog()
cat.add(('foo', 'foh'), ('foo', 'foh'))
ru = copy.copy(cat)
ru.locale = 'ru_RU'
ru.update(cat)
self.assertEqual(True, ru['foo'].fuzzy)
ru = copy.copy(cat)
ru.locale = 'ru_RU'
ru['foo'].string = ('foh', 'fohh', 'fohhh')
ru.update(cat)
self.assertEqual(False, ru['foo'].fuzzy)
def test_update_no_template_mutation(self):
tmpl = catalog.Catalog()
tmpl.add('foo')
cat1 = catalog.Catalog()
cat1.add('foo', 'Voh')
cat1.update(tmpl)
cat2 = catalog.Catalog()
cat2.update(tmpl)
self.assertEqual(None, cat2['foo'].string)
self.assertEqual(False, cat2['foo'].fuzzy)
def test_update_po_updates_pot_creation_date(self):
template = catalog.Catalog()
localized_catalog = copy.deepcopy(template)
localized_catalog.locale = 'de_DE'
self.assertNotEqual(template.mime_headers,
localized_catalog.mime_headers)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
template.creation_date = datetime.datetime.now() - \
datetime.timedelta(minutes=5)
localized_catalog.update(template)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
def test_update_po_keeps_po_revision_date(self):
template = catalog.Catalog()
localized_catalog = copy.deepcopy(template)
localized_catalog.locale = 'de_DE'
fake_rev_date = datetime.datetime.now() - datetime.timedelta(days=5)
localized_catalog.revision_date = fake_rev_date
self.assertNotEqual(template.mime_headers,
localized_catalog.mime_headers)
self.assertEqual(template.creation_date,
localized_catalog.creation_date)
template.creation_date = datetime.datetime.now() - \
datetime.timedelta(minutes=5)
localized_catalog.update(template)
self.assertEqual(localized_catalog.revision_date, fake_rev_date)
def test_stores_datetime_correctly(self):
localized = catalog.Catalog()
localized.locale = 'de_DE'
localized[''] = catalog.Message('',
"POT-Creation-Date: 2009-03-09 15:47-0700\n" +
"PO-Revision-Date: 2009-03-09 15:47-0700\n")
for key, value in localized.mime_headers:
if key in ('POT-Creation-Date', 'PO-Revision-Date'):
self.assertEqual(value, '2009-03-09 15:47-0700')
def suite():
suite = unittest.TestSuite()
if hasattr(doctest, 'ELLIPSIS'):
suite.addTest(doctest.DocTestSuite(catalog, optionflags=doctest.ELLIPSIS))
else:
# Python 2.3 has no doctest.ELLIPSIS option, it's implicit
suite.addTest(doctest.DocTestSuite(catalog))
suite.addTest(unittest.makeSuite(MessageTestCase))
suite.addTest(unittest.makeSuite(CatalogTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit |
ivyl/patchwork | patchwork/migrations/0016_add_delegation_rule_model.py | 2 | 1136 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('patchwork', '0015_remove_version_n_patches'),
]
operations = [
migrations.CreateModel(
name='DelegationRule',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('path', models.CharField(max_length=255)),
('priority', models.IntegerField(default=0)),
('project', models.ForeignKey(to='patchwork.Project', on_delete=models.CASCADE)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'ordering': ['-priority', 'path'],
},
),
migrations.AlterUniqueTogether(
name='delegationrule',
unique_together=set([('path', 'project')]),
),
]
| gpl-2.0 |
dgjustice/ansible | lib/ansible/modules/network/openswitch/_ops_template.py | 16 | 7218 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['deprecated'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: ops_template
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Push configuration to OpenSwitch
description:
- The OpenSwitch platform provides a library for pushing JSON structured
configuration files into the current running-config. This module
will read the current configuration from OpenSwitch and compare it
against a provided candidate configuration. If there are changes, the
candidate configuration is merged with the current configuration and
pushed into OpenSwitch
deprecated: Deprecated in 2.2. Use M(ops_config) instead.
extends_documentation_fragment: openswitch
options:
src:
description:
- The path to the config source. The source can be either a
file with config or a template that will be merged during
runtime. By default the task will search for the source
file in role or playbook root folder in templates directory.
required: true
force:
description:
- The force argument instructs the module to not consider the
current devices running-config. When set to true, this will
cause the module to push the contents of I(src) into the device
without first checking if already configured.
required: false
default: false
choices: ['yes', 'no']
backup:
description:
- When this argument is configured true, the module will backup
the running-config from the node prior to making any changes.
The backup file will be written to backups/ in
the root of the playbook directory.
required: false
default: false
choices: ['yes', 'no']
config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
required: false
default: null
"""
EXAMPLES = """
- name: set hostname with file lookup
ops_template:
src: ./hostname.json
backup: yes
remote_user: admin
become: yes
- name: set hostname with var
ops_template:
src: "{{ config }}"
remote_user: admin
become: yes
"""
RETURN = """
updates:
description: The list of configuration updates to be merged
returned: always
type: dict
sample: {obj, obj}
responses:
description: returns the responses when configuring using cli
returned: when transport == cli
type: list
sample: [...]
"""
import ansible.module_utils.openswitch
from ansible.module_utils.netcfg import NetworkConfig, dumps
from ansible.module_utils.network import NetworkModule
from ansible.module_utils.openswitch import HAS_OPS
def get_config(module):
config = module.params['config'] or dict()
if not config and not module.params['force']:
config = module.config.get_config()
return config
def sort(val):
if isinstance(val, (list, set)):
return sorted(val)
return val
def diff(this, other, path=None):
updates = list()
path = path or list()
for key, value in this.items():
if key not in other:
other_value = other.get(key)
updates.append((list(path), key, value, other_value))
else:
if isinstance(this[key], dict):
path.append(key)
updates.extend(diff(this[key], other[key], list(path)))
path.pop()
else:
other_value = other.get(key)
if sort(this[key]) != sort(other_value):
updates.append((list(path), key, value, other_value))
return updates
def merge(changeset, config=None):
config = config or dict()
for path, key, value, _ in changeset:
current_level = config
for part in path:
if part not in current_level:
current_level[part] = dict()
current_level = current_level[part]
current_level[key] = value
return config
def main():
""" main entry point for module execution
"""
argument_spec = dict(
src=dict(type='str'),
force=dict(default=False, type='bool'),
backup=dict(default=False, type='bool'),
config=dict(type='dict'),
)
mutually_exclusive = [('config', 'backup'), ('config', 'force')]
module = NetworkModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not module.params['transport'] and not HAS_OPS:
module.fail_json(msg='unable to import ops.dc library')
result = dict(changed=False)
contents = get_config(module)
result['_backup'] = contents
if module.params['transport'] in ['ssh', 'rest']:
config = contents
try:
src = module.from_json(module.params['src'])
except ValueError:
module.fail_json(msg='unable to load src due to json parsing error')
changeset = diff(src, config)
candidate = merge(changeset, config)
updates = dict()
for path, key, new_value, old_value in changeset:
path = '%s.%s' % ('.'.join(path), key)
updates[path] = str(new_value)
result['updates'] = updates
if changeset:
if not module.check_mode:
module.config(config)
result['changed'] = True
else:
candidate = NetworkConfig(contents=module.params['src'], indent=4)
if contents:
config = NetworkConfig(contents=contents, indent=4)
if not module.params['force']:
commands = candidate.difference(config)
commands = dumps(commands, 'commands').split('\n')
commands = [str(c) for c in commands if c]
else:
commands = str(candidate).split('\n')
if commands:
if not module.check_mode:
response = module.config(commands)
result['responses'] = response
result['changed'] = True
result['updates'] = commands
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
DazWorrall/ansible | test/units/module_utils/test_database.py | 178 | 4377 | import pytest
from ansible.module_utils.database import (
pg_quote_identifier,
SQLParseError,
)
# These are all valid strings
# The results are based on interpreting the identifier as a table name
VALID = {
# User quoted
'"public.table"': '"public.table"',
'"public"."table"': '"public"."table"',
'"schema test"."table test"': '"schema test"."table test"',
# We quote part
'public.table': '"public"."table"',
'"public".table': '"public"."table"',
'public."table"': '"public"."table"',
'schema test.table test': '"schema test"."table test"',
'"schema test".table test': '"schema test"."table test"',
'schema test."table test"': '"schema test"."table test"',
# Embedded double quotes
'table "test"': '"table ""test"""',
'public."table ""test"""': '"public"."table ""test"""',
'public.table "test"': '"public"."table ""test"""',
'schema "test".table': '"schema ""test"""."table"',
'"schema ""test""".table': '"schema ""test"""."table"',
'"""wat"""."""test"""': '"""wat"""."""test"""',
# Sigh, handle these as well:
'"no end quote': '"""no end quote"',
'schema."table': '"schema"."""table"',
'"schema.table': '"""schema"."table"',
'schema."table.something': '"schema"."""table"."something"',
# Embedded dots
'"schema.test"."table.test"': '"schema.test"."table.test"',
'"schema.".table': '"schema."."table"',
'"schema."."table"': '"schema."."table"',
'schema.".table"': '"schema".".table"',
'"schema".".table"': '"schema".".table"',
'"schema.".".table"': '"schema.".".table"',
# These are valid but maybe not what the user intended
'."table"': '".""table"""',
'table.': '"table."',
}
INVALID = {
('test.too.many.dots', 'table'): 'PostgreSQL does not support table with more than 3 dots',
('"test.too".many.dots', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('test.too."many.dots"', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('"test"."too"."many"."dots"', 'database'): "PostgreSQL does not support database with more than 1 dots",
('"test"."too"."many"."dots"', 'schema'): "PostgreSQL does not support schema with more than 2 dots",
('"test"."too"."many"."dots"', 'table'): "PostgreSQL does not support table with more than 3 dots",
('"test"."too"."many"."dots"."for"."column"', 'column'): "PostgreSQL does not support column with more than 4 dots",
('"table "invalid" double quote"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema "invalid"""."table "invalid"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema."table"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema".', 'table'): 'Identifier name unspecified or unquoted trailing dot',
}
HOW_MANY_DOTS = (
('role', 'role', '"role"',
'PostgreSQL does not support role with more than 1 dots'),
('db', 'database', '"db"',
'PostgreSQL does not support database with more than 1 dots'),
('db.schema', 'schema', '"db"."schema"',
'PostgreSQL does not support schema with more than 2 dots'),
('db.schema.table', 'table', '"db"."schema"."table"',
'PostgreSQL does not support table with more than 3 dots'),
('db.schema.table.column', 'column', '"db"."schema"."table"."column"',
'PostgreSQL does not support column with more than 4 dots'),
)
VALID_QUOTES = ((test, VALID[test]) for test in VALID)
INVALID_QUOTES = ((test[0], test[1], INVALID[test]) for test in INVALID)
@pytest.mark.parametrize("identifier, quoted_identifier", VALID_QUOTES)
def test_valid_quotes(identifier, quoted_identifier):
assert pg_quote_identifier(identifier, 'table') == quoted_identifier
@pytest.mark.parametrize("identifier, id_type, msg", INVALID_QUOTES)
def test_invalid_quotes(identifier, id_type, msg):
with pytest.raises(SQLParseError) as ex:
pg_quote_identifier(identifier, id_type)
ex.match(msg)
@pytest.mark.parametrize("identifier, id_type, quoted_identifier, msg", HOW_MANY_DOTS)
def test_how_many_dots(identifier, id_type, quoted_identifier, msg):
assert pg_quote_identifier(identifier, id_type) == quoted_identifier
with pytest.raises(SQLParseError) as ex:
pg_quote_identifier('%s.more' % identifier, id_type)
ex.match(msg)
| gpl-3.0 |
mglukhikh/intellij-community | python/lib/Lib/encodings/cp852.py | 593 | 35258 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP852.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp852',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
0x00f2: 0x02db, # OGONEK
0x00f3: 0x02c7, # CARON
0x00f4: 0x02d8, # BREVE
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x02d9, # DOT ABOVE
0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00fc: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00fd: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u016f' # 0x0085 -> LATIN SMALL LETTER U WITH RING ABOVE
u'\u0107' # 0x0086 -> LATIN SMALL LETTER C WITH ACUTE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0150' # 0x008a -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
u'\u0151' # 0x008b -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
u'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\u0106' # 0x008f -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0139' # 0x0091 -> LATIN CAPITAL LETTER L WITH ACUTE
u'\u013a' # 0x0092 -> LATIN SMALL LETTER L WITH ACUTE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\u013d' # 0x0095 -> LATIN CAPITAL LETTER L WITH CARON
u'\u013e' # 0x0096 -> LATIN SMALL LETTER L WITH CARON
u'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0164' # 0x009b -> LATIN CAPITAL LETTER T WITH CARON
u'\u0165' # 0x009c -> LATIN SMALL LETTER T WITH CARON
u'\u0141' # 0x009d -> LATIN CAPITAL LETTER L WITH STROKE
u'\xd7' # 0x009e -> MULTIPLICATION SIGN
u'\u010d' # 0x009f -> LATIN SMALL LETTER C WITH CARON
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\u0104' # 0x00a4 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u0105' # 0x00a5 -> LATIN SMALL LETTER A WITH OGONEK
u'\u017d' # 0x00a6 -> LATIN CAPITAL LETTER Z WITH CARON
u'\u017e' # 0x00a7 -> LATIN SMALL LETTER Z WITH CARON
u'\u0118' # 0x00a8 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0119' # 0x00a9 -> LATIN SMALL LETTER E WITH OGONEK
u'\xac' # 0x00aa -> NOT SIGN
u'\u017a' # 0x00ab -> LATIN SMALL LETTER Z WITH ACUTE
u'\u010c' # 0x00ac -> LATIN CAPITAL LETTER C WITH CARON
u'\u015f' # 0x00ad -> LATIN SMALL LETTER S WITH CEDILLA
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u011a' # 0x00b7 -> LATIN CAPITAL LETTER E WITH CARON
u'\u015e' # 0x00b8 -> LATIN CAPITAL LETTER S WITH CEDILLA
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u017b' # 0x00bd -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017c' # 0x00be -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u0102' # 0x00c6 -> LATIN CAPITAL LETTER A WITH BREVE
u'\u0103' # 0x00c7 -> LATIN SMALL LETTER A WITH BREVE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\u0111' # 0x00d0 -> LATIN SMALL LETTER D WITH STROKE
u'\u0110' # 0x00d1 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u010e' # 0x00d2 -> LATIN CAPITAL LETTER D WITH CARON
u'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u010f' # 0x00d4 -> LATIN SMALL LETTER D WITH CARON
u'\u0147' # 0x00d5 -> LATIN CAPITAL LETTER N WITH CARON
u'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\u011b' # 0x00d8 -> LATIN SMALL LETTER E WITH CARON
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u0162' # 0x00dd -> LATIN CAPITAL LETTER T WITH CEDILLA
u'\u016e' # 0x00de -> LATIN CAPITAL LETTER U WITH RING ABOVE
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\u0144' # 0x00e4 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0148' # 0x00e5 -> LATIN SMALL LETTER N WITH CARON
u'\u0160' # 0x00e6 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0161' # 0x00e7 -> LATIN SMALL LETTER S WITH CARON
u'\u0154' # 0x00e8 -> LATIN CAPITAL LETTER R WITH ACUTE
u'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\u0155' # 0x00ea -> LATIN SMALL LETTER R WITH ACUTE
u'\u0170' # 0x00eb -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
u'\xfd' # 0x00ec -> LATIN SMALL LETTER Y WITH ACUTE
u'\xdd' # 0x00ed -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\u0163' # 0x00ee -> LATIN SMALL LETTER T WITH CEDILLA
u'\xb4' # 0x00ef -> ACUTE ACCENT
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\u02dd' # 0x00f1 -> DOUBLE ACUTE ACCENT
u'\u02db' # 0x00f2 -> OGONEK
u'\u02c7' # 0x00f3 -> CARON
u'\u02d8' # 0x00f4 -> BREVE
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\xb8' # 0x00f7 -> CEDILLA
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\xa8' # 0x00f9 -> DIAERESIS
u'\u02d9' # 0x00fa -> DOT ABOVE
u'\u0171' # 0x00fb -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
u'\u0158' # 0x00fc -> LATIN CAPITAL LETTER R WITH CARON
u'\u0159' # 0x00fd -> LATIN SMALL LETTER R WITH CARON
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b8: 0x00f7, # CEDILLA
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x00ed, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00fd: 0x00ec, # LATIN SMALL LETTER Y WITH ACUTE
0x0102: 0x00c6, # LATIN CAPITAL LETTER A WITH BREVE
0x0103: 0x00c7, # LATIN SMALL LETTER A WITH BREVE
0x0104: 0x00a4, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00a5, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x008f, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0086, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00ac, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x009f, # LATIN SMALL LETTER C WITH CARON
0x010e: 0x00d2, # LATIN CAPITAL LETTER D WITH CARON
0x010f: 0x00d4, # LATIN SMALL LETTER D WITH CARON
0x0110: 0x00d1, # LATIN CAPITAL LETTER D WITH STROKE
0x0111: 0x00d0, # LATIN SMALL LETTER D WITH STROKE
0x0118: 0x00a8, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00a9, # LATIN SMALL LETTER E WITH OGONEK
0x011a: 0x00b7, # LATIN CAPITAL LETTER E WITH CARON
0x011b: 0x00d8, # LATIN SMALL LETTER E WITH CARON
0x0139: 0x0091, # LATIN CAPITAL LETTER L WITH ACUTE
0x013a: 0x0092, # LATIN SMALL LETTER L WITH ACUTE
0x013d: 0x0095, # LATIN CAPITAL LETTER L WITH CARON
0x013e: 0x0096, # LATIN SMALL LETTER L WITH CARON
0x0141: 0x009d, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e4, # LATIN SMALL LETTER N WITH ACUTE
0x0147: 0x00d5, # LATIN CAPITAL LETTER N WITH CARON
0x0148: 0x00e5, # LATIN SMALL LETTER N WITH CARON
0x0150: 0x008a, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x0151: 0x008b, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x0154: 0x00e8, # LATIN CAPITAL LETTER R WITH ACUTE
0x0155: 0x00ea, # LATIN SMALL LETTER R WITH ACUTE
0x0158: 0x00fc, # LATIN CAPITAL LETTER R WITH CARON
0x0159: 0x00fd, # LATIN SMALL LETTER R WITH CARON
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x015e: 0x00b8, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x00ad, # LATIN SMALL LETTER S WITH CEDILLA
0x0160: 0x00e6, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00e7, # LATIN SMALL LETTER S WITH CARON
0x0162: 0x00dd, # LATIN CAPITAL LETTER T WITH CEDILLA
0x0163: 0x00ee, # LATIN SMALL LETTER T WITH CEDILLA
0x0164: 0x009b, # LATIN CAPITAL LETTER T WITH CARON
0x0165: 0x009c, # LATIN SMALL LETTER T WITH CARON
0x016e: 0x00de, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x016f: 0x0085, # LATIN SMALL LETTER U WITH RING ABOVE
0x0170: 0x00eb, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x0171: 0x00fb, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00ab, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00bd, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00be, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00a6, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00a7, # LATIN SMALL LETTER Z WITH CARON
0x02c7: 0x00f3, # CARON
0x02d8: 0x00f4, # BREVE
0x02d9: 0x00fa, # DOT ABOVE
0x02db: 0x00f2, # OGONEK
0x02dd: 0x00f1, # DOUBLE ACUTE ACCENT
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| apache-2.0 |
DirkHoffmann/indico | indico/modules/events/logs/util.py | 4 | 6839 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import re
from datetime import datetime
from difflib import SequenceMatcher
from enum import Enum
from markupsafe import Markup
from indico.core import signals
from indico.util.i18n import orig_string
from indico.util.signals import named_objects_from_signal
def get_log_renderers():
return named_objects_from_signal(signals.event.get_log_renderers.send(), plugin_attr='plugin')
def make_diff_log(changes, fields):
"""Create a value for log data containing change information.
:param changes: a dict mapping attributes to ``(old, new)`` tuples
:param fields: a dict mapping attributes to field metadata. for
simple cases this may be a string with the human-friendly
title, for more advanced fields it should be a dict
containing ``title``, a ``type`` string and a ``convert``
callback which will be invoked with a tuple containing the
old and new value
"""
data = {'_diff': True}
for key, field_data in fields.items():
try:
change = changes[key]
except KeyError:
continue
if isinstance(field_data, str):
field_data = {'title': field_data}
title = field_data['title']
convert = field_data.get('convert')
attr = field_data.get('attr')
default = field_data.get('default')
type_ = field_data.get('type')
not_none_change = [x for x in change if x is not None]
if attr:
change = [getattr(x, attr) if x is not None else '' for x in change]
if convert:
change = convert(change)
if type_ is not None:
# when we have an explicit type specified don't do any
# guessing/conversions
pass
elif not_none_change and all(isinstance(x, Enum) for x in not_none_change):
type_ = 'enum'
change = [orig_string(getattr(x, 'title', x.name))
if x is not None else default
for x in change]
elif all(isinstance(x, (int, float)) for x in change):
type_ = 'number'
elif all(isinstance(x, (list, tuple)) for x in change):
type_ = 'list'
elif all(isinstance(x, set) for x in change):
type_ = 'list'
change = list(map(sorted, change))
elif all(isinstance(x, bool) for x in change):
type_ = 'bool'
elif all(isinstance(x, datetime) for x in change):
type_ = 'datetime'
change = [x.isoformat() for x in change]
else:
type_ = 'text'
change = list(map(str, map(orig_string, change)))
data[title] = list(change) + [type_]
return data
def render_changes(a, b, type_):
"""Render the comparison of `a` and `b` as HTML.
:param a: old value
:param b: new value
:param type_: the type determining how the values should be compared
"""
if type_ in ('number', 'enum', 'bool', 'datetime'):
if a in (None, ''):
a = '\N{EMPTY SET}'
if b in (None, ''):
b = '\N{EMPTY SET}'
return '{} \N{RIGHTWARDS ARROW} {}'.format(a, b)
elif type_ == 'string':
return '{} \N{RIGHTWARDS ARROW} {}'.format(a or '\N{EMPTY SET}', b or '\N{EMPTY SET}')
elif type_ == 'list':
return _diff_list(a or [], b or [])
elif type_ == 'text':
return _diff_text(a or '', b or '')
else:
raise NotImplementedError(f'Unexpected diff type: {type_}')
def _clean(strings, _linebreak_re=re.compile(r'\A(\n*)(.*?)(\n*)\Z', re.DOTALL)):
# make linebreak changes more visible by adding an arrow indicating
# the linebreak in addition to the linebreak itself
leading_nl, string, trailing_nl = _linebreak_re.match(''.join(strings)).groups()
_linebreak_symbol = Markup('<strong>\N{RETURN SYMBOL}</strong>\n')
return Markup('').join((_linebreak_symbol * len(leading_nl),
string,
_linebreak_symbol * len(trailing_nl)))
def _diff_text(a, b, _noword_re=re.compile(r'(\W)')):
# split the strings into words so we don't get changes involving
# partial words. this makes the diff much more readable to humans
# as you don't end up with large deletions/insertions inside a word
a = _noword_re.split(a)
b = _noword_re.split(b)
seqm = SequenceMatcher(a=a, b=b)
output = []
for opcode, a0, a1, b0, b1 in seqm.get_opcodes():
if opcode == 'equal':
output.append(''.join(seqm.a[a0:a1]))
elif opcode == 'insert':
inserted = _clean(seqm.b[b0:b1])
output.append(Markup('<ins>{}</ins>').format(inserted))
elif opcode == 'delete':
deleted = _clean(seqm.a[a0:a1])
output.append(Markup('<del>{}</del>').format(deleted))
elif opcode == 'replace':
deleted = _clean(seqm.a[a0:a1])
inserted = _clean(seqm.b[b0:b1])
output.append(Markup('<del>{}</del>').format(deleted))
output.append(Markup('<ins>{}</ins>').format(inserted))
else:
raise RuntimeError('unexpected opcode: ' + opcode)
return Markup('').join(output)
def _diff_list(a, b):
seqm = SequenceMatcher(a=a, b=b)
output = []
for opcode, a0, a1, b0, b1 in seqm.get_opcodes():
if opcode == 'equal':
output += seqm.a[a0:a1]
elif opcode == 'insert':
inserted = seqm.b[b0:b1]
output += list(map(Markup('<ins>{}</ins>').format, inserted))
elif opcode == 'delete':
deleted = seqm.a[a0:a1]
output += list(map(Markup('<del>{}</del>').format, deleted))
elif opcode == 'replace':
deleted = seqm.a[a0:a1]
inserted = seqm.b[b0:b1]
output += list(map(Markup('<del>{}</del>').format, deleted))
output += list(map(Markup('<ins>{}</ins>').format, inserted))
else:
raise RuntimeError('unexpected opcode: ' + opcode)
return Markup(', ').join(output)
def serialize_log_entry(entry):
return {
'id': entry.id,
'type': entry.type,
'realm': entry.realm.name,
'kind': entry.kind.name,
'module': entry.module,
'description': entry.summary,
'meta': entry.meta,
'time': entry.logged_dt.astimezone(entry.event.tzinfo).isoformat(),
'payload': entry.data,
'user': {
'fullName': entry.user.full_name if entry.user else None,
'avatarURL': entry.user.avatar_url if entry.user else None
}
}
| gpl-3.0 |
SamHames/scikit-image | skimage/restoration/tests/test_denoise.py | 1 | 5281 | import numpy as np
from numpy.testing import run_module_suite, assert_raises, assert_equal
from skimage import restoration, data, color, img_as_float
np.random.seed(1234)
lena = img_as_float(data.lena()[:128, :128])
lena_gray = color.rgb2gray(lena)
checkerboard_gray = img_as_float(data.checkerboard())
checkerboard = color.gray2rgb(checkerboard_gray)
def test_denoise_tv_chambolle_2d():
# lena image
img = lena_gray.copy()
# add noise to lena
img += 0.5 * img.std() * np.random.rand(*img.shape)
# clip noise so that it does not exceed allowed range for float images.
img = np.clip(img, 0, 1)
# denoise
denoised_lena = restoration.denoise_tv_chambolle(img, weight=60.0)
# which dtype?
assert denoised_lena.dtype in [np.float, np.float32, np.float64]
from scipy import ndimage
grad = ndimage.morphological_gradient(img, size=((3, 3)))
grad_denoised = ndimage.morphological_gradient(
denoised_lena, size=((3, 3)))
# test if the total variation has decreased
assert grad_denoised.dtype == np.float
assert (np.sqrt((grad_denoised**2).sum())
< np.sqrt((grad**2).sum()) / 2)
def test_denoise_tv_chambolle_multichannel():
denoised0 = restoration.denoise_tv_chambolle(lena[..., 0], weight=60.0)
denoised = restoration.denoise_tv_chambolle(lena, weight=60.0,
multichannel=True)
assert_equal(denoised[..., 0], denoised0)
def test_denoise_tv_chambolle_float_result_range():
# lena image
img = lena_gray
int_lena = np.multiply(img, 255).astype(np.uint8)
assert np.max(int_lena) > 1
denoised_int_lena = restoration.denoise_tv_chambolle(int_lena, weight=60.0)
# test if the value range of output float data is within [0.0:1.0]
assert denoised_int_lena.dtype == np.float
assert np.max(denoised_int_lena) <= 1.0
assert np.min(denoised_int_lena) >= 0.0
def test_denoise_tv_chambolle_3d():
"""Apply the TV denoising algorithm on a 3D image representing a sphere."""
x, y, z = np.ogrid[0:40, 0:40, 0:40]
mask = (x - 22)**2 + (y - 20)**2 + (z - 17)**2 < 8**2
mask = 100 * mask.astype(np.float)
mask += 60
mask += 20 * np.random.rand(*mask.shape)
mask[mask < 0] = 0
mask[mask > 255] = 255
res = restoration.denoise_tv_chambolle(mask.astype(np.uint8), weight=100)
assert res.dtype == np.float
assert res.std() * 255 < mask.std()
# test wrong number of dimensions
assert_raises(ValueError, restoration.denoise_tv_chambolle,
np.random.rand(8, 8, 8, 8))
def test_denoise_tv_bregman_2d():
img = checkerboard_gray.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_tv_bregman(img, weight=10)
out2 = restoration.denoise_tv_bregman(img, weight=5)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_tv_bregman_float_result_range():
# lena image
img = lena_gray.copy()
int_lena = np.multiply(img, 255).astype(np.uint8)
assert np.max(int_lena) > 1
denoised_int_lena = restoration.denoise_tv_bregman(int_lena, weight=60.0)
# test if the value range of output float data is within [0.0:1.0]
assert denoised_int_lena.dtype == np.float
assert np.max(denoised_int_lena) <= 1.0
assert np.min(denoised_int_lena) >= 0.0
def test_denoise_tv_bregman_3d():
img = checkerboard.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_tv_bregman(img, weight=10)
out2 = restoration.denoise_tv_bregman(img, weight=5)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_bilateral_2d():
img = checkerboard_gray.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_bilateral(img, sigma_range=0.1,
sigma_spatial=20)
out2 = restoration.denoise_bilateral(img, sigma_range=0.2,
sigma_spatial=30)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
def test_denoise_bilateral_3d():
img = checkerboard.copy()
# add some random noise
img += 0.5 * img.std() * np.random.rand(*img.shape)
img = np.clip(img, 0, 1)
out1 = restoration.denoise_bilateral(img, sigma_range=0.1,
sigma_spatial=20)
out2 = restoration.denoise_bilateral(img, sigma_range=0.2,
sigma_spatial=30)
# make sure noise is reduced in the checkerboard cells
assert img[30:45, 5:15].std() > out1[30:45, 5:15].std()
assert out1[30:45, 5:15].std() > out2[30:45, 5:15].std()
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
uTest/Androguard | elsim/tests/example_text_sim.py | 9 | 3007 | #!/usr/bin/env python
# This file is part of Elsim.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Elsim is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Elsim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Elsim. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import sys
sys.path.append("./")
from elsim.elsim import Elsim, ELSIM_VERSION
from elsim.elsim_text import ProxyText, FILTERS_TEXT
option_0 = { 'name' : ('-i', '--input'), 'help' : 'file : use these filenames', 'nargs' : 2 }
option_1 = { 'name' : ('-d', '--display'), 'help' : 'display the file in human readable format', 'action' : 'count' }
option_2 = { 'name' : ('-v', '--version'), 'help' : 'version of the API', 'action' : 'count' }
options = [option_0, option_1, option_2]
############################################################
def main(options, arguments) :
if options.input != None :
el = Elsim( ProxyText( open(options.input[0], "rb").read() ),
ProxyText( open(options.input[1], "rb").read() ), FILTERS_TEXT,
libpath="elsim/similarity/libsimilarity/libsimilarity.so")
el.show()
print "\t--> sentences: %f%% of similarities" % el.get_similarity_value()
if options.display :
print "SIMILAR sentences:"
diff_methods = el.get_similar_elements()
for i in diff_methods :
el.show_element( i )
print "IDENTICAL sentences:"
new_methods = el.get_identical_elements()
for i in new_methods :
el.show_element( i )
print "NEW sentences:"
new_methods = el.get_new_elements()
for i in new_methods :
el.show_element( i, False )
print "DELETED sentences:"
del_methods = el.get_deleted_elements()
for i in del_methods :
el.show_element( i )
print "SKIPPED sentences:"
skip_methods = el.get_skipped_elements()
for i in skip_methods :
el.show_element( i )
elif options.version != None :
print "example text sim %s" % ELSIM_VERSION
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 |
p4datasystems/CarnotKE | jyhton/lib-python/2.7/lib2to3/pgen2/literals.py | 399 | 1614 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Safely evaluate Python string literals without using eval()."""
import re
simple_escapes = {"a": "\a",
"b": "\b",
"f": "\f",
"n": "\n",
"r": "\r",
"t": "\t",
"v": "\v",
"'": "'",
'"': '"',
"\\": "\\"}
def escape(m):
all, tail = m.group(0, 1)
assert all.startswith("\\")
esc = simple_escapes.get(tail)
if esc is not None:
return esc
if tail.startswith("x"):
hexes = tail[1:]
if len(hexes) < 2:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
try:
i = int(hexes, 16)
except ValueError:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
else:
try:
i = int(tail, 8)
except ValueError:
raise ValueError("invalid octal string escape ('\\%s')" % tail)
return chr(i)
def evalString(s):
assert s.startswith("'") or s.startswith('"'), repr(s[:1])
q = s[0]
if s[:3] == q*3:
q = q*3
assert s.endswith(q), repr(s[-len(q):])
assert len(s) >= 2*len(q)
s = s[len(q):-len(q)]
return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s)
def test():
for i in range(256):
c = chr(i)
s = repr(c)
e = evalString(s)
if e != c:
print i, c, s, e
if __name__ == "__main__":
test()
| apache-2.0 |
epssy/hue | desktop/core/ext-py/Django-1.6.10/tests/admin_filters/tests.py | 49 | 35499 | from __future__ import absolute_import, unicode_literals
import datetime
from django.contrib.admin import (site, ModelAdmin, SimpleListFilter,
BooleanFieldListFilter)
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings, six
from django.utils.encoding import force_text
from .models import Book, Department, Employee
def select_by(dictlist, key, value):
return [x for x in dictlist if x[key] == value][0]
class DecadeListFilter(SimpleListFilter):
def lookups(self, request, model_admin):
return (
('the 80s', "the 1980's"),
('the 90s', "the 1990's"),
('the 00s', "the 2000's"),
('other', "other decades"),
)
def queryset(self, request, queryset):
decade = self.value()
if decade == 'the 80s':
return queryset.filter(year__gte=1980, year__lte=1989)
if decade == 'the 90s':
return queryset.filter(year__gte=1990, year__lte=1999)
if decade == 'the 00s':
return queryset.filter(year__gte=2000, year__lte=2009)
class DecadeListFilterWithTitleAndParameter(DecadeListFilter):
title = 'publication decade'
parameter_name = 'publication-decade'
class DecadeListFilterWithoutTitle(DecadeListFilter):
parameter_name = 'publication-decade'
class DecadeListFilterWithoutParameter(DecadeListFilter):
title = 'publication decade'
class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
pass
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
def queryset(self, request, queryset):
raise 1/0
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
qs = model_admin.get_queryset(request)
if qs.filter(year__gte=1980, year__lte=1989).exists():
yield ('the 80s', "the 1980's")
if qs.filter(year__gte=1990, year__lte=1999).exists():
yield ('the 90s', "the 1990's")
if qs.filter(year__gte=2000, year__lte=2009).exists():
yield ('the 00s', "the 2000's")
class DecadeListFilterParameterEndsWith__In(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__in' # Ends with '__in"
class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__isnull' # Ends with '__isnull"
class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter):
title = 'department'
parameter_name = 'department'
def lookups(self, request, model_admin):
return sorted(set([
(employee.department.id, # Intentionally not a string (Refs #19318)
employee.department.code)
for employee in model_admin.get_queryset(request).all()
]))
def queryset(self, request, queryset):
if self.value():
return queryset.filter(department__id=self.value())
class CustomUserAdmin(UserAdmin):
list_filter = ('books_authored', 'books_contributed')
class BookAdmin(ModelAdmin):
list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no')
ordering = ('-id',)
class BookAdminWithTupleBooleanFilter(BookAdmin):
list_filter = ('year', 'author', 'contributors', ('is_best_seller', BooleanFieldListFilter), 'date_registered', 'no')
class DecadeFilterBookAdmin(ModelAdmin):
list_filter = ('author', DecadeListFilterWithTitleAndParameter)
ordering = ('-id',)
class DecadeFilterBookAdminWithoutTitle(ModelAdmin):
list_filter = (DecadeListFilterWithoutTitle,)
class DecadeFilterBookAdminWithoutParameter(ModelAdmin):
list_filter = (DecadeListFilterWithoutParameter,)
class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin):
list_filter = (DecadeListFilterWithNoneReturningLookups,)
class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin):
list_filter = (DecadeListFilterWithFailingQueryset,)
class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin):
list_filter = (DecadeListFilterWithQuerysetBasedLookups,)
class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__In,)
class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__Isnull,)
class EmployeeAdmin(ModelAdmin):
list_display = ['name', 'department']
list_filter = ['department']
class DepartmentFilterEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithNonStringValue, ]
class ListFiltersTests(TestCase):
def setUp(self):
self.today = datetime.date.today()
self.tomorrow = self.today + datetime.timedelta(days=1)
self.one_week_ago = self.today - datetime.timedelta(days=7)
if self.today.month == 12:
self.next_month = self.today.replace(year=self.today.year + 1, month=1, day=1)
else:
self.next_month = self.today.replace(month=self.today.month + 1, day=1)
self.next_year = self.today.replace(year=self.today.year + 1, month=1, day=1)
self.request_factory = RequestFactory()
# Users
self.alfred = User.objects.create_user('alfred', 'alfred@example.com')
self.bob = User.objects.create_user('bob', 'bob@example.com')
self.lisa = User.objects.create_user('lisa', 'lisa@example.com')
# Books
self.djangonaut_book = Book.objects.create(title='Djangonaut: an art of living', year=2009, author=self.alfred, is_best_seller=True, date_registered=self.today)
self.bio_book = Book.objects.create(title='Django: a biography', year=1999, author=self.alfred, is_best_seller=False, no=207)
self.django_book = Book.objects.create(title='The Django Book', year=None, author=self.bob, is_best_seller=None, date_registered=self.today, no=103)
self.gipsy_book = Book.objects.create(title='Gipsy guitar for dummies', year=2002, is_best_seller=True, date_registered=self.one_week_ago)
self.gipsy_book.contributors = [self.bob, self.lisa]
self.gipsy_book.save()
# Departments
self.dev = Department.objects.create(code='DEV', description='Development')
self.design = Department.objects.create(code='DSN', description='Design')
# Employees
self.john = Employee.objects.create(name='John Blue', department=self.dev)
self.jack = Employee.objects.create(name='Jack Red', department=self.design)
def get_changelist(self, request, model, modeladmin):
return ChangeList(request, model, modeladmin.list_display, modeladmin.list_display_links,
modeladmin.list_filter, modeladmin.date_hierarchy, modeladmin.search_fields,
modeladmin.list_select_related, modeladmin.list_per_page, modeladmin.list_max_show_all, modeladmin.list_editable, modeladmin)
def test_datefieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'date_registered__gte': self.today,
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Today")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today, self.tomorrow))
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
'date_registered__lt': self.next_month})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month):
# In case one week ago is in the same month.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This month")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today.replace(day=1), self.next_month))
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
'date_registered__lt': self.next_year})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if self.today.year == self.one_week_ago.year:
# In case one week ago is in the same year.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This year")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today.replace(month=1, day=1), self.next_year))
request = self.request_factory.get('/', {'date_registered__gte': str(self.one_week_ago),
'date_registered__lt': str(self.tomorrow)})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_text(filterspec.title), 'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (str(self.one_week_ago), str(self.tomorrow)))
@override_settings(USE_TZ=True)
def test_datefieldlistfilter_with_time_zone_support(self):
# Regression for #17830
self.test_datefieldlistfilter()
def test_allvaluesfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'year__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?year__isnull=True')
request = self.request_factory.get('/', {'year': '2002'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?year=2002')
def test_relatedfieldlistfilter_foreignkey(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'author__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?author__isnull=True')
request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
# order of choices depends on User model, which has no order
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk)
def test_relatedfieldlistfilter_manytomany(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'contributors__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True')
request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(force_text(filterspec.title), 'Verbose Contributors')
choice = select_by(filterspec.choices(changelist), "display", "bob")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk)
def test_relatedfieldlistfilter_reverse_relationships(self):
modeladmin = CustomUserAdmin(User, site)
# FK relationship -----
request = self.request_factory.get('/', {'books_authored__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.lisa])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True')
request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk)
# M2M relationship -----
request = self.request_factory.get('/', {'books_contributed__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.alfred])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True')
request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'book')
choice = select_by(filterspec.choices(changelist), "display", self.django_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk)
def test_booleanfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def test_booleanfieldlistfilter_tuple(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def verify_booleanfieldlistfilter(self, modeladmin):
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'is_best_seller__exact': 0})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "No")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=0')
request = self.request_factory.get('/', {'is_best_seller__exact': 1})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Yes")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=1')
request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_text(filterspec.title), 'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Unknown")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True')
def test_simplelistfilter(self):
modeladmin = DecadeFilterBookAdmin(Book, site)
# Make sure that the first option is 'All' ---------------------------
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id')))
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
# Look for books in the 1980s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 80s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'the 1980\'s')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s')
# Look for books in the 1990s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s')
# Look for books in the 2000s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s')
# Combine multiple filters -------------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.djangonaut_book])
# Make sure the correct choices are selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], 'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'Verbose Author')
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk)
def test_listfilter_without_title(self):
"""
Any filter must define a title.
"""
modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_without_parameter(self):
"""
Any SimpleListFilter must define a parameter_name.
"""
modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site)
request = self.request_factory.get('/', {})
six.assertRaisesRegex(self, ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_none_returning_lookups(self):
"""
A SimpleListFilter lookups method can return None but disables the
filter completely.
"""
modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_filter_with_failing_queryset(self):
"""
Ensure that when a filter's queryset method fails, it fails loudly and
the corresponding exception doesn't get swallowed.
Refs #17828.
"""
modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site)
request = self.request_factory.get('/', {})
self.assertRaises(ZeroDivisionError, self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_queryset_based_lookups(self):
modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'the 1990\'s')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s')
self.assertEqual(choices[2]['display'], 'the 2000\'s')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s')
def test_two_characters_long_field(self):
"""
Ensure that list_filter works with two-characters long field names.
Refs #16080.
"""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'no': '207'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'number')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?no=207')
def test_parameter_ends_with__in__or__isnull(self):
"""
Ensure that a SimpleListFilter's parameter name is not mistaken for a
model field if it ends with '__isnull' or '__in'.
Refs #17091.
"""
# When it ends with '__in' -----------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site)
request = self.request_factory.get('/', {'decade__in': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s')
# When it ends with '__isnull' ---------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site)
request = self.request_factory.get('/', {'decade__isnull': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_text(filterspec.title), 'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], 'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s')
def test_lookup_with_non_string_value(self):
"""
Ensure choices are set the selected class when using non-string values
for lookups in SimpleListFilters.
Refs #19318
"""
modeladmin = DepartmentFilterEmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {'department': self.john.pk})
changelist = self.get_changelist(request, Employee, modeladmin)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], 'DEV')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.pk)
def test_fk_with_to_field(self):
"""
Ensure that a filter on a FK respects the FK's to_field attribute.
Refs #17972.
"""
modeladmin = EmployeeAdmin(Employee, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.jack, self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
# Filter by Department=='Development' --------------------------------
request = self.request_factory.get('/', {'department__code__exact': 'DEV'})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_text(filterspec.title), 'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], 'All')
self.assertEqual(choices[0]['selected'], False)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], 'Development')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], 'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
| apache-2.0 |
person142/scipy | scipy/optimize/tests/test_linprog.py | 4 | 70340 | """
Unit test for Linear Programming
"""
import sys
import numpy as np
from numpy.testing import (assert_, assert_allclose, assert_equal,
assert_array_less, assert_warns, suppress_warnings)
from pytest import raises as assert_raises
from scipy.optimize import linprog, OptimizeWarning
from scipy.sparse.linalg import MatrixRankWarning
from scipy.linalg import LinAlgWarning
import pytest
has_umfpack = True
try:
from scikits.umfpack import UmfpackWarning
except ImportError:
has_umfpack = False
has_cholmod = True
try:
import sksparse
except ImportError:
has_cholmod = False
def _assert_iteration_limit_reached(res, maxiter):
assert_(not res.success, "Incorrectly reported success")
assert_(res.success < maxiter, "Incorrectly reported number of iterations")
assert_equal(res.status, 1, "Failed to report iteration limit reached")
def _assert_infeasible(res):
# res: linprog result object
assert_(not res.success, "incorrectly reported success")
assert_equal(res.status, 2, "failed to report infeasible status")
def _assert_unbounded(res):
# res: linprog result object
assert_(not res.success, "incorrectly reported success")
assert_equal(res.status, 3, "failed to report unbounded status")
def _assert_unable_to_find_basic_feasible_sol(res):
# res: linprog result object
# The status may be either 2 or 4 depending on why the feasible solution
# could not be found. If the undelying problem is expected to not have a
# feasible solution, _assert_infeasible should be used.
assert_(not res.success, "incorrectly reported success")
assert_(res.status in (2, 4), "failed to report optimization failure")
def _assert_success(res, desired_fun=None, desired_x=None,
rtol=1e-8, atol=1e-8):
# res: linprog result object
# desired_fun: desired objective function value or None
# desired_x: desired solution or None
if not res.success:
msg = "linprog status {0}, message: {1}".format(res.status,
res.message)
raise AssertionError(msg)
assert_equal(res.status, 0)
if desired_fun is not None:
assert_allclose(res.fun, desired_fun,
err_msg="converged to an unexpected objective value",
rtol=rtol, atol=atol)
if desired_x is not None:
assert_allclose(res.x, desired_x,
err_msg="converged to an unexpected solution",
rtol=rtol, atol=atol)
def magic_square(n):
"""
Generates a linear program for which integer solutions represent an
n x n magic square; binary decision variables represent the presence
(or absence) of an integer 1 to n^2 in each position of the square.
"""
np.random.seed(0)
M = n * (n**2 + 1) / 2
numbers = np.arange(n**4) // n**2 + 1
numbers = numbers.reshape(n**2, n, n)
zeros = np.zeros((n**2, n, n))
A_list = []
b_list = []
# Rule 1: use every number exactly once
for i in range(n**2):
A_row = zeros.copy()
A_row[i, :, :] = 1
A_list.append(A_row.flatten())
b_list.append(1)
# Rule 2: Only one number per square
for i in range(n):
for j in range(n):
A_row = zeros.copy()
A_row[:, i, j] = 1
A_list.append(A_row.flatten())
b_list.append(1)
# Rule 3: sum of rows is M
for i in range(n):
A_row = zeros.copy()
A_row[:, i, :] = numbers[:, i, :]
A_list.append(A_row.flatten())
b_list.append(M)
# Rule 4: sum of columns is M
for i in range(n):
A_row = zeros.copy()
A_row[:, :, i] = numbers[:, :, i]
A_list.append(A_row.flatten())
b_list.append(M)
# Rule 5: sum of diagonals is M
A_row = zeros.copy()
A_row[:, range(n), range(n)] = numbers[:, range(n), range(n)]
A_list.append(A_row.flatten())
b_list.append(M)
A_row = zeros.copy()
A_row[:, range(n), range(-1, -n - 1, -1)] = \
numbers[:, range(n), range(-1, -n - 1, -1)]
A_list.append(A_row.flatten())
b_list.append(M)
A = np.array(np.vstack(A_list), dtype=float)
b = np.array(b_list, dtype=float)
c = np.random.rand(A.shape[1])
return A, b, c, numbers
def lpgen_2d(m, n):
""" -> A b c LP test: m*n vars, m+n constraints
row sums == n/m, col sums == 1
https://gist.github.com/denis-bz/8647461
"""
np.random.seed(0)
c = - np.random.exponential(size=(m, n))
Arow = np.zeros((m, m * n))
brow = np.zeros(m)
for j in range(m):
j1 = j + 1
Arow[j, j * n:j1 * n] = 1
brow[j] = n / m
Acol = np.zeros((n, m * n))
bcol = np.zeros(n)
for j in range(n):
j1 = j + 1
Acol[j, j::n] = 1
bcol[j] = 1
A = np.vstack((Arow, Acol))
b = np.hstack((brow, bcol))
return A, b, c.ravel()
def nontrivial_problem():
c = [-1, 8, 4, -6]
A_ub = [[-7, -7, 6, 9],
[1, -1, -3, 0],
[10, -10, -7, 7],
[6, -1, 3, 4]]
b_ub = [-3, 6, -6, 6]
A_eq = [[-10, 1, 1, -8]]
b_eq = [-4]
x_star = [101 / 1391, 1462 / 1391, 0, 752 / 1391]
f_star = 7083 / 1391
return c, A_ub, b_ub, A_eq, b_eq, x_star, f_star
def generic_callback_test(self):
# Check that callback is as advertised
last_cb = {}
def cb(res):
message = res.pop('message')
complete = res.pop('complete')
assert_(res.pop('phase') in (1, 2))
assert_(res.pop('status') in range(4))
assert_(isinstance(res.pop('nit'), int))
assert_(isinstance(complete, bool))
assert_(isinstance(message, str))
last_cb['x'] = res['x']
last_cb['fun'] = res['fun']
last_cb['slack'] = res['slack']
last_cb['con'] = res['con']
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
res = linprog(c, A_ub=A_ub, b_ub=b_ub, callback=cb, method=self.method)
_assert_success(res, desired_fun=-18.0, desired_x=[2, 6])
assert_allclose(last_cb['fun'], res['fun'])
assert_allclose(last_cb['x'], res['x'])
assert_allclose(last_cb['con'], res['con'])
assert_allclose(last_cb['slack'], res['slack'])
def test_unknown_solver():
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
assert_raises(ValueError, linprog,
c, A_ub=A_ub, b_ub=b_ub, method='ekki-ekki-ekki')
A_ub = None
b_ub = None
A_eq = None
b_eq = None
bounds = None
################
# Common Tests #
################
class LinprogCommonTests(object):
"""
Base class for `linprog` tests. Generally, each test will be performed
once for every derived class of LinprogCommonTests, each of which will
typically change self.options and/or self.method. Effectively, these tests
are run for many combination of method (simplex, revised simplex, and
interior point) and options (such as pivoting rule or sparse treatment).
"""
##################
# Targeted Tests #
##################
def test_callback(self):
generic_callback_test(self)
def test_disp(self):
# test that display option does not break anything.
A, b, c = lpgen_2d(20, 20)
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"disp": True})
_assert_success(res, desired_fun=-64.049494229)
def test_docstring_example(self):
# Example from linprog docstring.
c = [-1, 4]
A = [[-3, 1], [1, 2]]
b = [6, 4]
x0_bounds = (None, None)
x1_bounds = (-3, None)
res = linprog(c, A_ub=A, b_ub=b, bounds=(x0_bounds, x1_bounds),
options=self.options, method=self.method)
_assert_success(res, desired_fun=-22)
def test_type_error(self):
# (presumably) checks that linprog recognizes type errors
# This is tested more carefully in test__linprog_clean_inputs.py
c = [1]
A_eq = [[1]]
b_eq = "hello"
assert_raises(TypeError, linprog,
c, A_eq=A_eq, b_eq=b_eq,
method=self.method, options=self.options)
def test_aliasing_b_ub(self):
# (presumably) checks that linprog does not modify b_ub
# This is tested more carefully in test__linprog_clean_inputs.py
c = np.array([1.0])
A_ub = np.array([[1.0]])
b_ub_orig = np.array([3.0])
b_ub = b_ub_orig.copy()
bounds = (-4.0, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-4, desired_x=[-4])
assert_allclose(b_ub_orig, b_ub)
def test_aliasing_b_eq(self):
# (presumably) checks that linprog does not modify b_eq
# This is tested more carefully in test__linprog_clean_inputs.py
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq_orig = np.array([3.0])
b_eq = b_eq_orig.copy()
bounds = (-4.0, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
assert_allclose(b_eq_orig, b_eq)
def test_non_ndarray_args(self):
# (presumably) checks that linprog accepts list in place of arrays
# This is tested more carefully in test__linprog_clean_inputs.py
c = [1.0]
A_ub = [[1.0]]
b_ub = [3.0]
A_eq = [[1.0]]
b_eq = [2.0]
bounds = (-1.0, 10.0)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=2, desired_x=[2])
def test_unknown_options(self):
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
def f(c, A_ub=None, b_ub=None, A_eq=None,
b_eq=None, bounds=None, options={}):
linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=options)
o = {key: self.options[key] for key in self.options}
o['spam'] = 42
assert_warns(OptimizeWarning, f,
c, A_ub=A_ub, b_ub=b_ub, options=o)
def test_invalid_inputs(self):
def f(c, A_ub=None, b_ub=None, A_eq=None, b_eq=None, bounds=None):
linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
# Removed [(5, 0), (1, 2), (3, 4)]: these are invalid bounds but should be subject to a check in _presolve, not in _clean_inputs.
# The optimization should exit with an 'infeasible problem' error, not with a ValueError
# Same for [(1, 2), (np.inf, np.inf), (3, 4)] and [(1, 2), (-np.inf, -np.inf), (3, 4)]
for bad_bound in [[(1, 2), (3, 4)],
[(1, 2), (3, 4), (3, 4, 5)],
]:
assert_raises(ValueError, f, [1, 2, 3], bounds=bad_bound)
assert_raises(ValueError, f, [1, 2], A_ub=[[1, 2]], b_ub=[1, 2])
assert_raises(ValueError, f, [1, 2], A_ub=[[1]], b_ub=[1])
assert_raises(ValueError, f, [1, 2], A_eq=[[1, 2]], b_eq=[1, 2])
assert_raises(ValueError, f, [1, 2], A_eq=[[1]], b_eq=[1])
assert_raises(ValueError, f, [1, 2], A_eq=[1], b_eq=1)
# this last check doesn't make sense for sparse presolve
if ("_sparse_presolve" in self.options and
self.options["_sparse_presolve"]):
return
# there aren't 3-D sparse matrices
assert_raises(ValueError, f, [1, 2], A_ub=np.zeros((1, 1, 3)), b_eq=1)
def test_empty_constraint_1(self):
c = [-1, -2]
res = linprog(c, method=self.method, options=self.options)
_assert_unbounded(res)
def test_empty_constraint_2(self):
c = [-1, 1, -1, 1]
bounds = [(0, np.inf), (-np.inf, 0), (-1, 1), (-1, 1)]
res = linprog(c, bounds=bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
# Unboundedness detected in presolve requires no iterations
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_empty_constraint_3(self):
c = [1, -1, 1, -1]
bounds = [(0, np.inf), (-np.inf, 0), (-1, 1), (-1, 1)]
res = linprog(c, bounds=bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 0, -1, 1], desired_fun=-2)
def test_inequality_constraints(self):
# Minimize linear function subject to linear inequality constraints.
# http://www.dam.brown.edu/people/huiwang/classes/am121/Archive/simplex_121_c.pdf
c = np.array([3, 2]) * -1 # maximize
A_ub = [[2, 1],
[1, 1],
[1, 0]]
b_ub = [10, 8, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-18, desired_x=[2, 6])
def test_inequality_constraints2(self):
# Minimize linear function subject to linear inequality constraints.
# http://www.statslab.cam.ac.uk/~ff271/teaching/opt/notes/notes8.pdf
# (dead link)
c = [6, 3]
A_ub = [[0, 3],
[-1, -1],
[-2, 1]]
b_ub = [2, -1, -1]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=5, desired_x=[2 / 3, 1 / 3])
def test_bounds_simple(self):
c = [1, 2]
bounds = (1, 2)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[1, 1])
bounds = [(1, 2), (1, 2)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[1, 1])
def test_bounded_below_only_1(self):
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq = np.array([3.0])
bounds = (1.0, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
def test_bounded_below_only_2(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (0.5, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounded_above_only_1(self):
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq = np.array([3.0])
bounds = (None, 10.0)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
def test_bounded_above_only_2(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (-np.inf, 4)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounds_infinity(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (-np.inf, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounds_mixed(self):
# Problem has one unbounded variable and
# another with a negative lower bound.
c = np.array([-1, 4]) * -1 # maximize
A_ub = np.array([[-3, 1],
[1, 2]], dtype=np.float64)
b_ub = [6, 4]
x0_bounds = (-np.inf, np.inf)
x1_bounds = (-3, np.inf)
bounds = (x0_bounds, x1_bounds)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-80 / 7, desired_x=[-8 / 7, 18 / 7])
def test_bounds_equal_but_infeasible(self):
c = [-4, 1]
A_ub = [[7, -2], [0, 1], [2, -2]]
b_ub = [14, 0, 3]
bounds = [(2, 2), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bounds_equal_but_infeasible2(self):
c = [-4, 1]
A_eq = [[7, -2], [0, 1], [2, -2]]
b_eq = [14, 0, 3]
bounds = [(2, 2), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bounds_equal_no_presolve(self):
# There was a bug when a lower and upper bound were equal but
# presolve was not on to eliminate the variable. The bound
# was being converted to an equality constraint, but the bound
# was not eliminated, leading to issues in postprocessing.
c = [1, 2]
A_ub = [[1, 2], [1.1, 2.2]]
b_ub = [4, 8]
bounds = [(1, 2), (2, 2)]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_infeasible(res)
def test_zero_column_1(self):
m, n = 3, 4
np.random.seed(0)
c = np.random.rand(n)
c[1] = 1
A_eq = np.random.rand(m, n)
A_eq[:, 1] = 0
b_eq = np.random.rand(m)
A_ub = [[1, 0, 1, 1]]
b_ub = 3
bounds = [(-10, 10), (-10, 10), (-10, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-9.7087836730413404)
def test_zero_column_2(self):
np.random.seed(0)
m, n = 2, 4
c = np.random.rand(n)
c[1] = -1
A_eq = np.random.rand(m, n)
A_eq[:, 1] = 0
b_eq = np.random.rand(m)
A_ub = np.random.rand(m, n)
A_ub[:, 1] = 0
b_ub = np.random.rand(m)
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
# Unboundedness detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_zero_row_1(self):
c = [1, 2, 3]
A_eq = [[0, 0, 0], [1, 1, 1], [0, 0, 0]]
b_eq = [0, 3, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3)
def test_zero_row_2(self):
A_ub = [[0, 0, 0], [1, 1, 1], [0, 0, 0]]
b_ub = [0, 3, 0]
c = [1, 2, 3]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0)
def test_zero_row_3(self):
m, n = 2, 4
c = np.random.rand(n)
A_eq = np.random.rand(m, n)
A_eq[0, :] = 0
b_eq = np.random.rand(m)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_zero_row_4(self):
m, n = 2, 4
c = np.random.rand(n)
A_ub = np.random.rand(m, n)
A_ub[0, :] = 0
b_ub = -np.random.rand(m)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_eq_1(self):
c = [1, 1, 1, 2]
A_eq = [[1, 0, 0, 0], [0, 2, 0, 0], [1, 0, 0, 0], [1, 1, 1, 1]]
b_eq = [1, 2, 2, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_eq_2(self):
c = [1, 1, 1, 2]
A_eq = [[1, 0, 0, 0], [0, 2, 0, 0], [1, 0, 0, 0], [1, 1, 1, 1]]
b_eq = [1, 2, 1, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=4)
def test_singleton_row_ub_1(self):
c = [1, 1, 1, 2]
A_ub = [[1, 0, 0, 0], [0, 2, 0, 0], [-1, 0, 0, 0], [1, 1, 1, 1]]
b_ub = [1, 2, -2, 4]
bounds = [(None, None), (0, None), (0, None), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_ub_2(self):
c = [1, 1, 1, 2]
A_ub = [[1, 0, 0, 0], [0, 2, 0, 0], [-1, 0, 0, 0], [1, 1, 1, 1]]
b_ub = [1, 2, -0.5, 4]
bounds = [(None, None), (0, None), (0, None), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0.5)
def test_infeasible(self):
# Test linprog response to an infeasible problem
c = [-1, -1]
A_ub = [[1, 0],
[0, 1],
[-1, -1]]
b_ub = [2, 2, -5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_infeasible_inequality_bounds(self):
c = [1]
A_ub = [[2]]
b_ub = 4
bounds = (5, 6)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_unbounded(self):
# Test linprog response to an unbounded problem
c = np.array([1, 1]) * -1 # maximize
A_ub = [[-1, 1],
[-1, -1]]
b_ub = [-1, -2]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
def test_unbounded_below_no_presolve_corrected(self):
c = [1]
bounds = [(None, 1)]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c=c, bounds=bounds,
method=self.method,
options=o)
if self.method == "revised simplex":
# Revised simplex has a special pathway for no constraints.
assert_equal(res.status, 5)
else:
_assert_unbounded(res)
def test_unbounded_no_nontrivial_constraints_1(self):
"""
Test whether presolve pathway for detecting unboundedness after
constraint elimination is working.
"""
c = np.array([0, 0, 0, 1, -1, -1])
A_ub = np.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, -1]])
b_ub = np.array([2, -2, 0])
bounds = [(None, None), (None, None), (None, None),
(-1, 1), (-1, 1), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
assert_equal(res.x[-1], np.inf)
assert_equal(res.message[:36], "The problem is (trivially) unbounded")
def test_unbounded_no_nontrivial_constraints_2(self):
"""
Test whether presolve pathway for detecting unboundedness after
constraint elimination is working.
"""
c = np.array([0, 0, 0, 1, -1, 1])
A_ub = np.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1]])
b_ub = np.array([2, -2, 0])
bounds = [(None, None), (None, None), (None, None),
(-1, 1), (-1, 1), (None, 0)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
assert_equal(res.x[-1], -np.inf)
assert_equal(res.message[:36], "The problem is (trivially) unbounded")
def test_cyclic_recovery(self):
# Test linprogs recovery from cycling using the Klee-Minty problem
# Klee-Minty https://www.math.ubc.ca/~israel/m340/kleemin3.pdf
c = np.array([100, 10, 1]) * -1 # maximize
A_ub = [[1, 0, 0],
[20, 1, 0],
[200, 20, 1]]
b_ub = [1, 100, 10000]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 0, 10000], atol=5e-6, rtol=1e-7)
def test_cyclic_bland(self):
# Test the effect of Bland's rule on a cycling problem
c = np.array([-10, 57, 9, 24.])
A_ub = np.array([[0.5, -5.5, -2.5, 9],
[0.5, -1.5, -0.5, 1],
[1, 0, 0, 0]])
b_ub = [0, 0, 1]
# copy the existing options dictionary but change maxiter
maxiter = 100
o = {key: val for key, val in self.options.items()}
o['maxiter'] = maxiter
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
if self.method == 'simplex' and not self.options.get('bland'):
# simplex cycles without Bland's rule
_assert_iteration_limit_reached(res, o['maxiter'])
else:
# other methods, including simplex with Bland's rule, succeed
_assert_success(res, desired_x=[1, 0, 1, 0])
# note that revised simplex skips this test because it may or may not
# cycle depending on the initial basis
def test_remove_redundancy_infeasibility(self):
# mostly a test of redundancy removal, which is carefully tested in
# test__remove_redundancy.py
m, n = 10, 10
c = np.random.rand(n)
A_eq = np.random.rand(m, n)
b_eq = np.random.rand(m)
A_eq[-1, :] = 2 * A_eq[-2, :]
b_eq[-1] *= -1
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
#################
# General Tests #
#################
def test_nontrivial_problem(self):
# Problem involves all constraint types,
# negative resource limits, and rounding issues.
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
def test_lpgen_problem(self):
# Test linprog with a rather large problem (400 variables,
# 40 constraints) generated by https://gist.github.com/denis-bz/8647461
A_ub, b_ub, c = lpgen_2d(20, 20)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-64.049494229)
def test_network_flow(self):
# A network flow problem with supply and demand at nodes
# and with costs along directed edges.
# https://www.princeton.edu/~rvdb/542/lectures/lec10.pdf
c = [2, 4, 9, 11, 4, 3, 8, 7, 0, 15, 16, 18]
n, p = -1, 1
A_eq = [
[n, n, p, 0, p, 0, 0, 0, 0, p, 0, 0],
[p, 0, 0, p, 0, p, 0, 0, 0, 0, 0, 0],
[0, 0, n, n, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, p, p, 0, 0, p, 0],
[0, 0, 0, 0, n, n, n, 0, p, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, n, n, 0, 0, p],
[0, 0, 0, 0, 0, 0, 0, 0, 0, n, n, n]]
b_eq = [0, 19, -16, 33, 0, 0, -36]
with suppress_warnings() as sup:
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=755, atol=1e-6, rtol=1e-7)
def test_network_flow_limited_capacity(self):
# A network flow problem with supply and demand at nodes
# and with costs and capacities along directed edges.
# http://blog.sommer-forst.de/2013/04/10/
c = [2, 2, 1, 3, 1]
bounds = [
[0, 4],
[0, 2],
[0, 2],
[0, 3],
[0, 5]]
n, p = -1, 1
A_eq = [
[n, n, 0, 0, 0],
[p, 0, n, n, 0],
[0, p, p, 0, n],
[0, 0, 0, p, p]]
b_eq = [-4, 0, 0, 4]
with suppress_warnings() as sup:
# this is an UmfpackWarning but I had trouble importing it
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(OptimizeWarning, "Solving system with option...")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=14)
def test_simplex_algorithm_wikipedia_example(self):
# https://en.wikipedia.org/wiki/Simplex_algorithm#Example
c = [-2, -3, -4]
A_ub = [
[3, 2, 1],
[2, 5, 3]]
b_ub = [10, 15]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-20)
def test_enzo_example(self):
# https://github.com/scipy/scipy/issues/1779 lp2.py
#
# Translated from Octave code at:
# http://www.ecs.shimane-u.ac.jp/~kyoshida/lpeng.htm
# and placed under MIT licence by Enzo Michelangeli
# with permission explicitly granted by the original author,
# Prof. Kazunobu Yoshida
c = [4, 8, 3, 0, 0, 0]
A_eq = [
[2, 5, 3, -1, 0, 0],
[3, 2.5, 8, 0, -1, 0],
[8, 10, 4, 0, 0, -1]]
b_eq = [185, 155, 600]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=317.5,
desired_x=[66.25, 0, 17.5, 0, 183.75, 0],
atol=6e-6, rtol=1e-7)
def test_enzo_example_b(self):
# rescued from https://github.com/scipy/scipy/pull/218
c = [2.8, 6.3, 10.8, -2.8, -6.3, -10.8]
A_eq = [[-1, -1, -1, 0, 0, 0],
[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1]]
b_eq = [-0.5, 0.4, 0.3, 0.3, 0.3]
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-1.77,
desired_x=[0.3, 0.2, 0.0, 0.0, 0.1, 0.3])
def test_enzo_example_c_with_degeneracy(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 20
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(1, m + 1) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [0, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0, desired_x=np.zeros(m))
def test_enzo_example_c_with_unboundedness(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 50
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(m) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [0, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
def test_enzo_example_c_with_infeasibility(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 50
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(m) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [1, 1]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_infeasible(res)
def test_basic_artificial_vars(self):
# Problem is chosen to test two phase simplex methods when at the end
# of phase 1 some artificial variables remain in the basis.
# Also, for `method='simplex'`, the row in the tableau corresponding
# with the artificial variables is not all zero.
c = np.array([-0.1, -0.07, 0.004, 0.004, 0.004, 0.004])
A_ub = np.array([[1.0, 0, 0, 0, 0, 0], [-1.0, 0, 0, 0, 0, 0],
[0, -1.0, 0, 0, 0, 0], [0, 1.0, 0, 0, 0, 0],
[1.0, 1.0, 0, 0, 0, 0]])
b_ub = np.array([3.0, 3.0, 3.0, 3.0, 20.0])
A_eq = np.array([[1.0, 0, -1, 1, -1, 1], [0, -1.0, -1, 1, -1, 1]])
b_eq = np.array([0, 0])
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0, desired_x=np.zeros_like(c),
atol=2e-6)
#################
# Bug Fix Tests #
#################
def test_bug_5400(self):
# https://github.com/scipy/scipy/issues/5400
bounds = [
(0, None),
(0, 100), (0, 100), (0, 100), (0, 100), (0, 100), (0, 100),
(0, 900), (0, 900), (0, 900), (0, 900), (0, 900), (0, 900),
(0, None), (0, None), (0, None), (0, None), (0, None), (0, None)]
f = 1 / 9
g = -1e4
h = -3.1
A_ub = np.array([
[1, -2.99, 0, 0, -3, 0, 0, 0, -1, -1, 0, -1, -1, 1, 1, 0, 0, 0, 0],
[1, 0, -2.9, h, 0, -3, 0, -1, 0, 0, -1, 0, -1, 0, 0, 1, 1, 0, 0],
[1, 0, 0, h, 0, 0, -3, -1, -1, 0, -1, -1, 0, 0, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1],
[0, 1.99, -1, -1, 0, 0, 0, -1, f, f, 0, 0, 0, g, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 2, -1, -1, 0, 0, 0, -1, f, f, 0, g, 0, 0, 0, 0],
[0, -1, 1.9, 2.1, 0, 0, 0, f, -1, -1, 0, 0, 0, 0, 0, g, 0, 0, 0],
[0, 0, 0, 0, -1, 2, -1, 0, 0, 0, f, -1, f, 0, 0, 0, g, 0, 0],
[0, -1, -1, 2.1, 0, 0, 0, f, f, -1, 0, 0, 0, 0, 0, 0, 0, g, 0],
[0, 0, 0, 0, -1, -1, 2, 0, 0, 0, f, f, -1, 0, 0, 0, 0, 0, g]])
b_ub = np.array([
0.0, 0, 0, 100, 100, 100, 100, 100, 100, 900, 900, 900, 900, 900,
900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
c = np.array([-1.0, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0])
with suppress_warnings() as sup:
sup.filter(OptimizeWarning,
"Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-106.63507541835018)
def test_bug_6139(self):
# linprog(method='simplex') fails to find a basic feasible solution
# if phase 1 pseudo-objective function is outside the provided tol.
# https://github.com/scipy/scipy/issues/6139
# Note: This is not strictly a bug as the default tolerance determines
# if a result is "close enough" to zero and should not be expected
# to work for all cases.
c = np.array([1, 1, 1])
A_eq = np.array([[1., 0., 0.], [-1000., 0., - 1000.]])
b_eq = np.array([5.00000000e+00, -1.00000000e+04])
A_ub = -np.array([[0., 1000000., 1010000.]])
b_ub = -np.array([10000000.])
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=14.95,
desired_x=np.array([5, 4.95, 5]))
def test_bug_6690(self):
# linprog simplex used to violate bound constraint despite reporting
# success.
# https://github.com/scipy/scipy/issues/6690
A_eq = np.array([[0, 0, 0, 0.93, 0, 0.65, 0, 0, 0.83, 0]])
b_eq = np.array([0.9626])
A_ub = np.array([
[0, 0, 0, 1.18, 0, 0, 0, -0.2, 0, -0.22],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0.43, 0, 0, 0, 0, 0, 0],
[0, -1.22, -0.25, 0, 0, 0, -2.06, 0, 0, 1.37],
[0, 0, 0, 0, 0, 0, 0, -0.25, 0, 0]
])
b_ub = np.array([0.615, 0, 0.172, -0.869, -0.022])
bounds = np.array([
[-0.84, -0.97, 0.34, 0.4, -0.33, -0.74, 0.47, 0.09, -1.45, -0.73],
[0.37, 0.02, 2.86, 0.86, 1.18, 0.5, 1.76, 0.17, 0.32, -0.15]
]).T
c = np.array([
-1.64, 0.7, 1.8, -1.06, -1.16, 0.26, 2.13, 1.53, 0.66, 0.28
])
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(OptimizeWarning,
"Solving system with option 'cholesky'")
sup.filter(OptimizeWarning, "Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
desired_fun = -1.19099999999
desired_x = np.array([0.3700, -0.9700, 0.3400, 0.4000, 1.1800,
0.5000, 0.4700, 0.0900, 0.3200, -0.7300])
_assert_success(res, desired_fun=desired_fun, desired_x=desired_x)
# Add small tol value to ensure arrays are less than or equal.
atol = 1e-6
assert_array_less(bounds[:, 0] - atol, res.x)
assert_array_less(res.x, bounds[:, 1] + atol)
def test_bug_7044(self):
# linprog simplex failed to "identify correct constraints" (?)
# leading to a non-optimal solution if A is rank-deficient.
# https://github.com/scipy/scipy/issues/7044
A_eq, b_eq, c, N = magic_square(3)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
desired_fun = 1.730550597
_assert_success(res, desired_fun=desired_fun)
assert_allclose(A_eq.dot(res.x), b_eq)
assert_array_less(np.zeros(res.x.size) - 1e-5, res.x)
def test_bug_7237(self):
# https://github.com/scipy/scipy/issues/7237
# linprog simplex "explodes" when the pivot value is very
# close to zero.
c = np.array([-1, 0, 0, 0, 0, 0, 0, 0, 0])
A_ub = np.array([
[1., -724., 911., -551., -555., -896., 478., -80., -293.],
[1., 566., 42., 937., 233., 883., 392., -909., 57.],
[1., -208., -894., 539., 321., 532., -924., 942., 55.],
[1., 857., -859., 83., 462., -265., -971., 826., 482.],
[1., 314., -424., 245., -424., 194., -443., -104., -429.],
[1., 540., 679., 361., 149., -827., 876., 633., 302.],
[0., -1., -0., -0., -0., -0., -0., -0., -0.],
[0., -0., -1., -0., -0., -0., -0., -0., -0.],
[0., -0., -0., -1., -0., -0., -0., -0., -0.],
[0., -0., -0., -0., -1., -0., -0., -0., -0.],
[0., -0., -0., -0., -0., -1., -0., -0., -0.],
[0., -0., -0., -0., -0., -0., -1., -0., -0.],
[0., -0., -0., -0., -0., -0., -0., -1., -0.],
[0., -0., -0., -0., -0., -0., -0., -0., -1.],
[0., 1., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 1., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 1., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1.]
])
b_ub = np.array([
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.])
A_eq = np.array([[0., 1., 1., 1., 1., 1., 1., 1., 1.]])
b_eq = np.array([[1.]])
bounds = [(None, None)] * 9
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=108.568535, atol=1e-6)
def test_bug_8174(self):
# https://github.com/scipy/scipy/issues/8174
# The simplex method sometimes "explodes" if the pivot value is very
# close to zero.
A_ub = np.array([
[22714, 1008, 13380, -2713.5, -1116],
[-4986, -1092, -31220, 17386.5, 684],
[-4986, 0, 0, -2713.5, 0],
[22714, 0, 0, 17386.5, 0]])
b_ub = np.zeros(A_ub.shape[0])
c = -np.ones(A_ub.shape[1])
bounds = [(0, 1)] * A_ub.shape[1]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
if self.options.get('tol', 1e-9) < 1e-10 and self.method == 'simplex':
_assert_unable_to_find_basic_feasible_sol(res)
else:
_assert_success(res, desired_fun=-2.0080717488789235, atol=1e-6)
def test_bug_8174_2(self):
# Test supplementary example from issue 8174.
# https://github.com/scipy/scipy/issues/8174
# https://stackoverflow.com/questions/47717012/linprog-in-scipy-optimize-checking-solution
c = np.array([1, 0, 0, 0, 0, 0, 0])
A_ub = -np.identity(7)
b_ub = np.array([[-2], [-2], [-2], [-2], [-2], [-2], [-2]])
A_eq = np.array([
[1, 1, 1, 1, 1, 1, 0],
[0.3, 1.3, 0.9, 0, 0, 0, -1],
[0.3, 0, 0, 0, 0, 0, -2/3],
[0, 0.65, 0, 0, 0, 0, -1/15],
[0, 0, 0.3, 0, 0, 0, -1/15]
])
b_eq = np.array([[100], [0], [0], [0], [0]])
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=43.3333333331385)
def test_bug_8561(self):
# Test that pivot row is chosen correctly when using Bland's rule
# This was originally written for the simplex method with
# Bland's rule only, but it doesn't hurt to test all methods/options
# https://github.com/scipy/scipy/issues/8561
c = np.array([7, 0, -4, 1.5, 1.5])
A_ub = np.array([
[4, 5.5, 1.5, 1.0, -3.5],
[1, -2.5, -2, 2.5, 0.5],
[3, -0.5, 4, -12.5, -7],
[-1, 4.5, 2, -3.5, -2],
[5.5, 2, -4.5, -1, 9.5]])
b_ub = np.array([0, 0, 0, 0, 1])
res = linprog(c, A_ub=A_ub, b_ub=b_ub, options=self.options,
method=self.method)
_assert_success(res, desired_x=[0, 0, 19, 16/3, 29/3])
def test_bug_8662(self):
# linprog simplex used to report incorrect optimal results
# https://github.com/scipy/scipy/issues/8662
c = [-10, 10, 6, 3]
A_ub = [[8, -8, -4, 6],
[-8, 8, 4, -6],
[-4, 4, 8, -4],
[3, -3, -3, -10]]
b_ub = [9, -9, -9, -4]
bounds = [(0, None), (0, None), (0, None), (0, None)]
desired_fun = 36.0000000000
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res1 = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
# Set boundary condition as a constraint
A_ub.append([0, 0, -1, 0])
b_ub.append(0)
bounds[2] = (None, None)
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res2 = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
rtol = 1e-5
_assert_success(res1, desired_fun=desired_fun, rtol=rtol)
_assert_success(res2, desired_fun=desired_fun, rtol=rtol)
def test_bug_8663(self):
# exposed a bug in presolve
# https://github.com/scipy/scipy/issues/8663
c = [1, 5]
A_eq = [[0, -7]]
b_eq = [-6]
bounds = [(0, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 6./7], desired_fun=5*6./7)
def test_bug_8664(self):
# interior-point has trouble with this when presolve is off
# tested for interior-point with presolve off in TestLinprogIPSpecific
# https://github.com/scipy/scipy/issues/8664
c = [4]
A_ub = [[2], [5]]
b_ub = [4, 4]
A_eq = [[0], [-8], [9]]
b_eq = [3, 2, 10]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning)
sup.filter(OptimizeWarning, "Solving system with option...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bug_8973(self):
"""
Test whether bug described at:
https://github.com/scipy/scipy/issues/8973
was fixed.
"""
c = np.array([0, 0, 0, 1, -1])
A_ub = np.array([[1, 0, 0, 0, 0], [0, 1, 0, 0, 0]])
b_ub = np.array([2, -2])
bounds = [(None, None), (None, None), (None, None), (-1, 1), (-1, 1)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[2, -2, 0, -1, 1], desired_fun=-2)
def test_bug_8973_2(self):
"""
Additional test for:
https://github.com/scipy/scipy/issues/8973
suggested in
https://github.com/scipy/scipy/pull/8985
review by @antonior92
"""
c = np.zeros(1)
A_ub = np.array([[1]])
b_ub = np.array([-2])
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[-2], desired_fun=0)
def test_bug_10124(self):
"""
Test for linprog docstring problem
'disp'=True caused revised simplex failure
"""
c = np.zeros(1)
A_ub = np.array([[1]])
b_ub = np.array([-2])
bounds = (None, None)
c = [-1, 4]
A_ub = [[-3, 1], [1, 2]]
b_ub = [6, 4]
bounds = [(None, None), (-3, None)]
o = {"disp": True}
o.update(self.options)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_x=[10, -3], desired_fun=-22)
def test_bug_10349(self):
"""
Test for redundancy removal tolerance issue
https://github.com/scipy/scipy/issues/10349
"""
A_eq = np.array([[1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1],
[1, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0],
[0, 1, 0, 0, 0, 1]])
b_eq = np.array([221, 210, 10, 141, 198, 102])
c = np.concatenate((0, 1, np.zeros(4)), axis=None)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[129, 92, 12, 198, 0, 10], desired_fun=92)
def test_bug_10466(self):
"""
Test that autoscale fixes poorly-scaled problem
"""
c = [-8., -0., -8., -0., -8., -0., -0., -0., -0., -0., -0., -0., -0.]
A_eq = [[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., -1., 0., 0., 0., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., 0., 1., 0., 0., 0., 0., 0.],
[1., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0.],
[1., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 1., 0., 1., 0., 0., 0., 0., 0., 0., 1., 0.],
[0., 0., 1., 0., 1., 0., 0., 0., 0., 0., 0., 0., 1.]]
b_eq = [3.14572800e+08, 4.19430400e+08, 5.24288000e+08,
1.00663296e+09, 1.07374182e+09, 1.07374182e+09,
1.07374182e+09, 1.07374182e+09, 1.07374182e+09,
1.07374182e+09]
o = {"autoscale": True}
o.update(self.options)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "Solving system with option...")
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(RuntimeWarning, "divide by zero encountered...")
sup.filter(RuntimeWarning, "overflow encountered...")
sup.filter(RuntimeWarning, "invalid value encountered...")
sup.filter(LinAlgWarning, "Ill-conditioned matrix...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
assert_allclose(res.fun, -8589934560)
#########################
# Method-specific Tests #
#########################
class LinprogSimplexTests(LinprogCommonTests):
method = "simplex"
class LinprogIPTests(LinprogCommonTests):
method = "interior-point"
class LinprogRSTests(LinprogCommonTests):
method = "revised simplex"
# Revised simplex does not reliably solve these problems.
# Failure is intermittent due to the random choice of elements to complete
# the basis after phase 1 terminates. In any case, linprog exists
# gracefully, reporting numerical difficulties. I do not think this should
# prevent revised simplex from being merged, as it solves the problems
# most of the time and solves a broader range of problems than the existing
# simplex implementation.
# I believe that the root cause is the same for all three and that this
# same issue prevents revised simplex from solving many other problems
# reliably. Somehow the pivoting rule allows the algorithm to pivot into
# a singular basis. I haven't been able to find a reference that
# acknowledges this possibility, suggesting that there is a bug. On the
# other hand, the pivoting rule is quite simple, and I can't find a
# mistake, which suggests that this is a possibility with the pivoting
# rule. Hopefully, a better pivoting rule will fix the issue.
def test_bug_5400(self):
pytest.skip("Intermittent failure acceptable.")
def test_bug_8662(self):
pytest.skip("Intermittent failure acceptable.")
def test_network_flow(self):
pytest.skip("Intermittent failure acceptable.")
################################
# Simplex Option-Specific Tests#
################################
class TestLinprogSimplexDefault(LinprogSimplexTests):
def setup_method(self):
self.options = {}
def test_bug_5400(self):
with pytest.raises(ValueError):
super(TestLinprogSimplexDefault, self).test_bug_5400()
def test_bug_7237_low_tol(self):
# Fails if the tolerance is too strict. Here, we test that
# even if the solution is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
super(TestLinprogSimplexDefault, self).test_bug_7237()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here, we test that
# even if the solution is wrong, the appropriate warning is issued.
self.options.update({'tol': 1e-12})
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexDefault, self).test_bug_8174()
class TestLinprogSimplexBland(LinprogSimplexTests):
def setup_method(self):
self.options = {'bland': True}
def test_bug_5400(self):
with pytest.raises(ValueError):
super(TestLinprogSimplexBland, self).test_bug_5400()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here, we test that
# even if the solution is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(AssertionError):
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexBland, self).test_bug_8174()
class TestLinprogSimplexNoPresolve(LinprogSimplexTests):
def setup_method(self):
self.options = {'presolve': False}
is_32_bit = np.intp(0).itemsize < 8
is_linux = sys.platform.startswith('linux')
@pytest.mark.xfail(
condition=is_32_bit and is_linux,
reason='Fails with warning on 32-bit linux')
def test_bug_5400(self):
super(TestLinprogSimplexNoPresolve, self).test_bug_5400()
def test_bug_6139_low_tol(self):
# Linprog(method='simplex') fails to find a basic feasible solution
# if phase 1 pseudo-objective function is outside the provided tol.
# https://github.com/scipy/scipy/issues/6139
# Without ``presolve`` eliminating such rows the result is incorrect.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
return super(TestLinprogSimplexNoPresolve, self).test_bug_6139()
def test_bug_7237_low_tol(self):
# Fails if the tolerance is too strict. Here, we test that
# even if the solution is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
super(TestLinprogSimplexNoPresolve, self).test_bug_7237()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here, we test that
# even if the solution is wrong, the appropriate warning is issued.
self.options.update({'tol': 1e-12})
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexNoPresolve, self).test_bug_8174()
def test_unbounded_no_nontrivial_constraints_1(self):
pytest.skip("Tests behavior specific to presolve")
def test_unbounded_no_nontrivial_constraints_2(self):
pytest.skip("Tests behavior specific to presolve")
#######################################
# Interior-Point Option-Specific Tests#
#######################################
class TestLinprogIPDense(LinprogIPTests):
options = {"sparse": False}
if has_cholmod:
class TestLinprogIPSparseCholmod(LinprogIPTests):
options = {"sparse": True, "cholesky": True}
if has_umfpack:
class TestLinprogIPSparseUmfpack(LinprogIPTests):
options = {"sparse": True, "cholesky": False}
def test_bug_10466(self):
pytest.skip("Autoscale doesn't fix everything, and that's OK.")
class TestLinprogIPSparse(LinprogIPTests):
options = {"sparse": True, "cholesky": False, "sym_pos": False}
@pytest.mark.xfail_on_32bit("This test is sensitive to machine epsilon level "
"perturbations in linear system solution in "
"_linprog_ip._sym_solve.")
def test_bug_6139(self):
super(TestLinprogIPSparse, self).test_bug_6139()
@pytest.mark.xfail(reason='Fails with ATLAS, see gh-7877')
def test_bug_6690(self):
# Test defined in base class, but can't mark as xfail there
super(TestLinprogIPSparse, self).test_bug_6690()
def test_magic_square_sparse_no_presolve(self):
# test linprog with a problem with a rank-deficient A_eq matrix
A_eq, b_eq, c, N = magic_square(3)
bounds = (0, 1)
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(MatrixRankWarning, "Matrix is exactly singular")
sup.filter(OptimizeWarning, "Solving system with option...")
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_fun=1.730550597)
def test_sparse_solve_options(self):
# checking that problem is solved with all column permutation options
A_eq, b_eq, c, N = magic_square(3)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(OptimizeWarning, "Invalid permc_spec option")
o = {key: self.options[key] for key in self.options}
permc_specs = ('NATURAL', 'MMD_ATA', 'MMD_AT_PLUS_A',
'COLAMD', 'ekki-ekki-ekki')
# 'ekki-ekki-ekki' raises warning about invalid permc_spec option
# and uses default
for permc_spec in permc_specs:
o["permc_spec"] = permc_spec
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_fun=1.730550597)
class TestLinprogIPSparsePresolve(LinprogIPTests):
options = {"sparse": True, "_sparse_presolve": True}
@pytest.mark.xfail_on_32bit("This test is sensitive to machine epsilon level "
"perturbations in linear system solution in "
"_linprog_ip._sym_solve.")
def test_bug_6139(self):
super(TestLinprogIPSparsePresolve, self).test_bug_6139()
def test_enzo_example_c_with_infeasibility(self):
pytest.skip('_sparse_presolve=True incompatible with presolve=False')
@pytest.mark.xfail(reason='Fails with ATLAS, see gh-7877')
def test_bug_6690(self):
# Test defined in base class, but can't mark as xfail there
super(TestLinprogIPSparsePresolve, self).test_bug_6690()
class TestLinprogIPSpecific(object):
method = "interior-point"
# the following tests don't need to be performed separately for
# sparse presolve, sparse after presolve, and dense
def test_solver_select(self):
# check that default solver is selected as expected
if has_cholmod:
options = {'sparse': True, 'cholesky': True}
elif has_umfpack:
options = {'sparse': True, 'cholesky': False}
else:
options = {'sparse': True, 'cholesky': False, 'sym_pos': False}
A, b, c = lpgen_2d(20, 20)
res1 = linprog(c, A_ub=A, b_ub=b, method=self.method, options=options)
res2 = linprog(c, A_ub=A, b_ub=b, method=self.method) # default solver
assert_allclose(res1.fun, res2.fun,
err_msg="linprog default solver unexpected result",
rtol=1e-15, atol=1e-15)
def test_unbounded_below_no_presolve_original(self):
# formerly caused segfault in TravisCI w/ "cholesky":True
c = [-1]
bounds = [(None, 1)]
res = linprog(c=c, bounds=bounds,
method=self.method,
options={"presolve": False, "cholesky": True})
_assert_success(res, desired_fun=-1)
def test_cholesky(self):
# use cholesky factorization and triangular solves
A, b, c = lpgen_2d(20, 20)
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"cholesky": True}) # only for dense
_assert_success(res, desired_fun=-64.049494229)
def test_alternate_initial_point(self):
# use "improved" initial point
A, b, c = lpgen_2d(20, 20)
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(OptimizeWarning, "Solving system with option...")
sup.filter(LinAlgWarning, "Ill-conditioned matrix...")
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"ip": True, "disp": True})
# ip code is independent of sparse/dense
_assert_success(res, desired_fun=-64.049494229)
def test_maxiter(self):
# test iteration limit
A, b, c = lpgen_2d(20, 20)
maxiter = np.random.randint(6) + 1 # problem takes 7 iterations
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"maxiter": maxiter})
# maxiter is independent of sparse/dense
_assert_iteration_limit_reached(res, maxiter)
assert_equal(res.nit, maxiter)
def test_bug_8664(self):
# interior-point has trouble with this when presolve is off
c = [4]
A_ub = [[2], [5]]
b_ub = [4, 4]
A_eq = [[0], [-8], [9]]
b_eq = [3, 2, 10]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning)
sup.filter(OptimizeWarning, "Solving system with option...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options={"presolve": False})
assert_(not res.success, "Incorrectly reported success")
########################################
# Revised Simplex Option-Specific Tests#
########################################
class TestLinprogRSCommon(LinprogRSTests):
options = {}
def test_cyclic_bland(self):
pytest.skip("Intermittent failure acceptable.")
def test_nontrivial_problem_with_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_unbounded_variables(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bounds = [(None, None), (None, None), (0, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bounded_variables(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bounds = [(None, 1), (1, None), (0, None), (.4, .6)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_negative_unbounded_variable(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
b_eq = [4]
x_star = np.array([-219/385, 582/385, 0, 4/10])
f_star = 3951/385
bounds = [(None, None), (1, None), (0, None), (.4, .6)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bad_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bad_guess = [1, 2, 3, .5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=bad_guess)
assert_equal(res.status, 6)
def test_redundant_constraints_with_guess(self):
A, b, c, N = magic_square(3)
p = np.random.rand(*c.shape)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_eq=A, b_eq=b, method=self.method)
res2 = linprog(c, A_eq=A, b_eq=b, method=self.method, x0=res.x)
res3 = linprog(c + p, A_eq=A, b_eq=b, method=self.method, x0=res.x)
_assert_success(res2, desired_fun=1.730550597)
assert_equal(res2.nit, 0)
_assert_success(res3)
assert_(res3.nit < res.nit) # hot start reduces iterations
class TestLinprogRSBland(LinprogRSTests):
options = {"pivot": "bland"}
###########################
# Autoscale-Specific Tests#
###########################
class AutoscaleTests(object):
options = {"autoscale": True}
test_bug_6139 = LinprogCommonTests.test_bug_6139
test_bug_6690 = LinprogCommonTests.test_bug_6690
test_bug_7237 = LinprogCommonTests.test_bug_7237
class TestAutoscaleIP(AutoscaleTests):
method = "interior-point"
def test_bug_6139(self):
self.options['tol'] = 1e-10
return AutoscaleTests.test_bug_6139(self)
class TestAutoscaleSimplex(AutoscaleTests):
method = "simplex"
class TestAutoscaleRS(AutoscaleTests):
method = "revised simplex"
def test_nontrivial_problem_with_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bad_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bad_guess = [1, 2, 3, .5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=bad_guess)
assert_equal(res.status, 6)
| bsd-3-clause |
jirikuncar/invenio | invenio/modules/tags/restful.py | 12 | 12420 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Restful API for tags.
Some useful variables are shown below.
py:data: tag_post_schema , stores data when creating a new tag
py:data: tag_update_schema , stores data when updating a tag
py:data: add_tags_schema , stores a list of tags that will be
attached to a record
"""
from functools import wraps
from flask_login import current_user
from flask_restful import abort, Resource, fields, marshal
from flask import request
from invenio.ext.restful import (
require_api_auth, require_header,
RESTValidator
)
from invenio.modules.tags import api as tags_api
from invenio.modules.tags.models import WtgTAG
from .errors import (
TagError, TagNotCreatedError,
TagNotFoundError, TagNotDeletedError, TagOwnerError, TagNotUpdatedError,
TagsNotFetchedError, TagValidationError, TagRecordAssociationError,
RecordNotFoundError
)
def error_handler(f):
"""error handler."""
@wraps(f)
def inner(*args, **kwargs):
try:
return f(*args, **kwargs)
except (TagNotCreatedError, TagNotFoundError,
TagNotDeletedError, TagNotUpdatedError,
TagsNotFetchedError, TagOwnerError,
TagRecordAssociationError, RecordNotFoundError) as e:
abort(e.status_code, message=e.error_msg, status=e.status_code)
except TagValidationError as e:
abort(e.status_code, message=e.error_msg, status=e.status_code,
errors=e.error_list)
except TagError as e:
if len(e.args) >= 1:
abort(400, message=e.args[0], status=400)
else:
abort(500, message="Internal server error", status=500)
return inner
class TagRepresenation(object):
"""A representation of a tag.
This class will be only used to return a tag as JSON.
"""
marshaling_fields = dict(
id=fields.Integer,
name=fields.String,
id_user=fields.Integer,
group_name=fields.String,
group_access_rights=fields.String,
show_in_description=fields.Boolean
)
def __init__(self, retrieved_tag):
"""Initialization.
Declared the attributes to marshal with a tag.
:param retrieved_tag: a tag from the database
"""
#get fields from the given tag
self.id = retrieved_tag.id
self.name = retrieved_tag.name
self.id_user = retrieved_tag.id_user
if retrieved_tag.usergroup is None:
self.group_name = ''
else:
self.group_name = retrieved_tag.usergroup.name
#set the group access rights as a string
group_rights_list = (
WtgTAG.ACCESS_RIGHTS[retrieved_tag.group_access_rights]
)
if len(group_rights_list) == 0:
self.group_access_rights = "Nothing"
elif len(group_rights_list) == 1:
self.group_access_rights = "View"
else:
self.group_access_rights = ",".join(group_rights_list)
self.show_in_description = retrieved_tag.show_in_description
def marshal(self):
"""Marshal the Tag.
Marshal a tag with the defined attributes(marshaling_fields) as JSON.
"""
return marshal(self, self.marshaling_fields)
tag_post_schema = dict(
name=dict(required=True, type="string"),
)
tag_update_schema = dict(
rights=dict(required=False,
type="integer",
allowed=map(lambda e: e, WtgTAG.ACCESS_RIGHTS)),
groupname=dict(required=False, type="string"),
show_in_description=dict(required=False, type="boolean"),
)
class TagResource(Resource):
"""The Tag Resource."""
method_decorators = [
require_api_auth(),
error_handler
]
def get(self, tag_name):
"""Get a tag.
:param tag_name: the name of the tag to retrieve
"""
uid = current_user.get_id()
tag_retrieved = tags_api.get_tag_of_user(uid, tag_name)
tag = TagRepresenation(tag_retrieved)
return tag.marshal()
def delete(self, tag_name):
"""Delete a tag.
Checks if the tag is attached to records. If True,
the tag is attached and then is deleted.
:param tag_name: the name of the tag to delete
"""
uid = current_user.get_id()
tags_api.delete_tag_from_user(uid, tag_name)
return "", 204
@require_header('Content-Type', 'application/json')
def patch(self, tag_name):
"""Update a tag.
The attributes that can be updated are:
- group name
- group access rights
- show_in_description
:param tag_name: the name of the tag to update
"""
json_data = request.get_json()
v = RESTValidator(tag_update_schema)
if v.validate(json_data) is False:
raise TagValidationError(
error_msg="Validation for tag update failed",
status_code=400,
error_list=v.get_errors())
uid = current_user.get_id()
tag_retrieved = tags_api.update_tag_of_user(uid, tag_name, json_data)
tag = TagRepresenation(tag_retrieved)
return tag.marshal(), 201
def post(self, tag_name):
"""post."""
abort(405)
def options(self, tag_name):
"""options."""
abort(405)
def put(self, tag_name):
"""put."""
abort(405)
def head(self, tag_name):
"""head."""
abort(405)
class TagListResource(Resource):
"""The tags list resource."""
method_decorators = [
require_api_auth(),
error_handler
]
def get(self):
""" Get a list of tags.
Get the list of tags a user owns.
"""
uid = current_user.get_id()
tags_retrieved = tags_api.get_all_tags_of_user(uid)
tags = [TagRepresenation(t) for t in tags_retrieved]
return map(lambda t: t.marshal(), tags)
def delete(self):
"""Delete all tags.
Delete all the tags a user owns.
"""
uid = current_user.get_id()
tags_api.delete_all_tags_from_user(uid)
return "", 204
@require_header('Content-Type', 'application/json')
def post(self):
"""Create a new tag.
Creates a new tag and sets as owner the current user.
"""
json_data = request.get_json()
v = RESTValidator(tag_post_schema)
if v.validate(json_data) is False:
raise TagValidationError(
error_msg="Validation error for tag creation",
status_code=400,
error_list=v.get_errors())
uid = current_user.get_id()
tag_to_create = tags_api.create_tag_for_user(uid, json_data['name'])
tag_to_return = TagRepresenation(tag_to_create)
return tag_to_return.marshal(), 201
def patch(self):
"""PATCH."""
abort(405)
def options(self):
"""OPTIONS."""
abort(405)
def put(self):
"""PUT."""
abort(405)
def head(self):
"""HEAD."""
abort(405)
add_tags_schema = dict(
tags=dict(type="list", schema=dict(type="string"))
)
class RecordTagResource(Resource):
"""Handles a tag attached on a record."""
method_decorators = [
require_api_auth(),
error_handler
]
def delete(self, record_id, tag_name):
"""Detach a tag from a record.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
uid = current_user.get_id()
tags_api.detach_tag_from_record(uid, tag_name, record_id)
return "", 204
def post(self, record_id, tag_name):
"""A POST request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
def put(self, record_id, tag_name):
"""A PUT request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
def patch(self, record_id, tag_name):
"""A PATCH request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
def options(self, record_id, tag_name):
"""A OPTIONS request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
def head(self, record_id, tag_name):
"""A HEAD request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
def get(self, record_id, tag_name):
"""A GET request.
:param record_id: the identifier of the record
:param tag_name: the name of the tag
"""
abort(405)
class RecordListTagResource(Resource):
"""This resource handles tags when it comes to records."""
method_decorators = [
require_api_auth(),
error_handler
]
@require_header('Content-Type', 'application/json')
def post(self, record_id):
"""Attach a list of tags to a record.
If a tag in the list exists in database then it is attached
to the record else the tag is created an then it is attached
to the record
:param record_id: the identifier of the record
"""
json_data = request.get_json()
attachTagsValidator = RESTValidator(add_tags_schema)
if attachTagsValidator.validate(json_data) is False:
raise TagValidationError(
error_msg="Validation error in attaching tags on record",
status_code=400,
error_list=attachTagsValidator.get_errors())
uid = current_user.get_id()
tags_just_attached = tags_api.attach_tags_to_record(uid,
json_data['tags'],
record_id)
if len(tags_just_attached) == 0:
return []
else:
return map(
lambda t: TagRepresenation(t).marshal(), tags_just_attached
)
def get(self, record_id):
"""Retrieve all the attached on a record tags.
:param record_id: the identifier of the record
"""
attached_tags = tags_api.get_attached_tags_on_record(record_id)
if len(attached_tags) == 0:
return []
else:
return map(lambda t: TagRepresenation(t).marshal(), attached_tags)
def delete(self, record_id):
"""Detach all the tags from a record.
:param record_id: the identifier of the record
"""
pass
def put(self, record_id):
"""Replace all tags for a record.
:param record_id: the identifier of the record
"""
pass
def head(self, record_id):
"""A HEAD request."""
abort(405)
def patch(self, record_id):
"""A PATCH request."""
abort(405)
def options(self, record_id):
"""A OPTIONS request."""
abort(405)
#
# Register API resources
#
def setup_app(app, api):
"""setup the resources urls."""
api.add_resource(
TagListResource,
'/api/tags/'
)
api.add_resource(
TagResource,
'/api/tags/<string:tag_name>',
)
api.add_resource(
RecordListTagResource,
'/api/records/<int:record_id>/tags/'
)
api.add_resource(
RecordTagResource,
'/api/records/<int:record_id>/tags/<string:tag_name>'
)
| gpl-2.0 |
ChristianF88/CD3Waterbalance | Supplyvecs.py | 3 | 84594 | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 22 11:25:43 2015
@author: Acer
"""
#10 Buildings 2 Cluster
supplyvec=[[[[[[1,1,0,1,0],[1,1,1],1],[[1,1,0,1,0],[1,1,1],1],1],1]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#20 Buildings 4 Cluster
supplyvec=[[[[[[1,1,0,1,0],[1,1,1],1],[[1,1,0,1,0],[1,1,1],1],1],[[[1,1,0,1,0],[0,0,1],1],[[0,0,0,1,0],[0,0,1],1],0],1]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#40 Buildings 4 Cluster
supplyvec=[[[[[[1,1,0,1,0,0,1,0,1,0],[1,1,1],1],[[1,0,0,0,0,0,1,0,1,0],[1,1,1],1],1],[[[1,0,0,1,1,0,1,0,1,0],[0,0,1],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,1],1],0],1]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#80 Buildings 8 Cluster
supplyvec=[[[[[[1,1,0,1,0,0,1,0,1,0],[1,1,1],1],[[1,0,0,0,0,0,1,0,1,0],[1,1,1],1],1],[[[1,0,0,1,1,0,1,0,1,0],[0,0,1],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,1],1],0],1],[[[[1,1,0,1,0,0,1,0,1,0],[1,1,0],1],[[1,0,0,0,0,0,1,0,1,0],[1,1,0],1],1],[[[1,0,0,1,1,0,1,0,1,0],[0,0,0],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,0],1],0],0]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#160 Buildings 8 Cluster
supplyvec=[[[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],[[0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],0],1],[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,0],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,0],1],0],0]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#300 Buildings 16 Cluster
supplyvec=[[[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],[[0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],0],1],[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,0],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,0],1],0],0]],[[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,1],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],[[0,0,0,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,1],1],0],1],[[[[1,1,0,1,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],[[1,0,0,0,0,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[1,1,0],1],1],[[[1,0,0,1,1,0,1,0,1,0,1,1,0,1,0,0,1,0,1,0],[0,0,0],1],[[0,0,0,1,1,1,1,1,1,0],[0,0,0],1],0],0]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],
[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#for fixing problem
supplyvec=[[[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1],[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1]],[[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1],[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
#for fixing problem 2
supplyvec=[[[[[[1],[0,0,1],1],[[1],[0,0,1],1],0],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1],[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1]]]#,
# [[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1],[[[[1],[1,1,1],1],[[1],[1,1,1],1],1],[[[1],[1,1,1],1],[[1],[1,1,1],1],1],1]]]
Catchattrvec=[[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]#,
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
# [1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without'],
#[1,1.9,800,0.4,0.2,0.4,0.6,0.21,1.5,0.4,0.5,400,500,700,0.04,0.05,0.06,'without'],[1,1.8,10000,0,0.5,0.5,0.6,0.21,1.5,0.4,0.5,380,510,710,0.04,0.05,0.06,'without']]
Demandmodelattrvec = [[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]#,[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"],[[5,6,5],[5], "Simple_Model"]]
| gpl-2.0 |
mbernasocchi/QGIS | tests/src/python/test_qgssymbollayer_createsld.py | 30 | 61494 | """
***************************************************************************
test_qgssymbollayer_createsld.py
---------------------
Date : July 2016
Copyright : (C) 2016 by Andrea Aime
Email : andrea dot aime at geosolutions dot it
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *less
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Andrea Aime'
__date__ = 'July 2016'
__copyright__ = '(C) 2012, Andrea Aime'
import qgis # NOQA
from qgis.PyQt.QtCore import Qt, QDir, QFile, QIODevice, QPointF, QSizeF
from qgis.PyQt.QtXml import QDomDocument
from qgis.PyQt.QtGui import QColor, QFont
from qgis.core import (
QgsSimpleMarkerSymbolLayer, QgsSimpleMarkerSymbolLayerBase, QgsUnitTypes, QgsSvgMarkerSymbolLayer,
QgsFontMarkerSymbolLayer, QgsEllipseSymbolLayer, QgsSimpleLineSymbolLayer,
QgsMarkerLineSymbolLayer, QgsMarkerSymbol, QgsSimpleFillSymbolLayer, QgsSVGFillSymbolLayer,
QgsLinePatternFillSymbolLayer, QgsPointPatternFillSymbolLayer, QgsVectorLayer, QgsVectorLayerSimpleLabeling,
QgsTextBufferSettings, QgsPalLayerSettings, QgsTextBackgroundSettings, QgsRuleBasedLabeling)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
# Convenience instances in case you may need them
# not used in this test
start_app()
class TestQgsSymbolLayerCreateSld(unittest.TestCase):
"""
This class tests the creation of SLD from QGis layers
"""
def testSimpleMarkerRotation(self):
symbol = QgsSimpleMarkerSymbolLayer(
QgsSimpleMarkerSymbolLayerBase.Star, color=QColor(255, 0, 0), strokeColor=QColor(0, 255, 0), size=10)
symbol.setAngle(50)
dom, root = self.symbolToSld(symbol)
# print( "Simple marker rotation: " + root.ownerDocument().toString())
self.assertStaticRotation(root, '50')
def testSimpleMarkerUnitDefault(self):
symbol = QgsSimpleMarkerSymbolLayer(
QgsSimpleMarkerSymbolLayerBase.Star, color=QColor(255, 0, 0), strokeColor=QColor(0, 255, 0), size=10)
symbol.setStrokeWidth(3)
symbol.setOffset(QPointF(5, 10))
dom, root = self.symbolToSld(symbol)
# print("Simple marker unit mm: " + root.ownerDocument().toString())
# Check the size has been rescaled to pixels
self.assertStaticSize(root, '36')
# Check the same happened to the stroke width
self.assertStrokeWidth(root, 2, 11)
self.assertStaticDisplacement(root, 18, 36)
def testSimpleMarkerUnitPixels(self):
symbol = QgsSimpleMarkerSymbolLayer(
QgsSimpleMarkerSymbolLayerBase.Star, color=QColor(255, 0, 0), strokeColor=QColor(0, 255, 0), size=10)
symbol.setStrokeWidth(3)
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print("Marker unit mm: " + root.ownerDocument().toString())
# Check the size has not been rescaled
self.assertStaticSize(root, '10')
# Check the same happened to the stroke width
self.assertStrokeWidth(root, 2, 3)
self.assertStaticDisplacement(root, 5, 10)
def testSvgMarkerUnitDefault(self):
symbol = QgsSvgMarkerSymbolLayer('symbols/star.svg', 10, 90)
symbol.setFillColor(QColor("blue"))
symbol.setStrokeWidth(1)
symbol.setStrokeColor(QColor('red'))
symbol.setPath('symbols/star.svg')
symbol.setOffset(QPointF(5, 10))
dom, root = self.symbolToSld(symbol)
# print("Svg marker mm: " + dom.toString())
self.assertExternalGraphic(root, 0,
'symbols/star.svg?fill=%230000ff&fill-opacity=1&outline=%23ff0000&outline-opacity=1&outline-width=4',
'image/svg+xml')
self.assertExternalGraphic(root, 1,
'symbols/star.svg', 'image/svg+xml')
self.assertWellKnownMark(root, 0, 'square', '#0000ff', '#ff0000', 4)
# Check the size has been rescaled
self.assertStaticSize(root, '36')
# Check rotation for good measure
self.assertStaticRotation(root, '90')
self.assertStaticDisplacement(root, 18, 36)
def testSvgMarkerUnitPixels(self):
symbol = QgsSvgMarkerSymbolLayer('symbols/star.svg', 10, 0)
symbol.setFillColor(QColor("blue"))
symbol.setStrokeWidth(1)
symbol.setStrokeColor(QColor('red'))
symbol.setPath('symbols/star.svg')
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print("Svg marker unit px: " + dom.toString())
self.assertExternalGraphic(root, 0,
'symbols/star.svg?fill=%230000ff&fill-opacity=1&outline=%23ff0000&outline-opacity=1&outline-width=1',
'image/svg+xml')
self.assertExternalGraphic(root, 1,
'symbols/star.svg', 'image/svg+xml')
self.assertWellKnownMark(root, 0, 'square', '#0000ff', '#ff0000', 1)
# Check the size has not been rescaled
self.assertStaticSize(root, '10')
self.assertStaticDisplacement(root, 5, 10)
def testFontMarkerUnitDefault(self):
symbol = QgsFontMarkerSymbolLayer('sans', ',', 10, QColor('black'), 45)
symbol.setOffset(QPointF(5, 10))
dom, root = self.symbolToSld(symbol)
# print("Font marker unit mm: " + dom.toString())
# Check the size has been rescaled
self.assertStaticSize(root, '36')
self.assertStaticRotation(root, '45')
self.assertStaticDisplacement(root, 18, 36)
def testFontMarkerUnitPixel(self):
symbol = QgsFontMarkerSymbolLayer('sans', ',', 10, QColor('black'), 45)
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Font marker unit mm: " + dom.toString())
# Check the size has been rescaled
self.assertStaticSize(root, '10')
self.assertStaticRotation(root, '45')
self.assertStaticDisplacement(root, 5, 10)
def createEllipseSymbolLayer(self):
# No way to build it programmatically...
mTestName = 'QgsEllipseSymbolLayer'
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.sld' % (unitTestDataPath(), mTestName))
mDoc = QDomDocument(mTestName)
mFile = QFile(mFilePath)
mFile.open(QIODevice.ReadOnly)
mDoc.setContent(mFile, True)
mFile.close()
mSymbolLayer = QgsEllipseSymbolLayer.createFromSld(
mDoc.elementsByTagName('PointSymbolizer').item(0).toElement())
return mSymbolLayer
def testEllipseMarkerUnitDefault(self):
symbol = self.createEllipseSymbolLayer()
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderMillimeters)
dom, root = self.symbolToSld(symbol)
# print ("Ellipse marker unit mm: " + dom.toString())
# Check the size has been rescaled
self.assertStaticSize(root, '25')
# Check also the stroke width
self.assertStrokeWidth(root, 2, 4)
self.assertStaticDisplacement(root, 18, 36)
def testEllipseMarkerUnitPixel(self):
symbol = self.createEllipseSymbolLayer()
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Ellipse marker unit mm: " + dom.toString())
# Check the size has been rescaled
self.assertStaticSize(root, '7')
# Check also the stroke width
self.assertStrokeWidth(root, 2, 1)
self.assertStaticDisplacement(root, 5, 10)
def testSimpleLineHairline(self):
symbol = QgsSimpleLineSymbolLayer(QColor("black"), 0)
dom, root = self.symbolToSld(symbol)
# print ("Simple line px: \n" + dom.toString())
# Hairline is turned into 0.5px
self.assertStrokeWidth(root, 1, 0.5)
def testSimpleLineUnitDefault(self):
symbol = QgsSimpleLineSymbolLayer(QColor("black"), 1)
symbol.setCustomDashVector([10, 10])
symbol.setUseCustomDashPattern(True)
symbol.setOffset(5)
dom, root = self.symbolToSld(symbol)
# print ("Simple line px: \n" + dom.toString())
self.assertStrokeWidth(root, 1, 4)
self.assertDashPattern(root, 4, '36 36')
self.assertStaticPerpendicularOffset(root, '18')
def testSimpleLineUnitPixel(self):
symbol = QgsSimpleLineSymbolLayer(QColor("black"), 1)
symbol.setCustomDashVector([10, 10])
symbol.setUseCustomDashPattern(True)
symbol.setOffset(5)
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Simple line px: \n" + dom.toString())
self.assertStrokeWidth(root, 1, 1)
self.assertDashPattern(root, 4, '10 10')
self.assertStaticPerpendicularOffset(root, '5')
def testMarkLineUnitDefault(self):
symbol = QgsMarkerLineSymbolLayer()
symbol.setSubSymbol(
QgsMarkerSymbol.createSimple({'color': '#ffffff', 'size': '3'}))
symbol.setInterval(5)
symbol.setOffset(5)
dom, root = self.symbolToSld(symbol)
# print ("Mark line mm: \n" + dom.toString())
# size of the mark
self.assertStaticSize(root, '11')
# gap and offset
self.assertStaticGap(root, '18')
self.assertStaticPerpendicularOffset(root, '18')
def testMarkLineUnitPixels(self):
symbol = QgsMarkerLineSymbolLayer()
symbol.setSubSymbol(
QgsMarkerSymbol.createSimple({'color': '#ffffff', 'size': '3'}))
symbol.setInterval(5)
symbol.setOffset(5)
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Mark line px: \n" + dom.toString())
# size of the mark
self.assertStaticSize(root, '3')
# gap and offset
self.assertStaticGap(root, '5')
self.assertStaticPerpendicularOffset(root, '5')
def testSimpleFillDefault(self):
symbol = QgsSimpleFillSymbolLayer(
QColor('red'), Qt.SolidPattern, QColor('green'), Qt.SolidLine, 5)
symbol.setOffset(QPointF(5, 10))
dom, root = self.symbolToSld(symbol)
# print ("Simple fill mm: \n" + dom.toString())
self.assertStrokeWidth(root, 2, 18)
self.assertStaticDisplacement(root, 18, 36)
def testSimpleFillPixels(self):
symbol = QgsSimpleFillSymbolLayer(
QColor('red'), Qt.SolidPattern, QColor('green'), Qt.SolidLine, 5)
symbol.setOffset(QPointF(5, 10))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ( "Simple fill px: \n" + dom.toString())
self.assertStrokeWidth(root, 2, 5)
self.assertStaticDisplacement(root, 5, 10)
def testSvgFillDefault(self):
symbol = QgsSVGFillSymbolLayer('test/star.svg', 10, 45)
symbol.setSvgFillColor(QColor('blue'))
symbol.setSvgStrokeWidth(3)
symbol.setSvgStrokeColor(QColor('yellow'))
symbol.subSymbol().setWidth(10)
dom, root = self.symbolToSld(symbol)
# print ("Svg fill mm: \n" + dom.toString())
self.assertExternalGraphic(root, 0,
'test/star.svg?fill=%230000ff&fill-opacity=1&outline=%23ffff00&outline-opacity=1&outline-width=11',
'image/svg+xml')
self.assertExternalGraphic(root, 1,
'test/star.svg', 'image/svg+xml')
self.assertWellKnownMark(root, 0, 'square', '#0000ff', '#ffff00', 11)
self.assertStaticRotation(root, '45')
self.assertStaticSize(root, '36')
# width of the polygon stroke
lineSymbolizer = root.elementsByTagName('se:LineSymbolizer').item(0).toElement()
self.assertStrokeWidth(lineSymbolizer, 1, 36)
def testSvgFillPixel(self):
symbol = QgsSVGFillSymbolLayer('test/star.svg', 10, 45)
symbol.setSvgFillColor(QColor('blue'))
symbol.setSvgStrokeWidth(3)
symbol.setSvgStrokeColor(QColor('black'))
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
symbol.subSymbol().setWidth(10)
dom, root = self.symbolToSld(symbol)
# print ("Svg fill px: \n" + dom.toString())
self.assertExternalGraphic(root, 0,
'test/star.svg?fill=%230000ff&fill-opacity=1&outline=%23000000&outline-opacity=1&outline-width=3',
'image/svg+xml')
self.assertExternalGraphic(root, 1,
'test/star.svg', 'image/svg+xml')
self.assertWellKnownMark(root, 0, 'square', '#0000ff', '#000000', 3)
self.assertStaticRotation(root, '45')
self.assertStaticSize(root, '10')
# width of the polygon stroke
lineSymbolizer = root.elementsByTagName('se:LineSymbolizer').item(0).toElement()
self.assertStrokeWidth(lineSymbolizer, 1, 10)
def testLineFillDefault(self):
symbol = QgsLinePatternFillSymbolLayer()
symbol.setLineAngle(45)
symbol.setLineWidth(1)
symbol.setOffset(5)
dom, root = self.symbolToSld(symbol)
# print ("Line fill mm: \n" + dom.toString())
self.assertStaticRotation(root, '45')
self.assertStrokeWidth(root, 1, 4)
self.assertStaticSize(root, '18')
self.assertStaticDisplacement(root, 15, 9)
def testLineFillPixels(self):
symbol = QgsLinePatternFillSymbolLayer()
symbol.setLineAngle(45)
symbol.setLineWidth(1)
symbol.setOffset(5)
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Line fill px: \n" + dom.toString())
self.assertStaticRotation(root, '45')
self.assertStrokeWidth(root, 1, 1)
self.assertStaticSize(root, '5')
self.assertStaticDisplacement(root, 4.25, 2.63)
def testPointFillDefault(self):
symbol = QgsPointPatternFillSymbolLayer()
dom, root = self.symbolToSld(symbol)
# print ("Point fill mm: \n" + dom.toString())
self.assertStaticSize(root, '7')
def testPointFillpixels(self):
symbol = QgsPointPatternFillSymbolLayer()
symbol.setOutputUnit(QgsUnitTypes.RenderPixels)
dom, root = self.symbolToSld(symbol)
# print ("Point fill px: \n" + dom.toString())
self.assertStaticSize(root, '2')
def testSingleSymbolNoScaleDependencies(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "singleSymbol"))
layer.loadNamedStyle(mFilePath)
dom, root = self.layerToSld(layer)
# print("No dep on single symbol:" + dom.toString())
self.assertScaleDenominator(root, None, None)
def testSingleSymbolScaleDependencies(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "singleSymbol"))
layer.loadNamedStyle(mFilePath)
layer.setMaximumScale(1000)
layer.setMinimumScale(500000)
layer.setScaleBasedVisibility(True)
dom, root = self.layerToSld(layer)
# print("Scale dep on single symbol:" + dom.toString())
self.assertScaleDenominator(root, '1000', '500000')
def testCategorizedNoScaleDependencies(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "categorized"))
layer.loadNamedStyle(mFilePath)
dom, root = self.layerToSld(layer)
# print("Categorized no scale deps:" + dom.toString())
ruleCount = root.elementsByTagName('se:Rule').size()
for i in range(0, ruleCount):
self.assertScaleDenominator(root, None, None, i)
def testCategorizedWithScaleDependencies(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "categorized"))
layer.loadNamedStyle(mFilePath)
layer.setMaximumScale(1000)
layer.setMinimumScale(500000)
layer.setScaleBasedVisibility(True)
dom, root = self.layerToSld(layer)
# print("Categorized with scale deps:" + dom.toString())
ruleCount = root.elementsByTagName('se:Rule').size()
for i in range(0, ruleCount):
self.assertScaleDenominator(root, '1000', '500000', i)
def testGraduatedNoScaleDependencies(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "graduated"))
status = layer.loadNamedStyle(mFilePath) # NOQA
dom, root = self.layerToSld(layer)
# print("Graduated no scale deps:" + dom.toString())
ruleCount = root.elementsByTagName('se:Rule').size()
for i in range(0, ruleCount):
self.assertScaleDenominator(root, None, None, i)
# def testRuleBasedNoRootScaleDependencies(self):
# layer = QgsVectorLayer("Polygon", "addfeat", "memory")
#
# mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "ruleBased"))
# status = layer.loadNamedStyle(mFilePath) # NOQA
#
# dom, root = self.layerToSld(layer)
# print(("Rule based, no root scale deps:" + dom.toString()))
#
# ruleCount = root.elementsByTagName('se:Rule').size() # NOQA
# self.assertScaleDenominator(root, '1000', '40000000', 0)
# self.assertScaleDenominator(root, None, None, 1)
def testRuleBasedNoRootScaleDependencies(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "ruleBased"))
status = layer.loadNamedStyle(mFilePath) # NOQA
layer.setMaximumScale(5000)
layer.setMinimumScale(50000000)
layer.setScaleBasedVisibility(True)
dom, root = self.layerToSld(layer)
# print("Rule based, with root scale deps:" + dom.toString())
ruleCount = root.elementsByTagName('se:Rule').size() # NOQA
self.assertScaleDenominator(root, '5000', '40000000', 0)
self.assertScaleDenominator(root, '5000', '50000000', 1)
def testCategorizedFunctionConflict(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.qml' % (unitTestDataPath(), "categorizedFunctionConflict"))
status = layer.loadNamedStyle(mFilePath) # NOQA
dom, root = self.layerToSld(layer)
# print("Rule based, with root scale deps:" + dom.toString())
ruleCount = root.elementsByTagName('se:Rule').size() # NOQA
self.assertEqual(7, ruleCount)
self.assertRuleRangeFilter(root, 0, 'Area', '0', True, '500', True)
self.assertRuleRangeFilter(root, 1, 'Area', '500', False, '1000', True)
self.assertRuleRangeFilter(root, 2, 'Area', '1000', False, '5000', True)
self.assertRuleRangeFilter(root, 3, 'Area', '5000', False, '10000', True)
self.assertRuleRangeFilter(root, 4, 'Area', '10000', False, '50000', True)
self.assertRuleRangeFilter(root, 5, 'Area', '50000', False, '100000', True)
self.assertRuleRangeFilter(root, 6, 'Area', '100000', False, '200000', True)
def assertRuleRangeFilter(self, root, index, attributeName, min, includeMin, max, includeMax):
rule = root.elementsByTagName('se:Rule').item(index).toElement()
filter = rule.elementsByTagName("Filter").item(0).firstChild()
self.assertEqual("ogc:And", filter.nodeName())
gt = filter.firstChild()
expectedGtName = "ogc:PropertyIsGreaterThanOrEqualTo" if includeMin else "ogc:PropertyIsGreaterThan"
self.assertEqual(expectedGtName, gt.nodeName())
gtProperty = gt.firstChild()
self.assertEqual("ogc:PropertyName", gtProperty.nodeName())
self.assertEqual(attributeName, gtProperty.toElement().text())
gtValue = gt.childNodes().item(1)
self.assertEqual(min, gtValue.toElement().text())
lt = filter.childNodes().item(1)
expectedLtName = "ogc:PropertyIsLessThanOrEqualTo" if includeMax else "ogc:PropertyIsLessThan"
self.assertEqual(expectedLtName, lt.nodeName())
ltProperty = lt.firstChild()
self.assertEqual("ogc:PropertyName", ltProperty.nodeName())
self.assertEqual(attributeName, ltProperty.toElement().text())
ltValue = lt.childNodes().item(1)
self.assertEqual(max, ltValue.toElement().text())
def testSimpleLabeling(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
# Pick a local default font
fontFamily = QFont().family()
settings = layer.labeling().settings()
format = settings.format()
font = format.font()
font.setFamily(fontFamily)
font.setBold(False)
font.setItalic(False)
format.setFont(font)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Simple label text symbolizer" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
self.assertPropertyName(ts, 'se:Label', 'NAME')
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual(fontFamily, self.assertSvgParameter(font, 'font-family').text())
self.assertEqual('11', self.assertSvgParameter(font, 'font-size').text())
fill = self.assertElement(ts, 'se:Fill', 0)
self.assertEqual('#000000', self.assertSvgParameter(fill, "fill").text())
self.assertIsNone(self.assertSvgParameter(fill, "fill-opacity", True))
def testLabelingUomMillimeter(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
self.updateLayerLabelingUnit(layer, QgsUnitTypes.RenderMillimeters)
dom, root = self.layerToSld(layer)
# print("Label sized in mm " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('32', self.assertSvgParameter(font, 'font-size').text())
def testLabelingUomPixels(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
self.updateLayerLabelingUnit(layer, QgsUnitTypes.RenderPixels)
dom, root = self.layerToSld(layer)
# print("Label sized in pixels " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('9', self.assertSvgParameter(font, 'font-size').text())
def testLabelingUomInches(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
self.updateLayerLabelingUnit(layer, QgsUnitTypes.RenderInches)
dom, root = self.layerToSld(layer)
# print("Label sized in inches " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('816', self.assertSvgParameter(font, 'font-size').text())
def testTextStyle(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
# testing regular
self.updateLayerLabelingFontStyle(layer, False, False)
dom, root = self.layerToSld(layer)
# print("Simple label italic text" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertIsNone(self.assertSvgParameter(font, 'font-weight', True))
self.assertIsNone(self.assertSvgParameter(font, 'font-style', True))
# testing bold
self.updateLayerLabelingFontStyle(layer, True, False)
dom, root = self.layerToSld(layer)
# print("Simple label bold text" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('bold', self.assertSvgParameter(font, 'font-weight').text())
self.assertIsNone(self.assertSvgParameter(font, 'font-style', True))
# testing italic
self.updateLayerLabelingFontStyle(layer, False, True)
dom, root = self.layerToSld(layer)
# print("Simple label italic text" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('italic', self.assertSvgParameter(font, 'font-style').text())
self.assertIsNone(self.assertSvgParameter(font, 'font-weight', True))
# testing bold italic
self.updateLayerLabelingFontStyle(layer, True, True)
dom, root = self.layerToSld(layer)
# print("Simple label bold and italic text" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('italic', self.assertSvgParameter(font, 'font-style').text())
self.assertEqual('bold', self.assertSvgParameter(font, 'font-weight').text())
# testing underline and strikethrough vendor options
self.updateLayerLabelingFontStyle(layer, False, False, True, True)
dom, root = self.layerToSld(layer)
# print("Simple label underline and strikethrough text" + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
font = self.assertElement(ts, 'se:Font', 0)
self.assertEqual('true', self.assertVendorOption(ts, 'underlineText').text())
self.assertEqual('true', self.assertVendorOption(ts, 'strikethroughText').text())
def testTextMixedCase(self):
self.assertCapitalizationFunction(QFont.MixedCase, None)
def testTextUppercase(self):
self.assertCapitalizationFunction(QFont.AllUppercase, "strToUpperCase")
def testTextLowercase(self):
self.assertCapitalizationFunction(QFont.AllLowercase, "strToLowerCase")
def testTextCapitalcase(self):
self.assertCapitalizationFunction(QFont.Capitalize, "strCapitalize")
def assertCapitalizationFunction(self, capitalization, expectedFunction):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
format = settings.format()
font = format.font()
font.setCapitalization(capitalization)
format.setFont(font)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Simple text with capitalization " + str(QFont.AllUppercase) + ": " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
label = self.assertElement(ts, "se:Label", 0)
if expectedFunction is None:
property = self.assertElement(label, "ogc:PropertyName", 0)
self.assertEqual("NAME", property.text())
else:
function = self.assertElement(label, "ogc:Function", 0)
self.assertEqual(expectedFunction, function.attribute("name"))
property = self.assertElement(function, "ogc:PropertyName", 0)
self.assertEqual("NAME", property.text())
def testLabelingTransparency(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
format = settings.format()
format.setOpacity(0.5)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with transparency " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
fill = self.assertElement(ts, 'se:Fill', 0)
self.assertEqual('#000000', self.assertSvgParameter(fill, "fill").text())
self.assertEqual('0.5', self.assertSvgParameter(fill, "fill-opacity").text())
def testLabelingBuffer(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
buffer = QgsTextBufferSettings()
buffer.setEnabled(True)
buffer.setSize(10)
buffer.setSizeUnit(QgsUnitTypes.RenderPixels)
buffer.setColor(QColor("Black"))
self.setLabelBufferSettings(layer, buffer)
dom, root = self.layerToSld(layer)
# print("Label with buffer 10 px " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
halo = self.assertElement(ts, 'se:Halo', 0)
# not full width, just radius here
self.assertEqual('5', self.assertElement(ts, 'se:Radius', 0).text())
haloFill = self.assertElement(halo, 'se:Fill', 0)
self.assertEqual('#000000', self.assertSvgParameter(haloFill, "fill").text())
def testLabelingBufferPointTranslucent(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
buffer = QgsTextBufferSettings()
buffer.setEnabled(True)
buffer.setSize(10)
buffer.setSizeUnit(QgsUnitTypes.RenderPoints)
buffer.setColor(QColor("Red"))
buffer.setOpacity(0.5)
self.setLabelBufferSettings(layer, buffer)
dom, root = self.layerToSld(layer)
# print("Label with buffer 10 points, red 50% transparent " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
halo = self.assertElement(ts, 'se:Halo', 0)
# not full width, just radius here
self.assertEqual('6.5', self.assertElement(ts, 'se:Radius', 0).text())
haloFill = self.assertElement(halo, 'se:Fill', 0)
self.assertEqual('#ff0000', self.assertSvgParameter(haloFill, "fill").text())
self.assertEqual('0.5', self.assertSvgParameter(haloFill, "fill-opacity").text())
def testLabelingLowPriority(self):
self.assertLabelingPriority(0, 0, '0')
def testLabelingDefaultPriority(self):
self.assertLabelingPriority(0, 5, None)
def testLabelingHighPriority(self):
self.assertLabelingPriority(0, 10, '1000')
def testLabelingZIndexLowPriority(self):
self.assertLabelingPriority(1, 0, '1001')
def testLabelingZIndexDefaultPriority(self):
self.assertLabelingPriority(1, 5, "1500")
def testLabelingZIndexHighPriority(self):
self.assertLabelingPriority(1, 10, '2000')
def assertLabelingPriority(self, zIndex, priority, expectedSldPriority):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
settings.zIndex = zIndex
settings.priority = priority
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with zIndex at " + str(zIndex) + " and priority at " + str(priority) + ": " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
priorityElement = self.assertElement(ts, "se:Priority", 0, True)
if expectedSldPriority is None:
self.assertIsNone(priorityElement)
else:
self.assertEqual(expectedSldPriority, priorityElement.text())
def testLabelingPlacementOverPointOffsetRotation(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
settings.placement = QgsPalLayerSettings.OverPoint
settings.xOffset = 5
settings.yOffset = 10
settings.offsetUnits = QgsUnitTypes.RenderMillimeters
settings.quadOffset = QgsPalLayerSettings.QuadrantOver
settings.angleOffset = 30
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with 'over point' placement " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
pointPlacement = self.assertPointPlacement(ts)
self.assertStaticDisplacement(pointPlacement, 18, 36)
self.assertStaticAnchorPoint(pointPlacement, 0.5, 0.5)
def testPointPlacementAboveLeft(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantAboveLeft, "AboveLeft", 1, 0)
def testPointPlacementAbove(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantAbove, "Above", 0.5, 0)
def testPointPlacementAboveRight(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantAboveRight, "AboveRight", 0, 0)
def testPointPlacementLeft(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantLeft, "Left", 1, 0.5)
def testPointPlacementRight(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantRight, "Right", 0, 0.5)
def testPointPlacementBelowLeft(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantBelowLeft, "BelowLeft", 1, 1)
def testPointPlacementBelow(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantBelow, "Below", 0.5, 1)
def testPointPlacementAboveRight(self):
self.assertLabelQuadrant(QgsPalLayerSettings.QuadrantBelowRight, "BelowRight", 0, 1)
def testPointPlacementCartoraphic(self):
self.assertPointPlacementDistance(QgsPalLayerSettings.OrderedPositionsAroundPoint)
def testPointPlacementCartoraphic(self):
self.assertPointPlacementDistance(QgsPalLayerSettings.AroundPoint)
def testLineParallelPlacement(self):
layer = QgsVectorLayer("LineString", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "lineLabel")
dom, root = self.layerToSld(layer)
# print("Label with parallel line placement " + dom.toString())
linePlacement = self.assertLinePlacement(root)
generalize = self.assertElement(linePlacement, 'se:GeneralizeLine', 0)
self.assertEqual("true", generalize.text())
def testLineParallelPlacementOffsetRepeat(self):
layer = QgsVectorLayer("LineString", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "lineLabel")
self.updateLinePlacementProperties(layer, QgsPalLayerSettings.Line, 2, 50)
dom, root = self.layerToSld(layer)
# print("Label with parallel line placement, perp. offset and repeat " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
linePlacement = self.assertLinePlacement(ts)
generalize = self.assertElement(linePlacement, 'se:GeneralizeLine', 0)
self.assertEqual("true", generalize.text())
offset = self.assertElement(linePlacement, 'se:PerpendicularOffset', 0)
self.assertEqual("7", offset.text())
repeat = self.assertElement(linePlacement, 'se:Repeat', 0)
self.assertEqual("true", repeat.text())
gap = self.assertElement(linePlacement, 'se:Gap', 0)
self.assertEqual("179", gap.text())
self.assertEqual("179", self.assertVendorOption(ts, "repeat").text())
def testLineCurvePlacementOffsetRepeat(self):
layer = QgsVectorLayer("LineString", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "lineLabel")
self.updateLinePlacementProperties(layer, QgsPalLayerSettings.Curved, 2, 50, 30, 40)
dom, root = self.layerToSld(layer)
# print("Label with curved line placement " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
linePlacement = self.assertLinePlacement(ts)
generalize = self.assertElement(linePlacement, 'se:GeneralizeLine', 0)
self.assertEqual("true", generalize.text())
offset = self.assertElement(linePlacement, 'se:PerpendicularOffset', 0)
self.assertEqual("7", offset.text())
repeat = self.assertElement(linePlacement, 'se:Repeat', 0)
self.assertEqual("true", repeat.text())
gap = self.assertElement(linePlacement, 'se:Gap', 0)
self.assertEqual("179", gap.text())
self.assertEqual("179", self.assertVendorOption(ts, "repeat").text())
self.assertEqual("true", self.assertVendorOption(ts, "followLine").text())
self.assertEqual("30", self.assertVendorOption(ts, "maxAngleDelta").text())
def testLineCurveMergeLines(self):
layer = QgsVectorLayer("LineString", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "lineLabel")
settings = layer.labeling().settings()
settings.placement = QgsPalLayerSettings.Curved
settings.mergeLines = True
settings.labelPerPart = True
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with curved line and line grouping " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
self.assertEqual("yes", self.assertVendorOption(ts, "group").text())
self.assertEqual("true", self.assertVendorOption(ts, "labelAllGroup").text())
def testLabelingPolygonFree(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
settings = layer.labeling().settings()
settings.placement = QgsPalLayerSettings.Free
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Polygon label with 'Free' placement " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
pointPlacement = self.assertPointPlacement(ts)
self.assertIsNone(self.assertElement(ts, "se:Displacement", 0, True))
self.assertStaticAnchorPoint(pointPlacement, 0.5, 0.5)
def testLabelingPolygonPerimeterCurved(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
self.updateLinePlacementProperties(layer, QgsPalLayerSettings.PerimeterCurved, 2, 50, 30, -40)
dom, root = self.layerToSld(layer)
# print("Polygon Label with curved perimeter line placement " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
linePlacement = self.assertLinePlacement(ts)
generalize = self.assertElement(linePlacement, 'se:GeneralizeLine', 0)
self.assertEqual("true", generalize.text())
offset = self.assertElement(linePlacement, 'se:PerpendicularOffset', 0)
self.assertEqual("7", offset.text())
repeat = self.assertElement(linePlacement, 'se:Repeat', 0)
self.assertEqual("true", repeat.text())
gap = self.assertElement(linePlacement, 'se:Gap', 0)
self.assertEqual("179", gap.text())
self.assertEqual("179", self.assertVendorOption(ts, "repeat").text())
self.assertEqual("true", self.assertVendorOption(ts, "followLine").text())
self.assertEqual("30", self.assertVendorOption(ts, "maxAngleDelta").text())
def testLabelScaleDependencies(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
settings = layer.labeling().settings()
settings.scaleVisibility = True
# Careful: min scale -> large scale denomin
settings.minimumScale = 10000000
settings.maximumScale = 1000000
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Labeling with scale dependencies " + dom.toString())
self.assertScaleDenominator(root, "1000000", "10000000", 1)
def testLabelShowAll(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
settings = layer.labeling().settings()
settings.displayAll = True
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Labeling, showing all labels " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
self.assertVendorOption(ts, "conflictResolution", "false")
def testLabelUpsideDown(self):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
settings = layer.labeling().settings()
settings.upsidedownLabels = QgsPalLayerSettings.ShowAll
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Labeling, showing upside down labels on lines " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
self.assertVendorOption(ts, "forceLeftToRight", "false")
def testLabelBackgroundSquareResize(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeSquare, 'square',
QgsTextBackgroundSettings.SizeBuffer, 'proportional')
def testLabelBackgroundRectangleResize(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeRectangle, 'square',
QgsTextBackgroundSettings.SizeBuffer, 'stretch')
def testLabelBackgroundCircleResize(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeCircle, 'circle',
QgsTextBackgroundSettings.SizeBuffer, 'proportional')
def testLabelBackgroundEllipseResize(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeEllipse, 'circle',
QgsTextBackgroundSettings.SizeBuffer, 'stretch')
def testLabelBackgroundSquareAbsolute(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeSquare, 'square',
QgsTextBackgroundSettings.SizeFixed, None)
def testLabelBackgroundRectangleAbsolute(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeRectangle, 'square',
QgsTextBackgroundSettings.SizeFixed, None)
def testLabelBackgroundCircleAbsolute(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeCircle, 'circle',
QgsTextBackgroundSettings.SizeFixed, None)
def testLabelBackgroundEllipseAbsolute(self):
self.assertLabelBackground(QgsTextBackgroundSettings.ShapeEllipse, 'circle',
QgsTextBackgroundSettings.SizeFixed, None)
def assertLabelBackground(self, backgroundType, expectedMarkName, sizeType, expectedResize):
layer = QgsVectorLayer("Polygon", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "polygonLabel")
settings = layer.labeling().settings()
background = QgsTextBackgroundSettings()
background.setEnabled(True)
background.setType(backgroundType)
background.setFillColor(QColor('yellow'))
background.setStrokeColor(QColor('black'))
background.setStrokeWidth(2)
background.setSize(QSizeF(10, 10))
background.setSizeType(sizeType)
format = settings.format()
format.setBackground(background)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Labeling, with background type " + str(backgroundType) + " and size type " + str(sizeType) + ": " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
graphic = self.assertElement(ts, "se:Graphic", 0)
self.assertEqual("36", self.assertElement(graphic, 'se:Size', 0).text())
self.assertWellKnownMark(graphic, 0, expectedMarkName, '#ffff00', '#000000', 7)
if expectedResize is None:
self.assertIsNone(expectedResize, self.assertVendorOption(ts, 'graphic-resize', True))
else:
self.assertEqual(expectedResize, self.assertVendorOption(ts, 'graphic-resize').text())
if sizeType == 0:
# check extra padding for proportional ellipse
if backgroundType == QgsTextBackgroundSettings.ShapeEllipse:
self.assertEqual("42.5 49", self.assertVendorOption(ts, 'graphic-margin').text())
else:
self.assertEqual("36 36", self.assertVendorOption(ts, 'graphic-margin').text())
else:
self.assertIsNone(self.assertVendorOption(ts, 'graphic-margin', True))
def testRuleBasedLabels(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "ruleLabel")
dom, root = self.layerToSld(layer)
# print("Rule based labeling: " + dom.toString())
# three rules, one with the point symbol, one with the first rule based label,
# one with the second rule based label
rule1 = self.getRule(root, 0)
self.assertElement(rule1, 'se:PointSymbolizer', 0)
rule2 = self.getRule(root, 1)
self.assertScaleDenominator(root, '100000', '10000000', 1)
tsRule2 = self.assertElement(rule2, 'se:TextSymbolizer', 0)
gt = rule2.elementsByTagName("Filter").item(0).firstChild()
self.assertEqual("ogc:PropertyIsGreaterThan", gt.nodeName())
gtProperty = gt.toElement().firstChild()
self.assertEqual("ogc:PropertyName", gtProperty.nodeName())
self.assertEqual("POP_MAX", gtProperty.toElement().text())
gtValue = gt.childNodes().item(1)
self.assertEqual("1000000", gtValue.toElement().text())
rule3 = self.getRule(root, 2)
tsRule3 = self.assertElement(rule3, 'se:TextSymbolizer', 0)
lt = rule3.elementsByTagName("Filter").item(0).firstChild()
self.assertEqual("ogc:PropertyIsLessThan", lt.nodeName())
ltProperty = lt.toElement().firstChild()
self.assertEqual("ogc:PropertyName", ltProperty.nodeName())
self.assertEqual("POP_MAX", ltProperty.toElement().text())
ltValue = gt.childNodes().item(1)
self.assertEqual("1000000", gtValue.toElement().text())
# check that adding a rule without settings does not segfault
xml1 = dom.toString()
layer.labeling().rootRule().appendChild(QgsRuleBasedLabeling.Rule(None))
dom, root = self.layerToSld(layer)
xml2 = dom.toString()
self.assertEqual(xml1, xml2)
def updateLinePlacementProperties(self, layer, linePlacement, distance, repeat, maxAngleInternal=25,
maxAngleExternal=-25):
settings = layer.labeling().settings()
settings.placement = linePlacement
settings.dist = distance
settings.repeatDistance = repeat
settings.maxCurvedCharAngleIn = maxAngleInternal
settings.maxCurvedCharAngleOut = maxAngleExternal
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
def assertPointPlacementDistance(self, placement):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
settings.placement = placement
settings.xOffset = 0
settings.yOffset = 0
settings.dist = 2
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with around point placement " + dom.toString())
ts = self.getTextSymbolizer(root, 1, 0)
pointPlacement = self.assertPointPlacement(ts)
self.assertStaticAnchorPoint(pointPlacement, 0, 0.5)
self.assertStaticDisplacement(pointPlacement, 4.95, 4.95)
def assertLabelQuadrant(self, quadrant, label, ax, ay):
layer = QgsVectorLayer("Point", "addfeat", "memory")
self.loadStyleWithCustomProperties(layer, "simpleLabel")
settings = layer.labeling().settings()
settings.placement = QgsPalLayerSettings.OverPoint
settings.xOffset = 0
settings.yOffset = 0
settings.quadOffset = quadrant
settings.angleOffset = 0
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
dom, root = self.layerToSld(layer)
# print("Label with " + label + " placement " + dom.toString())
self.assertStaticAnchorPoint(root, ax, ay)
def setLabelBufferSettings(self, layer, buffer):
settings = layer.labeling().settings()
format = settings.format()
format.setBuffer(buffer)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
def updateLayerLabelingFontStyle(self, layer, bold, italic, underline=False, strikeout=False):
settings = layer.labeling().settings()
format = settings.format()
font = format.font()
font.setBold(bold)
font.setItalic(italic)
font.setUnderline(underline)
font.setStrikeOut(strikeout)
format.setFont(font)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
def updateLayerLabelingUnit(self, layer, unit):
settings = layer.labeling().settings()
format = settings.format()
format.setSizeUnit(unit)
settings.setFormat(format)
layer.setLabeling(QgsVectorLayerSimpleLabeling(settings))
def loadStyleWithCustomProperties(self, layer, qmlFileName):
# load the style, only vector symbology
path = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), qmlFileName))
# labeling is in custom properties, they need to be loaded separately
status = layer.loadNamedStyle(path)
doc = QDomDocument()
file = QFile(path)
file.open(QIODevice.ReadOnly)
doc.setContent(file, True)
file.close()
flag = layer.readCustomProperties(doc.documentElement())
def assertPointPlacement(self, textSymbolizer):
labelPlacement = self.assertElement(textSymbolizer, 'se:LabelPlacement', 0)
self.assertIsNone(self.assertElement(labelPlacement, 'se:LinePlacement', 0, True))
pointPlacement = self.assertElement(labelPlacement, 'se:PointPlacement', 0)
return pointPlacement
def assertLinePlacement(self, textSymbolizer):
labelPlacement = self.assertElement(textSymbolizer, 'se:LabelPlacement', 0)
self.assertIsNone(self.assertElement(labelPlacement, 'se:PointPlacement', 0, True))
linePlacement = self.assertElement(labelPlacement, 'se:LinePlacement', 0)
return linePlacement
def assertElement(self, container, elementName, index, allowMissing=False):
list = container.elementsByTagName(elementName)
if list.size() <= index:
if allowMissing:
return None
else:
self.fail('Expected to find at least ' + str(
index + 1) + ' ' + elementName + ' in ' + container.nodeName() + ' but found ' + str(list.size()))
node = list.item(index)
self.assertTrue(node.isElement(), 'Found node but it''s not an element')
return node.toElement()
def getRule(self, root, ruleIndex):
rule = self.assertElement(root, 'se:Rule', ruleIndex)
return rule
def getTextSymbolizer(self, root, ruleIndex, textSymbolizerIndex):
rule = self.assertElement(root, 'se:Rule', ruleIndex)
textSymbolizer = self.assertElement(rule, 'se:TextSymbolizer', textSymbolizerIndex)
return textSymbolizer
def assertPropertyName(self, root, containerProperty, expectedAttributeName):
container = root.elementsByTagName(containerProperty).item(0).toElement()
property = container.elementsByTagName("ogc:PropertyName").item(0).toElement()
self.assertEqual(expectedAttributeName, property.text())
def assertSvgParameter(self, container, expectedName, allowMissing=False):
list = container.elementsByTagName("se:SvgParameter")
for i in range(0, list.size()):
item = list.item(i)
if item.isElement and item.isElement() and item.toElement().attribute('name') == expectedName:
return item.toElement()
if allowMissing:
return None
else:
self.fail('Could not find a se:SvgParameter named ' + expectedName + ' in ' + container.nodeName())
def assertVendorOption(self, container, expectedName, allowMissing=False):
list = container.elementsByTagName("se:VendorOption")
for i in range(0, list.size()):
item = list.item(i)
if item.isElement and item.isElement() and item.toElement().attribute('name') == expectedName:
return item.toElement()
if allowMissing:
return None
else:
self.fail('Could not find a se:VendorOption named ' + expectedName + ' in ' + container.nodeName())
def testRuleBaseEmptyFilter(self):
layer = QgsVectorLayer("Point", "addfeat", "memory")
mFilePath = QDir.toNativeSeparators('%s/symbol_layer/%s.qml' % (unitTestDataPath(), "categorizedEmptyValue"))
status = layer.loadNamedStyle(mFilePath) # NOQA
dom, root = self.layerToSld(layer)
# print("Rule based, with last rule checking against empty value:" + dom.toString())
# get the third rule
rule = root.elementsByTagName('se:Rule').item(2).toElement()
filter = rule.elementsByTagName('Filter').item(0).toElement()
filter = filter.firstChild().toElement()
self.assertEqual("ogc:Or", filter.nodeName())
self.assertEqual(1, filter.elementsByTagName('ogc:PropertyIsEqualTo').size())
self.assertEqual(1, filter.elementsByTagName('ogc:PropertyIsNull').size())
def assertScaleDenominator(self, root, expectedMinScale, expectedMaxScale, index=0):
rule = root.elementsByTagName('se:Rule').item(index).toElement()
if expectedMinScale:
minScale = rule.elementsByTagName('se:MinScaleDenominator').item(0)
self.assertEqual(expectedMinScale, minScale.firstChild().nodeValue())
else:
self.assertEqual(0, root.elementsByTagName('se:MinScaleDenominator').size())
if expectedMaxScale:
maxScale = rule.elementsByTagName('se:MaxScaleDenominator').item(0)
self.assertEqual(expectedMaxScale, maxScale.firstChild().nodeValue())
else:
self.assertEqual(0, root.elementsByTagName('se:MaxScaleDenominator').size())
def assertDashPattern(self, root, svgParameterIdx, expectedPattern):
strokeWidth = root.elementsByTagName(
'se:SvgParameter').item(svgParameterIdx)
svgParameterName = strokeWidth.attributes().namedItem('name')
self.assertEqual("stroke-dasharray", svgParameterName.nodeValue())
self.assertEqual(
expectedPattern, strokeWidth.firstChild().nodeValue())
def assertStaticGap(self, root, expectedValue):
# Check the rotation element is a literal, not a
rotation = root.elementsByTagName('se:Gap').item(0)
literal = rotation.firstChild()
self.assertEqual("ogc:Literal", literal.nodeName())
self.assertEqual(expectedValue, literal.firstChild().nodeValue())
def assertStaticSize(self, root, expectedValue):
size = root.elementsByTagName('se:Size').item(0)
self.assertEqual(expectedValue, size.firstChild().nodeValue())
def assertExternalGraphic(self, root, index, expectedLink, expectedFormat):
graphic = root.elementsByTagName('se:ExternalGraphic').item(index)
onlineResource = graphic.firstChildElement('se:OnlineResource')
self.assertEqual(expectedLink, onlineResource.attribute('xlink:href'))
format = graphic.firstChildElement('se:Format')
self.assertEqual(expectedFormat, format.firstChild().nodeValue())
def assertStaticPerpendicularOffset(self, root, expectedValue):
offset = root.elementsByTagName('se:PerpendicularOffset').item(0)
self.assertEqual(expectedValue, offset.firstChild().nodeValue())
def assertWellKnownMark(self, root, index, expectedName, expectedFill, expectedStroke, expectedStrokeWidth):
mark = root.elementsByTagName('se:Mark').item(index)
wkn = mark.firstChildElement('se:WellKnownName')
self.assertEqual(expectedName, wkn.text())
fill = mark.firstChildElement('se:Fill')
if expectedFill is None:
self.assertTrue(fill.isNull())
else:
parameter = fill.firstChildElement('se:SvgParameter')
self.assertEqual('fill', parameter.attribute('name'))
self.assertEqual(expectedFill, parameter.text())
stroke = mark.firstChildElement('se:Stroke')
if expectedStroke is None:
self.assertTrue(stroke.isNull())
else:
parameter = stroke.firstChildElement('se:SvgParameter')
self.assertEqual('stroke', parameter.attribute('name'))
self.assertEqual(expectedStroke, parameter.text())
parameter = parameter.nextSiblingElement('se:SvgParameter')
self.assertEqual('stroke-width', parameter.attribute('name'))
self.assertEqual(str(expectedStrokeWidth), parameter.text())
def assertStaticRotation(self, root, expectedValue, index=0):
# Check the rotation element is a literal, not a
rotation = root.elementsByTagName('se:Rotation').item(index)
literal = rotation.firstChild()
self.assertEqual("ogc:Literal", literal.nodeName())
self.assertEqual(expectedValue, literal.firstChild().nodeValue())
def assertStaticDisplacement(self, root, expectedAnchorX, expectedAnchorY):
displacement = root.elementsByTagName('se:Displacement').item(0)
self.assertIsNotNone(displacement)
dx = displacement.firstChild()
self.assertIsNotNone(dx)
self.assertEqual("se:DisplacementX", dx.nodeName())
self.assertSldNumber(expectedAnchorX, dx.firstChild().nodeValue())
dy = displacement.lastChild()
self.assertIsNotNone(dy)
self.assertEqual("se:DisplacementY", dy.nodeName())
self.assertSldNumber(expectedAnchorY, dy.firstChild().nodeValue())
def assertStaticAnchorPoint(self, root, expectedDispX, expectedDispY):
anchor = root.elementsByTagName('se:AnchorPoint').item(0)
self.assertIsNotNone(anchor)
ax = anchor.firstChild()
self.assertIsNotNone(ax)
self.assertEqual("se:AnchorPointX", ax.nodeName())
self.assertSldNumber(expectedDispX, ax.firstChild().nodeValue())
ay = anchor.lastChild()
self.assertIsNotNone(ay)
self.assertEqual("se:AnchorPointY", ay.nodeName())
self.assertSldNumber(expectedDispY, ay.firstChild().nodeValue())
def assertSldNumber(self, expected, stringValue):
value = float(stringValue)
self.assertFloatEquals(expected, value, 0.01)
def assertFloatEquals(self, expected, actual, tol):
self.assertLess(abs(expected - actual), tol, 'Expected %d but was %d' % (expected, actual))
def assertStrokeWidth(self, root, svgParameterIdx, expectedWidth):
strokeWidth = root.elementsByTagName(
'se:SvgParameter').item(svgParameterIdx)
svgParameterName = strokeWidth.attributes().namedItem('name')
self.assertEqual("stroke-width", svgParameterName.nodeValue())
self.assertSldNumber(
expectedWidth, strokeWidth.firstChild().nodeValue())
def symbolToSld(self, symbolLayer):
dom = QDomDocument()
root = dom.createElement("FakeRoot")
dom.appendChild(root)
symbolLayer.toSld(dom, root, {})
return dom, root
def layerToSld(self, mapLayer):
dom = QDomDocument()
root = dom.createElement("FakeRoot")
dom.appendChild(root)
error = None
mapLayer.writeSld(root, dom, error, {})
return dom, root
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
underbluewaters/marinemap | lingcod/common/s3.py | 3 | 1613 | """
Lingcod's custom S3 wrapper
Provides some useful shortcuts to working with commom AWS S3 tasks
"""
from mimetypes import guess_type
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.conf import settings
import os
def s3_bucket(bucket=None):
"""
Shortcut to a boto s3 bucket
Uses settings.ACCESS_KEY, settings.AWS_SECRET_KEY
defaults to settings.AWS_MEDIA_BUCKET
"""
conn = S3Connection(settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY)
if not bucket:
try:
bucket = settings.AWS_MEDIA_BUCKET
except:
raise Exception("No bucket specified and no settings.AWS_MEDIA_BUCKET")
return conn.create_bucket(bucket)
def get_s3_url(b,k):
"""
Returns the standard s3 url
"""
return 'http://%s.s3.amazonaws.com/%s' % (b.name, k.key)
def upload_to_s3(local_path, keyname, mimetype=None, bucket=None, acl='public-read'):
"""
Given a local filepath, bucket name and keyname (the new s3 filename),
this function will connect, guess the mimetype of the file, upload the contents and set the acl.
Defaults to public-read
"""
b = s3_bucket(bucket)
if not os.path.exists(local_path):
raise Exception("%s does not exist; can't upload to S3" % local_path)
if not mimetype:
mimetype = guess_type(local_path)[0]
if not mimetype:
mimetype = "text/plain"
k = Key(b)
k.key = keyname
k.set_metadata("Content-Type", mimetype)
k.set_contents_from_filename(local_path)
k.set_acl(acl)
return get_s3_url(b,k)
| bsd-3-clause |
utilite2/linux-kernel | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
sv-dev1/odoo | addons/website/models/ir_ui_view.py | 161 | 10904 | # -*- coding: utf-8 -*-
import copy
from lxml import etree, html
from openerp import SUPERUSER_ID, api
from openerp.addons.website.models import website
from openerp.http import request
from openerp.osv import osv, fields
class view(osv.osv):
_inherit = "ir.ui.view"
_columns = {
'page': fields.boolean("Whether this view is a web page template (complete)"),
'website_meta_title': fields.char("Website meta title", size=70, translate=True),
'website_meta_description': fields.text("Website meta description", size=160, translate=True),
'website_meta_keywords': fields.char("Website meta keywords", translate=True),
'customize_show': fields.boolean("Show As Optional Inherit"),
}
_defaults = {
'page': False,
'customize_show': False,
}
def _view_obj(self, cr, uid, view_id, context=None):
if isinstance(view_id, basestring):
return self.pool['ir.model.data'].xmlid_to_object(
cr, uid, view_id, raise_if_not_found=True, context=context
)
elif isinstance(view_id, (int, long)):
return self.browse(cr, uid, view_id, context=context)
# assume it's already a view object (WTF?)
return view_id
# Returns all views (called and inherited) related to a view
# Used by translation mechanism, SEO and optional templates
def _views_get(self, cr, uid, view_id, options=True, context=None, root=True):
""" For a given view ``view_id``, should return:
* the view itself
* all views inheriting from it, enabled or not
- but not the optional children of a non-enabled child
* all views called from it (via t-call)
"""
try:
view = self._view_obj(cr, uid, view_id, context=context)
except ValueError:
# Shall we log that ?
return []
while root and view.inherit_id:
view = view.inherit_id
result = [view]
node = etree.fromstring(view.arch)
for child in node.xpath("//t[@t-call]"):
try:
called_view = self._view_obj(cr, uid, child.get('t-call'), context=context)
except ValueError:
continue
if called_view not in result:
result += self._views_get(cr, uid, called_view, options=options, context=context)
extensions = view.inherit_children_ids
if not options:
# only active children
extensions = (v for v in view.inherit_children_ids if v.active)
# Keep options in a deterministic order regardless of their applicability
for extension in sorted(extensions, key=lambda v: v.id):
for r in self._views_get(
cr, uid, extension,
# only return optional grandchildren if this child is enabled
options=extension.active,
context=context, root=False):
if r not in result:
result.append(r)
return result
def extract_embedded_fields(self, cr, uid, arch, context=None):
return arch.xpath('//*[@data-oe-model != "ir.ui.view"]')
def save_embedded_field(self, cr, uid, el, context=None):
Model = self.pool[el.get('data-oe-model')]
field = el.get('data-oe-field')
converter = self.pool['website.qweb'].get_converter_for(el.get('data-oe-type'))
value = converter.from_html(cr, uid, Model, Model._fields[field], el)
if value is not None:
# TODO: batch writes?
Model.write(cr, uid, [int(el.get('data-oe-id'))], {
field: value
}, context=context)
def to_field_ref(self, cr, uid, el, context=None):
# filter out meta-information inserted in the document
attributes = dict((k, v) for k, v in el.items()
if not k.startswith('data-oe-'))
attributes['t-field'] = el.get('data-oe-expression')
out = html.html_parser.makeelement(el.tag, attrib=attributes)
out.tail = el.tail
return out
def replace_arch_section(self, cr, uid, view_id, section_xpath, replacement, context=None):
# the root of the arch section shouldn't actually be replaced as it's
# not really editable itself, only the content truly is editable.
[view] = self.browse(cr, uid, [view_id], context=context)
arch = etree.fromstring(view.arch.encode('utf-8'))
# => get the replacement root
if not section_xpath:
root = arch
else:
# ensure there's only one match
[root] = arch.xpath(section_xpath)
root.text = replacement.text
root.tail = replacement.tail
# replace all children
del root[:]
for child in replacement:
root.append(copy.deepcopy(child))
return arch
@api.cr_uid_ids_context
def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None):
if request and getattr(request, 'website_enabled', False):
engine='website.qweb'
if isinstance(id_or_xml_id, list):
id_or_xml_id = id_or_xml_id[0]
if not context:
context = {}
company = self.pool['res.company'].browse(cr, SUPERUSER_ID, request.website.company_id.id, context=context)
qcontext = dict(
context.copy(),
website=request.website,
url_for=website.url_for,
slug=website.slug,
res_company=company,
user_id=self.pool.get("res.users").browse(cr, uid, uid),
translatable=context.get('lang') != request.website.default_lang_code,
editable=request.website.is_publisher(),
menu_data=self.pool['ir.ui.menu'].load_menus_root(cr, uid, context=context) if request.website.is_user() else None,
)
# add some values
if values:
qcontext.update(values)
# in edit mode ir.ui.view will tag nodes
if qcontext.get('editable'):
context = dict(context, inherit_branding=True)
elif request.registry['res.users'].has_group(cr, uid, 'base.group_website_publisher'):
context = dict(context, inherit_branding_auto=True)
view_obj = request.website.get_template(id_or_xml_id)
if 'main_object' not in qcontext:
qcontext['main_object'] = view_obj
values = qcontext
return super(view, self).render(cr, uid, id_or_xml_id, values=values, engine=engine, context=context)
def _pretty_arch(self, arch):
# remove_blank_string does not seem to work on HTMLParser, and
# pretty-printing with lxml more or less requires stripping
# whitespace: http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output
# so serialize to XML, parse as XML (remove whitespace) then serialize
# as XML (pretty print)
arch_no_whitespace = etree.fromstring(
etree.tostring(arch, encoding='utf-8'),
parser=etree.XMLParser(encoding='utf-8', remove_blank_text=True))
return etree.tostring(
arch_no_whitespace, encoding='unicode', pretty_print=True)
def save(self, cr, uid, res_id, value, xpath=None, context=None):
""" Update a view section. The view section may embed fields to write
:param str model:
:param int res_id:
:param str xpath: valid xpath to the tag to replace
"""
res_id = int(res_id)
arch_section = html.fromstring(
value, parser=html.HTMLParser(encoding='utf-8'))
if xpath is None:
# value is an embedded field on its own, not a view section
self.save_embedded_field(cr, uid, arch_section, context=context)
return
for el in self.extract_embedded_fields(cr, uid, arch_section, context=context):
self.save_embedded_field(cr, uid, el, context=context)
# transform embedded field back to t-field
el.getparent().replace(el, self.to_field_ref(cr, uid, el, context=context))
arch = self.replace_arch_section(cr, uid, res_id, xpath, arch_section, context=context)
self.write(cr, uid, res_id, {
'arch': self._pretty_arch(arch)
}, context=context)
view = self.browse(cr, SUPERUSER_ID, res_id, context=context)
if view.model_data_id:
view.model_data_id.write({'noupdate': True})
def customize_template_get(self, cr, uid, xml_id, full=False, bundles=False , context=None):
""" Get inherit view's informations of the template ``key``. By default, only
returns ``customize_show`` templates (which can be active or not), if
``full=True`` returns inherit view's informations of the template ``key``.
``bundles=True`` returns also the asset bundles
"""
imd = request.registry['ir.model.data']
view_model, view_theme_id = imd.get_object_reference(cr, uid, 'website', 'theme')
user = request.registry['res.users'].browse(cr, uid, uid, context)
user_groups = set(user.groups_id)
views = self._views_get(cr, uid, xml_id, context=dict(context or {}, active_test=False))
done = set()
result = []
for v in views:
if not user_groups.issuperset(v.groups_id):
continue
if full or (v.customize_show and v.inherit_id.id != view_theme_id):
if v.inherit_id not in done:
result.append({
'name': v.inherit_id.name,
'id': v.id,
'xml_id': v.xml_id,
'inherit_id': v.inherit_id.id,
'header': True,
'active': False
})
done.add(v.inherit_id)
result.append({
'name': v.name,
'id': v.id,
'xml_id': v.xml_id,
'inherit_id': v.inherit_id.id,
'header': False,
'active': v.active,
})
return result
def get_view_translations(self, cr, uid, xml_id, lang, field=['id', 'res_id', 'value', 'state', 'gengo_translation'], context=None):
views = self.customize_template_get(cr, uid, xml_id, full=True, context=context)
views_ids = [view.get('id') for view in views if view.get('active')]
domain = [('type', '=', 'view'), ('res_id', 'in', views_ids), ('lang', '=', lang)]
irt = request.registry.get('ir.translation')
return irt.search_read(cr, uid, domain, field, context=context)
| agpl-3.0 |
resmo/ansible | contrib/inventory/spacewalk.py | 28 | 8792 | #!/usr/bin/env python
"""
Spacewalk external inventory script
=================================
Ansible has a feature where instead of reading from /etc/ansible/hosts
as a text file, it can query external programs to obtain the list
of hosts, groups the hosts are in, and even variables to assign to each host.
To use this, copy this file over /etc/ansible/hosts and chmod +x the file.
This, more or less, allows you to keep one central database containing
info about all of your managed instances.
This script is dependent upon the spacealk-reports package being installed
on the same machine. It is basically a CSV-to-JSON converter from the
output of "spacewalk-report system-groups-systems|inventory".
Tested with Ansible 1.9.2 and spacewalk 2.3
"""
#
# Author:: Jon Miller <jonEbird@gmail.com>
# Copyright:: Copyright (c) 2013, Jon Miller
#
# Extended for support of multiple organizations and
# adding the "_meta" dictionary to --list output by
# Bernhard Lichtinger <bernhard.lichtinger@lrz.de> 2015
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import sys
import os
import time
from optparse import OptionParser
import subprocess
import json
from ansible.module_utils.six import iteritems
from ansible.module_utils.six.moves import configparser as ConfigParser
base_dir = os.path.dirname(os.path.realpath(__file__))
default_ini_file = os.path.join(base_dir, "spacewalk.ini")
SW_REPORT = '/usr/bin/spacewalk-report'
CACHE_DIR = os.path.join(base_dir, ".spacewalk_reports")
CACHE_AGE = 300 # 5min
INI_FILE = os.path.expanduser(os.path.expandvars(os.environ.get("SPACEWALK_INI_PATH", default_ini_file)))
# Sanity check
if not os.path.exists(SW_REPORT):
print('Error: %s is required for operation.' % (SW_REPORT), file=sys.stderr)
sys.exit(1)
# Pre-startup work
if not os.path.exists(CACHE_DIR):
os.mkdir(CACHE_DIR)
os.chmod(CACHE_DIR, 0o2775)
# Helper functions
# ------------------------------
def spacewalk_report(name):
"""Yield a dictionary form of each CSV output produced by the specified
spacewalk-report
"""
cache_filename = os.path.join(CACHE_DIR, name)
if not os.path.exists(cache_filename) or \
(time.time() - os.stat(cache_filename).st_mtime) > CACHE_AGE:
# Update the cache
fh = open(cache_filename, 'w')
p = subprocess.Popen([SW_REPORT, name], stdout=fh)
p.wait()
fh.close()
lines = open(cache_filename, 'r').readlines()
keys = lines[0].strip().split(',')
# add 'spacewalk_' prefix to the keys
keys = ['spacewalk_' + key for key in keys]
for line in lines[1:]:
values = line.strip().split(',')
if len(keys) == len(values):
yield dict(zip(keys, values))
# Options
# ------------------------------
parser = OptionParser(usage="%prog [options] --list | --host <machine>")
parser.add_option('--list', default=False, dest="list", action="store_true",
help="Produce a JSON consumable grouping of servers for Ansible")
parser.add_option('--host', default=None, dest="host",
help="Generate additional host specific details for given host for Ansible")
parser.add_option('-H', '--human', dest="human",
default=False, action="store_true",
help="Produce a friendlier version of either server list or host detail")
parser.add_option('-o', '--org', default=None, dest="org_number",
help="Limit to spacewalk organization number")
parser.add_option('-p', default=False, dest="prefix_org_name", action="store_true",
help="Prefix the group name with the organization number")
(options, args) = parser.parse_args()
# read spacewalk.ini if present
# ------------------------------
if os.path.exists(INI_FILE):
config = ConfigParser.SafeConfigParser()
config.read(INI_FILE)
if config.has_option('spacewalk', 'cache_age'):
CACHE_AGE = config.get('spacewalk', 'cache_age')
if not options.org_number and config.has_option('spacewalk', 'org_number'):
options.org_number = config.get('spacewalk', 'org_number')
if not options.prefix_org_name and config.has_option('spacewalk', 'prefix_org_name'):
options.prefix_org_name = config.getboolean('spacewalk', 'prefix_org_name')
# Generate dictionary for mapping group_id to org_id
# ------------------------------
org_groups = {}
try:
for group in spacewalk_report('system-groups'):
org_groups[group['spacewalk_group_id']] = group['spacewalk_org_id']
except (OSError) as e:
print('Problem executing the command "%s system-groups": %s' %
(SW_REPORT, str(e)), file=sys.stderr)
sys.exit(2)
# List out the known server from Spacewalk
# ------------------------------
if options.list:
# to build the "_meta"-Group with hostvars first create dictionary for later use
host_vars = {}
try:
for item in spacewalk_report('inventory'):
host_vars[item['spacewalk_profile_name']] = dict((key, (value.split(';') if ';' in value else value)) for key, value in item.items())
except (OSError) as e:
print('Problem executing the command "%s inventory": %s' %
(SW_REPORT, str(e)), file=sys.stderr)
sys.exit(2)
groups = {}
meta = {"hostvars": {}}
try:
for system in spacewalk_report('system-groups-systems'):
# first get org_id of system
org_id = org_groups[system['spacewalk_group_id']]
# shall we add the org_id as prefix to the group name:
if options.prefix_org_name:
prefix = org_id + "-"
group_name = prefix + system['spacewalk_group_name']
else:
group_name = system['spacewalk_group_name']
# if we are limited to one organization:
if options.org_number:
if org_id == options.org_number:
if group_name not in groups:
groups[group_name] = set()
groups[group_name].add(system['spacewalk_server_name'])
if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta["hostvars"]:
meta["hostvars"][system['spacewalk_server_name']] = host_vars[system['spacewalk_server_name']]
# or we list all groups and systems:
else:
if group_name not in groups:
groups[group_name] = set()
groups[group_name].add(system['spacewalk_server_name'])
if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta["hostvars"]:
meta["hostvars"][system['spacewalk_server_name']] = host_vars[system['spacewalk_server_name']]
except (OSError) as e:
print('Problem executing the command "%s system-groups-systems": %s' %
(SW_REPORT, str(e)), file=sys.stderr)
sys.exit(2)
if options.human:
for group, systems in iteritems(groups):
print('[%s]\n%s\n' % (group, '\n'.join(systems)))
else:
final = dict([(k, list(s)) for k, s in iteritems(groups)])
final["_meta"] = meta
print(json.dumps(final))
# print(json.dumps(groups))
sys.exit(0)
# Return a details information concerning the spacewalk server
# ------------------------------
elif options.host:
host_details = {}
try:
for system in spacewalk_report('inventory'):
if system['spacewalk_hostname'] == options.host:
host_details = system
break
except (OSError) as e:
print('Problem executing the command "%s inventory": %s' %
(SW_REPORT, str(e)), file=sys.stderr)
sys.exit(2)
if options.human:
print('Host: %s' % options.host)
for k, v in iteritems(host_details):
print(' %s: %s' % (k, '\n '.join(v.split(';'))))
else:
print(json.dumps(dict((key, (value.split(';') if ';' in value else value)) for key, value in host_details.items())))
sys.exit(0)
else:
parser.print_help()
sys.exit(1)
| gpl-3.0 |
eeshangarg/zulip | zerver/lib/types.py | 3 | 1896 | from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union
from django.http import HttpResponse
from django.utils.functional import Promise
from typing_extensions import TypedDict
ViewFuncT = TypeVar("ViewFuncT", bound=Callable[..., HttpResponse])
# See zerver/lib/validator.py for more details of Validators,
# including many examples
ResultT = TypeVar("ResultT")
Validator = Callable[[str, object], ResultT]
ExtendedValidator = Callable[[str, str, object], str]
RealmUserValidator = Callable[[int, object, bool], List[int]]
class ProfileDataElementBase(TypedDict):
id: int
name: str
type: int
hint: Optional[str]
field_data: Optional[str]
order: int
class ProfileDataElement(ProfileDataElementBase):
value: str
rendered_value: Optional[str]
ProfileData = List[ProfileDataElement]
FieldElement = Tuple[int, Promise, Validator[Union[int, str, List[int]]], Callable[[Any], Any], str]
ExtendedFieldElement = Tuple[int, Promise, ExtendedValidator, Callable[[Any], Any], str]
UserFieldElement = Tuple[int, Promise, RealmUserValidator, Callable[[Any], Any], str]
ProfileFieldData = Dict[str, Union[Dict[str, str], str]]
class UserDisplayRecipient(TypedDict):
email: str
full_name: str
id: int
is_mirror_dummy: bool
DisplayRecipientT = Union[str, List[UserDisplayRecipient]]
class LinkifierDict(TypedDict):
pattern: str
url_format: str
id: int
class SAMLIdPConfigDict(TypedDict, total=False):
entity_id: str
url: str
attr_user_permanent_id: str
attr_first_name: str
attr_last_name: str
attr_username: str
attr_email: str
attr_org_membership: str
display_name: str
display_icon: str
limit_to_subdomains: List[str]
extra_attrs: List[str]
x509cert: str
x509cert_path: str
class FullNameInfo(TypedDict):
id: int
email: str
full_name: str
| apache-2.0 |
windedge/odoo | addons/event/__openerp__.py | 261 | 2296 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Events Organisation',
'version': '0.1',
'website' : 'https://www.odoo.com/page/events',
'category': 'Tools',
'summary': 'Trainings, Conferences, Meetings, Exhibitions, Registrations',
'description': """
Organization and management of Events.
======================================
The event module allows you to efficiently organise events and all related tasks: planification, registration tracking,
attendances, etc.
Key Features
------------
* Manage your Events and Registrations
* Use emails to automatically confirm and send acknowledgements for any event registration
""",
'author': 'OpenERP SA',
'depends': ['base_setup', 'board', 'email_template', 'marketing'],
'data': [
'security/event_security.xml',
'security/ir.model.access.csv',
'wizard/event_confirm_view.xml',
'event_view.xml',
'event_data.xml',
'report/report_event_registration_view.xml',
'res_partner_view.xml',
'email_template.xml',
'views/event.xml',
],
'demo': [
'event_demo.xml',
],
'test': [
'test/ui/event_users.yml',
'test/process/event_draft2done.yml'
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
xbezdick/tempest | tempest/api/network/admin/test_external_networks_negative.py | 31 | 2155 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc
from tempest.api.network import base
from tempest import config
from tempest import test
CONF = config.CONF
class ExternalNetworksAdminNegativeTestJSON(base.BaseAdminNetworkTest):
@test.attr(type=['negative'])
@test.idempotent_id('d402ae6c-0be0-4d8e-833b-a738895d98d0')
def test_create_port_with_precreated_floatingip_as_fixed_ip(self):
"""
External networks can be used to create both floating-ip as well
as instance-ip. So, creating an instance-ip with a value of a
pre-created floating-ip should be denied.
"""
# create a floating ip
client = self.admin_client
body = client.create_floatingip(
floating_network_id=CONF.network.public_network_id)
created_floating_ip = body['floatingip']
self.addCleanup(self._try_delete_resource,
client.delete_floatingip,
created_floating_ip['id'])
floating_ip_address = created_floating_ip['floating_ip_address']
self.assertIsNotNone(floating_ip_address)
# use the same value of floatingip as fixed-ip to create_port()
fixed_ips = [{'ip_address': floating_ip_address}]
# create a port which will internally create an instance-ip
self.assertRaises(lib_exc.Conflict,
client.create_port,
network_id=CONF.network.public_network_id,
fixed_ips=fixed_ips)
| apache-2.0 |
Eaglemania/TOL | pyglet/canvas/win32.py | 4 | 3324 | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_ptr = MONITORENUMPROC(enum_proc)
_user32.EnumDisplayMonitors(None, None, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc
| gpl-2.0 |
qsnake/gpaw | config.py | 1 | 17421 | # Copyright (C) 2006 CSC-Scientific Computing Ltd.
# Please see the accompanying LICENSE file for further information.
import os
import sys
import re
import distutils.util
from distutils.sysconfig import get_config_var, get_config_vars
from distutils.command.config import config
from glob import glob
from os.path import join
from stat import ST_MTIME
def check_packages(packages, msg, include_ase, import_numpy):
"""Check the python version and required extra packages
If ASE is not installed, the `packages` list is extended with the
ASE modules if they are found."""
if sys.version_info < (2, 3, 0, 'final', 0):
raise SystemExit('Python 2.3.1 or later is required!')
if import_numpy:
try:
import numpy
except ImportError:
raise SystemExit('numpy is not installed!')
else:
msg += ['* numpy is not installed.',
' "include_dirs" in your customize.py must point to "numpy/core/include".']
if not include_ase:
if import_numpy:
try:
import ase
except ImportError:
import_ase = True
else:
import_ase = False
else:
import_ase = False
if include_ase or import_ase:
# Find ASE directories:
# include_ase works in case:
# cd gpaw # top-level gpaw source directory
# tar zxf ~/python-ase-3.1.0.846.tar.gz
# ln -s python-ase-3.1.0.846/ase .
ase_root = 'ase'
if include_ase:
assert os.path.isdir(ase_root), ase_root+': No such file or directory'
ase = []
for root, dirs, files in os.walk(ase_root):
if 'CVS' in dirs:
dirs.remove('CVS')
if '.svn' in dirs:
dirs.remove('.svn')
if '__init__.py' in files:
ase.append(root.replace('/', '.'))
if len(ase) == 0:
msg += ['* ASE is not installed! You may be able to install',
" gpaw, but you can't use it without ASE!"]
else:
packages += ase
def find_file(arg, dir, files):
#looks if the first element of the list arg is contained in the list files
# and if so, appends dir to to arg. To be used with the os.path.walk
if arg[0] in files:
arg.append(dir)
def get_system_config(define_macros, undef_macros,
include_dirs, libraries, library_dirs, extra_link_args,
extra_compile_args, runtime_library_dirs, extra_objects,
msg, import_numpy):
undef_macros += ['NDEBUG']
if import_numpy:
import numpy
include_dirs += [numpy.get_include()]
include_dirs += ['c/libxc']
machine = os.uname()[4]
if machine == 'sun4u':
# _
# |_ | ||\ |
# _||_|| \|
#
extra_compile_args += ['-Kpic', '-fast']
# Suppress warning from -fast (-xarch=native):
f = open('cc-test.c', 'w')
f.write('int main(){}\n')
f.close()
stderr = os.popen3('cc cc-test.c -fast')[2].read()
arch = re.findall('-xarch=(\S+)', stderr)
os.remove('cc-test.c')
if len(arch) > 0:
extra_compile_args += ['-xarch=%s' % arch[-1]]
# We need the -Bstatic before the -lsunperf and -lfsu:
# http://forum.java.sun.com/thread.jspa?threadID=5072537&messageID=9265782
extra_link_args += ['-Bstatic', '-lsunperf', '-lfsu', '-Bdynamic']
cc_version = os.popen3('cc -V')[2].readline().split()[3]
if cc_version > '5.6':
libraries.append('mtsk')
else:
extra_link_args.append('-lmtsk')
#define_macros.append(('NO_C99_COMPLEX', '1'))
msg += ['* Using SUN high performance library']
elif sys.platform in ['aix5', 'aix6']:
#
# o|_ _ _
# ||_)| | |
#
extra_compile_args += ['-qlanglvl=stdc99']
# setting memory limit is necessary on aix5
if sys.platform == 'aix5':
extra_link_args += ['-bmaxdata:0x80000000',
'-bmaxstack:0x80000000']
libraries += ['f', 'lapack', 'essl']
define_macros.append(('GPAW_AIX', '1'))
elif machine == 'x86_64':
# _
# \/|_||_ |_ |_|
# /\|_||_| _ |_| |
#
extra_compile_args += ['-Wall', '-std=c99']
# Look for ACML libraries:
acml = glob('/opt/acml*/g*64/lib')
if len(acml) > 0:
library_dirs += [acml[-1]]
libraries += ['acml']
if acml[-1].find('gfortran') != -1: libraries.append('gfortran')
if acml[-1].find('gnu') != -1: libraries.append('g2c')
extra_link_args += ['-Wl,-rpath=' + acml[-1]]
msg += ['* Using ACML library']
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
elif machine =='ia64':
# _ _
# |_ | o
# _||_||
#
extra_compile_args += ['-Wall', '-std=c99']
libraries += ['mkl','mkl_lapack64']
elif machine == 'i686':
# _
# o|_ |_||_
# ||_||_||_|
#
extra_compile_args += ['-Wall', '-std=c99']
if 'MKL_ROOT' in os.environ:
mklbasedir = [os.environ['MKL_ROOT']]
else:
mklbasedir = glob('/opt/intel/mkl*')
libs = ['libmkl_ia32.a']
if mklbasedir != []:
os.path.walk(mklbasedir[0],find_file, libs)
libs.pop(0)
if libs != []:
libs.sort()
libraries += ['mkl_lapack',
'mkl_ia32', 'guide', 'pthread', 'mkl']#, 'mkl_def']
library_dirs += libs
msg += ['* Using MKL library: %s' % library_dirs[-1]]
#extra_link_args += ['-Wl,-rpath=' + library_dirs[-1]]
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
# add libg2c if available
g2c=False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libg2c.so')) != []:
g2c=True
break
if glob(join(dir, 'libg2c.a')) != []:
g2c=True
break
if g2c: libraries += ['g2c']
elif sys.platform == 'darwin':
extra_compile_args += ['-Wall', '-std=c99']
include_dirs += ['/usr/include/malloc']
if glob('/System/Library/Frameworks/vecLib.framework') != []:
extra_link_args += ['-framework vecLib']
msg += ['* Using vecLib']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
return msg
def get_parallel_config(mpi_libraries,mpi_library_dirs,mpi_include_dirs,
mpi_runtime_library_dirs,mpi_define_macros):
globals = {}
execfile('gpaw/mpi/config.py', globals)
mpi = globals['get_mpi_implementation']()
if mpi == '':
mpicompiler = None
elif mpi == 'sun':
mpi_include_dirs += ['/opt/SUNWhpc/include']
mpi_libraries += ['mpi']
mpi_library_dirs += ['/opt/SUNWhpc/lib']
mpi_runtime_library_dirs += ['/opt/SUNWhpc/lib']
mpicompiler = get_config_var('CC')
elif mpi == 'poe':
mpicompiler = 'mpcc_r'
else:
#Try to use mpicc
mpicompiler = 'mpicc'
return mpicompiler
def get_scalapack_config(define_macros):
# check ScaLapack settings
define_macros.append(('GPAW_WITH_SL', '1'))
def mtime(path, name, mtimes):
"""Return modification time.
The modification time of a source file is returned. If one of its
dependencies is newer, the mtime of that file is returned.
This function fails if two include files with the same name
are present in different directories."""
include = re.compile('^#\s*include "(\S+)"', re.MULTILINE)
if mtimes.has_key(name):
return mtimes[name]
t = os.stat(os.path.join(path, name))[ST_MTIME]
for name2 in include.findall(open(os.path.join(path, name)).read()):
path2, name22 = os.path.split(name2)
if name22 != name:
t = max(t, mtime(os.path.join(path, path2), name22, mtimes))
mtimes[name] = t
return t
def check_dependencies(sources):
# Distutils does not do deep dependencies correctly. We take care of
# that here so that "python setup.py build_ext" always does the right
# thing!
mtimes = {} # modification times
# Remove object files if any dependencies have changed:
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
remove = False
for source in sources:
path, name = os.path.split(source)
t = mtime(path + '/', name, mtimes)
o = 'build/temp.%s/%s.o' % (plat, source[:-2]) # object file
if os.path.exists(o) and t > os.stat(o)[ST_MTIME]:
print 'removing', o
os.remove(o)
remove = True
so = 'build/lib.%s/_gpaw.so' % plat
if os.path.exists(so) and remove:
# Remove shared object C-extension:
# print 'removing', so
os.remove(so)
def test_configuration():
raise NotImplementedError
def write_configuration(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects, mpicompiler,
mpi_libraries, mpi_library_dirs, mpi_include_dirs,
mpi_runtime_library_dirs, mpi_define_macros):
# Write the compilation configuration into a file
try:
out = open('configuration.log', 'w')
except IOError, x:
print x
return
print >> out, "Current configuration"
print >> out, "libraries", libraries
print >> out, "library_dirs", library_dirs
print >> out, "include_dirs", include_dirs
print >> out, "define_macros", define_macros
print >> out, "extra_link_args", extra_link_args
print >> out, "extra_compile_args", extra_compile_args
print >> out, "runtime_library_dirs", runtime_library_dirs
print >> out, "extra_objects", extra_objects
if mpicompiler is not None:
print >> out
print >> out, "Parallel configuration"
print >> out, "mpicompiler", mpicompiler
print >> out, "mpi_libraries", mpi_libraries
print >> out, "mpi_library_dirs", mpi_library_dirs
print >> out, "mpi_include_dirs", mpi_include_dirs
print >> out, "mpi_define_macros", mpi_define_macros
print >> out, "mpi_runtime_library_dirs", mpi_runtime_library_dirs
out.close()
def build_interpreter(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects,
mpicompiler, mpilinker, mpi_libraries, mpi_library_dirs,
mpi_include_dirs, mpi_runtime_library_dirs,
mpi_define_macros):
#Build custom interpreter which is used for parallel calculations
cfgDict = get_config_vars()
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
cfiles = glob('c/[a-zA-Z_]*.c') + ['c/bmgs/bmgs.c']
cfiles += glob('c/libxc/src/*.c')
if ('HDF5', 1) in define_macros:
cfiles += glob('h5py/c/*.c')
cfiles += glob('h5py/c/lzf/*.c')
cfiles2remove = ['c/libxc/src/test.c',
'c/libxc/src/xc_f.c',
'c/libxc/src/work_gga_x.c',
'c/libxc/src/work_lda.c'
]
for c2r in glob('c/libxc/src/funcs_*.c'): cfiles2remove.append(c2r)
for c2r in cfiles2remove: cfiles.remove(c2r)
sources = ['c/bc.c', 'c/localized_functions.c', 'c/mpi.c', 'c/_gpaw.c',
'c/operators.c', 'c/transformers.c', 'c/compiled_WITH_SL.c',
'c/blacs.c', 'c/utilities.c']
objects = ' '.join(['build/temp.%s/' % plat + x[:-1] + 'o'
for x in cfiles])
if not os.path.isdir('build/bin.%s/' % plat):
os.makedirs('build/bin.%s/' % plat)
exefile = 'build/bin.%s/' % plat + '/gpaw-python'
libraries += mpi_libraries
library_dirs += mpi_library_dirs
define_macros += mpi_define_macros
include_dirs += mpi_include_dirs
runtime_library_dirs += mpi_runtime_library_dirs
define_macros.append(('PARALLEL', '1'))
define_macros.append(('GPAW_INTERPRETER', '1'))
macros = ' '.join(['-D%s=%s' % x for x in define_macros if x[0].strip()])
include_dirs.append(cfgDict['INCLUDEPY'])
include_dirs.append(cfgDict['CONFINCLUDEPY'])
includes = ' '.join(['-I' + incdir for incdir in include_dirs])
library_dirs.append(cfgDict['LIBPL'])
lib_dirs = ' '.join(['-L' + lib for lib in library_dirs])
libs = ' '.join(['-l' + lib for lib in libraries if lib.strip()])
libs += ' -lpython%s' % cfgDict['VERSION']
libs = ' '.join([libs, cfgDict['LIBS'], cfgDict['LIBM']])
#Hack taken from distutils to determine option for runtime_libary_dirs
if sys.platform[:6] == 'darwin':
# MacOSX's linker doesn't understand the -R flag at all
runtime_lib_option = '-L'
elif sys.platform[:5] == 'hp-ux':
runtime_lib_option = '+s -L'
elif os.popen('mpicc --showme 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
elif os.popen('mpicc -show 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
else:
runtime_lib_option = '-R'
runtime_libs = ' '.join([ runtime_lib_option + lib for lib in runtime_library_dirs])
extra_link_args.append(cfgDict['LDFLAGS'])
if sys.platform in ['aix5', 'aix6']:
extra_link_args.append(cfgDict['LINKFORSHARED'].replace('Modules', cfgDict['LIBPL']))
elif sys.platform == 'darwin':
pass
else:
extra_link_args.append(cfgDict['LINKFORSHARED'])
if ('IO_WRAPPERS', 1) in define_macros:
extra_link_args += ['-Wl,-wrap,fread',
'-Wl,-wrap,_IO_getc',
'-Wl,-wrap,getc_unlocked',
'-Wl,-wrap,fgets',
'-Wl,-wrap,ungetc',
'-Wl,-wrap,feof',
'-Wl,-wrap,ferror',
'-Wl,-wrap,fflush',
'-Wl,-wrap,fseek',
'-Wl,-wrap,rewind',
# '-Wl,-wrap,fileno',
'-Wl,-wrap,flockfile',
'-Wl,-wrap,funlockfile',
'-Wl,-wrap,clearerr',
'-Wl,-wrap,fgetpos',
'-Wl,-wrap,fsetpos',
'-Wl,-wrap,setbuf',
'-Wl,-wrap,setvbuf',
'-Wl,-wrap,ftell',
'-Wl,-wrap,fstat',
'-Wl,-wrap,fstat64',
'-Wl,-wrap,fgetc',
# '-Wl,-wrap,fputc',
# '-Wl,-wrap,fputs',
# '-Wl,-wrap,fwrite',
# '-Wl,-wrap,_IO_putc',
'-Wl,-wrap,fopen',
'-Wl,-wrap,fopen64',
'-Wl,-wrap,fclose',
]
# Compile the parallel sources
for src in sources:
obj = 'build/temp.%s/' % plat + src[:-1] + 'o'
cmd = ('%s %s %s %s -o %s -c %s ' ) % \
(mpicompiler,
macros,
' '.join(extra_compile_args),
includes,
obj,
src)
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg = ['* compiling FAILED! Only serial version of code will work.']
break
# Link the custom interpreter
cmd = ('%s -o %s %s %s %s %s %s %s' ) % \
(mpilinker,
exefile,
objects,
' '.join(extra_objects),
lib_dirs,
libs,
runtime_libs,
' '.join(extra_link_args))
msg = ['* Building a custom interpreter']
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg += ['* linking FAILED! Only serial version of code will work.']
return error, msg
| gpl-3.0 |
rue89-tech/edx-analytics-pipeline | edx/analytics/tasks/s3_util.py | 3 | 8993 | """
Utility methods for interacting with S3 via boto.
"""
import os
import math
import logging
from fnmatch import fnmatch
from urlparse import urlparse
from boto.s3.key import Key
from filechunkio import FileChunkIO
from luigi.s3 import S3Client, AtomicS3File
from luigi.hdfs import HdfsTarget, Plain
log = logging.getLogger(__name__)
# S3 does not permit using "put" for files larger than 5 GB, and
# returns a socket error. There is also a chance that smaller files
# might also fail. Arbitrarily choose a threshold so that files
# larger than 1GB should use multipart upload instead of a single put.
MULTIPART_UPLOAD_THRESHOLD = 1 * 1024 * 1024 * 1024
# Multipart upload algorithm taken from
# https://gist.github.com/fabiant7t/924094, which
# defines a minimum chunk size for multipart upload.
MINIMUM_BYTES_PER_CHUNK = 5242880
# By default, AWS does not apply an ACL to keys that are put into a
# bucket from another account. Having no ACL at all effectively
# renders the object useless since it cannot be read or anything. The
# only workaround we found was to explicitly set the ACL policy when
# putting the object. Define here what that policy will be.
DEFAULT_KEY_ACCESS_POLICY = 'bucket-owner-full-control'
def get_s3_bucket_key_names(url):
"""Extract the bucket and key names from a S3 URL"""
parts = urlparse(url)
return (parts.netloc.strip('/'), parts.path.strip('/'))
def join_as_s3_url(bucket, root, path):
"""Combine bucket name, root path and relative path into a S3 URL"""
return 's3://{0}/{1}/{2}'.format(bucket, root, path)
def get_s3_key(s3_conn, url):
"""Returns an S3 key for use in further boto actions."""
bucket_name, key_name = get_s3_bucket_key_names(url)
bucket = s3_conn.get_bucket(bucket_name)
key = bucket.get_key(key_name)
return key
def generate_s3_sources(s3_conn, source, patterns):
"""
Returns a list of S3 sources that match filters.
Args:
s3_conn: a boto connection to S3.
source: a url to S3.
patterns: a list of strings, each of which defines a pattern to match.
Yields:
(bucket, root, path) tuples for each matching file on S3.
where `bucket` and `root` are derived from the source url,
and `path` is a matching path relative to the `source`.
Does not include zero-length files.
"""
bucket_name, root = get_s3_bucket_key_names(source)
bucket = s3_conn.get_bucket(bucket_name)
# Make sure that the listing is done on a "folder" boundary,
# since list() just looks for matching prefixes.
root_with_slash = root if len(root) == 0 or root.endswith('/') else root + '/'
# Skip keys that have zero size. This allows directories
# to be skipped, but also skips legitimate files that are
# also zero-length.
keys = (s.key for s in bucket.list(root_with_slash) if s.size > 0)
# Make paths relative by removing root
paths = (k[len(root_with_slash):].lstrip('/') for k in keys)
# Filter only paths that match the include patterns
paths = _filter_matches(patterns, paths)
return ((bucket.name, root, path) for path in paths)
def _filter_matches(patterns, names):
"""Return only key names that match any of the include patterns."""
func = lambda n: any(fnmatch(n, p) for p in patterns)
return (n for n in names if func(n))
class ScalableS3Client(S3Client):
"""
S3 client that adds support for multipart uploads and requires minimal permissions.
Uses S3 multipart upload API for large files, and regular S3 puts for smaller files.
This client should only require PutObject and PutObjectAcl permissions in order to write to the target bucket.
"""
# TODO: Make this behavior configurable and submit this change upstream.
def put(self, local_path, destination_s3_path):
"""Put an object stored locally to an S3 path."""
# parse path into bucket and key
(bucket, key) = self._path_to_bucket_and_key(destination_s3_path)
# If Boto is passed "validate=True", it will require an
# additional permission to be present when asked to list all
# of the keys in the bucket. We want to minimize the set of
# required permissions so we get a reference to the bucket
# without validating that it exists. It should only require
# PutObject and PutObjectAcl permissions in order to write to
# the target bucket.
s3_bucket = self.s3.get_bucket(bucket, validate=False)
# Check first if we should be doing a multipart upload.
source_size_bytes = os.stat(local_path).st_size
if source_size_bytes < MULTIPART_UPLOAD_THRESHOLD:
self._upload_single(local_path, s3_bucket, key)
else:
log.info("File '%s' has size %d, exceeding threshold %d for using put -- using multipart upload.",
destination_s3_path, source_size_bytes, MULTIPART_UPLOAD_THRESHOLD)
self._upload_multipart(local_path, destination_s3_path, s3_bucket, key, source_size_bytes)
def _upload_single(self, local_path, s3_bucket, key):
"""
Write a local file to an S3 key using single PUT.
This only works for files < 5GB in size.
"""
s3_key = Key(s3_bucket)
s3_key.key = key
# Explicitly set the ACL policy when putting the object, so
# that it has an ACL when AWS writes to keys from another account.
s3_key.set_contents_from_filename(local_path, policy=DEFAULT_KEY_ACCESS_POLICY)
def _upload_multipart(self, local_path, destination_s3_path, s3_bucket, key, source_size_bytes):
"""Upload a large local file to an S3 path, using S3's multipart upload API."""
# Explicitly set the ACL policy when putting the object, so
# that it has an ACL when AWS writes to keys from another account.
multipart = s3_bucket.initiate_multipart_upload(key, policy=DEFAULT_KEY_ACCESS_POLICY)
number_of_chunks, bytes_per_chunk = self._get_chunk_specs(source_size_bytes)
log.info("Uploading file '%s' with size %d in %d parts, with chunksize of %d.",
destination_s3_path, source_size_bytes, number_of_chunks, bytes_per_chunk)
chunk_generator = self._generate_chunks(source_size_bytes, number_of_chunks, bytes_per_chunk)
for part_num, chunk_byte_offset, num_bytes in chunk_generator:
with FileChunkIO(local_path, 'r', offset=chunk_byte_offset, bytes=num_bytes) as chunk:
multipart.upload_part_from_file(fp=chunk, part_num=part_num)
if len(multipart.get_all_parts()) == number_of_chunks:
multipart.complete_upload()
else:
multipart.cancel_upload()
def _get_chunk_specs(self, source_size_bytes):
"""Returns number of chunks and bytes-per-chunk given a filesize."""
# Select a chunk size, so that the chunk size grows with the overall size, but
# more slowly. (Scale so that it equals the minimum chunk size.)
bytes_per_chunk = int(math.sqrt(MINIMUM_BYTES_PER_CHUNK) * math.sqrt(source_size_bytes))
bytes_per_chunk = min(max(bytes_per_chunk, MINIMUM_BYTES_PER_CHUNK), MULTIPART_UPLOAD_THRESHOLD)
number_of_chunks = int(math.ceil(source_size_bytes / float(bytes_per_chunk)))
return number_of_chunks, bytes_per_chunk
def _generate_chunks(self, source_size_bytes, number_of_chunks, bytes_per_chunk):
"""Returns the index, offset, and size of chunks."""
for chunk_index in range(number_of_chunks):
chunk_byte_offset = chunk_index * bytes_per_chunk
remaining_bytes_in_file = source_size_bytes - chunk_byte_offset
num_bytes = min([bytes_per_chunk, remaining_bytes_in_file])
# indexing of parts is one-based.
yield chunk_index + 1, chunk_byte_offset, num_bytes
class S3HdfsTarget(HdfsTarget):
"""HDFS target that supports writing and reading files directly in S3."""
# Luigi does not support writing to HDFS targets that point to complete URLs like "s3://foo/bar" it only supports
# HDFS paths that look like standard file paths "/foo/bar".
# (This class also provides a customized implementation for S3Client.)
# TODO: Fix the upstream bug in luigi that prevents writing to HDFS files that are specified by complete URLs
def __init__(self, path=None, format=Plain, is_tmp=False):
super(S3HdfsTarget, self).__init__(path=path, format=format, is_tmp=is_tmp)
def open(self, mode='r'):
if mode not in ('r', 'w'):
raise ValueError("Unsupported open mode '{mode}'".format(mode=mode))
if mode == 'r':
return super(S3HdfsTarget, self).open(mode=mode)
else:
safe_path = self.path.replace('s3n://', 's3://')
if not hasattr(self, 's3_client'):
self.s3_client = ScalableS3Client()
return AtomicS3File(safe_path, self.s3_client)
| agpl-3.0 |
vicnet/weboob | modules/becm/test.py | 2 | 1064 | # -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Julien Veyssier
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
class BanqueEuropeenneCreditMutuelTest(BackendTest):
MODULE = 'becm'
def test_becm(self):
l = list(self.backend.iter_accounts())
if len(l) > 0:
a = l[0]
list(self.backend.iter_history(a))
| lgpl-3.0 |
sotdjin/glibglab | venv/lib/python2.7/site-packages/sqlalchemy/orm/loading.py | 14 | 24424 | # orm/loading.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""private module containing functions used to convert database
rows into object instances and associated state.
the functions here are called primarily by Query, Mapper,
as well as some of the attribute loading strategies.
"""
from __future__ import absolute_import
from .. import util
from . import attributes, exc as orm_exc
from ..sql import util as sql_util
from . import strategy_options
from .util import _none_set, state_str
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
from .. import exc as sa_exc
import collections
_new_runid = util.counter()
def instances(query, cursor, context):
"""Return an ORM result as an iterator."""
context.runid = _new_runid()
filtered = query._has_mapper_entities
single_entity = len(query._entities) == 1 and \
query._entities[0].supports_single_entity
if filtered:
if single_entity:
filter_fn = id
else:
def filter_fn(row):
return tuple(
id(item)
if ent.use_id_for_hash
else item
for ent, item in zip(query._entities, row)
)
try:
(process, labels) = \
list(zip(*[
query_entity.row_processor(query,
context, cursor)
for query_entity in query._entities
]))
if not single_entity:
keyed_tuple = util.lightweight_named_tuple('result', labels)
while True:
context.partials = {}
if query._yield_per:
fetch = cursor.fetchmany(query._yield_per)
if not fetch:
break
else:
fetch = cursor.fetchall()
if single_entity:
proc = process[0]
rows = [proc(row) for row in fetch]
else:
rows = [keyed_tuple([proc(row) for proc in process])
for row in fetch]
if filtered:
rows = util.unique_list(rows, filter_fn)
for row in rows:
yield row
if not query._yield_per:
break
except Exception as err:
cursor.close()
util.raise_from_cause(err)
@util.dependencies("sqlalchemy.orm.query")
def merge_result(querylib, query, iterator, load=True):
"""Merge a result into this :class:`.Query` object's Session."""
session = query.session
if load:
# flush current contents if we expect to load data
session._autoflush()
autoflush = session.autoflush
try:
session.autoflush = False
single_entity = len(query._entities) == 1
if single_entity:
if isinstance(query._entities[0], querylib._MapperEntity):
result = [session._merge(
attributes.instance_state(instance),
attributes.instance_dict(instance),
load=load, _recursive={}, _resolve_conflict_map={})
for instance in iterator]
else:
result = list(iterator)
else:
mapped_entities = [i for i, e in enumerate(query._entities)
if isinstance(e, querylib._MapperEntity)]
result = []
keys = [ent._label_name for ent in query._entities]
keyed_tuple = util.lightweight_named_tuple('result', keys)
for row in iterator:
newrow = list(row)
for i in mapped_entities:
if newrow[i] is not None:
newrow[i] = session._merge(
attributes.instance_state(newrow[i]),
attributes.instance_dict(newrow[i]),
load=load, _recursive={}, _resolve_conflict_map={})
result.append(keyed_tuple(newrow))
return iter(result)
finally:
session.autoflush = autoflush
def get_from_identity(session, key, passive):
"""Look up the given key in the given session's identity map,
check the object for expired state if found.
"""
instance = session.identity_map.get(key)
if instance is not None:
state = attributes.instance_state(instance)
# expired - ensure it still exists
if state.expired:
if not passive & attributes.SQL_OK:
# TODO: no coverage here
return attributes.PASSIVE_NO_RESULT
elif not passive & attributes.RELATED_OBJECT_OK:
# this mode is used within a flush and the instance's
# expired state will be checked soon enough, if necessary
return instance
try:
state._load_expired(state, passive)
except orm_exc.ObjectDeletedError:
session._remove_newly_deleted([state])
return None
return instance
else:
return None
def load_on_ident(query, key,
refresh_state=None, lockmode=None,
only_load_props=None):
"""Load the given identity key from the database."""
if key is not None:
ident = key[1]
else:
ident = None
if refresh_state is None:
q = query._clone()
q._get_condition()
else:
q = query._clone()
if ident is not None:
mapper = query._mapper_zero()
(_get_clause, _get_params) = mapper._get_clause
# None present in ident - turn those comparisons
# into "IS NULL"
if None in ident:
nones = set([
_get_params[col].key for col, value in
zip(mapper.primary_key, ident) if value is None
])
_get_clause = sql_util.adapt_criterion_to_null(
_get_clause, nones)
_get_clause = q._adapt_clause(_get_clause, True, False)
q._criterion = _get_clause
params = dict([
(_get_params[primary_key].key, id_val)
for id_val, primary_key in zip(ident, mapper.primary_key)
])
q._params = params
if lockmode is not None:
version_check = True
q = q.with_lockmode(lockmode)
elif query._for_update_arg is not None:
version_check = True
q._for_update_arg = query._for_update_arg
else:
version_check = False
q._get_options(
populate_existing=bool(refresh_state),
version_check=version_check,
only_load_props=only_load_props,
refresh_state=refresh_state)
q._order_by = None
try:
return q.one()
except orm_exc.NoResultFound:
return None
def _setup_entity_query(
context, mapper, query_entity,
path, adapter, column_collection,
with_polymorphic=None, only_load_props=None,
polymorphic_discriminator=None, **kw):
if with_polymorphic:
poly_properties = mapper._iterate_polymorphic_properties(
with_polymorphic)
else:
poly_properties = mapper._polymorphic_properties
quick_populators = {}
path.set(
context.attributes,
"memoized_setups",
quick_populators)
for value in poly_properties:
if only_load_props and \
value.key not in only_load_props:
continue
value.setup(
context,
query_entity,
path,
adapter,
only_load_props=only_load_props,
column_collection=column_collection,
memoized_populators=quick_populators,
**kw
)
if polymorphic_discriminator is not None and \
polymorphic_discriminator \
is not mapper.polymorphic_on:
if adapter:
pd = adapter.columns[polymorphic_discriminator]
else:
pd = polymorphic_discriminator
column_collection.append(pd)
def _instance_processor(
mapper, context, result, path, adapter,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None,
_polymorphic_from=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
# note that this method, most of which exists in a closure
# called _instance(), resists being broken out, as
# attempts to do so tend to add significant function
# call overhead. _instance() is the most
# performance-critical section in the whole ORM.
pk_cols = mapper.primary_key
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
identity_class = mapper._identity_class
populators = collections.defaultdict(list)
props = mapper._prop_set
if only_load_props is not None:
props = props.intersection(
mapper._props[k] for k in only_load_props)
quick_populators = path.get(
context.attributes, "memoized_setups", _none_set)
for prop in props:
if prop in quick_populators:
# this is an inlined path just for column-based attributes.
col = quick_populators[prop]
if col is _DEFER_FOR_STATE:
populators["new"].append(
(prop.key, prop._deferred_column_loader))
elif col is _SET_DEFERRED_EXPIRED:
# note that in this path, we are no longer
# searching in the result to see if the column might
# be present in some unexpected way.
populators["expire"].append((prop.key, False))
else:
if adapter:
col = adapter.columns[col]
getter = result._getter(col, False)
if getter:
populators["quick"].append((prop.key, getter))
else:
# fall back to the ColumnProperty itself, which
# will iterate through all of its columns
# to see if one fits
prop.create_row_processor(
context, path, mapper, result, adapter, populators)
else:
prop.create_row_processor(
context, path, mapper, result, adapter, populators)
propagate_options = context.propagate_options
load_path = context.query._current_path + path \
if context.query._current_path.path else path
session_identity_map = context.session.identity_map
populate_existing = context.populate_existing or mapper.always_refresh
load_evt = bool(mapper.class_manager.dispatch.load)
refresh_evt = bool(mapper.class_manager.dispatch.refresh)
persistent_evt = bool(context.session.dispatch.loaded_as_persistent)
if persistent_evt:
loaded_as_persistent = context.session.dispatch.loaded_as_persistent
instance_state = attributes.instance_state
instance_dict = attributes.instance_dict
session_id = context.session.hash_key
version_check = context.version_check
runid = context.runid
if refresh_state:
refresh_identity_key = refresh_state.key
if refresh_identity_key is None:
# super-rare condition; a refresh is being called
# on a non-instance-key instance; this is meant to only
# occur within a flush()
refresh_identity_key = \
mapper._identity_key_from_state(refresh_state)
else:
refresh_identity_key = None
if mapper.allow_partial_pks:
is_not_primary_key = _none_set.issuperset
else:
is_not_primary_key = _none_set.intersection
def _instance(row):
# determine the state that we'll be populating
if refresh_identity_key:
# fixed state that we're refreshing
state = refresh_state
instance = state.obj()
dict_ = instance_dict(instance)
isnew = state.runid != runid
currentload = True
loaded_instance = False
else:
# look at the row, see if that identity is in the
# session, or we have to create a new one
identitykey = (
identity_class,
tuple([row[column] for column in pk_cols])
)
instance = session_identity_map.get(identitykey)
if instance is not None:
# existing instance
state = instance_state(instance)
dict_ = instance_dict(instance)
isnew = state.runid != runid
currentload = not isnew
loaded_instance = False
if version_check and not currentload:
_validate_version_id(mapper, state, dict_, row, adapter)
else:
# create a new instance
# check for non-NULL values in the primary key columns,
# else no entity is returned for the row
if is_not_primary_key(identitykey[1]):
return None
isnew = True
currentload = True
loaded_instance = True
instance = mapper.class_manager.new_instance()
dict_ = instance_dict(instance)
state = instance_state(instance)
state.key = identitykey
# attach instance to session.
state.session_id = session_id
session_identity_map._add_unpresent(state, identitykey)
# populate. this looks at whether this state is new
# for this load or was existing, and whether or not this
# row is the first row with this identity.
if currentload or populate_existing:
# full population routines. Objects here are either
# just created, or we are doing a populate_existing
# be conservative about setting load_path when populate_existing
# is in effect; want to maintain options from the original
# load. see test_expire->test_refresh_maintains_deferred_options
if isnew and (propagate_options or not populate_existing):
state.load_options = propagate_options
state.load_path = load_path
_populate_full(
context, row, state, dict_, isnew, load_path,
loaded_instance, populate_existing, populators)
if isnew:
if loaded_instance:
if load_evt:
state.manager.dispatch.load(state, context)
if persistent_evt:
loaded_as_persistent(context.session, state.obj())
elif refresh_evt:
state.manager.dispatch.refresh(
state, context, only_load_props)
if populate_existing or state.modified:
if refresh_state and only_load_props:
state._commit(dict_, only_load_props)
else:
state._commit_all(dict_, session_identity_map)
else:
# partial population routines, for objects that were already
# in the Session, but a row matches them; apply eager loaders
# on existing objects, etc.
unloaded = state.unloaded
isnew = state not in context.partials
if not isnew or unloaded or populators["eager"]:
# state is having a partial set of its attributes
# refreshed. Populate those attributes,
# and add to the "context.partials" collection.
to_load = _populate_partial(
context, row, state, dict_, isnew, load_path,
unloaded, populators)
if isnew:
if refresh_evt:
state.manager.dispatch.refresh(
state, context, to_load)
state._commit(dict_, to_load)
return instance
if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
# if we are doing polymorphic, dispatch to a different _instance()
# method specific to the subclass mapper
_instance = _decorate_polymorphic_switch(
_instance, context, mapper, result, path,
polymorphic_discriminator, adapter)
return _instance
def _populate_full(
context, row, state, dict_, isnew, load_path,
loaded_instance, populate_existing, populators):
if isnew:
# first time we are seeing a row with this identity.
state.runid = context.runid
for key, getter in populators["quick"]:
dict_[key] = getter(row)
if populate_existing:
for key, set_callable in populators["expire"]:
dict_.pop(key, None)
if set_callable:
state.expired_attributes.add(key)
else:
for key, set_callable in populators["expire"]:
if set_callable:
state.expired_attributes.add(key)
for key, populator in populators["new"]:
populator(state, dict_, row)
for key, populator in populators["delayed"]:
populator(state, dict_, row)
elif load_path != state.load_path:
# new load path, e.g. object is present in more than one
# column position in a series of rows
state.load_path = load_path
# if we have data, and the data isn't in the dict, OK, let's put
# it in.
for key, getter in populators["quick"]:
if key not in dict_:
dict_[key] = getter(row)
# otherwise treat like an "already seen" row
for key, populator in populators["existing"]:
populator(state, dict_, row)
# TODO: allow "existing" populator to know this is
# a new path for the state:
# populator(state, dict_, row, new_path=True)
else:
# have already seen rows with this identity in this same path.
for key, populator in populators["existing"]:
populator(state, dict_, row)
# TODO: same path
# populator(state, dict_, row, new_path=False)
def _populate_partial(
context, row, state, dict_, isnew, load_path,
unloaded, populators):
if not isnew:
to_load = context.partials[state]
for key, populator in populators["existing"]:
if key in to_load:
populator(state, dict_, row)
else:
to_load = unloaded
context.partials[state] = to_load
for key, getter in populators["quick"]:
if key in to_load:
dict_[key] = getter(row)
for key, set_callable in populators["expire"]:
if key in to_load:
dict_.pop(key, None)
if set_callable:
state.expired_attributes.add(key)
for key, populator in populators["new"]:
if key in to_load:
populator(state, dict_, row)
for key, populator in populators["delayed"]:
if key in to_load:
populator(state, dict_, row)
for key, populator in populators["eager"]:
if key not in unloaded:
populator(state, dict_, row)
return to_load
def _validate_version_id(mapper, state, dict_, row, adapter):
version_id_col = mapper.version_id_col
if version_id_col is None:
return
if adapter:
version_id_col = adapter.columns[version_id_col]
if mapper._get_state_attr_by_column(
state, dict_, mapper.version_id_col) != row[version_id_col]:
raise orm_exc.StaleDataError(
"Instance '%s' has version id '%s' which "
"does not match database-loaded version id '%s'."
% (state_str(state), mapper._get_state_attr_by_column(
state, dict_, mapper.version_id_col),
row[version_id_col]))
def _decorate_polymorphic_switch(
instance_fn, context, mapper, result, path,
polymorphic_discriminator, adapter):
if polymorphic_discriminator is not None:
polymorphic_on = polymorphic_discriminator
else:
polymorphic_on = mapper.polymorphic_on
if polymorphic_on is None:
return instance_fn
if adapter:
polymorphic_on = adapter.columns[polymorphic_on]
def configure_subclass_mapper(discriminator):
try:
sub_mapper = mapper.polymorphic_map[discriminator]
except KeyError:
raise AssertionError(
"No such polymorphic_identity %r is defined" %
discriminator)
else:
if sub_mapper is mapper:
return None
return _instance_processor(
sub_mapper, context, result,
path, adapter, _polymorphic_from=mapper)
polymorphic_instances = util.PopulateDict(
configure_subclass_mapper
)
def polymorphic_instance(row):
discriminator = row[polymorphic_on]
if discriminator is not None:
_instance = polymorphic_instances[discriminator]
if _instance:
return _instance(row)
return instance_fn(row)
return polymorphic_instance
def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
# assert mapper is _state_mapper(state)
session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
"Instance %s is not bound to a Session; "
"attribute refresh operation cannot proceed" %
(state_str(state)))
has_key = bool(state.key)
result = False
if mapper.inherits and not mapper.concrete:
# because we are using Core to produce a select() that we
# pass to the Query, we aren't calling setup() for mapped
# attributes; in 1.0 this means deferred attrs won't get loaded
# by default
statement = mapper._optimized_get_statement(state, attribute_names)
if statement is not None:
result = load_on_ident(
session.query(mapper).
options(
strategy_options.Load(mapper).undefer("*")
).from_statement(statement),
None,
only_load_props=attribute_names,
refresh_state=state
)
if result is False:
if has_key:
identity_key = state.key
else:
# this codepath is rare - only valid when inside a flush, and the
# object is becoming persistent but hasn't yet been assigned
# an identity_key.
# check here to ensure we have the attrs we need.
pk_attrs = [mapper._columntoproperty[col].key
for col in mapper.primary_key]
if state.expired_attributes.intersection(pk_attrs):
raise sa_exc.InvalidRequestError(
"Instance %s cannot be refreshed - it's not "
" persistent and does not "
"contain a full primary key." % state_str(state))
identity_key = mapper._identity_key_from_state(state)
if (_none_set.issubset(identity_key) and
not mapper.allow_partial_pks) or \
_none_set.issuperset(identity_key):
util.warn_limited(
"Instance %s to be refreshed doesn't "
"contain a full primary key - can't be refreshed "
"(and shouldn't be expired, either).",
state_str(state))
return
result = load_on_ident(
session.query(mapper),
identity_key,
refresh_state=state,
only_load_props=attribute_names)
# if instance is pending, a refresh operation
# may not complete (even if PK attributes are assigned)
if has_key and result is None:
raise orm_exc.ObjectDeletedError(state)
| mit |
jamesabel/osnap | osnap/osnapy.py | 1 | 2772 |
import argparse
from osnap import default_python_version, get_logger, init_logger_from_args, __application_name__
import osnap.osnapy_win
import osnap.osnapy_mac
import osnap.util
LOGGER = get_logger(__application_name__)
def make_osnapy(
python_version,
application_name = None,
clean_cache = False,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = False,
architecture = '64bit',
):
LOGGER.debug('creating osnapy Python environment using python %s' % python_version)
if osnap.util.is_mac() and application_name is None:
raise Exception('must specify the application name on mac')
osnapy = None
if osnap.util.is_windows():
osnapy = osnap.osnapy_win.OsnapyWin(python_version, application_name, clean_cache, architecture=architecture)
elif osnap.util.is_mac():
if use_pyrun:
LOGGER.critical('pyrun capability has been removed')
else:
osnapy = osnap.osnapy_mac.OsnapyMac(python_version, application_name, clean_cache, force_app_uninstall)
else:
raise NotImplementedError
osnapy.create_python()
osnapy.pip('pip')
osnapy.pip('setuptools')
osnapy.pip('Cython') # e.g. for kivy
osnapy.pip(None) # install all from requirements.txt
def main():
parser = argparse.ArgumentParser(description='create the osnapy Python environment',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-a', '--application', default=None, help='application name (required for OSX/MacOS)')
parser.add_argument('-A', '--architecture', default='64bit', choices=['64bit', '32bit'], help='The architecture to use for the launcher')
parser.add_argument('-p', '--python_version', default=default_python_version, help='python version')
parser.add_argument('-c', '--clear', action='store_true', default=False, help='clear cache')
parser.add_argument('-f', '--force_uninstall', action='store_true', default=False,
help='force application uninstalls if necessary')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print more verbose messages')
args = parser.parse_args()
init_logger_from_args(args)
make_osnapy(
python_version = args.python_version,
application_name = args.application,
clean_cache = args.clear,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = args.force_uninstall,
architecture = args.architecture
)
if __name__ == '__main__':
main()
| mit |
ChihChengLiang/python-rtmbot | tests/test_rtmbot_core.py | 11 | 2363 | # -*- coding: utf-8 -*-
try:
from unittest.mock import Mock, create_autospec
except ImportError:
from mock import Mock, create_autospec
from testfixtures import LogCapture
from slackclient import SlackClient, _channel, _server, _util
from rtmbot.core import RtmBot, Plugin
def init_rtmbot():
''' Initializes an instance of RTMBot with some default values '''
rtmbot = RtmBot({
'SLACK_TOKEN': 'test-12345',
'BASE_PATH': '/tmp/',
'LOGFILE': '/tmp/rtmbot.log',
'DEBUG': True
})
return rtmbot
def test_init():
with LogCapture() as l:
rtmbot = init_rtmbot()
assert rtmbot.token == 'test-12345'
assert rtmbot.directory == '/tmp/'
assert rtmbot.debug == True
l.check(
('root', 'INFO', 'Initialized in: /tmp/')
)
def test_output():
''' Test that sending a message behaves as expected '''
rtmbot = init_rtmbot()
# Mock the slack_client object with Server, Channel objects and needed methods
slackclient_mock = create_autospec(SlackClient)
server_mock = create_autospec(_server.Server)
# Mock Server with channels method and correct return value
slackclient_mock.server = server_mock
searchlist_mock = create_autospec(_util.SearchList)
server_mock.channels = searchlist_mock
channel_mock = create_autospec(_channel.Channel)
slackclient_mock.server.channels.find.return_value = channel_mock
rtmbot.slack_client = slackclient_mock
# mock the plugin object to return a sample response
plugin_mock = create_autospec(Plugin)
plugin_mock.do_output.return_value = [['C12345678', 'test message']]
rtmbot.bot_plugins.append(plugin_mock)
rtmbot.output()
# test that the output matches the expected value
channel_mock.send_message.assert_called_with('test message')
# test that emoji messages work as expected
channel_mock.reset_mock()
plugin_mock.reset_mock()
plugin_mock.do_output.return_value = [['C12345678', '🚀 testing']]
rtmbot.output()
channel_mock.send_message.assert_called_with('🚀 testing')
# test that unicode messages work as expected
channel_mock.reset_mock()
plugin_mock.reset_mock()
plugin_mock.do_output.return_value = [['C12345678', 'ù hœø3ö']]
rtmbot.output()
channel_mock.send_message.assert_called_with('ù hœø3ö') | mit |
paulmathews/nova | nova/virt/disk/nbd.py | 8 | 3960 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Support for mounting images with qemu-nbd"""
import os
import time
from nova import flags
from nova.openstack.common import cfg
from nova import utils
from nova.virt.disk import mount
nbd_opts = [
cfg.IntOpt('timeout_nbd',
default=10,
help='time to wait for a NBD device coming up'),
cfg.IntOpt('max_nbd_devices',
default=16,
help='maximum number of possible nbd devices'),
]
FLAGS = flags.FLAGS
FLAGS.register_opts(nbd_opts)
class Mount(mount.Mount):
"""qemu-nbd support disk images."""
mode = 'nbd'
device_id_string = mode
# NOTE(padraig): There are three issues with this nbd device handling
# 1. max_nbd_devices should be inferred (#861504)
# 2. We assume nothing else on the system uses nbd devices
# 3. Multiple workers on a system can race against each other
# A patch has been proposed in Nov 2011, to add add a -f option to
# qemu-nbd, akin to losetup -f. One could test for this by running qemu-nbd
# with just the -f option, where it will fail if not supported, or if there
# are no free devices. Note that patch currently hardcodes 16 devices.
# We might be able to alleviate problem 2. by scanning /proc/partitions
# like the aformentioned patch does.
_DEVICES = ['/dev/nbd%s' % i for i in range(FLAGS.max_nbd_devices)]
def _allocate_nbd(self):
if not os.path.exists("/sys/block/nbd0"):
self.error = _('nbd unavailable: module not loaded')
return None
while True:
if not self._DEVICES:
# really want to log this info, not raise
self.error = _('No free nbd devices')
return None
device = self._DEVICES.pop()
if not os.path.exists("/sys/block/%s/pid" %
os.path.basename(device)):
break
return device
def _free_nbd(self, device):
# The device could already be present if unget_dev
# is called right after a nova restart
# (when destroying an LXC container for example).
if not device in self._DEVICES:
self._DEVICES.append(device)
def get_dev(self):
device = self._allocate_nbd()
if not device:
return False
_out, err = utils.trycmd('qemu-nbd', '-c', device, self.image,
run_as_root=True)
if err:
self.error = _('qemu-nbd error: %s') % err
self._free_nbd(device)
return False
# NOTE(vish): this forks into another process, so give it a chance
# to set up before continuing
for _i in range(FLAGS.timeout_nbd):
if os.path.exists("/sys/block/%s/pid" % os.path.basename(device)):
self.device = device
break
time.sleep(1)
else:
self.error = _('nbd device %s did not show up') % device
self._free_nbd(device)
return False
self.linked = True
return True
def unget_dev(self):
if not self.linked:
return
utils.execute('qemu-nbd', '-d', self.device, run_as_root=True)
self._free_nbd(self.device)
self.linked = False
self.device = None
| apache-2.0 |
KyleJamesWalker/ansible | lib/ansible/plugins/cache/redis.py | 36 | 3530 | # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
import json
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.cache import BaseCacheModule
try:
from redis import StrictRedis
except ImportError:
raise AnsibleError("The 'redis' python module is required for the redis fact cache, 'pip install redis'")
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
Keys are maintained in a zset with their score being the timestamp
when they are inserted. This allows for the usage of 'zremrangebyscore'
to expire keys. This mechanism is used or a pattern matched 'scan' for
performance.
"""
def __init__(self, *args, **kwargs):
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(':')
else:
connection = []
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
def _make_key(self, key):
return self._prefix + key
def get(self, key):
value = self._cache.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
return json.loads(value)
def set(self, key, value):
value2 = json.dumps(value)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._cache.setex(self._make_key(key), int(self._timeout), value2)
else:
self._cache.set(self._make_key(key), value2)
self._cache.zadd(self._keys_set, time.time(), key)
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._cache.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self):
self._expire_keys()
return self._cache.zrange(self._keys_set, 0, -1)
def contains(self, key):
self._expire_keys()
return (self._cache.zrank(self._keys_set, key) >= 0)
def delete(self, key):
self._cache.delete(self._make_key(key))
self._cache.zrem(self._keys_set, key)
def flush(self):
for key in self.keys():
self.delete(key)
def copy(self):
# TODO: there is probably a better way to do this in redis
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
def __getstate__(self):
return dict()
def __setstate__(self, data):
self.__init__()
| gpl-3.0 |
oblank/pydjango-froum | xp/settings.py | 1 | 7211 | # coding: utf-8
# Django settings for xp project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'forum', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'root',
'PASSWORD': '123456',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '3306', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-CN'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True # 只有用admin的时候需要开启
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = False
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h6=yzee&jze#4p1@twhksg1wg6hv%pzwomw(!o($qsly%lzlhe'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware', # 缓存中间件,必须放在开头
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware', # 开启了CSRF,记得在POST表单中加{% csrf_token %},使用RequestContext
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware', # 缓存中间件,必须放在最后
)
ROOT_URLCONF = 'xp.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'xp.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = ( # F2E中有current_user对象和request对象,这里设置可在模板中使用RquestContext
'django.contrib.auth.context_processors.auth', # user对象等等
'django.core.context_processors.request', # request对象等等
'django.core.context_processors.static', # 在模板中使用{{ STATIC_URL }}获取静态文件路径
'forum.context_processors.custom_proc', # 自定义模板上下文处理器
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.sitemaps', # Django sitemap framework
'forum',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'DEBUG',
'handlers': ['console'],
},
}
}
# CACHES = { # memcached缓存设置
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'LOCATION': '127.0.0.1:11211',
# }
# }
# SESSION_ENGINE = 'django.contrib.sessions.backends.cache' # 使用memcached存储session
# 自定义User类
AUTH_USER_MODEL = 'forum.ForumUser'
# 用户认证BackEnds
AUTHENTICATION_BACKENDS = ('forum.backends.EmailAuthBackend',)
# 默认登陆uri
LOGIN_URL = '/login/'
# 发送邮件设置
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 25
EMAIL_HOST_USER= 'a135689110'
EMAIL_HOST_PASSWORD= '8804183'
DEFAULT_FROM_EMAIL = 'a135689110@163.com'
# 注册用户保留关键字,非Django设置
RESERVED = ["user", "topic", "home", "setting", "forgot", "login", "logout", "register", "admin"]
| mit |
drufat/sympy | sympy/physics/mechanics/tests/test_particle.py | 77 | 1388 | from sympy import symbols
from sympy.physics.mechanics import Point, Particle, ReferenceFrame
def test_particle():
m, m2, v1, v2, v3, r, g, h = symbols('m m2 v1 v2 v3 r g h')
P = Point('P')
P2 = Point('P2')
p = Particle('pa', P, m)
assert p.mass == m
assert p.point == P
# Test the mass setter
p.mass = m2
assert p.mass == m2
# Test the point setter
p.point = P2
assert p.point == P2
# Test the linear momentum function
N = ReferenceFrame('N')
O = Point('O')
P2.set_pos(O, r * N.y)
P2.set_vel(N, v1 * N.x)
assert p.linear_momentum(N) == m2 * v1 * N.x
assert p.angular_momentum(O, N) == -m2 * r *v1 * N.z
P2.set_vel(N, v2 * N.y)
assert p.linear_momentum(N) == m2 * v2 * N.y
assert p.angular_momentum(O, N) == 0
P2.set_vel(N, v3 * N.z)
assert p.linear_momentum(N) == m2 * v3 * N.z
assert p.angular_momentum(O, N) == m2 * r * v3 * N.x
P2.set_vel(N, v1 * N.x + v2 * N.y + v3 * N.z)
assert p.linear_momentum(N) == m2 * (v1 * N.x + v2 * N.y + v3 * N.z)
assert p.angular_momentum(O, N) == m2 * r * (v3 * N.x - v1 * N.z)
p.potential_energy = m * g * h
assert p.potential_energy == m * g * h
# TODO make the result not be system-dependent
assert p.kinetic_energy(
N) in [m2*(v1**2 + v2**2 + v3**2)/2,
m2 * v1**2 / 2 + m2 * v2**2 / 2 + m2 * v3**2 / 2]
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.