repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
RyanSkraba/beam | sdks/python/apache_beam/coders/typecoders.py | 1 | 8078 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Type coders registration.
This module contains functionality to define and use coders for custom classes.
Let's say we have a class Xyz and we are processing a PCollection with elements
of type Xyz. If we do not register a coder for Xyz, a default pickle-based
fallback coder will be used. This can be undesirable for two reasons. First, we
may want a faster coder or a more space efficient one. Second, the pickle-based
coder is not deterministic in the sense that objects like dictionaries or sets
are not guaranteed to be encoded in the same way every time (elements are not
really ordered).
Two (sometimes three) steps are needed to define and use a custom coder:
- define the coder class
- associate the code with the class (a.k.a. coder registration)
- typehint DoFns or transforms with the new class or composite types using
the class.
A coder class is defined by subclassing from CoderBase and defining the
encode_to_bytes and decode_from_bytes methods. The framework uses duck-typing
for coders so it is not strictly required to subclass from CoderBase as long as
the encode/decode methods are defined.
Registering a coder class is made with a register_coder() call::
from apache_beam import coders
...
coders.registry.register_coder(Xyz, XyzCoder)
Additionally, DoFns and PTransforms may need type hints. This is not always
necessary since there is functionality to infer the return types of DoFns by
analyzing the code. For instance, for the function below the return type of
'Xyz' will be inferred::
def MakeXyzs(v):
return Xyz(v)
If Xyz is inferred then its coder will be used whenever the framework needs to
serialize data (e.g., writing to the shuffler subsystem responsible for group by
key operations). If a typehint is needed it can be specified by decorating the
DoFns or using with_input_types/with_output_types methods on PTransforms. For
example, the above function can be decorated::
@with_output_types(Xyz)
def MakeXyzs(v):
return complex_operation_returning_Xyz(v)
See apache_beam.typehints.decorators module for more details.
"""
from __future__ import absolute_import
from builtins import object
from typing import Any
from typing import Dict
from typing import Iterable
from typing import List
from typing import Type
from past.builtins import unicode
from apache_beam.coders import coders
from apache_beam.typehints import typehints
__all__ = ['registry']
class CoderRegistry(object):
"""A coder registry for typehint/coder associations."""
def __init__(self, fallback_coder=None):
self._coders = {} # type: Dict[Any, Type[coders.Coder]]
self.custom_types = [] # type: List[Any]
self.register_standard_coders(fallback_coder)
def register_standard_coders(self, fallback_coder):
"""Register coders for all basic and composite types."""
self._register_coder_internal(int, coders.VarIntCoder)
self._register_coder_internal(float, coders.FloatCoder)
self._register_coder_internal(bytes, coders.BytesCoder)
self._register_coder_internal(bool, coders.BooleanCoder)
self._register_coder_internal(unicode, coders.StrUtf8Coder)
self._register_coder_internal(typehints.TupleConstraint, coders.TupleCoder)
# Default fallback coders applied in that order until the first matching
# coder found.
default_fallback_coders = [coders.ProtoCoder, coders.FastPrimitivesCoder]
self._fallback_coder = fallback_coder or FirstOf(default_fallback_coders)
def _register_coder_internal(self, typehint_type, typehint_coder_class):
# type: (Any, Type[coders.Coder]) -> None
self._coders[typehint_type] = typehint_coder_class
def register_coder(self, typehint_type, typehint_coder_class):
# type: (Any, Type[coders.Coder]) -> None
if not isinstance(typehint_coder_class, type):
raise TypeError('Coder registration requires a coder class object. '
'Received %r instead.' % typehint_coder_class)
if typehint_type not in self.custom_types:
self.custom_types.append(typehint_type)
self._register_coder_internal(typehint_type, typehint_coder_class)
def get_coder(self, typehint):
# type: (Any) -> coders.Coder
coder = self._coders.get(
typehint.__class__ if isinstance(typehint, typehints.TypeConstraint)
else typehint, None)
if isinstance(typehint, typehints.TypeConstraint) and coder is not None:
return coder.from_type_hint(typehint, self)
if coder is None:
# We use the fallback coder when there is no coder registered for a
# typehint. For example a user defined class with no coder specified.
if not hasattr(self, '_fallback_coder'):
raise RuntimeError(
'Coder registry has no fallback coder. This can happen if the '
'fast_coders module could not be imported.')
if isinstance(typehint, (typehints.IterableTypeConstraint,
typehints.ListConstraint)):
return coders.IterableCoder.from_type_hint(typehint, self)
elif typehint is None:
# In some old code, None is used for Any.
# TODO(robertwb): Clean this up.
pass
elif typehint is object or typehint == typehints.Any:
# We explicitly want the fallback coder.
pass
elif isinstance(typehint, typehints.TypeVariable):
# TODO(robertwb): Clean this up when type inference is fully enabled.
pass
else:
# TODO(robertwb): Re-enable this warning when it's actionable.
# warnings.warn('Using fallback coder for typehint: %r.' % typehint)
pass
coder = self._fallback_coder
return coder.from_type_hint(typehint, self)
def get_custom_type_coder_tuples(self, types):
"""Returns type/coder tuples for all custom types passed in."""
return [(t, self._coders[t]) for t in types if t in self.custom_types]
def verify_deterministic(self, key_coder, op_name, silent=True):
if not key_coder.is_deterministic():
error_msg = ('The key coder "%s" for %s '
'is not deterministic. This may result in incorrect '
'pipeline output. This can be fixed by adding a type '
'hint to the operation preceding the GroupByKey step, '
'and for custom key classes, by writing a '
'deterministic custom Coder. Please see the '
'documentation for more details.' % (key_coder, op_name))
return key_coder.as_deterministic_coder(op_name, error_msg)
else:
return key_coder
class FirstOf(object):
"""For internal use only; no backwards-compatibility guarantees.
A class used to get the first matching coder from a list of coders."""
def __init__(self, coders):
# type: (Iterable[Type[coders.Coder]]) -> None
self._coders = coders
def from_type_hint(self, typehint, registry):
messages = []
for coder in self._coders:
try:
return coder.from_type_hint(typehint, self)
except Exception as e:
msg = ('%s could not provide a Coder for type %s: %s' %
(coder, typehint, e))
messages.append(msg)
raise ValueError('Cannot provide coder for %s: %s' %
(typehint, ';'.join(messages)))
registry = CoderRegistry()
| apache-2.0 |
arnaud-morvan/QGIS | python/plugins/processing/algs/qgis/HypsometricCurves.py | 4 | 9490 | # -*- coding: utf-8 -*-
"""
***************************************************************************
HypsometricCurves.py
---------------------
Date : November 2014
Copyright : (C) 2014 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'November 2014'
__copyright__ = '(C) 2014, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import numpy
import csv
from osgeo import gdal, ogr, osr
from qgis.core import (QgsRectangle,
QgsGeometry,
QgsFeatureRequest,
QgsProcessingException,
QgsProcessing,
QgsProcessingParameterBoolean,
QgsProcessingParameterNumber,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterFolderDestination)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.tools import raster
class HypsometricCurves(QgisAlgorithm):
INPUT_DEM = 'INPUT_DEM'
BOUNDARY_LAYER = 'BOUNDARY_LAYER'
STEP = 'STEP'
USE_PERCENTAGE = 'USE_PERCENTAGE'
OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
def group(self):
return self.tr('Raster terrain analysis')
def groupId(self):
return 'rasterterrainanalysis'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT_DEM,
self.tr('DEM to analyze')))
self.addParameter(QgsProcessingParameterFeatureSource(self.BOUNDARY_LAYER,
self.tr('Boundary layer'), [QgsProcessing.TypeVectorPolygon]))
self.addParameter(QgsProcessingParameterNumber(self.STEP,
self.tr('Step'), type=QgsProcessingParameterNumber.Double, minValue=0.0, defaultValue=100.0))
self.addParameter(QgsProcessingParameterBoolean(self.USE_PERCENTAGE,
self.tr('Use % of area instead of absolute value'), defaultValue=False))
self.addParameter(QgsProcessingParameterFolderDestination(self.OUTPUT_DIRECTORY,
self.tr('Hypsometric curves')))
def name(self):
return 'hypsometriccurves'
def displayName(self):
return self.tr('Hypsometric curves')
def processAlgorithm(self, parameters, context, feedback):
raster_layer = self.parameterAsRasterLayer(parameters, self.INPUT_DEM, context)
target_crs = raster_layer.crs()
rasterPath = raster_layer.source()
source = self.parameterAsSource(parameters, self.BOUNDARY_LAYER, context)
if source is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.BOUNDARY_LAYER))
step = self.parameterAsDouble(parameters, self.STEP, context)
percentage = self.parameterAsBool(parameters, self.USE_PERCENTAGE, context)
outputPath = self.parameterAsString(parameters, self.OUTPUT_DIRECTORY, context)
rasterDS = gdal.Open(rasterPath, gdal.GA_ReadOnly)
geoTransform = rasterDS.GetGeoTransform()
rasterBand = rasterDS.GetRasterBand(1)
noData = rasterBand.GetNoDataValue()
cellXSize = abs(geoTransform[1])
cellYSize = abs(geoTransform[5])
rasterXSize = rasterDS.RasterXSize
rasterYSize = rasterDS.RasterYSize
rasterBBox = QgsRectangle(geoTransform[0],
geoTransform[3] - cellYSize * rasterYSize,
geoTransform[0] + cellXSize * rasterXSize,
geoTransform[3])
rasterGeom = QgsGeometry.fromRect(rasterBBox)
crs = osr.SpatialReference()
crs.ImportFromProj4(str(target_crs.toProj4()))
memVectorDriver = ogr.GetDriverByName('Memory')
memRasterDriver = gdal.GetDriverByName('MEM')
features = source.getFeatures(QgsFeatureRequest().setDestinationCrs(target_crs, context.transformContext()))
total = 100.0 / source.featureCount() if source.featureCount() else 0
for current, f in enumerate(features):
if not f.hasGeometry():
continue
if feedback.isCanceled():
break
geom = f.geometry()
intersectedGeom = rasterGeom.intersection(geom)
if intersectedGeom.isEmpty():
feedback.pushInfo(
self.tr('Feature {0} does not intersect raster or '
'entirely located in NODATA area').format(f.id()))
continue
fName = os.path.join(
outputPath, 'hystogram_%s_%s.csv' % (source.sourceName(), f.id()))
ogrGeom = ogr.CreateGeometryFromWkt(intersectedGeom.asWkt())
bbox = intersectedGeom.boundingBox()
xMin = bbox.xMinimum()
xMax = bbox.xMaximum()
yMin = bbox.yMinimum()
yMax = bbox.yMaximum()
(startColumn, startRow) = raster.mapToPixel(xMin, yMax, geoTransform)
(endColumn, endRow) = raster.mapToPixel(xMax, yMin, geoTransform)
width = endColumn - startColumn
height = endRow - startRow
srcOffset = (startColumn, startRow, width, height)
srcArray = rasterBand.ReadAsArray(*srcOffset)
if srcOffset[2] == 0 or srcOffset[3] == 0:
feedback.pushInfo(
self.tr('Feature {0} is smaller than raster '
'cell size').format(f.id()))
continue
newGeoTransform = (
geoTransform[0] + srcOffset[0] * geoTransform[1],
geoTransform[1],
0.0,
geoTransform[3] + srcOffset[1] * geoTransform[5],
0.0,
geoTransform[5]
)
memVDS = memVectorDriver.CreateDataSource('out')
memLayer = memVDS.CreateLayer('poly', crs, ogr.wkbPolygon)
ft = ogr.Feature(memLayer.GetLayerDefn())
ft.SetGeometry(ogrGeom)
memLayer.CreateFeature(ft)
ft.Destroy()
rasterizedDS = memRasterDriver.Create('', srcOffset[2],
srcOffset[3], 1, gdal.GDT_Byte)
rasterizedDS.SetGeoTransform(newGeoTransform)
gdal.RasterizeLayer(rasterizedDS, [1], memLayer, burn_values=[1])
rasterizedArray = rasterizedDS.ReadAsArray()
srcArray = numpy.nan_to_num(srcArray)
masked = numpy.ma.MaskedArray(srcArray,
mask=numpy.logical_or(srcArray == noData,
numpy.logical_not(rasterizedArray)))
self.calculateHypsometry(f.id(), fName, feedback, masked,
cellXSize, cellYSize, percentage, step)
memVDS = None
rasterizedDS = None
feedback.setProgress(int(current * total))
rasterDS = None
return {self.OUTPUT_DIRECTORY: outputPath}
def calculateHypsometry(self, fid, fName, feedback, data, pX, pY,
percentage, step):
out = dict()
d = data.compressed()
if d.size == 0:
feedback.pushInfo(
self.tr('Feature {0} does not intersect raster or '
'entirely located in NODATA area').format(fid))
return
minValue = d.min()
maxValue = d.max()
startValue = minValue
tmpValue = minValue + step
while startValue < maxValue:
out[tmpValue] = ((startValue <= d) & (d < tmpValue)).sum()
startValue = tmpValue
tmpValue += step
if percentage:
multiplier = 100.0 / len(d.flat)
else:
multiplier = pX * pY
for k, v in out.items():
out[k] = v * multiplier
prev = None
for i in sorted(out.items()):
if prev is None:
out[i[0]] = i[1]
else:
out[i[0]] = i[1] + out[prev]
prev = i[0]
with open(fName, 'w', newline='', encoding='utf-8') as out_file:
writer = csv.writer(out_file)
writer.writerow([self.tr('Area'), self.tr('Elevation')])
for i in sorted(out.items()):
writer.writerow([i[1], i[0]])
| gpl-2.0 |
rsvip/Django | tests/migrations/test_executor.py | 202 | 24097 | from django.apps.registry import apps as global_apps
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.recorder import MigrationRecorder
from django.db.utils import DatabaseError
from django.test import TestCase, modify_settings, override_settings
from .test_base import MigrationTestBase
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
class ExecutorTests(MigrationTestBase):
"""
Tests the migration executor (full end-to-end running).
Bear in mind that if these are failing you should fix the other
test failures first, as they may be propagating into here.
"""
available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"]
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_run(self):
"""
Tests running a simple set of migrations.
"""
executor = MigrationExecutor(connection)
# Let's look at the plan first and make sure it's up to scratch
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0002_second")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_run_with_squashed(self):
"""
Tests running a squashed migration from zero (should ignore what it replaces)
"""
executor = MigrationExecutor(connection)
# Check our leaf node is the squashed one
leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"]
self.assertEqual(leaves, [("migrations", "0001_squashed_0002")])
# Check the plan
plan = executor.migration_plan([("migrations", "0001_squashed_0002")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0001_squashed_0002")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did. Should also just use squashed.
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
})
def test_empty_plan(self):
"""
Tests that re-planning a full migration of a fully-migrated set doesn't
perform spurious unmigrations and remigrations.
There was previously a bug where the executor just always performed the
backwards plan for applied migrations - which even for the most recent
migration in an app, might include other, dependent apps, and these
were being unmigrated.
"""
# Make the initial plan, check it
executor = MigrationExecutor(connection)
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Fake-apply all migrations
executor.migrate([
("migrations", "0002_second"),
("migrations2", "0001_initial")
], fake=True)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Now plan a second time and make sure it's empty
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(plan, [])
# Erase all the fake records
executor.recorder.record_unapplied("migrations2", "0001_initial")
executor.recorder.record_unapplied("migrations", "0002_second")
executor.recorder.record_unapplied("migrations", "0001_initial")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_soft_apply(self):
"""
Tests detection of initial migrations already having been applied.
"""
state = {"faked": None}
def fake_storer(phase, migration=None, fake=None):
state["faked"] = fake
executor = MigrationExecutor(connection, progress_callback=fake_storer)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run it normally
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
executor.migrate([("migrations", "0001_initial")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# We shouldn't have faked that one
self.assertEqual(state["faked"], False)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Fake-reverse that
executor.migrate([("migrations", None)], fake=True)
# Are the tables still there?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure that was faked
self.assertEqual(state["faked"], True)
# Finally, migrate forwards; this should fake-apply our initial migration
executor.loader.build_graph()
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
# Applying the migration should raise a database level error
# because we haven't given the --fake-initial option
with self.assertRaises(DatabaseError):
executor.migrate([("migrations", "0001_initial")])
# Reset the faked state
state = {"faked": None}
# Allow faking of initial CreateModel operations
executor.migrate([("migrations", "0001_initial")], fake_initial=True)
self.assertEqual(state["faked"], True)
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_custom_user",
"django.contrib.auth": "django.contrib.auth.migrations",
},
AUTH_USER_MODEL="migrations.Author",
)
def test_custom_user(self):
"""
Regression test for #22325 - references to a custom user model defined in the
same app are not resolved correctly.
"""
executor = MigrationExecutor(connection)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Migrate forwards
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure the soft-application detection works (#23093)
# Change table_names to not return auth_user during this as
# it wouldn't be there in a normal run, and ensure migrations.Author
# exists in the global app registry temporarily.
old_table_names = connection.introspection.table_names
connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"]
migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps
global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author")
try:
migration = executor.loader.get_migration("auth", "0001_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], True)
finally:
connection.introspection.table_names = old_table_names
del global_apps.get_app_config("migrations").models["author"]
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_forwards(self):
"""
#24123 - Tests that all models of apps already applied which are
unrelated to the first app being applied are part of the initial model
state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([("lookuperror_b", "0003_b3")])
self.assertTableExists("lookuperror_b_b3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate forwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is already applied
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_a", None),
("lookuperror_b", None),
("lookuperror_c", None),
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_backwards(self):
"""
#24123 - Tests that all models of apps being unapplied which are
unrelated to the first app being unapplied are part of the initial
model state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_b", "0003_b3"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_b_b3")
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate backwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is not in the initial state (unrelated to app c)
executor.migrate([("lookuperror_a", None)])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_b", None),
("lookuperror_c", None)
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_process_callback(self):
"""
#24129 - Tests callback process
"""
call_args_list = []
def callback(*args):
call_args_list.append(args)
executor = MigrationExecutor(connection, progress_callback=callback)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
executor.migrate([
("migrations", "0001_initial"),
("migrations", "0002_second"),
])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate([
("migrations", None),
("migrations", None),
])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
migrations = executor.loader.graph.nodes
expected = [
("render_start", ),
("render_success", ),
("apply_start", migrations['migrations', '0001_initial'], False),
("apply_success", migrations['migrations', '0001_initial'], False),
("apply_start", migrations['migrations', '0002_second'], False),
("apply_success", migrations['migrations', '0002_second'], False),
("render_start", ),
("render_success", ),
("unapply_start", migrations['migrations', '0002_second'], False),
("unapply_success", migrations['migrations', '0002_second'], False),
("unapply_start", migrations['migrations', '0001_initial'], False),
("unapply_success", migrations['migrations', '0001_initial'], False),
]
self.assertEqual(call_args_list, expected)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_alter_id_type_with_fk(self):
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
# Apply initial migrations
executor.migrate([
("author_app", "0001_initial"),
("book_app", "0001_initial"),
])
self.assertTableExists("author_app_author")
self.assertTableExists("book_app_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Apply PK type alteration
executor.migrate([("author_app", "0002_alter_id")])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# We can't simply unapply the migrations here because there is no
# implicit cast from VARCHAR to INT on the database level.
with connection.schema_editor() as editor:
editor.execute(editor.sql_delete_table % {"table": "book_app_book"})
editor.execute(editor.sql_delete_table % {"table": "author_app_author"})
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_apply_all_replaced_marks_replacement_as_applied(self):
"""
Applying all replaced migrations marks replacement as applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Place the database in a state where the replaced migrations are
# partially applied: 0001 is applied, 0002 is not.
recorder.record_applied("migrations", "0001_initial")
executor = MigrationExecutor(connection)
# Use fake because we don't actually have the first migration
# applied, so the second will fail. And there's no need to actually
# create/modify tables here, we're just testing the
# MigrationRecord, which works the same with or without fake.
executor.migrate([("migrations", "0002_second")], fake=True)
# Because we've now applied 0001 and 0002 both, their squashed
# replacement should be marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self):
"""
A new squash migration will be marked as applied even if all its
replaced migrations were previously already applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Record all replaced migrations as applied
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
# Because 0001 and 0002 are both applied, even though this migrate run
# didn't apply anything new, their squashed replacement should be
# marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
class FakeLoader(object):
def __init__(self, graph, applied):
self.graph = graph
self.applied_migrations = applied
class FakeMigration(object):
"""Really all we need is any object with a debug-useful repr."""
def __init__(self, name):
self.name = name
def __repr__(self):
return 'M<%s>' % self.name
class ExecutorUnitTests(TestCase):
"""(More) isolated unit tests for executor methods."""
def test_minimize_rollbacks(self):
"""
Minimize unnecessary rollbacks in connected apps.
When you say "./manage.py migrate appA 0001", rather than migrating to
just after appA-0001 in the linearized migration plan (which could roll
back migrations in other apps that depend on appA 0001, but don't need
to be rolled back since we're not rolling back appA 0001), we migrate
to just before appA-0002.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, a2, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [(a2_impl, True)])
def test_minimize_rollbacks_branchy(self):
"""
Minimize rollbacks when target has multiple in-app children.
a: 1 <---- 3 <--\
\ \- 2 <--- 4
\ \
b: \- 1 <--- 2
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
a3_impl = FakeMigration('a3')
a3 = ('a', '3')
a4_impl = FakeMigration('a4')
a4 = ('a', '4')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
b2_impl = FakeMigration('b2')
b2 = ('b', '2')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(a3, a3_impl)
graph.add_node(a4, a4_impl)
graph.add_node(b1, b1_impl)
graph.add_node(b2, b2_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, a3, a1)
graph.add_dependency(None, a4, a2)
graph.add_dependency(None, a4, a3)
graph.add_dependency(None, b2, b1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, b2, a2)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2, b2, a3, a4})
plan = executor.migration_plan({a1})
should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl]
exp = [(m, True) for m in should_be_rolled_back]
self.assertEqual(plan, exp)
def test_backwards_nothing_to_do(self):
"""
If the current state satisfies the given target, do nothing.
a: 1 <--- 2
b: \- 1
c: \- 1
If a1 is applied already and a2 is not, and we're asked to migrate to
a1, don't apply or unapply b1 or c1, regardless of their current state.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
c1_impl = FakeMigration('c1')
c1 = ('c', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_node(c1, c1_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, c1, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [])
| bsd-3-clause |
NetDBNCKU/GAE-Conference-Web-App | django/contrib/admin/views/decorators.py | 100 | 1285 | from functools import wraps
from django.utils.translation import ugettext as _
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth.views import login
from django.contrib.auth import REDIRECT_FIELD_NAME
def staff_member_required(view_func):
"""
Decorator for views that checks that the user is logged in and is a staff
member, displaying the login page if necessary.
"""
@wraps(view_func)
def _checklogin(request, *args, **kwargs):
if request.user.is_active and request.user.is_staff:
# The user is valid. Continue to the admin page.
return view_func(request, *args, **kwargs)
assert hasattr(request, 'session'), "The Django admin requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'."
defaults = {
'template_name': 'admin/login.html',
'authentication_form': AdminAuthenticationForm,
'extra_context': {
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
},
}
return login(request, **defaults)
return _checklogin
| bsd-3-clause |
sunshinelover/chanlun | vn.trader/ctaAlgo/uiChanlunWidget.py | 1 | 68647 | # encoding: UTF-8
"""
缠论模块相关的GUI控制组件
"""
from vtGateway import VtSubscribeReq
from uiBasicWidget import QtGui, QtCore, BasicCell,BasicMonitor,TradingWidget
from eventEngine import *
from ctaBase import *
import pyqtgraph as pg
import numpy as np
import pymongo
from pymongo.errors import *
from datetime import datetime, timedelta
from ctaHistoryData import HistoryDataEngine
import time
import types
import pandas as pd
########################################################################
class MyStringAxis(pg.AxisItem):
def __init__(self, xdict, *args, **kwargs):
pg.AxisItem.__init__(self, *args, **kwargs)
self.x_values = np.asarray(xdict.keys())
self.x_strings = xdict.values()
def tickStrings(self, values, scale, spacing):
strings = []
for v in values:
# vs is the original tick value
vs = v * scale
# if we have vs in our values, show the string
# otherwise show nothing
if vs in self.x_values:
# Find the string with x_values closest to vs
vstr = self.x_strings[np.abs(self.x_values - vs).argmin()]
else:
vstr = ""
strings.append(vstr)
return strings
########################################################################
class ChanlunEngineManager(QtGui.QWidget):
"""chanlun引擎管理组件"""
signal = QtCore.pyqtSignal(type(Event()))
# ----------------------------------------------------------------------
def __init__(self, chanlunEngine, eventEngine, mainEngine, parent=None):
"""Constructor"""
super(ChanlunEngineManager, self).__init__(parent)
self.chanlunEngine = chanlunEngine
self.eventEngine = eventEngine
self.mainEngine = mainEngine
self.penLoaded = False
self.segmentLoaded = False
self.tickLoaded = False
self.zhongShuLoaded = False
self.instrumentid = ''
self.initUi()
self.registerEvent()
# 记录日志
self.chanlunEngine.writeChanlunLog(u'缠论引擎启动成功')
# ----------------------------------------------------------------------
def initUi(self):
"""初始化界面"""
self.setWindowTitle(u'缠论策略')
# 期货代码输入框
self.codeEdit = QtGui.QLineEdit()
self.codeEdit.setPlaceholderText(u'在此输入期货代码')
self.codeEdit.setMaximumWidth(200)
self.data = pd.DataFrame() #画图所需数据, 重要
self.fenX = [] #分笔分段所需X轴坐标
self.fenY = [] #分笔分段所需Y轴坐标
self.zhongshuPos = [] #中枢的位置
self.zhongShuType = [] #中枢的方向
# 金融图
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.TickW = None
# MongoDB数据库相关
self.__mongoConnected = False
self.__mongoConnection = None
# 调用函数
self.__connectMongo()
# 按钮
penButton = QtGui.QPushButton(u'分笔')
segmentButton = QtGui.QPushButton(u'分段')
zhongshuButton = QtGui.QPushButton(u'走势中枢')
shopButton = QtGui.QPushButton(u'买卖点')
restoreButton = QtGui.QPushButton(u'还原')
penButton.clicked.connect(self.pen)
segmentButton.clicked.connect(self.segment)
zhongshuButton.clicked.connect(self.zhongShu)
shopButton.clicked.connect(self.shop)
restoreButton.clicked.connect(self.restore)
# Chanlun组件的日志监控
self.chanlunLogMonitor = QtGui.QTextEdit()
self.chanlunLogMonitor.setReadOnly(True)
self.chanlunLogMonitor.setMaximumHeight(180)
# 设置布局
self.hbox2 = QtGui.QHBoxLayout()
self.hbox2.addWidget(self.codeEdit)
self.hbox2.addWidget(penButton)
self.hbox2.addWidget(segmentButton)
self.hbox2.addWidget(zhongshuButton)
self.hbox2.addWidget(shopButton)
self.hbox2.addWidget(restoreButton)
self.hbox2.addStretch()
tickButton = QtGui.QPushButton(u'Tick')
oneMButton = QtGui.QPushButton(u"1分")
fiveMButton = QtGui.QPushButton(u'5分')
fifteenMButton = QtGui.QPushButton(u'15分')
thirtyMButton = QtGui.QPushButton(u'30分')
sixtyMButton = QtGui.QPushButton(u'60分')
dayButton = QtGui.QPushButton(u'日')
weekButton = QtGui.QPushButton(u'周')
monthButton = QtGui.QPushButton(u'月')
oneMButton.checked = True
self.vbox1 = QtGui.QVBoxLayout()
tickButton.clicked.connect(self.openTick)
oneMButton.clicked.connect(self.oneM)
fiveMButton.clicked.connect(self.fiveM)
fifteenMButton.clicked.connect(self.fifteenM)
thirtyMButton.clicked.connect(self.thirtyM)
sixtyMButton.clicked.connect(self.sixtyM)
dayButton.clicked.connect(self.daily)
weekButton.clicked.connect(self.weekly)
monthButton.clicked.connect(self.monthly)
self.vbox2 = QtGui.QVBoxLayout()
self.vbox1.addWidget(self.PriceW)
self.vbox2.addWidget(tickButton)
self.vbox2.addWidget(oneMButton)
self.vbox2.addWidget(fiveMButton)
self.vbox2.addWidget(fifteenMButton)
self.vbox2.addWidget(thirtyMButton)
self.vbox2.addWidget(sixtyMButton)
self.vbox2.addWidget(dayButton)
self.vbox2.addWidget(weekButton)
self.vbox2.addWidget(monthButton)
self.vbox2.addStretch()
self.hbox3 = QtGui.QHBoxLayout()
self.hbox3.addStretch()
self.hbox3.addLayout(self.vbox1)
self.hbox3.addLayout(self.vbox2)
self.vbox = QtGui.QVBoxLayout()
self.vbox.addLayout(self.hbox2)
self.vbox.addLayout(self.hbox3)
self.vbox.addWidget(self.chanlunLogMonitor)
self.setLayout(self.vbox)
self.codeEdit.returnPressed.connect(self.updateSymbol)
#-----------------------------------------------------------------------
#从通联数据端获取历史数据
def downloadData(self, symbol, unit):
listBar = [] #K线数据
num = 0
#从通联客户端获取K线数据
historyDataEngine = HistoryDataEngine()
# unit为int型获取分钟数据,为String类型获取日周月K线数据
if type(unit) is types.IntType:
#从通联数据端获取当日分钟数据并存入数据库
historyDataEngine.downloadFuturesIntradayBar(symbol, unit)
# 从数据库获取前几天的分钟数据
cx = self.getDbData(symbol, unit)
if cx:
for data in cx:
barOpen = data['open']
barClose = data['close']
barLow = data['low']
barHigh = data['high']
barTime = data['datetime']
listBar.append((num, barTime, barOpen, barClose, barLow, barHigh))
num += 1
elif type(unit) is types.StringType:
data = historyDataEngine.downloadFuturesBar(symbol, unit)
if data:
for d in data:
barOpen = d.get('openPrice', 0)
barClose = d.get('closePrice', 0)
barLow = d.get('lowestPrice', 0)
barHigh = d.get('highestPrice', 0)
if unit == "daily":
barTime = d.get('tradeDate', '').replace('-', '')
else:
barTime = d.get('endDate', '').replace('-', '')
listBar.append((num, barTime, barOpen, barClose, barLow, barHigh))
num += 1
if unit == "monthly" or unit == "weekly":
listBar.reverse()
else:
print "参数格式错误"
return
#将List数据转换成dataFormat类型,方便处理
df = pd.DataFrame(listBar, columns=['num', 'time', 'open', 'close', 'low', 'high'])
df.index = df['time'].tolist()
df = df.drop('time', 1)
return df
#-----------------------------------------------------------------------
#从数据库获取前两天的分钟数据
def getDbData(self, symbol, unit):
#周六周日不交易,无分钟数据
# 给数据库命名
dbname = ''
days = 7
if unit == 1:
dbname = MINUTE_DB_NAME
elif unit == 5:
dbname = MINUTE5_DB_NAME
elif unit == 15:
dbname = MINUTE15_DB_NAME
elif unit == 30:
dbname = MINUTE30_DB_NAME
elif unit == 60:
dbname = MINUTE60_DB_NAME
weekday = datetime.now().weekday() # weekday() 返回的是0-6是星期一到星期日
if days == 2:
if weekday == 6:
aDay = timedelta(days=3)
elif weekday == 0 or weekday == 1:
aDay = timedelta(days=4)
else:
aDay = timedelta(days=2)
else:
aDay = timedelta(days=7)
startDate = (datetime.now() - aDay).strftime('%Y%m%d')
print startDate
if self.__mongoConnected:
collection = self.__mongoConnection[dbname][symbol]
cx = collection.find({'date': {'$gte': startDate}})
return cx
else:
return None
#----------------------------------------------------------------------------------
#"""合约变化"""
def updateSymbol(self):
# 读取组件数据
instrumentid = str(self.codeEdit.text())
self.chanlunEngine.writeChanlunLog(u'查询合约%s' % (instrumentid))
# 从通联数据客户端获取当日分钟数据
self.data = self.downloadData(instrumentid, 1)
if self.data.empty:
self.chanlunEngine.writeChanlunLog(u'合约%s 不存在' % (instrumentid))
else:
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.chanlunEngine.writeChanlunLog(u'打开合约%s 1分钟K线图' % (instrumentid))
self.penLoaded = False
self.segmentLoaded = False
self.tickLoaded = False
self.zhongShuLoaded = False
# # 订阅合约[仿照ctaEngine.py写的]
# # 先取消订阅之前的合约,再订阅最新输入的合约
# contract = self.mainEngine.getContract(self.instrumentid)
# if contract:
# req = VtSubscribeReq()
# req.symbol = contract.symbol
# self.mainEngine.unsubscribe(req, contract.gatewayName)
#
# contract = self.mainEngine.getContract(instrumentid)
# if contract:
# req = VtSubscribeReq()
# req.symbol = contract.symbol
# self.mainEngine.subscribe(req, contract.gatewayName)
# else:
# self.chanlunEngine.writeChanlunLog(u'交易合约%s无法找到' % (instrumentid))
#
# # 重新注册事件监听
# self.eventEngine.unregister(EVENT_TICK + self.instrumentid, self.signal.emit)
# self.eventEngine.register(EVENT_TICK + instrumentid, self.signal.emit)
# 更新目前的合约
self.instrumentid = instrumentid
def oneM(self):
"打开1分钟K线图"
self.chanlunEngine.writeChanlunLog(u'打开合约%s 1分钟K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, 1)
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def fiveM(self):
"打开5分钟K线图"
self.chanlunEngine.writeChanlunLog(u'打开合约%s 5分钟K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, 5)
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def fifteenM(self):
"打开15分钟K线图"
self.chanlunEngine.writeChanlunLog(u'打开合约%s 15分钟K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, 15)
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def thirtyM(self):
"打开30分钟K线图"
self.chanlunEngine.writeChanlunLog(u'打开合约%s 30分钟K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, 30)
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def sixtyM(self):
"打开60分钟K线图"
self.chanlunEngine.writeChanlunLog(u'打开合约%s 60分钟K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, 60)
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def daily(self):
"""打开日K线图"""
self.chanlunEngine.writeChanlunLog(u'打开合约%s 日K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, "daily")
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def weekly(self):
"""打开周K线图"""
self.chanlunEngine.writeChanlunLog(u'打开合约%s 周K线图' % (self.instrumentid))
# 从通联数据客户端获取数据
self.data = self.downloadData(self.instrumentid, "weekly")
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
def monthly(self):
"""打开月K线图"""
self.chanlunEngine.writeChanlunLog(u'打开合约%s 月K线图' % (self.instrumentid))
# 从通联数据客户端获取数据并画图
self.data = self.downloadData(self.instrumentid, "monthly")
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.tickLoaded = False
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def openTick(self):
"""切换成tick图"""
self.chanlunEngine.writeChanlunLog(u'打开tick图')
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.TickW = TickWidget(self.eventEngine, self.chanlunEngine)
self.vbox1.addWidget(self.TickW)
self.tickLoaded = True
self.penLoaded = False
self.segmentLoaded = False
self.zhongShuLoaded = False
# ----------------------------------------------------------------------
def restore(self):
"""还原初始k线状态"""
self.chanlunEngine.writeChanlunLog(u'还原加载成功')
if self.tickLoaded:
self.vbox1.removeWidget(self.TickW)
self.TickW.deleteLater()
else:
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.data = self.downloadData(self.instrumentid, 1)
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, self.data, self)
self.vbox1.addWidget(self.PriceW)
# 画K线图
self.PriceW.plotHistorticData()
self.chanlunEngine.writeChanlunLog(u'还原为1分钟k线图')
self.penLoaded = False
self.segmentLoaded = False
self.tickLoaded = False
# ----------------------------------------------------------------------
def pen(self):
"""加载分笔"""
# 先合并K线数据,记录新建PriceW之前合并K线的数据
if not self.penLoaded:
after_fenxing = self.judgeInclude() #判断self.data中K线数据的包含关系
# 清空画布时先remove已有的Widget再新建
self.vbox1.removeWidget(self.PriceW)
self.PriceW.deleteLater()
self.PriceW = PriceWidget(self.eventEngine, self.chanlunEngine, after_fenxing)
self.vbox1.addWidget(self.PriceW)
#使用合并K线的数据重新画K线图
self.plotAfterFenXing(after_fenxing)
# 找出顶和底
fenxing_data, fenxing_type = self.findTopAndLow(after_fenxing)
arrayFenxingdata = np.array(fenxing_data)
arrayTypedata = np.array(fenxing_type)
self.fenY = []
self.fenX = [m[0] for m in arrayFenxingdata]
fenbiY1 = [m[4] for m in arrayFenxingdata] # 顶分型标志最高价
fenbiY2 = [m[3] for m in arrayFenxingdata] # 底分型标志最低价
for i in xrange(len(self.fenX)):
if arrayTypedata[i] == 1:
self.fenY.append(fenbiY1[i])
else:
self.fenY.append(fenbiY2[i])
if not self.penLoaded:
if self.fenX:
self.fenX.append(self.fenX[-1])
self.fenY.append(self.fenY[-1])
print "self.fenX: ", self.fenX
print "self.fenY: ", self.fenY
self.fenbi(self.fenX, self.fenY)
self.fenX.pop()
self.fenY.pop()
self.chanlunEngine.writeChanlunLog(u'分笔加载成功')
self.penLoaded = True
# ----------------------------------------------------------------------
def segment(self):
if not self.penLoaded:
self.pen() #先分笔才能分段
segmentX = [] #分段点X轴值
segmentY = [] #分段点Y轴值
temp_type = 0 #标志线段方向,向上为1,向下为-1, 未判断前三笔是否重合为0
i = 0
while i < len(self.fenX) - 4:
if temp_type == 0:
if self.fenY[i] > self.fenY[i+1] and self.fenY[i] > self.fenY[i+3]:
temp_type = -1 #向下线段,三笔重合
segmentX.append(self.fenX[i])
segmentY.append(self.fenY[i])
elif self.fenY[i] < self.fenY[i+1] and self.fenY[i] < self.fenY[i+3]:
temp_type = 1 #向上线段,三笔重合
segmentX.append(self.fenX[i])
segmentY.append(self.fenY[i])
else:
temp_type = 0
i += 1
continue
if temp_type == 1: #向上线段
j = i+1
high = [] # 记录顶
low = [] # 记录低
while j < len(self.fenX) - 1: #记录顶底
high.append(self.fenY[j])
low.append(self.fenY[j+1])
j += 2
if self.fenY[i+4] < self.fenY[i+1]: #向上线段被向下笔破坏
j = 0
while j < len(high)-2:
# 顶底出现顶分型,向上线段结束
if high[j+1] > high[j] and high[j+1] > high[j+2]:
num = i + 2 * j + 3 #线段结束点位置
segmentX.append(self.fenX[num])
segmentY.append(self.fenY[num])
i = num
temp_type = -1 #向上线段一定由向下线段结束
break
j += 1
if j == len(high)-2:
break
else: #向上线段未被向下笔破坏
j = 1
while j < len(high)-2:
# 顶底出现底分型,向上线段结束
if low[j + 1] < low[j] and low[j + 1] < low[j + 2]:
num = i + 2 * j + 1 # 线段结束点位置
segmentX.append(self.fenX[num])
segmentY.append(self.fenY[num])
i = num
temp_type = -1 # 向上线段一定由向下线段结束
break
j += 1
if j == len(high)-2:
break
elif temp_type == -1: # 向下线段
j = i + 1
high = [] # 记录顶
low = [] # 记录低
while j < len(self.fenX) - 1: # 记录顶底
high.append(self.fenY[j + 1])
low.append(self.fenY[j])
j += 2
if self.fenY[i + 4] > self.fenY[i + 1]: # 向下线段被向上笔破坏
j = 0
while j < len(high) - 2:
# 顶底出现底分型,向下线段结束
if low[j + 1] < low[j] and low[j + 1] < low[j + 2]:
num = i + 2 * j + 3 # 线段结束点位置
segmentX.append(self.fenX[num])
segmentY.append(self.fenY[num])
i = num
temp_type = 1 # 向下线段一定由向上线段结束
break
j += 1
if j == len(high) - 2:
break
else: # 向下线段未被向上笔破坏
j = 1
while j < len(high) - 2:
# 顶底出现顶分型,向下线段结束
if high[j + 1] > high[j] and high[j + 1] > high[j + 2]:
num = i + 2 * j + 1 # 线段结束点位置
segmentX.append(self.fenX[num])
segmentY.append(self.fenY[num])
i = num
temp_type = 1 # 向下线段一定由向上线段结束
break
j += 1
if j == len(high) - 2:
break
print "segmentX: ", segmentX
print "segmentY: ", segmentY
if not self.segmentLoaded:
if len(segmentX) > 1:
segmentX.append(segmentX[-1])
segmentY.append(segmentY[-1])
segmentX = [int(x) for x in segmentX]
segmentY = [int(y) for y in segmentY]
self.fenduan(segmentX, segmentY)
self.chanlunEngine.writeChanlunLog(u'分段加载成功')
self.segmentLoaded = True
# ----------------------------------------------------------------------
def updateChanlunLog(self, event):
"""更新缠论相关日志"""
log = event.dict_['data']
# print type(log)
if(log.logTime):
content = '\t'.join([log.logTime, log.logContent])
self.chanlunLogMonitor.append(content)
else:
print 0
#-----------------------------------------------------------------------
def zhongShu(self):
if not self.penLoaded:
self.pen() # 先分笔才能画走势中枢
# temp_type = 0 # 标志中枢方向,向上为1,向下为-1
i = 0
temp_high, temp_low = 0, 0
minX, maxY = 0, 0
self.zhongshuPos = [] # 记录所有的中枢开始段和结束段的位置
self.zhongShuType = [] #记录所有中枢的方向
while i < len(self.fenX) - 4:
if (self.fenY[i] > self.fenY[i + 1] and self.fenY[i + 1] < self.fenY[i + 4]): #判断进入段方向
temp_low = max(self.fenY[i + 1], self.fenY[i + 3])
temp_high = min(self.fenY[i + 2], self.fenY[i + 4]) #记录中枢内顶的最小值与底的最大值
minX = self.fenX[i+1]
self.zhongshuPos.append(i)
self.zhongShuType.append(-1)
j = i
while i < len(self.fenX) - 4:
j = i
if self.fenY[i + 1] < self.fenY[i + 4] and self.fenY[i + 4] > temp_low and self.fenY[i + 3] < temp_high :
maxX = self.fenX[i+4]
if self.fenY[i + 3] > temp_low:
temp_low = self.fenY[i + 3]
if self.fenY[i + 4] < temp_high:
temp_high = self.fenY[i + 4]
i = i + 1
elif self.fenY[i + 1] > self.fenY[i + 4] and self.fenY[i + 4] < temp_high and self.fenY[i + 3] > temp_low :
maxX = self.fenX[i + 4]
if self.fenY[i + 3] < temp_high:
temp_high = self.fenY[i + 3]
if self.fenY[i + 4] > temp_low:
temp_low = self.fenY[i + 4]
i = i + 1
if j == i:
break
elif (self.fenY[i] < self.fenY[i + 1] and self.fenY[i + 1] > self.fenY[i + 4]):
temp_high = min(self.fenY[i + 1], self.fenY[i + 3])
temp_low = max(self.fenY[i + 2], self.fenY[i + 4])
minX = self.fenX[i + 1]
self.zhongshuPos.append(i)
self.zhongShuType.append(1)
j = i
while i < len(self.fenX) - 4:
j = i
if self.fenY[i + 1] > self.fenY[i + 4] and self.fenY[i + 4] < temp_high and self.fenY[i + 3] > temp_low:
maxX = self.fenX[i + 4]
if self.fenY[i + 3] < temp_high:
temp_high = self.fenY[i + 3]
if self.fenY[i + 4] > temp_low:
temp_low = self.fenY[i + 4]
i = i + 1
elif self.fenY[i + 1] < self.fenY[i + 4] and self.fenY[i + 4] > temp_low and self.fenY[i + 3] < temp_high:
maxX = self.fenX[i + 4]
if self.fenY[i + 3] > temp_low:
temp_low = self.fenY[i + 3]
if self.fenY[i + 4] < temp_high:
temp_high = self.fenY[i + 4]
i = i + 1
if i == j:
break
else:
i += 1
continue
# 画出当前判断出的中枢
if minX != 0 and maxX == 0:
maxX = self.fenX[i+4]
i = i + 1
self.zhongshuPos.append(i + 4)
else:
self.zhongshuPos.append(i + 3)
minY, maxY = temp_low, temp_high
print minX, minY, maxX, maxY
if int(maxY) > int(minY):
plotX = [minX, minX, maxX, maxX, minX]
plotY = [minY, maxY, maxY, minY, minY]
plotX = [int(x) for x in plotX]
plotY = [int(y) for y in plotY]
self.zhongshu(plotX, plotY)
i = i + 4
self.zhongShuLoaded = True
self.chanlunEngine.writeChanlunLog(u'走势中枢加载成功')
# ----------------------------------------------------------------------
def shop(self):
"""加载买卖点"""
if not self.zhongShuLoaded:
self.zhongShu()
i = 0
while i < len(self.zhongShuType) - 1:
startPos, endPos = self.zhongshuPos[2*i], self.zhongshuPos[2*i + 1] # 中枢开始段的位置和结束段的位置
startY = self.fenY[startPos + 1] - self.fenY[startPos] # 开始段Y轴距离
startX = self.fenX[startPos + 1] - self.fenX[startPos] # 开始段X轴距离
startK = abs(startY * startX) # 开始段投影面积
endY = self.fenY[endPos + 1] - self.fenY[endPos] # 结束段Y轴距离
endX = self.fenX[endPos + 1] - self.fenX[endPos] # 结束段段X轴距离
endK = abs(endY * endX) # 开始段投影面积
if endK < startK:
print startPos, endPos
if self.zhongShuType[i] == 1 and self.zhongShuType[i + 1] == -1:
# 一卖
self.sellpoint([self.fenX[endPos + 1]], [self.fenY[endPos + 1]], 1)
# 二卖,一卖后一个顶点
self.sellpoint([self.fenX[endPos + 3]], [self.fenY[endPos + 3]], 2)
# 三卖,一卖之后中枢结束段的第一个顶
i = i + 1
nextPos = self.zhongshuPos[2*i + 1] # 下一个中枢结束位置
if nextPos + 1 < len(self.fenY):
if self.fenY[nextPos + 1] > self.fenY[nextPos]:
self.sellpoint([self.fenX[nextPos + 1]], [self.fenY[nextPos + 1]], 3)
else:
self.sellpoint([self.fenX[nextPos]], [self.fenY[nextPos]], 3)
elif self.zhongShuType[i] == -1 and self.zhongShuType[i + 1] == 1:
# 一买
self.buypoint([self.fenX[endPos + 1]], [self.fenY[endPos + 1]], 1)
# 二买,一买后一个底点
self.buypoint([self.fenX[endPos + 3]], [self.fenY[endPos + 3]], 2)
# 三买,一买之后中枢结束段的第一个顶
i = i + 1
nextPos = self.zhongshuPos[2*i + 1] # 下一个中枢结束位置
if nextPos + 1 < len(self.fenY):
if self.fenY[nextPos + 1] < self.fenY[nextPos]:
self.buypoint([self.fenX[nextPos + 1]], [self.fenY[nextPos + 1]], 3)
else:
self.buypoint([self.fenX[nextPos]], [self.fenY[nextPos]], 3)
i = i + 1 # 接着判断之后的中枢是否出现背驰
self.chanlunEngine.writeChanlunLog(u'买卖点加载成功')
# ----------------------------------------------------------------------
def fenbi(self, fenbix, fenbiy):
self.PriceW.pw2.plotItem.plot(x=fenbix, y=fenbiy, pen=QtGui.QPen(QtGui.QColor(255, 236, 139)))
def fenduan(self, fenduanx, fenduany):
self.PriceW.pw2.plot(x=fenduanx, y=fenduany, symbol='o', pen=QtGui.QPen(QtGui.QColor(131, 111, 255)))
def zhongshu(self, zhongshux, zhongshuy):
self.PriceW.pw2.plot(x=zhongshux, y=zhongshuy, pen=QtGui.QPen(QtGui.QColor(255,165,0)))
def buypoint(self, buyx, buyy, point):
if point == 1:
self.PriceW.pw2.plot(x=buyx, y=buyy, symbolSize=18, symbolBrush=(255,0,0), symbolPen=(255,0,0), symbol='star')
elif point == 2:
self.PriceW.pw2.plot(x=buyx, y=buyy, symbolSize=18, symbolBrush=(238,130,238), symbolPen=(238,130,238),symbol='star')
elif point == 3:
self.PriceW.pw2.plot(x=buyx, y=buyy, symbolSize=18, symbolBrush=(138,43,226), symbolPen=(138,43,226),symbol='star')
def sellpoint(self, sellx, selly, point):
if point == 1:
self.PriceW.pw2.plot(x=sellx, y=selly, symbolSize=18, symbolBrush=(119,172,48), symbolPen=(119,172,48), symbol='star')
elif point == 2:
self.PriceW.pw2.plot(x=sellx, y=selly, symbolSize=18, symbolBrush=(221,221,34), symbolPen=(221,221,34),symbol='star')
elif point == 3:
self.PriceW.pw2.plot(x=sellx, y=selly, symbolSize=18, symbolBrush=(179,158,77), symbolPen=(179,158,77),symbol='star')
# ----------------------------------------------------------------------
# 判断包含关系,仿照聚框,合并K线数据
def judgeInclude(self):
## 判断包含关系
k_data = self.data
# 保存分型后dataFrame的值
after_fenxing = pd.DataFrame()
temp_data = k_data[:1]
zoushi = [3] # 3-持平 4-向下 5-向上
for i in xrange(len(k_data)):
case1 = temp_data.high[-1] >= k_data.high[i] and temp_data.low[-1] <= k_data.low[i] # 第1根包含第2根
case2 = temp_data.high[-1] <= k_data.high[i] and temp_data.low[-1] >= k_data.low[i] # 第2根包含第1根
case3 = temp_data.high[-1] == k_data.high[i] and temp_data.low[-1] == k_data.low[i] # 第1根等于第2根
case4 = temp_data.high[-1] > k_data.high[i] and temp_data.low[-1] > k_data.low[i] # 向下趋势
case5 = temp_data.high[-1] < k_data.high[i] and temp_data.low[-1] < k_data.low[i] # 向上趋势
if case3:
zoushi.append(3)
continue
elif case1:
print temp_data
if zoushi[-1] == 4:
temp_data.ix[0, 4] = k_data.high[i] #向下走取高点的低点
else:
temp_data.ix[0, 3] = k_data.low[i] #向上走取低点的高点
elif case2:
temp_temp = temp_data[-1:]
temp_data = k_data[i:i + 1]
if zoushi[-1] == 4:
temp_data.ix[0, 4] = temp_temp.high[0]
else:
temp_data.ix[0, 3] = temp_temp.low[0]
elif case4:
zoushi.append(4)
after_fenxing = pd.concat([after_fenxing, temp_data], axis=0)
temp_data = k_data[i:i + 1]
elif case5:
zoushi.append(5)
after_fenxing = pd.concat([after_fenxing, temp_data], axis=0)
temp_data = k_data[i:i + 1]
return after_fenxing
# ----------------------------------------------------------------------
#画出合并后的K线图,分笔
def plotAfterFenXing(self, after_fenxing):
#判断包含关系,合并K线
for i in xrange(len(after_fenxing)):
#处理k线的最大最小值、开盘收盘价,合并后k线不显示影线。
after_fenxing.iloc[i, 0] = i
if after_fenxing.open[i] > after_fenxing.close[i]:
after_fenxing.iloc[i, 1] = after_fenxing.high[i]
after_fenxing.iloc[i, 2] = after_fenxing.low[i]
else:
after_fenxing.iloc[i, 1] = after_fenxing.low[i]
after_fenxing.iloc[i, 2] = after_fenxing.high[i]
self.PriceW.onBarAfterFenXing(i, after_fenxing.index[i], after_fenxing.open[i], after_fenxing.close[i], after_fenxing.low[i], after_fenxing.high[i])
self.PriceW.plotKlineAfterFenXing()
print "plotKLine after fenxing"
# ----------------------------------------------------------------------
# 找出顶和底
def findTopAndLow(self, after_fenxing):
temp_num = 0 # 上一个顶或底的位置
temp_high = 0 # 上一个顶的high值
temp_low = 0 # 上一个底的low值
temp_type = 0 # 上一个记录位置的类型
i = 1
fenxing_type = [] # 记录分型点的类型,1为顶分型,-1为底分型
fenxing_data = pd.DataFrame() # 分型点的DataFrame值
while (i < len(after_fenxing) - 1):
case1 = after_fenxing.high[i - 1] < after_fenxing.high[i] and after_fenxing.high[i] > after_fenxing.high[i + 1] # 顶分型
case2 = after_fenxing.low[i - 1] > after_fenxing.low[i] and after_fenxing.low[i] < after_fenxing.low[i + 1] # 底分型
if case1:
if temp_type == 1: # 如果上一个分型为顶分型,则进行比较,选取高点更高的分型
if after_fenxing.high[i] <= temp_high:
i += 1
else:
temp_high = after_fenxing.high[i]
temp_num = i
temp_type = 1
i += 1
elif temp_type == 2: # 如果上一个分型为底分型,则记录上一个分型,用当前分型与后面的分型比较,选取同向更极端的分型
if temp_low >= after_fenxing.high[i]: # 如果上一个底分型的底比当前顶分型的顶高,则跳过当前顶分型。
i += 1
elif i < temp_num + 4: # 顶和底至少5k线
i += 1
else:
fenxing_type.append(-1)
fenxing_data = pd.concat([fenxing_data, after_fenxing[temp_num:temp_num + 1]], axis=0)
temp_high = after_fenxing.high[i]
temp_num = i
temp_type = 1
i += 1
else:
temp_high = after_fenxing.high[i]
temp_num = i
temp_type = 1
i += 1
elif case2:
if temp_type == 2: # 如果上一个分型为底分型,则进行比较,选取低点更低的分型
if after_fenxing.low[i] >= temp_low:
i += 1
else:
temp_low = after_fenxing.low[i]
temp_num = i
temp_type = 2
i += 1
elif temp_type == 1: # 如果上一个分型为顶分型,则记录上一个分型,用当前分型与后面的分型比较,选取同向更极端的分型
if temp_high <= after_fenxing.low[i]: # 如果上一个顶分型的底比当前底分型的底低,则跳过当前底分型。
i += 1
elif i < temp_num + 4: # 顶和底至少5k线
i += 1
else:
fenxing_type.append(1)
fenxing_data = pd.concat([fenxing_data, after_fenxing[temp_num:temp_num + 1]], axis=0)
temp_low = after_fenxing.low[i]
temp_num = i
temp_type = 2
i += 1
else:
temp_low = after_fenxing.low[i]
temp_num = i
temp_type = 2
i += 1
else:
i += 1
# if fenxing_type:
# if fenxing_type[-1] == 1 and temp_type == 2:
# fenxing_type.append(-1)
# fenxing_data = pd.concat([fenxing_data, after_fenxing[temp_num:temp_num + 1]], axis=0)
#
# if fenxing_type[-1] == -1 and temp_type == 1:
# fenxing_type.append(1)
# fenxing_data = pd.concat([fenxing_data, after_fenxing[temp_num:temp_num + 1]], axis=0)
return fenxing_data, fenxing_type
# ----------------------------------------------------------------------
# 连接MongoDB数据库
def __connectMongo(self):
try:
self.__mongoConnection = pymongo.MongoClient("localhost", 27017)
self.__mongoConnected = True
except ConnectionFailure:
pass
# ----------------------------------------------------------------------
def registerEvent(self):
"""注册事件监听"""
self.signal.connect(self.updateChanlunLog)
self.eventEngine.register(EVENT_CHANLUN_LOG, self.signal.emit)
########################################################################
class PriceWidget(QtGui.QWidget):
"""用于显示价格走势图"""
signal = QtCore.pyqtSignal(type(Event()))
symbol = ''
class CandlestickItem(pg.GraphicsObject):
def __init__(self, data):
pg.GraphicsObject.__init__(self)
self.data = data ## data must have fields: time, open, close, min, max
self.generatePicture()
def generatePicture(self):
## pre-computing a QPicture object allows paint() to run much more quickly,
## rather than re-drawing the shapes every time.
self.picture = QtGui.QPicture()
p = QtGui.QPainter(self.picture)
p.setPen(pg.mkPen(color='w', width=0.4)) # 0.4 means w*2
# w = (self.data[1][0] - self.data[0][0]) / 3.
w = 0.2
for (n, t, open, close, min, max) in self.data:
p.drawLine(QtCore.QPointF(n, min), QtCore.QPointF(n, max))
if open > close:
p.setBrush(pg.mkBrush('g'))
else:
p.setBrush(pg.mkBrush('r'))
p.drawRect(QtCore.QRectF(n-w, open, w*2, close-open))
pg.setConfigOption('leftButtonPan', False)
p.end()
def paint(self, p, *args):
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
## boundingRect _must_ indicate the entire area that will be drawn on
## or else we will get artifacts and possibly crashing.
## (in this case, QPicture does all the work of computing the bouning rect for us)
return QtCore.QRectF(self.picture.boundingRect())
#----------------------------------------------------------------------
def __init__(self, eventEngine, chanlunEngine, data, parent=None):
"""Constructor"""
super(PriceWidget, self).__init__(parent)
# K线图EMA均线的参数、变量
self.EMAFastAlpha = 0.0167 # 快速EMA的参数,60
self.EMASlowAlpha = 0.0083 # 慢速EMA的参数,120
self.fastEMA = 0 # 快速EMA的数值
self.slowEMA = 0 # 慢速EMA的数值
self.listfastEMA = []
self.listslowEMA = []
# 保存K线数据的列表对象
self.listBar = []
self.listClose = []
self.listHigh = []
self.listLow = []
self.listOpen = []
# 是否完成了历史数据的读取
self.initCompleted = False
self.__eventEngine = eventEngine
self.__chanlunEngine = chanlunEngine
self.data = data #画图所需数据
# MongoDB数据库相关
self.__mongoConnected = False
self.__mongoConnection = None
# 调用函数
self.__connectMongo()
self.initUi()
# self.registerEvent()
#----------------------------------------------------------------------
def initUi(self):
"""初始化界面"""
self.setWindowTitle(u'Price')
self.vbl_1 = QtGui.QHBoxLayout()
self.initplotKline() # plotKline初始化
self.setLayout(self.vbl_1)
#----------------------------------------------------------------------
def initplotKline(self):
"""Kline"""
s = self.data.index #横坐标值
print "numbers of KLine: ", len(s)
xdict = dict(enumerate(s))
self.__axisTime = MyStringAxis(xdict, orientation='bottom')
self.pw2 = pg.PlotWidget(axisItems={'bottom': self.__axisTime}) # K线图
pw2x = self.pw2.getAxis('bottom')
pw2x.setGrid(150) # 设置默认x轴网格
pw2y = self.pw2.getAxis('left')
pw2y.setGrid(150) # 设置默认y轴网格
self.vbl_1.addWidget(self.pw2)
self.pw2.setMinimumWidth(1500)
self.pw2.setMaximumWidth(1800)
self.pw2.setDownsampling(mode='peak')
self.pw2.setClipToView(True)
self.curve5 = self.pw2.plot()
self.curve6 = self.pw2.plot()
self.candle = self.CandlestickItem(self.listBar)
self.pw2.addItem(self.candle)
## Draw an arrowhead next to the text box
# self.arrow = pg.ArrowItem()
# self.pw2.addItem(self.arrow)
# 从数据库读取一分钟数据画分钟线
def plotMin(self, symbol):
self.initCompleted = True
cx = self.__mongoMinDB[symbol].find()
print cx.count()
if cx:
for data in cx:
self.barOpen = data['open']
self.barClose = data['close']
self.barLow = data['low']
self.barHigh = data['high']
self.barOpenInterest = data['openInterest']
# print self.num, self.barOpen, self.barClose, self.barLow, self.barHigh, self.barOpenInterest
self.onBar(self.num, self.barOpen, self.barClose, self.barLow, self.barHigh, self.barOpenInterest)
self.num += 1
# 画历史数据K线图
def plotHistorticData(self):
self.initCompleted = True
for i in xrange(len(self.data)):
self.onBar(i, self.data.index[i], self.data.open[i], self.data.close[i], self.data.low[i], self.data.high[i])
self.plotKline()
print "plotKLine success"
#----------------------------------------------------------------------
def initHistoricalData(self):
"""初始历史数据"""
if self.symbol!='':
print "download histrical data:",self.symbol
self.initCompleted = True # 读取历史数据完成
td = timedelta(days=1) # 读取3天的历史TICK数据
# if startDate:
# cx = self.loadTick(self.symbol, startDate-td)
# else:
# today = datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
# cx = self.loadTick(self.symbol, today-td)
print cx.count()
if cx:
for data in cx:
tick = Tick(data['symbol'])
tick.openPrice = data['lastPrice']
tick.highPrice = data['upperLimit']
tick.lowPrice = data['lowerLimit']
tick.lastPrice = data['lastPrice']
tick.volume = data['volume']
tick.openInterest = data['openInterest']
tick.upperLimit = data['upperLimit']
tick.lowerLimit = data['lowerLimit']
tick.time = data['time']
# tick.ms = data['UpdateMillisec']
tick.bidPrice1 = data['bidPrice1']
tick.bidPrice2 = data['bidPrice2']
tick.bidPrice3 = data['bidPrice3']
tick.bidPrice4 = data['bidPrice4']
tick.bidPrice5 = data['bidPrice5']
tick.askPrice1 = data['askPrice1']
tick.askPrice2 = data['askPrice2']
tick.askPrice3 = data['askPrice3']
tick.askPrice4 = data['askPrice4']
tick.askPrice5 = data['askPrice5']
tick.bidVolume1 = data['bidVolume1']
tick.bidVolume2 = data['bidVolume2']
tick.bidVolume3 = data['bidVolume3']
tick.bidVolume4 = data['bidVolume4']
tick.bidVolume5 = data['bidVolume5']
tick.askVolume1 = data['askVolume1']
tick.askVolume2 = data['askVolume2']
tick.askVolume3 = data['askVolume3']
tick.askVolume4 = data['askVolume4']
tick.askVolume5 = data['askVolume5']
self.onTick(tick)
print('load historic data completed')
#----------------------------------------------------------------------
def plotKline(self):
"""K线图"""
if self.initCompleted:
# 均线
self.curve5.setData(self.listfastEMA, pen=(255, 0, 0), name="Red curve")
self.curve6.setData(self.listslowEMA, pen=(0, 255, 0), name="Green curve")
# 画K线
self.pw2.removeItem(self.candle)
self.candle = self.CandlestickItem(self.listBar)
self.pw2.addItem(self.candle)
#----------------------------------------------------------------------
def plotText(self):
lenClose = len(self.listClose)
if lenClose >= 5: # Fractal Signal
if self.listClose[-1] > self.listClose[-2] and self.listClose[-3] > self.listClose[-2] and self.listClose[-4] > self.listClose[-2] and self.listClose[-5] > self.listClose[-2] and self.listfastEMA[-1] > self.listslowEMA[-1]:
## Draw an arrowhead next to the text box
# self.pw2.removeItem(self.arrow)
self.arrow = pg.ArrowItem(pos=(lenClose-1, self.listLow[-1]), angle=90, brush=(255, 0, 0))#红色
self.pw2.addItem(self.arrow)
elif self.listClose[-1] < self.listClose[-2] and self.listClose[-3] < self.listClose[-2] and self.listClose[-4] < self.listClose[-2] and self.listClose[-5] < self.listClose[-2] and self.listfastEMA[-1] < self.listslowEMA[-1]:
## Draw an arrowhead next to the text box
# self.pw2.removeItem(self.arrow)
self.arrow = pg.ArrowItem(pos=(lenClose-1, self.listHigh[-1]), angle=-90, brush=(0, 255, 0))#绿色
self.pw2.addItem(self.arrow)
#----------------------------------------------------------------------
def onBar(self, n, t, o, c, l, h):
self.listBar.append((n, t, o, c, l, h))
self.listOpen.append(o)
self.listClose.append(c)
self.listHigh.append(h)
self.listLow.append(l)
#计算K线图EMA均线
if self.fastEMA:
self.fastEMA = c*self.EMAFastAlpha + self.fastEMA*(1-self.EMAFastAlpha)
self.slowEMA = c*self.EMASlowAlpha + self.slowEMA*(1-self.EMASlowAlpha)
else:
self.fastEMA = c
self.slowEMA = c
self.listfastEMA.append(self.fastEMA)
self.listslowEMA.append(self.slowEMA)
self.plotText() #显示开仓位置
# ----------------------------------------------------------------------
#画合并后的K线Bar
def onBarAfterFenXing(self, n, t, o, c, l, h):
self.listBar.append((n, t, o, c, l, h))
def plotKlineAfterFenXing(self):
# 画K线
self.pw2.removeItem(self.candle)
self.candle = self.CandlestickItem(self.listBar)
self.pw2.addItem(self.candle)
#----------------------------------------------------------------------
def __connectMongo(self):
"""连接MongoDB数据库"""
try:
self.__mongoConnection = pymongo.MongoClient("localhost", 27017)
self.__mongoConnected = True
self.__mongoMinDB = self.__mongoConnection['VnTrader_1Min_Db']
except ConnectionFailure:
pass
########################################################################
class TickWidget(QtGui.QWidget):
"""用于显示价格走势图"""
signal = QtCore.pyqtSignal(type(Event()))
# tick图的相关参数、变量
listlastPrice = np.empty(1000)
fastMA = 0
midMA = 0
slowMA = 0
listfastMA = np.empty(1000)
listmidMA = np.empty(1000)
listslowMA = np.empty(1000)
tickFastAlpha = 0.0333 # 快速均线的参数,30
tickMidAlpha = 0.0167 # 中速均线的参数,60
tickSlowAlpha = 0.0083 # 慢速均线的参数,120
ptr = 0
ticktime = None # tick数据时间
class CandlestickItem(pg.GraphicsObject):
def __init__(self, data):
pg.GraphicsObject.__init__(self)
self.data = data ## data must have fields: time, open, close, min, max
self.generatePicture()
def generatePicture(self):
## pre-computing a QPicture object allows paint() to run much more quickly,
## rather than re-drawing the shapes every time.
self.picture = QtGui.QPicture()
p = QtGui.QPainter(self.picture)
p.setPen(pg.mkPen(color='w', width=0.4)) # 0.4 means w*2
a = pg.AxisItem('bottom', pen=None, linkView=None, parent=None, maxTickLength=-5, showValues=True)
a.setFixedWidth(1)
a.setWidth(1)
a.setLabel(show=True)
a.setGrid(grid=True)
labelStyle = {'color': '#FFF', 'font-size': '14pt'}
a.setLabel('label text', units='V', **labelStyle)
# w = (self.data[1][0] - self.data[0][0]) / 3.
w = 0.2
for (t, open, close, min, max) in self.data:
p.drawLine(QtCore.QPointF(t, min), QtCore.QPointF(t, max))
if open > close:
p.setBrush(pg.mkBrush('g'))
else:
p.setBrush(pg.mkBrush('r'))
p.drawRect(QtCore.QRectF(t-w, open, w*2, close-open))
pg.setConfigOption('leftButtonPan', False)
p.end()
def paint(self, p, *args):
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
## boundingRect _must_ indicate the entire area that will be drawn on
## or else we will get artifacts and possibly crashing.
## (in this case, QPicture does all the work of computing the bouning rect for us)
return QtCore.QRectF(self.picture.boundingRect())
#----------------------------------------------------------------------
def __init__(self, eventEngine, chanlunEngine, parent=None):
"""Constructor"""
super(TickWidget, self).__init__(parent)
self.__eventEngine = eventEngine
self.__chanlunEngine = chanlunEngine
# MongoDB数据库相关
self.__mongoConnected = False
self.__mongoConnection = None
self.__mongoTickDB = None
# 调用函数
self.initUi()
self.registerEvent()
#----------------------------------------------------------------------
def initUi(self):
"""初始化界面"""
self.setWindowTitle(u'Tick')
self.vbl_1 = QtGui.QHBoxLayout()
self.initplotTick() # plotTick初始化
self.setLayout(self.vbl_1)
#----------------------------------------------------------------------
def initplotTick(self):
""""""
self.pw1 = pg.PlotWidget(name='Plot1')
self.vbl_1.addWidget(self.pw1)
self.pw1.setMinimumWidth(1500)
self.pw1.setMaximumWidth(1800)
self.pw1.setRange(xRange=[-360, 0])
self.pw1.setLimits(xMax=5)
self.pw1.setDownsampling(mode='peak')
self.pw1.setClipToView(True)
self.curve1 = self.pw1.plot()
self.curve2 = self.pw1.plot()
self.curve3 = self.pw1.plot()
self.curve4 = self.pw1.plot()
# #----------------------------------------------------------------------
# def initHistoricalData(self,startDate=None):
# """初始历史数据"""
# print "download histrical data"
# self.initCompleted = True # 读取历史数据完成
# td = timedelta(days=1) # 读取3天的历史TICK数据
#
# if startDate:
# cx = self.loadTick(self.symbol, startDate-td)
# else:
# today = datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
# cx = self.loadTick(self.symbol, today-td)
#
# print cx.count()
#
# if cx:
# for data in cx:
# tick = Tick(data['symbol'])
#
# tick.openPrice = data['lastPrice']
# tick.highPrice = data['upperLimit']
# tick.lowPrice = data['lowerLimit']
# tick.lastPrice = data['lastPrice']
#
# tick.volume = data['volume']
# tick.openInterest = data['openInterest']
#
# tick.upperLimit = data['upperLimit']
# tick.lowerLimit = data['lowerLimit']
#
# tick.time = data['time']
# # tick.ms = data['UpdateMillisec']
#
# tick.bidPrice1 = data['bidPrice1']
# tick.bidPrice2 = data['bidPrice2']
# tick.bidPrice3 = data['bidPrice3']
# tick.bidPrice4 = data['bidPrice4']
# tick.bidPrice5 = data['bidPrice5']
#
# tick.askPrice1 = data['askPrice1']
# tick.askPrice2 = data['askPrice2']
# tick.askPrice3 = data['askPrice3']
# tick.askPrice4 = data['askPrice4']
# tick.askPrice5 = data['askPrice5']
#
# tick.bidVolume1 = data['bidVolume1']
# tick.bidVolume2 = data['bidVolume2']
# tick.bidVolume3 = data['bidVolume3']
# tick.bidVolume4 = data['bidVolume4']
# tick.bidVolume5 = data['bidVolume5']
#
# tick.askVolume1 = data['askVolume1']
# tick.askVolume2 = data['askVolume2']
# tick.askVolume3 = data['askVolume3']
# tick.askVolume4 = data['askVolume4']
# tick.askVolume5 = data['askVolume5']
#
# self.onTick(tick)
#
# print('load historic data completed')
#----------------------------------------------------------------------
def plotTick(self):
"""画tick图"""
self.curve1.setData(self.listlastPrice[:self.ptr])
self.curve2.setData(self.listfastMA[:self.ptr], pen=(255, 0, 0), name="Red curve")
self.curve3.setData(self.listmidMA[:self.ptr], pen=(0, 255, 0), name="Green curve")
self.curve4.setData(self.listslowMA[:self.ptr], pen=(0, 0, 255), name="Blue curve")
self.curve1.setPos(-self.ptr, 0)
self.curve2.setPos(-self.ptr, 0)
self.curve3.setPos(-self.ptr, 0)
self.curve4.setPos(-self.ptr, 0)
#----------------------------------------------------------------------
def updateMarketData(self, event):
"""更新行情"""
data = event.dict_['data']
print "update", data['InstrumentID']
symbol = data['InstrumentID']
tick = Tick(symbol)
tick.openPrice = data['OpenPrice']
tick.highPrice = data['HighestPrice']
tick.lowPrice = data['LowestPrice']
tick.lastPrice = data['LastPrice']
tick.volume = data['Volume']
tick.openInterest = data['OpenInterest']
tick.upperLimit = data['UpperLimitPrice']
tick.lowerLimit = data['LowerLimitPrice']
tick.time = data['UpdateTime']
tick.ms = data['UpdateMillisec']
tick.bidPrice1 = data['BidPrice1']
tick.bidPrice2 = data['BidPrice2']
tick.bidPrice3 = data['BidPrice3']
tick.bidPrice4 = data['BidPrice4']
tick.bidPrice5 = data['BidPrice5']
tick.askPrice1 = data['AskPrice1']
tick.askPrice2 = data['AskPrice2']
tick.askPrice3 = data['AskPrice3']
tick.askPrice4 = data['AskPrice4']
tick.askPrice5 = data['AskPrice5']
tick.bidVolume1 = data['BidVolume1']
tick.bidVolume2 = data['BidVolume2']
tick.bidVolume3 = data['BidVolume3']
tick.bidVolume4 = data['BidVolume4']
tick.bidVolume5 = data['BidVolume5']
tick.askVolume1 = data['AskVolume1']
tick.askVolume2 = data['AskVolume2']
tick.askVolume3 = data['AskVolume3']
tick.askVolume4 = data['AskVolume4']
tick.askVolume5 = data['AskVolume5']
self.onTick(tick) # tick数据更新
self.__recordTick(tick) #记录Tick数据
#----------------------------------------------------------------------
def onTick(self, tick):
"""tick数据更新"""
from datetime import time
# 首先生成datetime.time格式的时间(便于比较),从字符串时间转化为time格式的时间
hh, mm, ss = tick.time.split(':')
self.ticktime = time(int(hh), int(mm), int(ss), microsecond=tick.ms)
# 计算tick图的相关参数
if self.ptr == 0:
self.fastMA = tick.lastPrice
self.midMA = tick.lastPrice
self.slowMA = tick.lastPrice
else:
self.fastMA = (1-self.tickFastAlpha) * self.fastMA + self.tickFastAlpha * tick.lastPrice
self.midMA = (1-self.tickMidAlpha) * self.midMA + self.tickMidAlpha * tick.lastPrice
self.slowMA = (1-self.tickSlowAlpha) * self.slowMA + self.tickSlowAlpha * tick.lastPrice
self.listlastPrice[self.ptr] = int(tick.lastPrice)
self.listfastMA[self.ptr] = int(self.fastMA)
self.listmidMA[self.ptr] = int(self.midMA)
self.listslowMA[self.ptr] = int(self.slowMA)
self.ptr += 1
print(self.ptr)
if self.ptr >= self.listlastPrice.shape[0]:
tmp = self.listlastPrice
self.listlastPrice = np.empty(self.listlastPrice.shape[0] * 2)
self.listlastPrice[:tmp.shape[0]] = tmp
tmp = self.listfastMA
self.listfastMA = np.empty(self.listfastMA.shape[0] * 2)
self.listfastMA[:tmp.shape[0]] = tmp
tmp = self.listmidMA
self.listmidMA = np.empty(self.listmidMA.shape[0] * 2)
self.listmidMA[:tmp.shape[0]] = tmp
tmp = self.listslowMA
self.listslowMA = np.empty(self.listslowMA.shape[0] * 2)
self.listslowMA[:tmp.shape[0]] = tmp
# 调用画图函数
self.plotTick() # tick图
#----------------------------------------------------------------------
def __connectMongo(self):
"""连接MongoDB数据库"""
try:
self.__mongoConnection = pymongo.MongoClient("localhost", 27017)
self.__mongoConnected = True
self.__mongoTickDB = self.__mongoConnection['VnTrader_Tick_Db']
except ConnectionFailure:
pass
#----------------------------------------------------------------------
def __recordTick(self, data):
"""将Tick数据插入到MongoDB中"""
if self.__mongoConnected:
symbol = data['InstrumentID']
data['date'] = datetime.now().strftime('%Y%m%d')
self.__mongoTickDB[symbol].insert(data)
# #----------------------------------------------------------------------
# def loadTick(self, symbol, startDate, endDate=None):
# """从MongoDB中读取Tick数据"""
# cx = self.__mongoTickDB[symbol].find()
# print cx.count()
# return cx
# # if self.__mongoConnected:
# # collection = self.__mongoTickDB[symbol]
# #
# # # 如果输入了读取TICK的最后日期
# # if endDate:
# # cx = collection.find({'date': {'$gte': startDate, '$lte': endDate}})
# # else:
# # cx = collection.find({'date': {'$gte': startDate}})
# # return cx
# # else:
# # return None
#----------------------------------------------------------------------
def registerEvent(self):
"""注册事件监听"""
print "connect"
self.signal.connect(self.updateMarketData)
self.__eventEngine.register(EVENT_MARKETDATA, self.signal.emit)
class Tick:
"""Tick数据对象"""
#----------------------------------------------------------------------
def __init__(self, symbol):
"""Constructor"""
self.symbol = symbol # 合约代码
self.openPrice = 0 # OHLC
self.highPrice = 0
self.lowPrice = 0
self.lastPrice = 0
self.volume = 0 # 成交量
self.openInterest = 0 # 持仓量
self.upperLimit = 0 # 涨停价
self.lowerLimit = 0 # 跌停价
self.time = '' # 更新时间和毫秒
self.ms = 0
self.bidPrice1 = 0 # 深度行情
self.bidPrice2 = 0
self.bidPrice3 = 0
self.bidPrice4 = 0
self.bidPrice5 = 0
self.askPrice1 = 0
self.askPrice2 = 0
self.askPrice3 = 0
self.askPrice4 = 0
self.askPrice5 = 0
self.bidVolume1 = 0
self.bidVolume2 = 0
self.bidVolume3 = 0
self.bidVolume4 = 0
self.bidVolume5 = 0
self.askVolume1 = 0
self.askVolume2 = 0
self.askVolume3 = 0
self.askVolume4 = 0
self.askVolume5 = 0 | mit |
dracos/django | tests/gis_tests/gdal_tests/test_srs.py | 80 | 12842 | import unittest
from django.contrib.gis.gdal import (
CoordTransform, GDALException, SpatialReference, SRSException,
)
class TestSRS:
def __init__(self, wkt, **kwargs):
self.wkt = wkt
for key, value in kwargs.items():
setattr(self, key, value)
WGS84_proj = '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs '
# Some Spatial Reference examples
srlist = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",'
'0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
epsg=4326, projected=False, geographic=True, local=False,
lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'GEOGCS': ('EPSG', '4326'), 'spheroid': ('EPSG', '7030')},
attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'), ('primem|authority', 'EPSG'),),
),
TestSRS(
'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
'AUTHORITY["EPSG","32140"]]',
epsg=32140, projected=True, geographic=False, local=False,
lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'PROJCS': ('EPSG', '32140'), 'spheroid': ('EPSG', '7019'), 'unit': ('EPSG', '9001')},
attr=(
('DATUM', 'North_American_Datum_1983'),
(('SPHEROID', 2), '298.257222101'),
('PROJECTION', 'Lambert_Conformal_Conic_2SP'),
),
),
TestSRS(
'PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",'
'GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],'
'UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],'
'PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],'
'PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],'
'UNIT["Foot_US",0.3048006096012192]]',
epsg=None, projected=True, geographic=False, local=False,
lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199,
auth={'PROJCS': (None, None)},
attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'), (('projcs', 9), 'UNIT'), (('projcs', 11), None),),
),
# This is really ESRI format, not WKT -- but the import should work the same
TestSRS(
'LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
esri=True, epsg=None, projected=False, geographic=False, local=True,
lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')),
),
)
# Well-Known Names
well_known = (
TestSRS(
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
'AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
wk='WGS84', name='WGS 84',
attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84')),
),
TestSRS(
'GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,'
'AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4322"]]',
wk='WGS72', name='WGS 72',
attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72')),
),
TestSRS(
'GEOGCS["NAD27",DATUM["North_American_Datum_1927",'
'SPHEROID["Clarke 1866",6378206.4,294.9786982138982,'
'AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4267"]]',
wk='NAD27', name='NAD27',
attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))
),
TestSRS(
'GEOGCS["NAD83",DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,'
'AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]]',
wk='NAD83', name='NAD83',
attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980')),
),
TestSRS(
'PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",'
'DATUM["New_Zealand_Geodetic_Datum_1949",'
'SPHEROID["International 1924",6378388,297,'
'AUTHORITY["EPSG","7022"]],'
'TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],'
'AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,'
'AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,'
'AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],'
'PROJECTION["Transverse_Mercator"],'
'PARAMETER["latitude_of_origin",-41.28991152777778],'
'PARAMETER["central_meridian",172.1090281944444],'
'PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],'
'PARAMETER["false_northing",700000],'
'UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]',
wk='EPSG:27216', name='NZGD49 / Karamea Circuit',
attrs=(('PROJECTION', 'Transverse_Mercator'), ('SPHEROID', 'International 1924')),
),
)
bad_srlist = (
'Foobar',
'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",'
'DATUM["North_American_Datum_1983",'
'SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],'
'AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],'
'PARAMETER["standard_parallel_1",30.28333333333333],'
'PARAMETER["standard_parallel_2",28.38333333333333],'
'PARAMETER["latitude_of_origin",27.83333333333333],'
'PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],'
'PARAMETER["false_northing",4000000],UNIT["metre",1,'
'AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
)
class SpatialRefTest(unittest.TestCase):
def test01_wkt(self):
"Testing initialization on valid OGC WKT."
for s in srlist:
SpatialReference(s.wkt)
def test02_bad_wkt(self):
"Testing initialization on invalid WKT."
for bad in bad_srlist:
try:
srs = SpatialReference(bad)
srs.validate()
except (SRSException, GDALException):
pass
else:
self.fail('Should not have initialized on bad WKT "%s"!')
def test03_get_wkt(self):
"Testing getting the WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.wkt, srs.wkt)
def test04_proj(self):
"Test PROJ.4 import and export."
proj_parts = [
'+proj=longlat', '+ellps=WGS84', '+towgs84=0,0,0,0,0,0,0', '+datum=WGS84', '+no_defs'
]
srs1 = SpatialReference(srlist[0].wkt)
srs2 = SpatialReference(WGS84_proj)
self.assertTrue(all(part in proj_parts for part in srs1.proj.split()))
self.assertTrue(all(part in proj_parts for part in srs2.proj.split()))
def test05_epsg(self):
"Test EPSG import."
for s in srlist:
if s.epsg:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.epsg)
srs3 = SpatialReference(str(s.epsg))
srs4 = SpatialReference('EPSG:%d' % s.epsg)
for srs in (srs1, srs2, srs3, srs4):
for attr, expected in s.attr:
self.assertEqual(expected, srs[attr])
def test07_boolean_props(self):
"Testing the boolean properties."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.projected, srs.projected)
self.assertEqual(s.geographic, srs.geographic)
def test08_angular_linear(self):
"Testing the linear and angular units routines."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.ang_name, srs.angular_name)
self.assertEqual(s.lin_name, srs.linear_name)
self.assertAlmostEqual(s.ang_units, srs.angular_units, 9)
self.assertAlmostEqual(s.lin_units, srs.linear_units, 9)
def test09_authority(self):
"Testing the authority name & code routines."
for s in srlist:
if hasattr(s, 'auth'):
srs = SpatialReference(s.wkt)
for target, tup in s.auth.items():
self.assertEqual(tup[0], srs.auth_name(target))
self.assertEqual(tup[1], srs.auth_code(target))
def test10_attributes(self):
"Testing the attribute retrieval routines."
for s in srlist:
srs = SpatialReference(s.wkt)
for tup in s.attr:
att = tup[0] # Attribute to test
exp = tup[1] # Expected result
self.assertEqual(exp, srs[att])
def test11_wellknown(self):
"Testing Well Known Names of Spatial References."
for s in well_known:
srs = SpatialReference(s.wk)
self.assertEqual(s.name, srs.name)
for tup in s.attrs:
if len(tup) == 2:
key = tup[0]
exp = tup[1]
elif len(tup) == 3:
key = tup[:2]
exp = tup[2]
self.assertEqual(srs[key], exp)
def test12_coordtransform(self):
"Testing initialization of a CoordTransform."
target = SpatialReference('WGS84')
CoordTransform(SpatialReference(srlist[0].wkt), target)
def test13_attr_value(self):
"Testing the attr_value() method."
s1 = SpatialReference('WGS84')
with self.assertRaises(TypeError):
s1.__getitem__(0)
with self.assertRaises(TypeError):
s1.__getitem__(('GEOGCS', 'foo'))
self.assertEqual('WGS 84', s1['GEOGCS'])
self.assertEqual('WGS_1984', s1['DATUM'])
self.assertEqual('EPSG', s1['AUTHORITY'])
self.assertEqual(4326, int(s1['AUTHORITY', 1]))
self.assertIsNone(s1['FOOBAR'])
def test_unicode(self):
wkt = (
'PROJCS["DHDN / Soldner 39 Langschoß",'
'GEOGCS["DHDN",DATUM["Deutsches_Hauptdreiecksnetz",'
'SPHEROID["Bessel 1841",6377397.155,299.1528128,AUTHORITY["EPSG","7004"]],AUTHORITY["EPSG","6314"]],'
'PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
'UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],'
'AUTHORITY["EPSG","4314"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
'PROJECTION["Cassini_Soldner"],PARAMETER["latitude_of_origin",50.66738711],'
'PARAMETER["central_meridian",6.28935703],PARAMETER["false_easting",0],'
'PARAMETER["false_northing",0],AUTHORITY["mj10777.de","187939"],AXIS["x",NORTH],AXIS["y",EAST]]'
)
srs = SpatialReference(wkt)
srs_list = [srs, srs.clone()]
srs.import_wkt(wkt)
for srs in srs_list:
self.assertEqual(srs.name, 'DHDN / Soldner 39 Langschoß')
self.assertEqual(srs.wkt, wkt)
self.assertIn('Langschoß', srs.pretty_wkt)
self.assertIn('Langschoß', srs.xml)
| bsd-3-clause |
blsmit5728/PokeAlarm | PokeAlarm/Events/MonEvent.py | 1 | 13089 | # Standard Library Imports
from datetime import datetime
# 3rd Party Imports
# Local Imports
from PokeAlarm import Unknown
from PokeAlarm.Utilities import MonUtils
from PokeAlarm.Utils import (
get_gmaps_link, get_move_type, get_move_damage, get_move_dps,
get_move_duration, get_move_energy, get_pokemon_size,
get_applemaps_link, get_time_as_str, get_seconds_remaining,
get_base_types, get_dist_as_str, get_weather_emoji,
get_type_emoji)
from . import BaseEvent
class MonEvent(BaseEvent):
""" Event representing the discovery of a Pokemon. """
def __init__(self, data):
""" Creates a new Monster Event based on the given dict. """
super(MonEvent, self).__init__('monster')
check_for_none = BaseEvent.check_for_none
# Identification
self.enc_id = data['encounter_id']
self.monster_id = int(data['pokemon_id'])
# Time Left
self.disappear_time = datetime.utcfromtimestamp(data['disappear_time'])
self.time_left = get_seconds_remaining(self.disappear_time)
# Spawn Data
self.spawn_start = check_for_none(
int, data.get('spawn_start'), Unknown.REGULAR)
self.spawn_end = check_for_none(
int, data.get('spawn_end'), Unknown.REGULAR)
self.spawn_verified = check_for_none(bool, data.get('verified'), False)
# Location
self.lat = float(data['latitude'])
self.lng = float(data['longitude'])
self.distance = Unknown.SMALL # Completed by Manager
self.direction = Unknown.TINY # Completed by Manager
self.weather_id = check_for_none(
int, data.get('weather'), Unknown.TINY)
self.boosted_weather_id = check_for_none(
int, data.get('boosted_weather')
or data.get('weather_boosted_condition'), 0)
# Encounter Stats
self.mon_lvl = check_for_none(
int, data.get('pokemon_level'), Unknown.TINY)
self.cp = check_for_none(int, data.get('cp'), Unknown.TINY)
# IVs
self.atk_iv = check_for_none(
int, data.get('individual_attack'), Unknown.TINY)
self.def_iv = check_for_none(
int, data.get('individual_defense'), Unknown.TINY)
self.sta_iv = check_for_none(
int, data.get('individual_stamina'), Unknown.TINY)
if Unknown.is_not(self.atk_iv, self.def_iv, self.sta_iv):
self.iv = \
100 * (self.atk_iv + self.def_iv + self.sta_iv) / float(45)
else:
self.iv = Unknown.SMALL
# Quick Move
self.quick_id = check_for_none(
int, data.get('move_1'), Unknown.TINY)
self.quick_type = get_move_type(self.quick_id)
self.quick_damage = get_move_damage(self.quick_id)
self.quick_dps = get_move_dps(self.quick_id)
self.quick_duration = get_move_duration(self.quick_id)
self.quick_energy = get_move_energy(self.quick_id)
# Charge Move
self.charge_id = check_for_none(
int, data.get('move_2'), Unknown.TINY)
self.charge_type = get_move_type(self.charge_id)
self.charge_damage = get_move_damage(self.charge_id)
self.charge_dps = get_move_dps(self.charge_id)
self.charge_duration = get_move_duration(self.charge_id)
self.charge_energy = get_move_energy(self.charge_id)
# Catch Probs
self.base_catch = check_for_none(
float, data.get('base_catch'), Unknown.TINY)
self.great_catch = check_for_none(
float, data.get('great_catch'), Unknown.TINY)
self.ultra_catch = check_for_none(
float, data.get('ultra_catch'), Unknown.TINY)
# Attack Rating
self.atk_grade = check_for_none(
str, data.get('atk_grade'), Unknown.TINY)
self.def_grade = check_for_none(
str, data.get('def_grade'), Unknown.TINY)
# Cosmetic
self.gender = MonUtils.get_gender_sym(
check_for_none(int, data.get('gender'), Unknown.TINY))
self.height = check_for_none(float, data.get('height'), Unknown.SMALL)
self.weight = check_for_none(float, data.get('weight'), Unknown.SMALL)
if Unknown.is_not(self.height, self.weight):
self.size_id = get_pokemon_size(
self.monster_id, self.height, self.weight)
else:
self.size_id = Unknown.SMALL
self.types = get_base_types(self.monster_id)
# Form
self.form_id = check_for_none(int, data.get('form'), 0)
# Costume
self.costume_id = check_for_none(int, data.get('costume'), 0)
# Correct this later
self.name = self.monster_id
self.geofence = Unknown.REGULAR
self.custom_dts = {}
def generate_dts(self, locale, timezone, units):
""" Return a dict with all the DTS for this event. """
time = get_time_as_str(self.disappear_time, timezone)
form_name = locale.get_form_name(self.monster_id, self.form_id)
costume_name = locale.get_costume_name(
self.monster_id, self.costume_id)
weather_name = locale.get_weather_name(self.weather_id)
boosted_weather_name = locale.get_weather_name(self.boosted_weather_id)
type1 = locale.get_type_name(self.types[0])
type2 = locale.get_type_name(self.types[1])
dts = self.custom_dts.copy()
dts.update({
# Identification
'encounter_id': self.enc_id,
'mon_name': locale.get_pokemon_name(self.monster_id),
'mon_id': self.monster_id,
'mon_id_3': "{:03}".format(self.monster_id),
# Time Remaining
'time_left': time[0],
'12h_time': time[1],
'24h_time': time[2],
# Spawn Data
'spawn_start': self.spawn_start,
'spawn_end': self.spawn_end,
'spawn_verified': self.spawn_verified,
# Location
'lat': self.lat,
'lng': self.lng,
'lat_5': "{:.5f}".format(self.lat),
'lng_5': "{:.5f}".format(self.lng),
'distance': (
get_dist_as_str(self.distance, units)
if Unknown.is_not(self.distance) else Unknown.SMALL),
'direction': self.direction,
'gmaps': get_gmaps_link(self.lat, self.lng),
'applemaps': get_applemaps_link(self.lat, self.lng),
'geofence': self.geofence,
# Weather
'weather_id': self.weather_id,
'weather': weather_name,
'weather_or_empty': Unknown.or_empty(weather_name),
'weather_emoji': get_weather_emoji(self.weather_id),
'boosted_weather_id': self.boosted_weather_id,
'boosted_weather': boosted_weather_name,
'boosted_weather_or_empty': (
'' if self.boosted_weather_id == 0
else Unknown.or_empty(boosted_weather_name)),
'boosted_weather_emoji':
get_weather_emoji(self.boosted_weather_id),
'boosted_or_empty': locale.get_boosted_text() if \
Unknown.is_not(self.boosted_weather_id) and
self.boosted_weather_id != 0 else '',
# Encounter Stats
'mon_lvl': self.mon_lvl,
'cp': self.cp,
# IVs
'iv_0': (
"{:.0f}".format(self.iv) if Unknown.is_not(self.iv)
else Unknown.TINY),
'iv': (
"{:.1f}".format(self.iv) if Unknown.is_not(self.iv)
else Unknown.SMALL),
'iv_2': (
"{:.2f}".format(self.iv) if Unknown.is_not(self.iv)
else Unknown.SMALL),
'atk': self.atk_iv,
'def': self.def_iv,
'sta': self.sta_iv,
# Type
'type1': type1,
'type1_or_empty': Unknown.or_empty(type1),
'type1_emoji': Unknown.or_empty(get_type_emoji(self.types[0])),
'type2': type2,
'type2_or_empty': Unknown.or_empty(type2),
'type2_emoji': Unknown.or_empty(get_type_emoji(self.types[1])),
'types': (
"{}/{}".format(type1, type2)
if Unknown.is_not(type2) else type1),
'types_emoji': (
"{}{}".format(
get_type_emoji(self.types[0]),
get_type_emoji(self.types[1]))
if Unknown.is_not(type2) else get_type_emoji(self.types[0])),
# Form
'form': form_name,
'form_or_empty': Unknown.or_empty(form_name),
'form_id': self.form_id,
'form_id_3': "{:03d}".format(self.form_id),
# Costume
'costume': costume_name,
'costume_or_empty': Unknown.or_empty(costume_name),
'costume_id': self.costume_id,
'costume_id_3': "{:03d}".format(self.costume_id),
# Quick Move
'quick_move': locale.get_move_name(self.quick_id),
'quick_id': self.quick_id,
'quick_type_id': self.quick_type,
'quick_type': locale.get_type_name(self.quick_type),
'quick_type_emoji': get_type_emoji(self.quick_type),
'quick_damage': self.quick_damage,
'quick_dps': self.quick_dps,
'quick_duration': self.quick_duration,
'quick_energy': self.quick_energy,
# Charge Move
'charge_move': locale.get_move_name(self.charge_id),
'charge_id': self.charge_id,
'charge_type_id': self.charge_type,
'charge_type': locale.get_type_name(self.charge_type),
'charge_type_emoji': get_type_emoji(self.charge_type),
'charge_damage': self.charge_damage,
'charge_dps': self.charge_dps,
'charge_duration': self.charge_duration,
'charge_energy': self.charge_energy,
# Cosmetic
'gender': self.gender,
'height_0': (
"{:.0f}".format(self.height) if Unknown.is_not(self.height)
else Unknown.TINY),
'height': (
"{:.1f}".format(self.height) if Unknown.is_not(self.height)
else Unknown.SMALL),
'height_2': (
"{:.2f}".format(self.height) if Unknown.is_not(self.height)
else Unknown.SMALL),
'weight_0': (
"{:.0f}".format(self.weight) if Unknown.is_not(self.weight)
else Unknown.TINY),
'weight': (
"{:.1f}".format(self.weight) if Unknown.is_not(self.weight)
else Unknown.SMALL),
'weight_2': (
"{:.2f}".format(self.weight) if Unknown.is_not(self.weight)
else Unknown.SMALL),
'size': locale.get_size_name(self.size_id),
# Attack rating
'atk_grade': (
Unknown.or_empty(self.atk_grade, Unknown.TINY)),
'def_grade': (
Unknown.or_empty(self.def_grade, Unknown.TINY)),
# Catch Prob
'base_catch_0': (
"{:.0f}".format(self.base_catch * 100)
if Unknown.is_not(self.base_catch)
else Unknown.TINY),
'base_catch': (
"{:.1f}".format(self.base_catch * 100)
if Unknown.is_not(self.base_catch)
else Unknown.SMALL),
'base_catch_2': (
"{:.2f}".format(self.base_catch * 100)
if Unknown.is_not(self.base_catch)
else Unknown.SMALL),
'great_catch_0': (
"{:.0f}".format(self.great_catch * 100)
if Unknown.is_not(self.great_catch)
else Unknown.TINY),
'great_catch': (
"{:.1f}".format(self.great_catch * 100)
if Unknown.is_not(self.great_catch)
else Unknown.SMALL),
'great_catch_2': (
"{:.2f}".format(self.great_catch * 100)
if Unknown.is_not(self.great_catch)
else Unknown.SMALL),
'ultra_catch_0': (
"{:.0f}".format(self.ultra_catch * 100)
if Unknown.is_not(self.ultra_catch)
else Unknown.TINY),
'ultra_catch': (
"{:.1f}".format(self.ultra_catch * 100)
if Unknown.is_not(self.ultra_catch)
else Unknown.SMALL),
'ultra_catch_2': (
"{:.2f}".format(self.ultra_catch * 100)
if Unknown.is_not(self.ultra_catch)
else Unknown.SMALL),
# Misc
'big_karp': (
'big' if self.monster_id == 129 and Unknown.is_not(self.weight)
and self.weight >= 13.13 else ''),
'tiny_rat': (
'tiny' if self.monster_id == 19 and Unknown.is_not(self.weight)
and self.weight <= 2.41 else '')
})
return dts
| agpl-3.0 |
robbymeals/scikit-learn | sklearn/externals/joblib/numpy_pickle.py | 194 | 19716 | """
Utilities for fast persistence of big data, with optional compression.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import traceback
import sys
import os
import zlib
import warnings
import struct
import codecs
from ._compat import _basestring
from io import BytesIO
PY3 = sys.version_info[0] >= 3
if PY3:
Unpickler = pickle._Unpickler
Pickler = pickle._Pickler
def asbytes(s):
if isinstance(s, bytes):
return s
return s.encode('latin1')
else:
Unpickler = pickle.Unpickler
Pickler = pickle.Pickler
asbytes = str
def hex_str(an_int):
"""Converts an int to an hexadecimal string
"""
return '{0:#x}'.format(an_int)
_MEGA = 2 ** 20
# Compressed pickle header format: _ZFILE_PREFIX followed by _MAX_LEN
# bytes which contains the length of the zlib compressed data as an
# hexadecimal string. For example: 'ZF0x139 '
_ZFILE_PREFIX = asbytes('ZF')
_MAX_LEN = len(hex_str(2 ** 64))
###############################################################################
# Compressed file with Zlib
def _read_magic(file_handle):
""" Utility to check the magic signature of a file identifying it as a
Zfile
"""
magic = file_handle.read(len(_ZFILE_PREFIX))
# Pickling needs file-handles at the beginning of the file
file_handle.seek(0)
return magic
def read_zfile(file_handle):
"""Read the z-file and return the content as a string
Z-files are raw data compressed with zlib used internally by joblib
for persistence. Backward compatibility is not guaranteed. Do not
use for external purposes.
"""
file_handle.seek(0)
assert _read_magic(file_handle) == _ZFILE_PREFIX, \
"File does not have the right magic"
header_length = len(_ZFILE_PREFIX) + _MAX_LEN
length = file_handle.read(header_length)
length = length[len(_ZFILE_PREFIX):]
length = int(length, 16)
# With python2 and joblib version <= 0.8.4 compressed pickle header is one
# character wider so we need to ignore an additional space if present.
# Note: the first byte of the zlib data is guaranteed not to be a
# space according to
# https://tools.ietf.org/html/rfc6713#section-2.1
next_byte = file_handle.read(1)
if next_byte != b' ':
# The zlib compressed data has started and we need to go back
# one byte
file_handle.seek(header_length)
# We use the known length of the data to tell Zlib the size of the
# buffer to allocate.
data = zlib.decompress(file_handle.read(), 15, length)
assert len(data) == length, (
"Incorrect data length while decompressing %s."
"The file could be corrupted." % file_handle)
return data
def write_zfile(file_handle, data, compress=1):
"""Write the data in the given file as a Z-file.
Z-files are raw data compressed with zlib used internally by joblib
for persistence. Backward compatibility is not guarantied. Do not
use for external purposes.
"""
file_handle.write(_ZFILE_PREFIX)
length = hex_str(len(data))
# Store the length of the data
file_handle.write(asbytes(length.ljust(_MAX_LEN)))
file_handle.write(zlib.compress(asbytes(data), compress))
###############################################################################
# Utility objects for persistence.
class NDArrayWrapper(object):
""" An object to be persisted instead of numpy arrays.
The only thing this object does, is to carry the filename in which
the array has been persisted, and the array subclass.
"""
def __init__(self, filename, subclass, allow_mmap=True):
"Store the useful information for later"
self.filename = filename
self.subclass = subclass
self.allow_mmap = allow_mmap
def read(self, unpickler):
"Reconstruct the array"
filename = os.path.join(unpickler._dirname, self.filename)
# Load the array from the disk
np_ver = [int(x) for x in unpickler.np.__version__.split('.', 2)[:2]]
# use getattr instead of self.allow_mmap to ensure backward compat
# with NDArrayWrapper instances pickled with joblib < 0.9.0
allow_mmap = getattr(self, 'allow_mmap', True)
memmap_kwargs = ({} if not allow_mmap
else {'mmap_mode': unpickler.mmap_mode})
array = unpickler.np.load(filename, **memmap_kwargs)
# Reconstruct subclasses. This does not work with old
# versions of numpy
if (hasattr(array, '__array_prepare__')
and not self.subclass in (unpickler.np.ndarray,
unpickler.np.memmap)):
# We need to reconstruct another subclass
new_array = unpickler.np.core.multiarray._reconstruct(
self.subclass, (0,), 'b')
new_array.__array_prepare__(array)
array = new_array
return array
#def __reduce__(self):
# return None
class ZNDArrayWrapper(NDArrayWrapper):
"""An object to be persisted instead of numpy arrays.
This object store the Zfile filename in which
the data array has been persisted, and the meta information to
retrieve it.
The reason that we store the raw buffer data of the array and
the meta information, rather than array representation routine
(tostring) is that it enables us to use completely the strided
model to avoid memory copies (a and a.T store as fast). In
addition saving the heavy information separately can avoid
creating large temporary buffers when unpickling data with
large arrays.
"""
def __init__(self, filename, init_args, state):
"Store the useful information for later"
self.filename = filename
self.state = state
self.init_args = init_args
def read(self, unpickler):
"Reconstruct the array from the meta-information and the z-file"
# Here we a simply reproducing the unpickling mechanism for numpy
# arrays
filename = os.path.join(unpickler._dirname, self.filename)
array = unpickler.np.core.multiarray._reconstruct(*self.init_args)
with open(filename, 'rb') as f:
data = read_zfile(f)
state = self.state + (data,)
array.__setstate__(state)
return array
###############################################################################
# Pickler classes
class NumpyPickler(Pickler):
"""A pickler to persist of big data efficiently.
The main features of this object are:
* persistence of numpy arrays in separate .npy files, for which
I/O is fast.
* optional compression using Zlib, with a special care on avoid
temporaries.
"""
dispatch = Pickler.dispatch.copy()
def __init__(self, filename, compress=0, cache_size=10):
self._filename = filename
self._filenames = [filename, ]
self.cache_size = cache_size
self.compress = compress
if not self.compress:
self.file = open(filename, 'wb')
else:
self.file = BytesIO()
# Count the number of npy files that we have created:
self._npy_counter = 0
highest_python_2_3_compatible_protocol = 2
Pickler.__init__(self, self.file,
protocol=highest_python_2_3_compatible_protocol)
# delayed import of numpy, to avoid tight coupling
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _write_array(self, array, filename):
if not self.compress:
self.np.save(filename, array)
allow_mmap = not array.dtype.hasobject
container = NDArrayWrapper(os.path.basename(filename),
type(array),
allow_mmap=allow_mmap)
else:
filename += '.z'
# Efficient compressed storage:
# The meta data is stored in the container, and the core
# numerics in a z-file
_, init_args, state = array.__reduce__()
# the last entry of 'state' is the data itself
with open(filename, 'wb') as zfile:
write_zfile(zfile, state[-1], compress=self.compress)
state = state[:-1]
container = ZNDArrayWrapper(os.path.basename(filename),
init_args, state)
return container, filename
def save(self, obj):
""" Subclass the save method, to save ndarray subclasses in npy
files, rather than pickling them. Of course, this is a
total abuse of the Pickler class.
"""
if self.np is not None and type(obj) in (self.np.ndarray,
self.np.matrix, self.np.memmap):
size = obj.size * obj.itemsize
if self.compress and size < self.cache_size * _MEGA:
# When compressing, as we are not writing directly to the
# disk, it is more efficient to use standard pickling
if type(obj) is self.np.memmap:
# Pickling doesn't work with memmaped arrays
obj = self.np.asarray(obj)
return Pickler.save(self, obj)
self._npy_counter += 1
try:
filename = '%s_%02i.npy' % (self._filename,
self._npy_counter)
# This converts the array in a container
obj, filename = self._write_array(obj, filename)
self._filenames.append(filename)
except:
self._npy_counter -= 1
# XXX: We should have a logging mechanism
print('Failed to save %s to .npy file:\n%s' % (
type(obj),
traceback.format_exc()))
return Pickler.save(self, obj)
def save_bytes(self, obj):
"""Strongly inspired from python 2.7 pickle.Pickler.save_string"""
if self.bin:
n = len(obj)
if n < 256:
self.write(pickle.SHORT_BINSTRING + asbytes(chr(n)) + obj)
else:
self.write(pickle.BINSTRING + struct.pack("<i", n) + obj)
self.memoize(obj)
else:
Pickler.save_bytes(self, obj)
# We need to override save_bytes for python 3. We are using
# protocol=2 for python 2/3 compatibility and save_bytes for
# protocol < 3 ends up creating a unicode string which is very
# inefficient resulting in pickles up to 1.5 times the size you
# would get with protocol=4 or protocol=2 with python 2.7. This
# cause severe slowdowns in joblib.dump and joblib.load. See
# https://github.com/joblib/joblib/issues/194 for more details.
if PY3:
dispatch[bytes] = save_bytes
def close(self):
if self.compress:
with open(self._filename, 'wb') as zfile:
write_zfile(zfile, self.file.getvalue(), self.compress)
class NumpyUnpickler(Unpickler):
"""A subclass of the Unpickler to unpickle our numpy pickles.
"""
dispatch = Unpickler.dispatch.copy()
def __init__(self, filename, file_handle, mmap_mode=None):
self._filename = os.path.basename(filename)
self._dirname = os.path.dirname(filename)
self.mmap_mode = mmap_mode
self.file_handle = self._open_pickle(file_handle)
Unpickler.__init__(self, self.file_handle)
try:
import numpy as np
except ImportError:
np = None
self.np = np
if PY3:
self.encoding = 'bytes'
# Python 3.2 and 3.3 do not support encoding=bytes so I copied
# _decode_string, load_string, load_binstring and
# load_short_binstring from python 3.4 to emulate this
# functionality
if PY3 and sys.version_info.minor < 4:
def _decode_string(self, value):
"""Copied from python 3.4 pickle.Unpickler._decode_string"""
# Used to allow strings from Python 2 to be decoded either as
# bytes or Unicode strings. This should be used only with the
# STRING, BINSTRING and SHORT_BINSTRING opcodes.
if self.encoding == "bytes":
return value
else:
return value.decode(self.encoding, self.errors)
def load_string(self):
"""Copied from python 3.4 pickle.Unpickler.load_string"""
data = self.readline()[:-1]
# Strip outermost quotes
if len(data) >= 2 and data[0] == data[-1] and data[0] in b'"\'':
data = data[1:-1]
else:
raise pickle.UnpicklingError(
"the STRING opcode argument must be quoted")
self.append(self._decode_string(codecs.escape_decode(data)[0]))
dispatch[pickle.STRING[0]] = load_string
def load_binstring(self):
"""Copied from python 3.4 pickle.Unpickler.load_binstring"""
# Deprecated BINSTRING uses signed 32-bit length
len, = struct.unpack('<i', self.read(4))
if len < 0:
raise pickle.UnpicklingError(
"BINSTRING pickle has negative byte count")
data = self.read(len)
self.append(self._decode_string(data))
dispatch[pickle.BINSTRING[0]] = load_binstring
def load_short_binstring(self):
"""Copied from python 3.4 pickle.Unpickler.load_short_binstring"""
len = self.read(1)[0]
data = self.read(len)
self.append(self._decode_string(data))
dispatch[pickle.SHORT_BINSTRING[0]] = load_short_binstring
def _open_pickle(self, file_handle):
return file_handle
def load_build(self):
""" This method is called to set the state of a newly created
object.
We capture it to replace our place-holder objects,
NDArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
"""
Unpickler.load_build(self)
if isinstance(self.stack[-1], NDArrayWrapper):
if self.np is None:
raise ImportError('Trying to unpickle an ndarray, '
"but numpy didn't import correctly")
nd_array_wrapper = self.stack.pop()
array = nd_array_wrapper.read(self)
self.stack.append(array)
# Be careful to register our new method.
if PY3:
dispatch[pickle.BUILD[0]] = load_build
else:
dispatch[pickle.BUILD] = load_build
class ZipNumpyUnpickler(NumpyUnpickler):
"""A subclass of our Unpickler to unpickle on the fly from
compressed storage."""
def __init__(self, filename, file_handle):
NumpyUnpickler.__init__(self, filename,
file_handle,
mmap_mode=None)
def _open_pickle(self, file_handle):
return BytesIO(read_zfile(file_handle))
###############################################################################
# Utility functions
def dump(value, filename, compress=0, cache_size=100):
"""Fast persistence of an arbitrary Python object into a files, with
dedicated storage for numpy arrays.
Parameters
-----------
value: any Python object
The object to store to disk
filename: string
The name of the file in which it is to be stored
compress: integer for 0 to 9, optional
Optional compression level for the data. 0 is no compression.
Higher means more compression, but also slower read and
write times. Using a value of 3 is often a good compromise.
See the notes for more details.
cache_size: positive number, optional
Fixes the order of magnitude (in megabytes) of the cache used
for in-memory compression. Note that this is just an order of
magnitude estimate and that for big arrays, the code will go
over this value at dump and at load time.
Returns
-------
filenames: list of strings
The list of file names in which the data is stored. If
compress is false, each array is stored in a different file.
See Also
--------
joblib.load : corresponding loader
Notes
-----
Memmapping on load cannot be used for compressed files. Thus
using compression can significantly slow down loading. In
addition, compressed files take extra extra memory during
dump and load.
"""
if compress is True:
# By default, if compress is enabled, we want to be using 3 by
# default
compress = 3
if not isinstance(filename, _basestring):
# People keep inverting arguments, and the resulting error is
# incomprehensible
raise ValueError(
'Second argument should be a filename, %s (type %s) was given'
% (filename, type(filename))
)
try:
pickler = NumpyPickler(filename, compress=compress,
cache_size=cache_size)
pickler.dump(value)
pickler.close()
finally:
if 'pickler' in locals() and hasattr(pickler, 'file'):
pickler.file.flush()
pickler.file.close()
return pickler._filenames
def load(filename, mmap_mode=None):
"""Reconstruct a Python object from a file persisted with joblib.dump.
Parameters
-----------
filename: string
The name of the file from which to load the object
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
If not None, the arrays are memory-mapped from the disk. This
mode has no effect for compressed files. Note that in this
case the reconstructed object might not longer match exactly
the originally pickled object.
Returns
-------
result: any Python object
The object stored in the file.
See Also
--------
joblib.dump : function to save an object
Notes
-----
This function can load numpy array files saved separately during the
dump. If the mmap_mode argument is given, it is passed to np.load and
arrays are loaded as memmaps. As a consequence, the reconstructed
object might not match the original pickled object. Note that if the
file was saved with compression, the arrays cannot be memmaped.
"""
with open(filename, 'rb') as file_handle:
# We are careful to open the file handle early and keep it open to
# avoid race-conditions on renames. That said, if data are stored in
# companion files, moving the directory will create a race when
# joblib tries to access the companion files.
if _read_magic(file_handle) == _ZFILE_PREFIX:
if mmap_mode is not None:
warnings.warn('file "%(filename)s" appears to be a zip, '
'ignoring mmap_mode "%(mmap_mode)s" flag passed'
% locals(), Warning, stacklevel=2)
unpickler = ZipNumpyUnpickler(filename, file_handle=file_handle)
else:
unpickler = NumpyUnpickler(filename, file_handle=file_handle,
mmap_mode=mmap_mode)
try:
obj = unpickler.load()
finally:
if hasattr(unpickler, 'file_handle'):
unpickler.file_handle.close()
return obj
| bsd-3-clause |
rahushen/ansible | lib/ansible/modules/windows/win_user.py | 52 | 5133 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Matt Martz <matt@sivel.net>, and others
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_user
version_added: "1.7"
short_description: Manages local Windows user accounts
description:
- Manages local Windows user accounts.
- For non-Windows targets, use the M(user) module instead.
options:
name:
description:
- Name of the user to create, remove or modify.
required: yes
fullname:
description:
- Full name of the user.
version_added: "1.9"
description:
description:
- Description of the user.
version_added: "1.9"
password:
description:
- Optionally set the user's password to this (plain text) value.
update_password:
description:
- C(always) will update passwords if they differ. C(on_create) will
only set the password for newly created users.
choices: [ always, on_create ]
default: always
version_added: "1.9"
password_expired:
description:
- C(yes) will require the user to change their password at next login.
- C(no) will clear the expired password flag.
type: bool
version_added: "1.9"
password_never_expires:
description:
- C(yes) will set the password to never expire.
- C(no) will allow the password to expire.
type: bool
version_added: "1.9"
user_cannot_change_password:
description:
- C(yes) will prevent the user from changing their password.
- C(no) will allow the user to change their password.
type: bool
version_added: "1.9"
account_disabled:
description:
- C(yes) will disable the user account.
- C(no) will clear the disabled flag.
type: bool
version_added: "1.9"
account_locked:
description:
- C(no) will unlock the user account if locked.
choices: [ 'no' ]
version_added: "1.9"
groups:
description:
- Adds or removes the user from this comma-separated lis of groups,
depending on the value of I(groups_action). When I(groups_action) is
C(replace) and I(groups) is set to the empty string ('groups='), the
user is removed from all groups.
version_added: "1.9"
groups_action:
description:
- If C(add), the user is added to each group in I(groups) where not
already a member.
- If C(replace), the user is added as a member of each group in
I(groups) and removed from any other groups.
- If C(remove), the user is removed from each group in I(groups).
choices: [ add, replace, remove ]
default: replace
version_added: "1.9"
state:
description:
- When C(absent), removes the user account if it exists.
- When C(present), creates or updates the user account.
- When C(query) (new in 1.9), retrieves the user account details
without making any changes.
choices: [ absent, present, query ]
default: present
notes:
- For non-Windows targets, use the M(user) module instead.
author:
- Paul Durivage (@angstwad)
- Chris Church (@cchurch)
'''
EXAMPLES = r'''
- name: Ensure user bob is present
win_user:
name: bob
password: B0bP4ssw0rd
state: present
groups:
- Users
- name: Ensure user bob is absent
win_user:
name: bob
state: absent
'''
RETURN = r'''
account_disabled:
description: Whether the user is disabled.
returned: user exists
type: bool
sample: false
account_locked:
description: Whether the user is locked.
returned: user exists
type: bool
sample: false
description:
description: The description set for the user.
returned: user exists
type: str
sample: Username for test
fullname:
description: The full name set for the user.
returned: user exists
type: str
sample: Test Username
groups:
description: A list of groups and their ADSI path the user is a member of.
returned: user exists
type: list
sample: [
{
"name": "Administrators",
"path": "WinNT://WORKGROUP/USER-PC/Administrators"
}
]
name:
description: The name of the user
returned: always
type: str
sample: username
password_expired:
description: Whether the password is expired.
returned: user exists
type: bool
sample: false
password_never_expires:
description: Whether the password is set to never expire.
returned: user exists
type: bool
sample: true
path:
description: The ADSI path for the user.
returned: user exists
type: str
sample: "WinNT://WORKGROUP/USER-PC/username"
sid:
description: The SID for the user.
returned: user exists
type: str
sample: S-1-5-21-3322259488-2828151810-3939402796-1001
user_cannot_change_password:
description: Whether the user can change their own password.
returned: user exists
type: bool
sample: false
'''
| gpl-3.0 |
fiunchinho/ansible-modules-extras | system/open_iscsi.py | 89 | 11683 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Serge van Ginderachter <serge@vanginderachter.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: open_iscsi
author: "Serge van Ginderachter (@srvg)"
version_added: "1.4"
short_description: Manage iscsi targets with open-iscsi
description:
- Discover targets on given portal, (dis)connect targets, mark targets to
manually or auto start, return device nodes of connected targets.
requirements:
- open_iscsi library and tools (iscsiadm)
options:
portal:
required: false
aliases: [ip]
description:
- the ip address of the iscsi target
port:
required: false
default: 3260
description:
- the port on which the iscsi target process listens
target:
required: false
aliases: [name, targetname]
description:
- the iscsi target name
login:
required: false
choices: [true, false]
description:
- whether the target node should be connected
node_auth:
required: false
default: CHAP
description:
- discovery.sendtargets.auth.authmethod
node_user:
required: false
description:
- discovery.sendtargets.auth.username
node_pass:
required: false
description:
- discovery.sendtargets.auth.password
auto_node_startup:
aliases: [automatic]
required: false
choices: [true, false]
description:
- whether the target node should be automatically connected at startup
discover:
required: false
choices: [true, false]
description:
- whether the list of target nodes on the portal should be
(re)discovered and added to the persistent iscsi database.
Keep in mind that iscsiadm discovery resets configurtion, like node.startup
to manual, hence combined with auto_node_startup=yes will allways return
a changed state.
show_nodes:
required: false
choices: [true, false]
description:
- whether the list of nodes in the persistent iscsi database should be
returned by the module
'''
EXAMPLES = '''
# perform a discovery on 10.1.2.3 and show available target nodes
- open_iscsi: show_nodes=yes discover=yes portal=10.1.2.3
# discover targets on portal and login to the one available
# (only works if exactly one target is exported to the initiator)
- open_iscsi: portal={{iscsi_target}} login=yes discover=yes
# description: connect to the named target, after updating the local
# persistent database (cache)
- open_iscsi: login=yes target=iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d
# description: discconnect from the cached named target
- open_iscsi: login=no target=iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d"
'''
import glob
import time
ISCSIADM = 'iscsiadm'
def compare_nodelists(l1, l2):
l1.sort()
l2.sort()
return l1 == l2
def iscsi_get_cached_nodes(module, portal=None):
cmd = '%s --mode node' % iscsiadm_cmd
(rc, out, err) = module.run_command(cmd)
if rc == 0:
lines = out.splitlines()
nodes = []
for line in lines:
# line format is "ip:port,target_portal_group_tag targetname"
parts = line.split()
if len(parts) > 2:
module.fail_json(msg='error parsing output', cmd=cmd)
target = parts[1]
parts = parts[0].split(':')
target_portal = parts[0]
if portal is None or portal == target_portal:
nodes.append(target)
# older versions of scsiadm don't have nice return codes
# for newer versions see iscsiadm(8); also usr/iscsiadm.c for details
# err can contain [N|n]o records...
elif rc == 21 or (rc == 255 and "o records found" in err):
nodes = []
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
return nodes
def iscsi_discover(module, portal, port):
cmd = '%s --mode discovery --type sendtargets --portal %s:%s' % (iscsiadm_cmd, portal, port)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_loggedon(module, target):
cmd = '%s --mode session' % iscsiadm_cmd
(rc, out, err) = module.run_command(cmd)
if rc == 0:
return target in out
elif rc == 21:
return False
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_login(module, target):
node_auth = module.params['node_auth']
node_user = module.params['node_user']
node_pass = module.params['node_pass']
if node_user:
params = [('node.session.auth.authmethod', node_auth),
('node.session.auth.username', node_user),
('node.session.auth.password', node_pass)]
for (name, value) in params:
cmd = '%s --mode node --targetname %s --op=update --name %s --value %s' % (iscsiadm_cmd, target, name, value)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
cmd = '%s --mode node --targetname %s --login' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_logout(module, target):
cmd = '%s --mode node --targetname %s --logout' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_device_node(module, target):
# if anyone know a better way to find out which devicenodes get created for
# a given target...
devices = glob.glob('/dev/disk/by-path/*%s*' % target)
devdisks = []
for dev in devices:
# exclude partitions
if "-part" not in dev:
devdisk = os.path.realpath(dev)
# only add once (multi-path?)
if devdisk not in devdisks:
devdisks.append(devdisk)
return devdisks
def target_isauto(module, target):
cmd = '%s --mode node --targetname %s' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc == 0:
lines = out.splitlines()
for line in lines:
if 'node.startup' in line:
return 'automatic' in line
return False
else:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_setauto(module, target):
cmd = '%s --mode node --targetname %s --op=update --name node.startup --value automatic' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def target_setmanual(module, target):
cmd = '%s --mode node --targetname %s --op=update --name node.startup --value manual' % (iscsiadm_cmd, target)
(rc, out, err) = module.run_command(cmd)
if rc > 0:
module.fail_json(cmd=cmd, rc=rc, msg=err)
def main():
# load ansible module object
module = AnsibleModule(
argument_spec = dict(
# target
portal = dict(required=False, aliases=['ip']),
port = dict(required=False, default=3260),
target = dict(required=False, aliases=['name', 'targetname']),
node_auth = dict(required=False, default='CHAP'),
node_user = dict(required=False),
node_pass = dict(required=False),
# actions
login = dict(type='bool', aliases=['state']),
auto_node_startup = dict(type='bool', aliases=['automatic']),
discover = dict(type='bool', default=False),
show_nodes = dict(type='bool', default=False)
),
required_together=[['discover_user', 'discover_pass'],
['node_user', 'node_pass']],
supports_check_mode=True
)
global iscsiadm_cmd
iscsiadm_cmd = module.get_bin_path('iscsiadm', required=True)
# parameters
portal = module.params['portal']
target = module.params['target']
port = module.params['port']
login = module.params['login']
automatic = module.params['auto_node_startup']
discover = module.params['discover']
show_nodes = module.params['show_nodes']
check = module.check_mode
cached = iscsi_get_cached_nodes(module, portal)
# return json dict
result = {}
result['changed'] = False
if discover:
if portal is None:
module.fail_json(msg = "Need to specify at least the portal (ip) to discover")
elif check:
nodes = cached
else:
iscsi_discover(module, portal, port)
nodes = iscsi_get_cached_nodes(module, portal)
if not compare_nodelists(cached, nodes):
result['changed'] |= True
result['cache_updated'] = True
else:
nodes = cached
if login is not None or automatic is not None:
if target is None:
if len(nodes) > 1:
module.fail_json(msg = "Need to specify a target")
else:
target = nodes[0]
else:
# check given target is in cache
check_target = False
for node in nodes:
if node == target:
check_target = True
break
if not check_target:
module.fail_json(msg = "Specified target not found")
if show_nodes:
result['nodes'] = nodes
if login is not None:
loggedon = target_loggedon(module, target)
if (login and loggedon) or (not login and not loggedon):
result['changed'] |= False
if login:
result['devicenodes'] = target_device_node(module, target)
elif not check:
if login:
target_login(module, target)
# give udev some time
time.sleep(1)
result['devicenodes'] = target_device_node(module, target)
else:
target_logout(module, target)
result['changed'] |= True
result['connection_changed'] = True
else:
result['changed'] |= True
result['connection_changed'] = True
if automatic is not None:
isauto = target_isauto(module, target)
if (automatic and isauto) or (not automatic and not isauto):
result['changed'] |= False
result['automatic_changed'] = False
elif not check:
if automatic:
target_setauto(module, target)
else:
target_setmanual(module, target)
result['changed'] |= True
result['automatic_changed'] = True
else:
result['changed'] |= True
result['automatic_changed'] = True
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
ryanGT/sympy | sympy/thirdparty/pyglet/pyglet/image/codecs/pypng.py | 5 | 41165 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2007 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# png.py - PNG encoder in pure Python
# Copyright (C) 2006 Johann C. Rocholl <johann@browsershots.org>
# <ah> Modifications for pyglet by Alex Holkner <alex.holkner@gmail.com>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Contributors (alphabetical):
# Nicko van Someren <nicko@nicko.org>
#
# Changelog (recent first):
# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
# 2006-06-17 Johann: Very simple prototype PNG decoder.
# 2006-06-17 Nicko: Test suite with various image generators.
# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
# 2006-06-15 Johann: Scanline iterator interface for large input files.
# 2006-06-09 Johann: Very simple prototype PNG encoder.
"""
Pure Python PNG Reader/Writer
This is an implementation of a subset of the PNG specification at
http://www.w3.org/TR/2003/REC-PNG-20031110 in pure Python. It reads
and writes PNG files with 8/16/24/32/48/64 bits per pixel (greyscale,
RGB, RGBA, with 8 or 16 bits per layer), with a number of options. For
help, type "import png; help(png)" in your python interpreter.
This file can also be used as a command-line utility to convert PNM
files to PNG. The interface is similar to that of the pnmtopng program
from the netpbm package. Type "python png.py --help" at the shell
prompt for usage and a list of options.
"""
__revision__ = '$Rev$'
__date__ = '$Date$'
__author__ = '$Author$'
import sys
import zlib
import struct
import math
from array import array
_adam7 = ((0, 0, 8, 8),
(4, 0, 8, 8),
(0, 4, 4, 8),
(2, 0, 4, 4),
(0, 2, 2, 4),
(1, 0, 2, 2),
(0, 1, 1, 2))
def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave color planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the ipsize bytes of data
from each pixel in ipixels followed by the apsize bytes of data
from each pixel in apixels, for an image of size width x height.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
out = array('B')
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
for i in range(apsize):
out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
return out
class Error(Exception):
pass
class Writer:
"""
PNG encoder in pure Python.
"""
def __init__(self, width, height,
transparent=None,
background=None,
gamma=None,
greyscale=False,
has_alpha=False,
bytes_per_sample=1,
compression=None,
interlaced=False,
chunk_limit=2**20):
"""
Create a PNG encoder object.
Arguments:
width, height - size of the image in pixels
transparent - create a tRNS chunk
background - create a bKGD chunk
gamma - create a gAMA chunk
greyscale - input data is greyscale, not RGB
has_alpha - input data has alpha channel (RGBA)
bytes_per_sample - 8-bit or 16-bit input data
compression - zlib compression level (1-9)
chunk_limit - write multiple IDAT chunks to save memory
If specified, the transparent and background parameters must
be a tuple with three integer values for red, green, blue, or
a simple integer (or singleton tuple) for a greyscale image.
If specified, the gamma parameter must be a float value.
"""
if width <= 0 or height <= 0:
raise ValueError("width and height must be greater than zero")
if has_alpha and transparent is not None:
raise ValueError(
"transparent color not allowed with alpha channel")
if bytes_per_sample < 1 or bytes_per_sample > 2:
raise ValueError("bytes per sample must be 1 or 2")
if transparent is not None:
if greyscale:
if type(transparent) is not int:
raise ValueError(
"transparent color for greyscale must be integer")
else:
if not (len(transparent) == 3 and
type(transparent[0]) is int and
type(transparent[1]) is int and
type(transparent[2]) is int):
raise ValueError(
"transparent color must be a triple of integers")
if background is not None:
if greyscale:
if type(background) is not int:
raise ValueError(
"background color for greyscale must be integer")
else:
if not (len(background) == 3 and
type(background[0]) is int and
type(background[1]) is int and
type(background[2]) is int):
raise ValueError(
"background color must be a triple of integers")
self.width = width
self.height = height
self.transparent = transparent
self.background = background
self.gamma = gamma
self.greyscale = greyscale
self.has_alpha = has_alpha
self.bytes_per_sample = bytes_per_sample
self.compression = compression
self.chunk_limit = chunk_limit
self.interlaced = interlaced
if self.greyscale:
self.color_depth = 1
if self.has_alpha:
self.color_type = 4
self.psize = self.bytes_per_sample * 2
else:
self.color_type = 0
self.psize = self.bytes_per_sample
else:
self.color_depth = 3
if self.has_alpha:
self.color_type = 6
self.psize = self.bytes_per_sample * 4
else:
self.color_type = 2
self.psize = self.bytes_per_sample * 3
def write_chunk(self, outfile, tag, data):
"""
Write a PNG chunk to the output file, including length and checksum.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
outfile.write(struct.pack("!I", len(data)))
outfile.write(tag)
outfile.write(data)
checksum = zlib.crc32(tag)
checksum = zlib.crc32(data, checksum)
# <ah> Avoid DeprecationWarning: struct integer overflow masking
# with Python2.5/Windows.
checksum = checksum & 0xffffffff
outfile.write(struct.pack("!I", checksum))
def write(self, outfile, scanlines):
"""
Write a PNG image to the output file.
"""
# http://www.w3.org/TR/PNG/#5PNG-file-signature
outfile.write(struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10))
# http://www.w3.org/TR/PNG/#11IHDR
if self.interlaced:
interlaced = 1
else:
interlaced = 0
self.write_chunk(outfile, 'IHDR',
struct.pack("!2I5B", self.width, self.height,
self.bytes_per_sample * 8,
self.color_type, 0, 0, interlaced))
# http://www.w3.org/TR/PNG/#11tRNS
if self.transparent is not None:
if self.greyscale:
self.write_chunk(outfile, 'tRNS',
struct.pack("!1H", *self.transparent))
else:
self.write_chunk(outfile, 'tRNS',
struct.pack("!3H", *self.transparent))
# http://www.w3.org/TR/PNG/#11bKGD
if self.background is not None:
if self.greyscale:
self.write_chunk(outfile, 'bKGD',
struct.pack("!1H", *self.background))
else:
self.write_chunk(outfile, 'bKGD',
struct.pack("!3H", *self.background))
# http://www.w3.org/TR/PNG/#11gAMA
if self.gamma is not None:
self.write_chunk(outfile, 'gAMA',
struct.pack("!L", int(self.gamma * 100000)))
# http://www.w3.org/TR/PNG/#11IDAT
if self.compression is not None:
compressor = zlib.compressobj(self.compression)
else:
compressor = zlib.compressobj()
data = array('B')
for scanline in scanlines:
data.append(0)
data.extend(scanline)
if len(data) > self.chunk_limit:
compressed = compressor.compress(data.tostring())
if len(compressed):
# print >> sys.stderr, len(data), len(compressed)
self.write_chunk(outfile, 'IDAT', compressed)
data = array('B')
if len(data):
compressed = compressor.compress(data.tostring())
else:
compressed = ''
flushed = compressor.flush()
if len(compressed) or len(flushed):
# print >> sys.stderr, len(data), len(compressed), len(flushed)
self.write_chunk(outfile, 'IDAT', compressed + flushed)
# http://www.w3.org/TR/PNG/#11IEND
self.write_chunk(outfile, 'IEND', '')
def write_array(self, outfile, pixels):
"""
Encode a pixel array to PNG and write output file.
"""
if self.interlaced:
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.array_scanlines(pixels))
def convert_ppm(self, ppmfile, outfile):
"""
Convert a PPM file containing raw pixel data into a PNG file
with the parameters set in the writer object.
"""
if self.interlaced:
pixels = array('B')
pixels.fromfile(ppmfile,
self.bytes_per_sample * self.color_depth *
self.width * self.height)
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.file_scanlines(ppmfile))
def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
"""
Convert a PPM and PGM file containing raw pixel data into a
PNG outfile with the parameters set in the writer object.
"""
pixels = array('B')
pixels.fromfile(ppmfile,
self.bytes_per_sample * self.color_depth *
self.width * self.height)
apixels = array('B')
apixels.fromfile(pgmfile,
self.bytes_per_sample *
self.width * self.height)
pixels = interleave_planes(pixels, apixels,
self.bytes_per_sample * self.color_depth,
self.bytes_per_sample)
if self.interlaced:
self.write(outfile, self.array_scanlines_interlace(pixels))
else:
self.write(outfile, self.array_scanlines(pixels))
def file_scanlines(self, infile):
"""
Generator for scanlines from an input file.
"""
row_bytes = self.psize * self.width
for y in range(self.height):
scanline = array('B')
scanline.fromfile(infile, row_bytes)
yield scanline
def array_scanlines(self, pixels):
"""
Generator for scanlines from an array.
"""
row_bytes = self.width * self.psize
stop = 0
for y in range(self.height):
start = stop
stop = start + row_bytes
yield pixels[start:stop]
def old_array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array.
http://www.w3.org/TR/PNG/#8InterlaceMethods
"""
row_bytes = self.psize * self.width
for xstart, ystart, xstep, ystep in _adam7:
for y in range(ystart, self.height, ystep):
if xstart < self.width:
if xstep == 1:
offset = y*row_bytes
yield pixels[offset:offset+row_bytes]
else:
row = array('B')
offset = y*row_bytes + xstart* self.psize
skip = self.psize * xstep
for x in range(xstart, self.width, xstep):
row.extend(pixels[offset:offset + self.psize])
offset += skip
yield row
def array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array.
http://www.w3.org/TR/PNG/#8InterlaceMethods
"""
row_bytes = self.psize * self.width
for xstart, ystart, xstep, ystep in _adam7:
for y in range(ystart, self.height, ystep):
if xstart >= self.width:
continue
if xstep == 1:
offset = y * row_bytes
yield pixels[offset:offset+row_bytes]
else:
row = array('B')
# Note we want the ceiling of (self.width - xstart) / xtep
row_len = self.psize * (
(self.width - xstart + xstep - 1) / xstep)
# There's no easier way to set the length of an array
row.extend(pixels[0:row_len])
offset = y * row_bytes + xstart * self.psize
end_offset = (y+1) * row_bytes
skip = self.psize * xstep
for i in range(self.psize):
row[i:row_len:self.psize] = \
pixels[offset+i:end_offset:skip]
yield row
class _readable:
"""
A simple file-like interface for strings and arrays.
"""
def __init__(self, buf):
self.buf = buf
self.offset = 0
def read(self, n):
r = buf[offset:offset+n]
if isinstance(r, array):
r = r.tostring()
offset += n
return r
class Reader:
"""
PNG decoder in pure Python.
"""
def __init__(self, _guess=None, **kw):
"""
Create a PNG decoder object.
The constructor expects exactly one keyword argument. If you
supply a positional argument instead, it will guess the input
type. You can choose among the following arguments:
filename - name of PNG input file
file - object with a read() method
pixels - array or string with PNG data
"""
if ((_guess is not None and len(kw) != 0) or
(_guess is None and len(kw) != 1)):
raise TypeError("Reader() takes exactly 1 argument")
if _guess is not None:
if isinstance(_guess, array):
kw["pixels"] = _guess
elif isinstance(_guess, str):
kw["filename"] = _guess
elif isinstance(_guess, file):
kw["file"] = _guess
if "filename" in kw:
self.file = file(kw["filename"])
elif "file" in kw:
self.file = kw["file"]
elif "pixels" in kw:
self.file = _readable(kw["pixels"])
else:
raise TypeError("expecting filename, file or pixels array")
def read_chunk(self):
"""
Read a PNG chunk from the input file, return tag name and data.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
try:
data_bytes, tag = struct.unpack('!I4s', self.file.read(8))
except struct.error:
raise ValueError('Chunk too short for header')
data = self.file.read(data_bytes)
if len(data) != data_bytes:
raise ValueError('Chunk %s too short for required %i data octets'
% (tag, data_bytes))
checksum = self.file.read(4)
if len(checksum) != 4:
raise ValueError('Chunk %s too short for checksum', tag)
verify = zlib.crc32(tag)
verify = zlib.crc32(data, verify)
verify = struct.pack('!i', verify)
if checksum != verify:
# print repr(checksum)
(a,) = struct.unpack('!I', checksum)
(b,) = struct.unpack('!I', verify)
raise ValueError("Checksum error in %s chunk: 0x%X != 0x%X"
% (tag, a, b))
return tag, data
def _reconstruct_sub(self, offset, xstep, ystep):
"""
Reverse sub filter.
"""
pixels = self.pixels
a_offset = offset
offset += self.psize * xstep
if xstep == 1:
for index in range(self.psize, self.row_bytes):
x = pixels[offset]
a = pixels[a_offset]
pixels[offset] = (x + a) & 0xff
offset += 1
a_offset += 1
else:
byte_step = self.psize * xstep
for index in range(byte_step, self.row_bytes, byte_step):
for i in range(self.psize):
x = pixels[offset + i]
a = pixels[a_offset + i]
pixels[offset + i] = (x + a) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
def _reconstruct_up(self, offset, xstep, ystep):
"""
Reverse up filter.
"""
pixels = self.pixels
b_offset = offset - (self.row_bytes * ystep)
if xstep == 1:
for index in range(self.row_bytes):
x = pixels[offset]
b = pixels[b_offset]
pixels[offset] = (x + b) & 0xff
offset += 1
b_offset += 1
else:
for index in range(0, self.row_bytes, xstep * self.psize):
for i in range(self.psize):
x = pixels[offset + i]
b = pixels[b_offset + i]
pixels[offset + i] = (x + b) & 0xff
offset += self.psize * xstep
b_offset += self.psize * xstep
def _reconstruct_average(self, offset, xstep, ystep):
"""
Reverse average filter.
"""
pixels = self.pixels
a_offset = offset - (self.psize * xstep)
b_offset = offset - (self.row_bytes * ystep)
if xstep == 1:
for index in range(self.row_bytes):
x = pixels[offset]
if index < self.psize:
a = 0
else:
a = pixels[a_offset]
if b_offset < 0:
b = 0
else:
b = pixels[b_offset]
pixels[offset] = (x + ((a + b) >> 1)) & 0xff
offset += 1
a_offset += 1
b_offset += 1
else:
for index in range(0, self.row_bytes, self.psize * xstep):
for i in range(self.psize):
x = pixels[offset+i]
if index < self.psize:
a = 0
else:
a = pixels[a_offset + i]
if b_offset < 0:
b = 0
else:
b = pixels[b_offset + i]
pixels[offset + i] = (x + ((a + b) >> 1)) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
b_offset += self.psize * xstep
def _reconstruct_paeth(self, offset, xstep, ystep):
"""
Reverse Paeth filter.
"""
pixels = self.pixels
a_offset = offset - (self.psize * xstep)
b_offset = offset - (self.row_bytes * ystep)
c_offset = b_offset - (self.psize * xstep)
# There's enough inside this loop that it's probably not worth
# optimising for xstep == 1
for index in range(0, self.row_bytes, self.psize * xstep):
for i in range(self.psize):
x = pixels[offset+i]
if index < self.psize:
a = c = 0
b = pixels[b_offset+i]
else:
a = pixels[a_offset+i]
b = pixels[b_offset+i]
c = pixels[c_offset+i]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
pixels[offset+i] = (x + pr) & 0xff
offset += self.psize * xstep
a_offset += self.psize * xstep
b_offset += self.psize * xstep
c_offset += self.psize * xstep
# N.B. PNG files with 'up', 'average' or 'paeth' filters on the
# first line of a pass are legal. The code above for 'average'
# deals with this case explicitly. For up we map to the null
# filter and for paeth we map to the sub filter.
def reconstruct_line(self, filter_type, first_line, offset, xstep, ystep):
# print >> sys.stderr, "Filter type %s, first_line=%s" % (
# filter_type, first_line)
filter_type += (first_line << 8)
if filter_type == 1 or filter_type == 0x101 or filter_type == 0x104:
self._reconstruct_sub(offset, xstep, ystep)
elif filter_type == 2:
self._reconstruct_up(offset, xstep, ystep)
elif filter_type == 3 or filter_type == 0x103:
self._reconstruct_average(offset, xstep, ystep)
elif filter_type == 4:
self._reconstruct_paeth(offset, xstep, ystep)
return
def deinterlace(self, scanlines):
# print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," +
# " bpp=%s") % (self.width, self.height, self.planes, self.bps)
a = array('B')
self.pixels = a
# Make the array big enough
temp = scanlines[0:self.width*self.height*self.psize]
a.extend(temp)
source_offset = 0
for xstart, ystart, xstep, ystep in _adam7:
# print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % (
# xstart, ystart, xstep, ystep)
filter_first_line = 1
for y in range(ystart, self.height, ystep):
if xstart >= self.width:
continue
filter_type = scanlines[source_offset]
source_offset += 1
if xstep == 1:
offset = y * self.row_bytes
a[offset:offset+self.row_bytes] = \
scanlines[source_offset:source_offset + self.row_bytes]
source_offset += self.row_bytes
else:
# Note we want the ceiling of (width - xstart) / xtep
row_len = self.psize * (
(self.width - xstart + xstep - 1) / xstep)
offset = y * self.row_bytes + xstart * self.psize
end_offset = (y+1) * self.row_bytes
skip = self.psize * xstep
for i in range(self.psize):
a[offset+i:end_offset:skip] = \
scanlines[source_offset + i:
source_offset + row_len:
self.psize]
source_offset += row_len
if filter_type:
self.reconstruct_line(filter_type, filter_first_line,
offset, xstep, ystep)
filter_first_line = 0
return a
def read_flat(self, scanlines):
a = array('B')
self.pixels = a
offset = 0
source_offset = 0
filter_first_line = 1
for y in range(self.height):
filter_type = scanlines[source_offset]
source_offset += 1
a.extend(scanlines[source_offset: source_offset + self.row_bytes])
if filter_type:
self.reconstruct_line(filter_type, filter_first_line,
offset, 1, 1)
filter_first_line = 0
offset += self.row_bytes
source_offset += self.row_bytes
return a
def read(self):
"""
Read a simple PNG file, return width, height, pixels and image metadata
This function is a very early prototype with limited flexibility
and excessive use of memory.
"""
signature = self.file.read(8)
if (signature != struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)):
raise Error("PNG file has invalid header")
compressed = []
image_metadata = {}
while True:
try:
tag, data = self.read_chunk()
except ValueError, e:
raise Error('Chunk error: ' + e.args[0])
# print >> sys.stderr, tag, len(data)
if tag == 'IHDR': # http://www.w3.org/TR/PNG/#11IHDR
(width, height, bits_per_sample, color_type,
compression_method, filter_method,
interlaced) = struct.unpack("!2I5B", data)
bps = bits_per_sample / 8
if bps == 0:
raise Error("unsupported pixel depth")
if bps > 2 or bits_per_sample != (bps * 8):
raise Error("invalid pixel depth")
if color_type == 0:
greyscale = True
has_alpha = False
planes = 1
elif color_type == 2:
greyscale = False
has_alpha = False
planes = 3
elif color_type == 4:
greyscale = True
has_alpha = True
planes = 2
elif color_type == 6:
greyscale = False
has_alpha = True
planes = 4
else:
raise Error("unknown PNG colour type %s" % color_type)
if compression_method != 0:
raise Error("unknown compression method")
if filter_method != 0:
raise Error("unknown filter method")
self.bps = bps
self.planes = planes
self.psize = bps * planes
self.width = width
self.height = height
self.row_bytes = width * self.psize
elif tag == 'IDAT': # http://www.w3.org/TR/PNG/#11IDAT
compressed.append(data)
elif tag == 'bKGD':
if greyscale:
image_metadata["background"] = struct.unpack("!1H", data)
else:
image_metadata["background"] = struct.unpack("!3H", data)
elif tag == 'tRNS':
if greyscale:
image_metadata["transparent"] = struct.unpack("!1H", data)
else:
image_metadata["transparent"] = struct.unpack("!3H", data)
elif tag == 'gAMA':
image_metadata["gamma"] = (
struct.unpack("!L", data)[0]) / 100000.0
elif tag == 'IEND': # http://www.w3.org/TR/PNG/#11IEND
break
scanlines = array('B', zlib.decompress(''.join(compressed)))
if interlaced:
pixels = self.deinterlace(scanlines)
else:
pixels = self.read_flat(scanlines)
image_metadata["greyscale"] = greyscale
image_metadata["has_alpha"] = has_alpha
image_metadata["bytes_per_sample"] = bps
image_metadata["interlaced"] = interlaced
return width, height, pixels, image_metadata
def test_suite(options):
"""
Run regression test and write PNG file to stdout.
"""
# Below is a big stack of test image generators
def test_gradient_horizontal_lr(x, y):
return x
def test_gradient_horizontal_rl(x, y):
return 1-x
def test_gradient_vertical_tb(x, y):
return y
def test_gradient_vertical_bt(x, y):
return 1-y
def test_radial_tl(x, y):
return max(1-math.sqrt(x*x+y*y), 0.0)
def test_radial_center(x, y):
return test_radial_tl(x-0.5, y-0.5)
def test_radial_tr(x, y):
return test_radial_tl(1-x, y)
def test_radial_bl(x, y):
return test_radial_tl(x, 1-y)
def test_radial_br(x, y):
return test_radial_tl(1-x, 1-y)
def test_stripe(x, n):
return 1.0*(int(x*n) & 1)
def test_stripe_h_2(x, y):
return test_stripe(x, 2)
def test_stripe_h_4(x, y):
return test_stripe(x, 4)
def test_stripe_h_10(x, y):
return test_stripe(x, 10)
def test_stripe_v_2(x, y):
return test_stripe(y, 2)
def test_stripe_v_4(x, y):
return test_stripe(y, 4)
def test_stripe_v_10(x, y):
return test_stripe(y, 10)
def test_stripe_lr_10(x, y):
return test_stripe(x+y, 10)
def test_stripe_rl_10(x, y):
return test_stripe(x-y, 10)
def test_checker(x, y, n):
return 1.0*((int(x*n) & 1) ^ (int(y*n) & 1))
def test_checker_8(x, y):
return test_checker(x, y, 8)
def test_checker_15(x, y):
return test_checker(x, y, 15)
def test_zero(x, y):
return 0
def test_one(x, y):
return 1
test_patterns = {
"GLR": test_gradient_horizontal_lr,
"GRL": test_gradient_horizontal_rl,
"GTB": test_gradient_vertical_tb,
"GBT": test_gradient_vertical_bt,
"RTL": test_radial_tl,
"RTR": test_radial_tr,
"RBL": test_radial_bl,
"RBR": test_radial_br,
"RCTR": test_radial_center,
"HS2": test_stripe_h_2,
"HS4": test_stripe_h_4,
"HS10": test_stripe_h_10,
"VS2": test_stripe_v_2,
"VS4": test_stripe_v_4,
"VS10": test_stripe_v_10,
"LRS": test_stripe_lr_10,
"RLS": test_stripe_rl_10,
"CK8": test_checker_8,
"CK15": test_checker_15,
"ZERO": test_zero,
"ONE": test_one,
}
def test_pattern(width, height, depth, pattern):
a = array('B')
fw = float(width)
fh = float(height)
pfun = test_patterns[pattern]
if depth == 1:
for y in range(height):
for x in range(width):
a.append(int(pfun(float(x)/fw, float(y)/fh) * 255))
elif depth == 2:
for y in range(height):
for x in range(width):
v = int(pfun(float(x)/fw, float(y)/fh) * 65535)
a.append(v >> 8)
a.append(v & 0xff)
return a
def test_rgba(size=256, depth=1,
red="GTB", green="GLR", blue="RTL", alpha=None):
r = test_pattern(size, size, depth, red)
g = test_pattern(size, size, depth, green)
b = test_pattern(size, size, depth, blue)
if alpha:
a = test_pattern(size, size, depth, alpha)
i = interleave_planes(r, g, depth, depth)
i = interleave_planes(i, b, 2 * depth, depth)
if alpha:
i = interleave_planes(i, a, 3 * depth, depth)
return i
# The body of test_suite()
size = 256
if options.test_size:
size = options.test_size
depth = 1
if options.test_deep:
depth = 2
kwargs = {}
if options.test_red:
kwargs["red"] = options.test_red
if options.test_green:
kwargs["green"] = options.test_green
if options.test_blue:
kwargs["blue"] = options.test_blue
if options.test_alpha:
kwargs["alpha"] = options.test_alpha
pixels = test_rgba(size, depth, **kwargs)
writer = Writer(size, size,
bytes_per_sample=depth,
transparent=options.transparent,
background=options.background,
gamma=options.gamma,
has_alpha=options.test_alpha,
compression=options.compression,
interlaced=options.interlace)
writer.write_array(sys.stdout, pixels)
def read_pnm_header(infile, supported='P6'):
"""
Read a PNM header, return width and height of the image in pixels.
"""
header = []
while len(header) < 4:
line = infile.readline()
sharp = line.find('#')
if sharp > -1:
line = line[:sharp]
header.extend(line.split())
if len(header) == 3 and header[0] == 'P4':
break # PBM doesn't have maxval
if header[0] not in supported:
raise NotImplementedError('file format %s not supported' % header[0])
if header[0] != 'P4' and header[3] != '255':
raise NotImplementedError('maxval %s not supported' % header[3])
return int(header[1]), int(header[2])
def color_triple(color):
"""
Convert a command line color value to a RGB triple of integers.
FIXME: Somewhere we need support for greyscale backgrounds etc.
"""
if color.startswith('#') and len(color) == 4:
return (int(color[1], 16),
int(color[2], 16),
int(color[3], 16))
if color.startswith('#') and len(color) == 7:
return (int(color[1:3], 16),
int(color[3:5], 16),
int(color[5:7], 16))
elif color.startswith('#') and len(color) == 13:
return (int(color[1:5], 16),
int(color[5:9], 16),
int(color[9:13], 16))
def _main():
"""
Run the PNG encoder with options from the command line.
"""
# Parse command line arguments
from optparse import OptionParser
version = '%prog ' + __revision__.strip('$').replace('Rev: ', 'r')
parser = OptionParser(version=version)
parser.set_usage("%prog [options] [pnmfile]")
parser.add_option("-i", "--interlace",
default=False, action="store_true",
help="create an interlaced PNG file (Adam7)")
parser.add_option("-t", "--transparent",
action="store", type="string", metavar="color",
help="mark the specified color as transparent")
parser.add_option("-b", "--background",
action="store", type="string", metavar="color",
help="save the specified background color")
parser.add_option("-a", "--alpha",
action="store", type="string", metavar="pgmfile",
help="alpha channel transparency (RGBA)")
parser.add_option("-g", "--gamma",
action="store", type="float", metavar="value",
help="save the specified gamma value")
parser.add_option("-c", "--compression",
action="store", type="int", metavar="level",
help="zlib compression level (0-9)")
parser.add_option("-T", "--test",
default=False, action="store_true",
help="create a test image")
parser.add_option("-R", "--test-red",
action="store", type="string", metavar="pattern",
help="test pattern for the red image layer")
parser.add_option("-G", "--test-green",
action="store", type="string", metavar="pattern",
help="test pattern for the green image layer")
parser.add_option("-B", "--test-blue",
action="store", type="string", metavar="pattern",
help="test pattern for the blue image layer")
parser.add_option("-A", "--test-alpha",
action="store", type="string", metavar="pattern",
help="test pattern for the alpha image layer")
parser.add_option("-D", "--test-deep",
default=False, action="store_true",
help="use test patterns with 16 bits per layer")
parser.add_option("-S", "--test-size",
action="store", type="int", metavar="size",
help="width and height of the test image")
(options, args) = parser.parse_args()
# Convert options
if options.transparent is not None:
options.transparent = color_triple(options.transparent)
if options.background is not None:
options.background = color_triple(options.background)
# Run regression tests
if options.test:
return test_suite(options)
# Prepare input and output files
if len(args) == 0:
ppmfilename = '-'
ppmfile = sys.stdin
elif len(args) == 1:
ppmfilename = args[0]
ppmfile = open(ppmfilename, 'rb')
else:
parser.error("more than one input file")
outfile = sys.stdout
# Encode PNM to PNG
width, height = read_pnm_header(ppmfile)
writer = Writer(width, height,
transparent=options.transparent,
background=options.background,
has_alpha=options.alpha is not None,
gamma=options.gamma,
compression=options.compression)
if options.alpha is not None:
pgmfile = open(options.alpha, 'rb')
awidth, aheight = read_pnm_header(pgmfile, 'P5')
if (awidth, aheight) != (width, height):
raise ValueError("alpha channel image size mismatch" +
" (%s has %sx%s but %s has %sx%s)"
% (ppmfilename, width, height,
options.alpha, awidth, aheight))
writer.convert_ppm_and_pgm(ppmfile, pgmfile, outfile,
interlace=options.interlace)
else:
writer.convert_ppm(ppmfile, outfile,
interlace=options.interlace)
if __name__ == '__main__':
_main()
| bsd-3-clause |
PLyczkowski/Sticky-Keymap | 2.74/scripts/addons/io_anim_bvh/__init__.py | 1 | 8032 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
bl_info = {
"name": "BioVision Motion Capture (BVH) format",
"author": "Campbell Barton",
"blender": (2, 74, 0),
"location": "File > Import-Export",
"description": "Import-Export BVH from armature objects",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/MotionCapture_BVH",
"support": 'OFFICIAL',
"category": "Import-Export"}
if "bpy" in locals():
import importlib
if "import_bvh" in locals():
importlib.reload(import_bvh)
if "export_bvh" in locals():
importlib.reload(export_bvh)
import bpy
from bpy.props import (StringProperty,
FloatProperty,
IntProperty,
BoolProperty,
EnumProperty,
)
from bpy_extras.io_utils import (ImportHelper,
ExportHelper,
orientation_helper_factory,
axis_conversion,
)
ImportBVHOrientationHelper = orientation_helper_factory("ImportBVHOrientationHelper", axis_forward='-Z', axis_up='Y')
class ImportBVH(bpy.types.Operator, ImportHelper, ImportBVHOrientationHelper):
"""Load a BVH motion capture file"""
bl_idname = "import_anim.bvh"
bl_label = "Import BVH"
bl_options = {'REGISTER', 'UNDO'}
filename_ext = ".bvh"
filter_glob = StringProperty(default="*.bvh", options={'HIDDEN'})
target = EnumProperty(items=(
('ARMATURE', "Armature", ""),
('OBJECT', "Object", ""),
),
name="Target",
description="Import target type",
default='ARMATURE')
global_scale = FloatProperty(
name="Scale",
description="Scale the BVH by this value",
min=0.0001, max=1000000.0,
soft_min=0.001, soft_max=100.0,
default=1.0,
)
frame_start = IntProperty(
name="Start Frame",
description="Starting frame for the animation",
default=1,
)
use_fps_scale = BoolProperty(
name="Scale FPS",
description=("Scale the framerate from the BVH to "
"the current scenes, otherwise each "
"BVH frame maps directly to a Blender frame"),
default=False,
)
use_cyclic = BoolProperty(
name="Loop",
description="Loop the animation playback",
default=False,
)
rotate_mode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('QUATERNION', "Quaternion",
"Convert rotations to quaternions"),
('NATIVE', "Euler (Native)", ("Use the rotation order "
"defined in the BVH file")),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='NATIVE',
)
def execute(self, context):
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
from . import import_bvh
return import_bvh.load(self, context, **keywords)
class ExportBVH(bpy.types.Operator, ExportHelper):
"""Save a BVH motion capture file from an armature"""
bl_idname = "export_anim.bvh"
bl_label = "Export BVH"
filename_ext = ".bvh"
filter_glob = StringProperty(
default="*.bvh",
options={'HIDDEN'},
)
global_scale = FloatProperty(
name="Scale",
description="Scale the BVH by this value",
min=0.0001, max=1000000.0,
soft_min=0.001, soft_max=100.0,
default=1.0,
)
frame_start = IntProperty(
name="Start Frame",
description="Starting frame to export",
default=0,
)
frame_end = IntProperty(
name="End Frame",
description="End frame to export",
default=0,
)
rotate_mode = EnumProperty(
name="Rotation",
description="Rotation conversion",
items=(('NATIVE', "Euler (Native)",
"Use the rotation order defined in the BVH file"),
('XYZ', "Euler (XYZ)", "Convert rotations to euler XYZ"),
('XZY', "Euler (XZY)", "Convert rotations to euler XZY"),
('YXZ', "Euler (YXZ)", "Convert rotations to euler YXZ"),
('YZX', "Euler (YZX)", "Convert rotations to euler YZX"),
('ZXY', "Euler (ZXY)", "Convert rotations to euler ZXY"),
('ZYX', "Euler (ZYX)", "Convert rotations to euler ZYX"),
),
default='NATIVE',
)
root_transform_only = BoolProperty(
name="Root Translation Only",
description="Only write out translation channels for the root bone",
default=False,
)
@classmethod
def poll(cls, context):
obj = context.object
return obj and obj.type == 'ARMATURE'
def invoke(self, context, event):
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
return super().invoke(context, event)
def execute(self, context):
if self.frame_start == 0 and self.frame_end == 0:
self.frame_start = context.scene.frame_start
self.frame_end = context.scene.frame_end
keywords = self.as_keywords(ignore=("check_existing", "filter_glob"))
from . import export_bvh
return export_bvh.save(self, context, **keywords)
def menu_func_import(self, context):
self.layout.operator(ImportBVH.bl_idname, text="Motion Capture (.bvh)")
def menu_func_export(self, context):
self.layout.operator(ExportBVH.bl_idname, text="Motion Capture (.bvh)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()
| gpl-2.0 |
desihub/desimodel | py/desimodel/weather.py | 1 | 15591 | # See LICENSE.rst for BSD 3-clause license info
# -*- coding: utf-8 -*-
"""
desimodel.weather
=================
Model of the expected weather conditions at KPNO during the DESI survey.
To generate a random time series of expected FWHM seeing in arcsecs and
atmospheric transparency, use, for example::
n = 10000
dt = 300 # seconds
t = np.arange(n) * dt
gen = np.random.RandomState(seed=123)
seeing = sample_seeing(n, dt_sec=dt, gen=gen)
transp = sample_transp(n, dt_sec=dt, gen=gen)
The resulting arrays are randomly sampled from models of the 1D probability
density and 2-point power spectral density derived from MzLS observations.
See `DESI-doc-3087
<https://desi.lbl.gov/DocDB/cgi-bin/private/ShowDocument?docid=3087>`__
for details.
Used by :mod:`surveysim.weather` for simulations of DESI observing and
survey strategy studies.
"""
from __future__ import print_function, division
import os
import datetime
import calendar
import numpy as np
import scipy.interpolate
import scipy.special
import astropy.table
def whiten_transforms_from_cdf(x, cdf):
"""
Calculate a pair of transforms to whiten and unwhiten a distribution.
The whitening transform is monotonic and invertible.
Parameters
----------
x : array
1D array of non-decreasing values giving bin edges for the distribution
to whiten and unwhiten.
cdf : array
1D array of non-decreasing values giving the cummulative probability
density associated with each bin edge. Does not need to be normalized.
Must have the same length as x.
Returns
-------
tuple
Tuple (F,G) of callable objects that whiten y=F(x) and unwhiten x=G(y)
samples x of the input distribution, so that y has a Gaussian
distribution with zero mean and unit variance.
"""
x = np.asarray(x)
cdf = np.asarray(cdf)
if x.shape != cdf.shape:
raise ValueError('Input arrays must have same shape.')
if len(x.shape) != 1:
raise ValueError('Input arrays must be 1D.')
if not np.all(np.diff(x) >= 0):
raise ValueError('Values of x must be non-decreasing.')
if not np.all(np.diff(cdf) >= 0):
raise ValueError('Values of cdf must be non-decreasing.')
# Normalize.
cdf /= cdf[-1]
# Use linear interpolation for the forward and inverse transforms between
# the input range and Gaussian CDF values.
args = dict(
kind='linear', assume_sorted=True, copy=False, bounds_error=True)
forward = scipy.interpolate.interp1d(x, cdf, **args)
backward = scipy.interpolate.interp1d(cdf, x, **args)
# Add wrappers to convert between CDF and PDF samples.
root2 = np.sqrt(2)
forward_transform = (
lambda x: root2 * scipy.special.erfinv(2 * forward(x) - 1))
inverse_transform = (
lambda y: backward(0.5 * (1 + scipy.special.erf(y / root2))))
return forward_transform, inverse_transform
def whiten_transforms(data, data_min=None, data_max=None):
"""Calculate a pair of transforms to whiten and unwhiten a distribution.
Uses :func:`desimodel.weather.whiten_transforms_from_cdf`.
Parameters
----------
data : array
1D array of samples from the distribution to whiten.
data_min : float or None
Clip the distribution to this minimum value, or at min(data) if None.
Must be <= min(data).
data_max : float or None
Clip the distribution to this maximum value, or at max(data) if None.
Must be >= max(data).
Returns
-------
tuple
See :func:`desimodel.weather.whiten_transforms_from_cdf`.
"""
n_data = len(data)
# Sort the input data with padding at each end for the min/max values.
sorted_data = np.empty(shape=n_data + 2, dtype=data.dtype)
sorted_data[1:-1] = np.sort(data)
if data_min is None:
sorted_data[0] = sorted_data[1]
else:
if data_min > sorted_data[1]:
raise ValueError('data_min > min(data)')
sorted_data[0] = data_min
if data_max is None:
sorted_data[-1] = sorted_data[-2]
else:
if data_max < sorted_data[-2]:
raise ValueError('data_max < max(data)')
sorted_data[-1] = data_max
# Calculate the Gaussian CDF value associated with each input value in
# sorted order. The pad values are associated with CDF = 0, 1 respectively.
cdf = np.arange(n_data + 2) / (n_data + 1.)
return whiten_transforms_from_cdf(sorted_data, cdf)
def _seeing_fit_model(x):
"""Evalute the fit to MzLS seeing described in DESI-doc-3087.
"""
p = np.array([ 0.07511146, 0.44276671, 23.02442192, 38.07691498])
y = (1 + ((x - p[0]) / p[1]) ** 2) ** (-p[2]) * x ** p[3]
return y / (y.sum() * np.gradient(x))
def get_seeing_pdf(median_seeing=1.1, max_seeing=2.5, n=250):
"""Return PDF of FWHM seeing for specified clipped median value.
Note that this is atmospheric seeing, not delivered image quality.
The reference wavelength for seeing values is 6355A, in the r band,
and the observed wavelength dependence in Dey & Valdes is closer to
``lambda ** (-1/15)`` than the ``lambda ** (-1/5)`` predicted by
Kolmogorov theory. See DESI-doc-3087 for details.
Scales the clipped MzLS seeing PDF in order to achieve the requested
median value. Note that clipping is applied before scaling, so
the output PDF is clipped at scale * max_seeing.
Parameters
----------
median_seeing : float
Target FWHM seeing value in arcsec. Must be in the range [0.95, 1.30].
max_seeing : float
Calculate scaled median using unscaled values below this value.
n : int
Size of grid to use for tabulating the returned arrays.
Returns
-------
tuple
Tuple (fwhm, pdf) that tabulates pdf[fwhm]. Normalized so that
``np.sum(pdf * np.gradient(fwhm)) = 1``.
"""
# Tabulate the nominal (scale=1) seeing PDF.
fwhm = np.linspace(0., max_seeing, n)
pdf = _seeing_fit_model(fwhm)
pdf /= (pdf.sum() * np.gradient(fwhm))
cdf = np.cumsum(pdf)
cdf /= cdf[-1]
# Tabulate the median as a function of FWHM scale.
scale = np.linspace(0.9, 1.4, 11)
median = np.empty_like(scale)
for i, s in enumerate(scale):
median[i] = np.interp(0.5, cdf, s * fwhm)
if median_seeing < median[0] or median_seeing > median[-1]:
raise ValueError('Requested median is outside allowed range.')
# Interpolate to find the scale factor that gives the requested median.
s = np.interp(median_seeing, median, scale)
return fwhm * s, pdf / s
def sample_timeseries(x_grid, pdf_grid, psd, n_sample, dt_sec=180., gen=None):
"""Sample a time series specified by a power spectrum and 1D PDF.
The PSD should describe the temporal correlations of whitened samples.
Generated samples will then be unwhitened to recover the input 1D PDF.
See DESI-doc-3087 for details.
Uses :func:`whiten_transforms_from_cdf`.
Parameters
----------
x_grid : array
1D array of N increasing grid values covering the parameter range
to sample from.
pdf_grid : array
1D array of N increasing PDF values corresponding to each x_grid.
Does not need to be normalized.
psd : callable
Function of frequency in 1/days that returns the power-spectral
density of whitened temporal fluctations to sample from. Will only be
called for positive frequencies. Normalization does not matter.
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
"""
x_grid = np.array(x_grid)
pdf_grid = np.array(pdf_grid)
if not np.all(np.diff(x_grid) > 0):
raise ValueError('x_grid values are not increasing.')
if x_grid.shape != pdf_grid.shape:
raise ValueError('x_grid and pdf_grid arrays have different shapes.')
# Initialize random numbers if necessary.
if gen is None:
gen = np.random.RandomState()
# Calculate the CDF.
cdf_grid = np.cumsum(pdf_grid)
cdf_grid /= cdf_grid[-1]
# Calculate whitening / unwhitening transforms.
whiten, unwhiten = whiten_transforms_from_cdf(x_grid, cdf_grid)
# Build a linear grid of frequencies present in the Fourier transform
# of the requested time series. Frequency units are 1/day.
dt_day = dt_sec / (24. * 3600.)
df_day = 1. / (n_sample * dt_day)
f_grid = np.arange(1 + (n_sample // 2)) * df_day
# Tabulate the power spectral density at each frequency. The PSD
# describes seeing fluctuations that have been "whitened", i.e., mapped
# via a non-linear monotonic transform to have unit Gaussian probability
# density.
psd_grid = np.empty_like(f_grid)
psd_grid[1:] = psd(f_grid[1:])
# Force the mean to zero.
psd_grid[0] = 0.
# Force the variance to one.
psd_grid[1:] /= psd_grid[1:].sum() * df_day ** 2
# Generate random whitened samples.
n_psd = len(psd_grid)
x_fft = np.ones(n_psd, dtype=complex)
x_fft[1:-1].real = gen.normal(size=n_psd - 2)
x_fft[1:-1].imag = gen.normal(size=n_psd - 2)
x_fft *= np.sqrt(psd_grid) / (2 * dt_day)
x_fft[0] *= np.sqrt(2)
x = np.fft.irfft(x_fft, n_sample)
# Un-whiten the samples to recover the desired 1D PDF.
x_cdf = 0.5 * (1 + scipy.special.erf(x / np.sqrt(2)))
return np.interp(x_cdf, cdf_grid, x_grid)
def _seeing_psd(freq):
"""Evaluate the 'chi-by-eye' fit of the seeing PSD described in
DESI-doc-3087.
"""
N, f0, a0, a1 = 8000, 0.10, 2.8, -1.1
return (N * (freq/f0)**a0 / (1 + (freq/f0)**a0) *
(freq/f0) ** a1 / (10 + (freq/f0) ** a1))
def sample_seeing(n_sample, dt_sec=180., median_seeing=1.1, max_seeing=2.5,
gen=None):
"""Generate a random time series of FWHM seeing values.
See DESI-doc-3087 for details. Uses :func:`get_seeing_pdf`,
:func:`_seeing_psd` and :func:`sample_timeseries`.
Parameters
----------
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
median_seeing : float
See :func:`get_seeing_pdf`.
mex_seeing : float
See :func:`get_seeing_pdf`.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
Returns
-------
array
1D array of randomly generated samples.
"""
fwhm_grid, pdf_grid = get_seeing_pdf(median_seeing, max_seeing)
return sample_timeseries(
fwhm_grid, pdf_grid, _seeing_psd, n_sample, dt_sec, gen)
_transp_pdf_cum = np.array([0.06,0.11,1.0])
_transp_pdf_powers = np.array([0., 2.5, 35.])
def get_transp_pdf(n=250):
"""Return PDF of atmospheric transparency.
Derived from MzLS observations, but corrected for dust accumulation and
measurement error. See DESI-doc-3087 for details.
Parameters
----------
n : int
Size of grid to use for tabulating the returned arrays.
Returns
-------
tuple
Tuple (transp, pdf) that tabulates pdf[transp]. Normalized so that
``np.sum(pdf * np.gradient(transp)) = 1``.
"""
transp = np.linspace(0., 1., n)
pdf = np.zeros_like(transp)
last_c = 0.
for c, p in zip(_transp_pdf_cum, _transp_pdf_powers):
pdf += (c - last_c) * np.power(transp, p) * (p + 1)
last_c = c
pdf /= pdf.sum() * np.gradient(transp)
return transp, pdf
def _transp_psd(freq):
"""Evaluate the 'chi-by-eye' fit of the transparency PSD described in
DESI-doc-3087.
"""
N, f0, a0, a1 = 500, 1.5, 0.0, -1.5
return (N * (freq/f0)**a0 / (1 + (freq/f0)**a0) *
(freq/f0) ** a1 / (1 + (freq/f0) ** a1))
def sample_transp(n_sample, dt_sec=180., gen=None):
"""Generate a random time series of atmospheric transparency values.
See DESI-doc-3087 for details. Uses :func:`get_transp_pdf`,
:func:`_transp_psd` and :func:`sample_timeseries`.
Parameters
----------
n_sample : int
Number of equally spaced samples to generate.
dt_sec : float
Time interval between samples in seconds.
gen : np.random.RandomState or None
Provide an existing RandomState for full control of reproducible random
numbers, or None for non-reproducible random numbers.
Returns
-------
array
1D array of randomly generated samples.
"""
transp_grid, pdf_grid = get_transp_pdf()
return sample_timeseries(
transp_grid, pdf_grid, _transp_psd, n_sample, dt_sec, gen)
def dome_closed_fractions(start_date, stop_date,
replay='Y2007,Y2008,Y2009,Y2010,Y2011,Y2012,Y2013,Y2014'):
"""Return dome-closed fractions for each night of the survey.
Years can be replayed in any order. If the number of years to replay is less
than the survey duration, they are repeated.
Parameters
----------
start_date : datetime.date or None
Survey starts on the evening of this date. Use the ``first_day``
config parameter if None (the default).
stop_date : datetime.date or None
Survey stops on the morning of this date. Use the ``last_day``
config parameter if None (the default).
replay : str
Comma-separated list of years to replay, identified by arbitrary strings
that must match column names in the DESIMODEL weather history.
Returns
-------
numpy array
1D array of N probabilities between 0-1, where N is the number of nights
spanned by the start and stop dates.
"""
# Check the inputs.
num_nights = (stop_date - start_date).days
if num_nights <= 0:
raise ValueError('Expected start_date < stop_date.')
replay = replay.split(',')
# Load tabulated daily weather history.
DESIMODEL = os.getenv('DESIMODEL')
path = os.path.join(DESIMODEL, 'data', 'weather', 'daily-2007-2017.csv')
t = astropy.table.Table.read(path)
if not len(t) == 365:
raise ValueError('Invalid weather history length (expected 365).')
years = t.colnames
lostfracs = []
for yr in replay:
if yr not in years:
raise ValueError('Replay year "{}" not in weather history.'.format(yr))
lostfrac = t[yr].data
if not np.all((lostfrac >= 0) & (lostfrac <= 1)):
raise ValueError('Invalid weather history for replay year "{}".'.format(yr))
lostfracs.append(lostfrac)
# Replay the specified years (with wrap-around if necessary),
# overlaid on the actual survey dates.
probs = np.zeros(num_nights)
start = start_date
for year_num, year in enumerate(range(start_date.year, stop_date.year + 1)):
first = datetime.date(year=year, month=1, day=1)
stop = datetime.date(year=year + 1, month=1, day=1)
if stop > stop_date:
stop = stop_date
n = (stop - start).days
if n == 0:
break
if calendar.isleap(year):
n -= 1
idx = (start - start_date).days
jdx = (start - first).days
lostfrac = lostfracs[year_num % len(replay)]
probs[idx:idx + n] = lostfrac[jdx:jdx + n]
start = stop
return probs
| bsd-3-clause |
robbiet480/home-assistant | homeassistant/components/agent_dvr/__init__.py | 5 | 2201 | """Support for Agent."""
import asyncio
import logging
from agent import AgentError
from agent.a import Agent
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONNECTION, DOMAIN as AGENT_DOMAIN, SERVER_URL
ATTRIBUTION = "ispyconnect.com"
DEFAULT_BRAND = "Agent DVR by ispyconnect.com"
_LOGGER = logging.getLogger(__name__)
FORWARDS = ["camera"]
async def async_setup(hass, config):
"""Old way to set up integrations."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up the Agent component."""
hass.data.setdefault(AGENT_DOMAIN, {})
server_origin = config_entry.data[SERVER_URL]
agent_client = Agent(server_origin, async_get_clientsession(hass))
try:
await agent_client.update()
except AgentError:
await agent_client.close()
raise ConfigEntryNotReady
if not agent_client.is_available:
raise ConfigEntryNotReady
await agent_client.get_devices()
hass.data[AGENT_DOMAIN][config_entry.entry_id] = {CONNECTION: agent_client}
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(AGENT_DOMAIN, agent_client.unique)},
manufacturer="iSpyConnect",
name=f"Agent {agent_client.name}",
model="Agent DVR",
sw_version=agent_client.version,
)
for forward in FORWARDS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, forward)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, forward)
for forward in FORWARDS
]
)
)
await hass.data[AGENT_DOMAIN][config_entry.entry_id][CONNECTION].close()
if unload_ok:
hass.data[AGENT_DOMAIN].pop(config_entry.entry_id)
return unload_ok
| apache-2.0 |
dh4nav/lammps | tools/moltemplate/examples/coarse_grained_examples/chromosome_metaphase_Naumova2013/moltemplate_files/interpolate_coords.py | 22 | 1934 | #!/usr/bin/env python
err_msg = """
Usage:
interpolate_coords.py Ndesired [scale] < coords_orig.raw > coords.raw
Example:
interpolate_coords.py 30118 3.0 < coords_orig.raw > coords.raw
# (Note: 30117 ~= 128000/4.25, but using 30118 makes interpolation cleaner.
# See the supplemental section of Naumova et al Science 2013, p 18.)
"""
import sys
from math import *
# Parse the argument list:
if len(sys.argv) <= 1:
sys.stderr.write("Error:\n\nTypical Usage:\n\n"+err_msg+"\n")
exit(1)
n_new = int(sys.argv[1])
if len(sys.argv) > 2:
scale = float(sys.argv[2])
else:
scale = 1.0
coords_orig = []
lines = sys.stdin.readlines()
for line in lines:
tokens = line.split()
if (len(tokens) > 0):
coords_orig.append(map(float, tokens))
g_dim = len(tokens)
n_orig = len(coords_orig)
if n_orig < 2:
sys.stderr.write("Error:\n\nInput file contains less than two lines of coordinates\n")
exit(1)
if n_new < 2:
sys.stderr.write("Error:\n\nOutput file will contain less than two lines of coordinates\n")
exit(1)
coords_new = [[0.0 for d in range(0, g_dim)] for i in range(0, n_new)]
for i_new in range(0, n_new):
I_orig = (i_new) * (float(n_orig-1) / float(n_new-1))
i_orig = int(floor(I_orig))
i_remainder = I_orig - i_orig
if (i_new < n_new-1):
for d in range(0, g_dim):
coords_new[i_new][d] = scale*(coords_orig[i_orig][d]
+
i_remainder*(coords_orig[i_orig+1][d]-
coords_orig[i_orig][d]))
else:
for d in range(0, g_dim):
coords_new[i_new][d] = scale*coords_orig[n_orig-1][d]
# print the coordates
for d in range(0, g_dim-1):
sys.stdout.write(str(coords_new[i_new][d]) + ' ')
sys.stdout.write(str(coords_new[i_new][g_dim-1]) + "\n")
| gpl-2.0 |
grevutiu-gabriel/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/common.py | 203 | 3229 | # Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Supports style checking not specific to any one file type."""
# FIXME: Test this list in the same way that the list of CppChecker
# categories is tested, for example by checking that all of its
# elements appear in the unit tests. This should probably be done
# after moving the relevant cpp_unittest.ErrorCollector code
# into a shared location and refactoring appropriately.
categories = set([
"whitespace/carriage_return",
"whitespace/tab"])
class CarriageReturnChecker(object):
"""Supports checking for and handling carriage returns."""
def __init__(self, handle_style_error):
self._handle_style_error = handle_style_error
def check(self, lines):
"""Check for and strip trailing carriage returns from lines."""
for line_number in range(len(lines)):
if not lines[line_number].endswith("\r"):
continue
self._handle_style_error(line_number + 1, # Correct for offset.
"whitespace/carriage_return",
1,
"One or more unexpected \\r (^M) found; "
"better to use only a \\n")
lines[line_number] = lines[line_number].rstrip("\r")
return lines
class TabChecker(object):
"""Supports checking for and handling tabs."""
def __init__(self, file_path, handle_style_error):
self.file_path = file_path
self.handle_style_error = handle_style_error
def check(self, lines):
# FIXME: share with cpp_style.
for line_number, line in enumerate(lines):
if "\t" in line:
self.handle_style_error(line_number + 1,
"whitespace/tab", 5,
"Line contains tab character.")
| bsd-3-clause |
MartinEnder/erpnext-de | erpnext/accounts/party.py | 4 | 11993 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import datetime
from frappe import _, msgprint, scrub
from frappe.defaults import get_user_permissions
from frappe.utils import add_days, getdate, formatdate, get_first_day, date_diff
from erpnext.utilities.doctype.address.address import get_address_display
from erpnext.utilities.doctype.contact.contact import get_contact_details
from erpnext.exceptions import InvalidAccountCurrency
class DuplicatePartyAccountError(frappe.ValidationError): pass
@frappe.whitelist()
def get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None):
if not party:
return {}
if not frappe.db.exists(party_type, party):
frappe.throw(_("{0}: {1} does not exists").format(party_type, party))
return _get_party_details(party, account, party_type,
company, posting_date, price_list, currency, doctype)
def _get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None, ignore_permissions=False):
out = frappe._dict(set_account_and_due_date(party, account, party_type, company, posting_date, doctype))
party = out[party_type.lower()]
if not ignore_permissions and not frappe.has_permission(party_type, "read", party):
frappe.throw(_("Not permitted for {0}").format(party), frappe.PermissionError)
party = frappe.get_doc(party_type, party)
set_address_details(out, party, party_type)
set_contact_details(out, party, party_type)
set_other_values(out, party, party_type)
set_price_list(out, party, party_type, price_list)
out["taxes_and_charges"] = set_taxes(party.name, party_type, posting_date, company, out.customer_group, out.supplier_type)
if not out.get("currency"):
out["currency"] = currency
# sales team
if party_type=="Customer":
out["sales_team"] = [{
"sales_person": d.sales_person,
"allocated_percentage": d.allocated_percentage or None
} for d in party.get("sales_team")]
return out
def set_address_details(out, party, party_type):
billing_address_field = "customer_address" if party_type == "Lead" \
else party_type.lower() + "_address"
out[billing_address_field] = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_primary_address":1}, "name")
# address display
out.address_display = get_address_display(out[billing_address_field])
# shipping address
if party_type in ["Customer", "Lead"]:
out.shipping_address_name = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_shipping_address":1}, "name")
out.shipping_address = get_address_display(out["shipping_address_name"])
def set_contact_details(out, party, party_type):
out.contact_person = frappe.db.get_value("Contact",
{party_type.lower(): party.name, "is_primary_contact":1}, "name")
if not out.contact_person:
out.update({
"contact_person": None,
"contact_display": None,
"contact_email": None,
"contact_mobile": None,
"contact_phone": None,
"contact_designation": None,
"contact_department": None
})
else:
out.update(get_contact_details(out.contact_person))
def set_other_values(out, party, party_type):
# copy
if party_type=="Customer":
to_copy = ["customer_name", "customer_group", "territory"]
else:
to_copy = ["supplier_name", "supplier_type"]
for f in to_copy:
out[f] = party.get(f)
# fields prepended with default in Customer doctype
for f in ['currency'] \
+ (['sales_partner', 'commission_rate'] if party_type=="Customer" else []):
if party.get("default_" + f):
out[f] = party.get("default_" + f)
def get_default_price_list(party):
"""Return default price list for party (Document object)"""
if party.default_price_list:
return party.default_price_list
if party.doctype == "Customer":
price_list = frappe.db.get_value("Customer Group",
party.customer_group, "default_price_list")
if price_list:
return price_list
return None
def set_price_list(out, party, party_type, given_price_list):
# price list
price_list = filter(None, get_user_permissions().get("Price List", []))
if isinstance(price_list, list):
price_list = price_list[0] if len(price_list)==1 else None
if not price_list:
price_list = get_default_price_list(party)
if not price_list:
price_list = given_price_list
if price_list:
out.price_list_currency = frappe.db.get_value("Price List", price_list, "currency")
out["selling_price_list" if party.doctype=="Customer" else "buying_price_list"] = price_list
def set_account_and_due_date(party, account, party_type, company, posting_date, doctype):
if doctype not in ["Sales Invoice", "Purchase Invoice"]:
# not an invoice
return {
party_type.lower(): party
}
if party:
account = get_party_account(party_type, party, company)
account_fieldname = "debit_to" if party_type=="Customer" else "credit_to"
out = {
party_type.lower(): party,
account_fieldname : account,
"due_date": get_due_date(posting_date, party_type, party, company)
}
return out
def get_company_currency():
company_currency = frappe._dict()
for d in frappe.get_all("Company", fields=["name", "default_currency"]):
company_currency.setdefault(d.name, d.default_currency)
return company_currency
@frappe.whitelist()
def get_party_account(party_type, party, company):
"""Returns the account for the given `party`.
Will first search in party (Customer / Supplier) record, if not found,
will search in group (Customer Group / Supplier Type),
finally will return default."""
if not company:
frappe.throw(_("Please select a Company"))
if party:
account = frappe.db.get_value("Party Account",
{"parenttype": party_type, "parent": party, "company": company}, "account")
if not account:
party_group_doctype = "Customer Group" if party_type=="Customer" else "Supplier Type"
group = frappe.db.get_value(party_type, party, scrub(party_group_doctype))
account = frappe.db.get_value("Party Account",
{"parenttype": party_group_doctype, "parent": group, "company": company}, "account")
if not account:
default_account_name = "default_receivable_account" if party_type=="Customer" else "default_payable_account"
account = frappe.db.get_value("Company", company, default_account_name)
return account
def get_party_account_currency(party_type, party, company):
def generator():
party_account = get_party_account(party_type, party, company)
return frappe.db.get_value("Account", party_account, "account_currency")
return frappe.local_cache("party_account_currency", (party_type, party, company), generator)
def get_party_gle_currency(party_type, party, company):
def generator():
existing_gle_currency = frappe.db.sql("""select account_currency from `tabGL Entry`
where docstatus=1 and company=%(company)s and party_type=%(party_type)s and party=%(party)s
limit 1""", { "company": company, "party_type": party_type, "party": party })
return existing_gle_currency[0][0] if existing_gle_currency else None
return frappe.local_cache("party_gle_currency", (party_type, party, company), generator,
regenerate_if_none=True)
def validate_party_gle_currency(party_type, party, company, party_account_currency=None):
"""Validate party account currency with existing GL Entry's currency"""
if not party_account_currency:
party_account_currency = get_party_account_currency(party_type, party, company)
existing_gle_currency = get_party_gle_currency(party_type, party, company)
if existing_gle_currency and party_account_currency != existing_gle_currency:
frappe.throw(_("Accounting Entry for {0}: {1} can only be made in currency: {2}")
.format(party_type, party, existing_gle_currency), InvalidAccountCurrency)
def validate_party_accounts(doc):
companies = []
for account in doc.get("accounts"):
if account.company in companies:
frappe.throw(_("There can only be 1 Account per Company in {0} {1}")
.format(doc.doctype, doc.name), DuplicatePartyAccountError)
else:
companies.append(account.company)
party_account_currency = frappe.db.get_value("Account", account.account, "account_currency")
existing_gle_currency = get_party_gle_currency(doc.doctype, doc.name, account.company)
if existing_gle_currency and party_account_currency != existing_gle_currency:
frappe.throw(_("Accounting entries have already been made in currency {0} for company {1}. Please select a receivable or payable account with currency {0}.").format(existing_gle_currency, account.company))
@frappe.whitelist()
def get_due_date(posting_date, party_type, party, company):
"""Set Due Date = Posting Date + Credit Days"""
due_date = None
if posting_date and party:
due_date = posting_date
if party_type=="Customer":
credit_days_based_on, credit_days = get_credit_days(party_type, party, company)
if credit_days_based_on == "Fixed Days" and credit_days:
due_date = add_days(posting_date, credit_days)
elif credit_days_based_on == "Last Day of the Next Month":
due_date = (get_first_day(posting_date, 0, 2) + datetime.timedelta(-1)).strftime("%Y-%m-%d")
else:
credit_days = get_credit_days(party_type, party, company)
if credit_days:
due_date = add_days(posting_date, credit_days)
return due_date
def get_credit_days(party_type, party, company):
if party_type and party:
if party_type == "Customer":
credit_days_based_on, credit_days, customer_group = \
frappe.db.get_value(party_type, party, ["credit_days_based_on", "credit_days", "customer_group"])
if not credit_days_based_on:
credit_days_based_on, credit_days = \
frappe.db.get_value("Customer Group", customer_group, ["credit_days_based_on", "credit_days"]) \
or frappe.db.get_value("Company", company, ["credit_days_based_on", "credit_days"])
return credit_days_based_on, credit_days
else:
credit_days, supplier_type = frappe.db.get_value(party_type, party, ["credit_days", "supplier_type"])
if not credit_days:
credit_days = frappe.db.get_value("Supplier Type", supplier_type, "credit_days") \
or frappe.db.get_value("Company", company, "credit_days")
return credit_days
def validate_due_date(posting_date, due_date, party_type, party, company):
if getdate(due_date) < getdate(posting_date):
frappe.throw(_("Due Date cannot be before Posting Date"))
else:
default_due_date = get_due_date(posting_date, party_type, party, company)
if not default_due_date:
return
if default_due_date != posting_date and getdate(due_date) > getdate(default_due_date):
is_credit_controller = frappe.db.get_single_value("Accounts Settings", "credit_controller") in frappe.get_roles()
if is_credit_controller:
msgprint(_("Note: Due / Reference Date exceeds allowed customer credit days by {0} day(s)")
.format(date_diff(due_date, default_due_date)))
else:
frappe.throw(_("Due / Reference Date cannot be after {0}").format(formatdate(default_due_date)))
@frappe.whitelist()
def set_taxes(party, party_type, posting_date, company, customer_group=None, supplier_type=None,
billing_address=None, shipping_address=None, use_for_shopping_cart=None):
from erpnext.accounts.doctype.tax_rule.tax_rule import get_tax_template, get_party_details
args = {
party_type.lower(): party,
"customer_group": customer_group,
"supplier_type": supplier_type,
"company": company
}
if billing_address or shipping_address:
args.update(get_party_details(party, party_type, {"billing_address": billing_address, \
"shipping_address": shipping_address }))
else:
args.update(get_party_details(party, party_type))
if party_type=="Customer":
args.update({"tax_type": "Sales"})
else:
args.update({"tax_type": "Purchase"})
if use_for_shopping_cart:
args.update({"use_for_shopping_cart": use_for_shopping_cart})
return get_tax_template(posting_date, args)
| agpl-3.0 |
bgreenlee/sublime-github | lib/requests/packages/urllib3/connectionpool.py | 57 | 21510 | # urllib3/connectionpool.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
import socket
import errno
from socket import error as SocketError, timeout as SocketTimeout
from .util import resolve_cert_reqs, resolve_ssl_version, assert_fingerprint
try: # Python 3
from http.client import HTTPConnection, HTTPException
from http.client import HTTP_PORT, HTTPS_PORT
except ImportError:
from httplib import HTTPConnection, HTTPException
from httplib import HTTP_PORT, HTTPS_PORT
try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
try: # Compiled with SSL?
HTTPSConnection = object
BaseSSLError = None
ssl = None
try: # Python 3
from http.client import HTTPSConnection
except ImportError:
from httplib import HTTPSConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
pass
from .request import RequestMethods
from .response import HTTPResponse
from .util import get_host, is_connection_dropped, ssl_wrap_socket
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
HostChangedError,
MaxRetryError,
SSLError,
TimeoutError,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .packages import six
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
port_by_scheme = {
'http': HTTP_PORT,
'https': HTTPS_PORT,
}
## Connection objects (extension of httplib)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ssl_version = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def connect(self):
# Add certificate verification
sock = socket.create_connection((self.host, self.port), self.timeout)
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
server_hostname=self.host,
ssl_version=resolved_ssl_version)
if resolved_cert_reqs != ssl.CERT_NONE:
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
else:
match_hostname(self.sock.getpeercert(),
self.assert_hostname or self.host)
## Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
self.host = host
self.port = port
def __str__(self):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`httplib.HTTPConnection`.
:param timeout:
Socket timeout for each individual connection, can be a float. None
disables timeout.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to false, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
scheme = 'http'
def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1,
block=False, headers=None):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
self.timeout = timeout
self.pool = self.QueueCls(maxsize)
self.block = block
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s" %
(self.num_connections, self.host))
return HTTPConnection(host=self.host,
port=self.port,
strict=self.strict)
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
if self.block:
raise EmptyPoolError(self,
"Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
log.warning("HttpConnectionPool is full, discarding connection: %s"
% self.host)
# Connection never got put back into the pool, close it.
conn.close()
def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw):
"""
Perform a request on a given httplib connection object taken from our
pool.
"""
self.num_requests += 1
if timeout is _Default:
timeout = self.timeout
conn.timeout = timeout # This only does anything in Py26+
conn.request(method, url, **httplib_request_kw)
# Set timeout
sock = getattr(conn, 'sock', False) # AppEngine doesn't have sock attr.
if sock:
sock.settimeout(timeout)
try: # Python 2.7+, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
# AppEngine doesn't have a version attr.
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
httplib_response.status,
httplib_response.length))
return httplib_response
def close(self):
"""
Close all pooled connections and disable the pool.
"""
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith('/'):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
if self.port and not port:
# Use explicit default port for comparison when none is given.
port = port_by_scheme.get(scheme)
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, **response_kw):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Number of retries to allow before raising a MaxRetryError exception.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307). Each redirect counts as a retry.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one request.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param \**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if retries < 0:
raise MaxRetryError(self, url)
if timeout is _Default:
timeout = self.timeout
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
host = "%s://%s" % (self.scheme, self.host)
if self.port:
host = "%s:%d" % (host, self.port)
raise HostChangedError(self, url, retries - 1)
conn = None
try:
# Request a connection from the queue
conn = self._get_conn(timeout=pool_timeout)
# Make the request on the httplib connection object
httplib_response = self._make_request(conn, method, url,
timeout=timeout,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
# the request doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = not release_conn and conn
# Import httplib's response into our own wrapper object
response = HTTPResponse.from_httplib(httplib_response,
pool=self,
connection=response_conn,
**response_kw)
# else:
# The connection will be put back into the pool when
# ``response.release_conn()`` is called (implicitly by
# ``response.read()``)
except Empty as e:
# Timed out by queue
raise TimeoutError(self, url,
"Request timed out. (pool_timeout=%s)" %
pool_timeout)
except SocketTimeout as e:
# Timed out by socket
raise TimeoutError(self, url,
"Request timed out. (timeout=%s)" %
timeout)
except BaseSSLError as e:
# SSL certificate error
raise SSLError(e)
except CertificateError as e:
# Name mismatch
raise SSLError(e)
except (HTTPException, SocketError) as e:
# Connection broken, discard. It will be replaced next _get_conn().
conn = None
# This is necessary so we can access e below
err = e
if retries == 0:
raise MaxRetryError(self, url, e)
finally:
if release_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warn("Retrying (%d attempts remain) after connection "
"broken by '%r': %s" % (retries, err, url))
return self.urlopen(method, url, body, headers, retries - 1,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = 'GET'
log.info("Redirecting %s -> %s" % (url, redirect_location))
return self.urlopen(method, redirect_location, body, headers,
retries - 1, redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`httplib.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and
``ssl_version`` are only used if :mod:`ssl` is available and are fed into
:meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket
into an SSL socket.
"""
scheme = 'https'
def __init__(self, host, port=None,
strict=False, timeout=None, maxsize=1,
block=False, headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
assert_hostname=None, assert_fingerprint=None):
HTTPConnectionPool.__init__(self, host, port,
strict, timeout, maxsize,
block, headers)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
if not ssl: # Platform-specific: Python compiled without +ssl
if not HTTPSConnection or HTTPSConnection is object:
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
return HTTPSConnection(host=self.host,
port=self.port,
strict=self.strict)
connection = VerifiedHTTPSConnection(host=self.host,
port=self.port,
strict=self.strict)
connection.set_cert(key_file=self.key_file, cert_file=self.cert_file,
cert_reqs=self.cert_reqs, ca_certs=self.ca_certs,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint)
connection.ssl_version = self.ssl_version
return connection
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example: ::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
if scheme == 'https':
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
| mit |
dfalt974/SickRage | lib/chardet/gb2312freq.py | 343 | 20715 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312_CHAR_TO_FREQ_ORDER = (
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512
)
| gpl-3.0 |
Hellowlol/PyTunes | libs/simplejson/tests/test_namedtuple.py | 147 | 4004 | from __future__ import absolute_import
import unittest
import simplejson as json
from simplejson.compat import StringIO
try:
from collections import namedtuple
except ImportError:
class Value(tuple):
def __new__(cls, *args):
return tuple.__new__(cls, args)
def _asdict(self):
return {'value': self[0]}
class Point(tuple):
def __new__(cls, *args):
return tuple.__new__(cls, args)
def _asdict(self):
return {'x': self[0], 'y': self[1]}
else:
Value = namedtuple('Value', ['value'])
Point = namedtuple('Point', ['x', 'y'])
class DuckValue(object):
def __init__(self, *args):
self.value = Value(*args)
def _asdict(self):
return self.value._asdict()
class DuckPoint(object):
def __init__(self, *args):
self.point = Point(*args)
def _asdict(self):
return self.point._asdict()
class DeadDuck(object):
_asdict = None
class DeadDict(dict):
_asdict = None
CONSTRUCTORS = [
lambda v: v,
lambda v: [v],
lambda v: [{'key': v}],
]
class TestNamedTuple(unittest.TestCase):
def test_namedtuple_dumps(self):
for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
d = v._asdict()
self.assertEqual(d, json.loads(json.dumps(v)))
self.assertEqual(
d,
json.loads(json.dumps(v, namedtuple_as_object=True)))
self.assertEqual(d, json.loads(json.dumps(v, tuple_as_array=False)))
self.assertEqual(
d,
json.loads(json.dumps(v, namedtuple_as_object=True,
tuple_as_array=False)))
def test_namedtuple_dumps_false(self):
for v in [Value(1), Point(1, 2)]:
l = list(v)
self.assertEqual(
l,
json.loads(json.dumps(v, namedtuple_as_object=False)))
self.assertRaises(TypeError, json.dumps, v,
tuple_as_array=False, namedtuple_as_object=False)
def test_namedtuple_dump(self):
for v in [Value(1), Point(1, 2), DuckValue(1), DuckPoint(1, 2)]:
d = v._asdict()
sio = StringIO()
json.dump(v, sio)
self.assertEqual(d, json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=True)
self.assertEqual(
d,
json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, tuple_as_array=False)
self.assertEqual(d, json.loads(sio.getvalue()))
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=True,
tuple_as_array=False)
self.assertEqual(
d,
json.loads(sio.getvalue()))
def test_namedtuple_dump_false(self):
for v in [Value(1), Point(1, 2)]:
l = list(v)
sio = StringIO()
json.dump(v, sio, namedtuple_as_object=False)
self.assertEqual(
l,
json.loads(sio.getvalue()))
self.assertRaises(TypeError, json.dump, v, StringIO(),
tuple_as_array=False, namedtuple_as_object=False)
def test_asdict_not_callable_dump(self):
for f in CONSTRUCTORS:
self.assertRaises(TypeError,
json.dump, f(DeadDuck()), StringIO(), namedtuple_as_object=True)
sio = StringIO()
json.dump(f(DeadDict()), sio, namedtuple_as_object=True)
self.assertEqual(
json.dumps(f({})),
sio.getvalue())
def test_asdict_not_callable_dumps(self):
for f in CONSTRUCTORS:
self.assertRaises(TypeError,
json.dumps, f(DeadDuck()), namedtuple_as_object=True)
self.assertEqual(
json.dumps(f({})),
json.dumps(f(DeadDict()), namedtuple_as_object=True))
| gpl-3.0 |
taoliu/taolib | Scripts/kmeans2image.py | 1 | 1598 | #!/usr/bin/env python
# Time-stamp: <2009-04-14 14:07:21 Tao Liu>
import os
import sys
import re
from PIL import Image, ImageDraw
# ------------------------------------
# Main function
# ------------------------------------
help_message = """
Draw the K-means clustering result.
need 6 parameter: %s <kmeans_file> <lim> <x_points> <y_points> <x_ext> <y_ext>
kmeans_file : tab-delimited plain text file. First column is cluster number by k-means, and following columns are data columns.
lim : data value limit
x_points : number of data value columns
y_points : number of rows
x_ext : pixels extended in x-axis
y_ext : pixels extended in y-axis
""" % sys.argv[0]
def main():
if len(sys.argv) < 7:
sys.stderr.write(help_message)
sys.exit(1)
fhd = open (sys.argv[1])
lim = int(sys.argv[2])
x_points = int(sys.argv[3])
y_points = int(sys.argv[4])
x_ext = int(sys.argv[5])
y_ext = int(sys.argv[6])
a = Image.new("RGB",(x_points*x_ext,y_points*y_ext),"white")
d = ImageDraw.Draw(a)
y = 0
for i in fhd:
y += 1
i.strip()
if not re.search("^\d+",i):
continue
values = map(float,i.split())
x = 0
cl = values[0]
for v in values[1:]:
x += 1
c = "hsl(%d,100%%,%d%%)" % (cl*70,min(1,v/lim)*90.0)
d.rectangle([(int(x*x_ext),int(y*y_ext)),(int((x+1)*x_ext),int((y+1)*y_ext))],outline=c,fill=c)
a.save(sys.argv[1]+".png")
print "check %s!" % (sys.argv[1]+".png")
if __name__ == '__main__':
main()
| bsd-3-clause |
inspyration/odoo | addons/l10n_ch/__init__.py | 424 | 1212 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import account_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/django/contrib/gis/geos/prepared.py | 137 | 2437 | from .base import GEOSBase
from .error import GEOSException
from .geometry import GEOSGeometry
from .libgeos import geos_version_info
from .prototypes import prepared as capi
class PreparedGeometry(GEOSBase):
"""
A geometry that is prepared for performing certain operations.
At the moment this includes the contains covers, and intersects
operations.
"""
ptr_type = capi.PREPGEOM_PTR
def __init__(self, geom):
# Keeping a reference to the original geometry object to prevent it
# from being garbage collected which could then crash the prepared one
# See #21662
self._base_geom = geom
if not isinstance(geom, GEOSGeometry):
raise TypeError
self.ptr = capi.geos_prepare(geom.ptr)
def __del__(self):
if self._ptr and capi:
capi.prepared_destroy(self._ptr)
def contains(self, other):
return capi.prepared_contains(self.ptr, other.ptr)
def contains_properly(self, other):
return capi.prepared_contains_properly(self.ptr, other.ptr)
def covers(self, other):
return capi.prepared_covers(self.ptr, other.ptr)
def intersects(self, other):
return capi.prepared_intersects(self.ptr, other.ptr)
# Added in GEOS 3.3:
def crosses(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("crosses on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_crosses(self.ptr, other.ptr)
def disjoint(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("disjoint on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_disjoint(self.ptr, other.ptr)
def overlaps(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("overlaps on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_overlaps(self.ptr, other.ptr)
def touches(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("touches on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_touches(self.ptr, other.ptr)
def within(self, other):
if geos_version_info()['version'] < '3.3.0':
raise GEOSException("within on prepared geometries requires GEOS >= 3.3.0")
return capi.prepared_within(self.ptr, other.ptr)
| mit |
vipullakhani/mi-instrument | mi/instrument/kml/cam/camds/test/test_driver.py | 6 | 32447 | """
@package mi.instrument.kml.cam.camds.driver
@file marine-integrations/mi/instrument/kml/cam/camds/test/test_driver.py
@author Sung Ahn
@brief Test Driver for CAMDS
Release notes:
"""
__author__ = 'Sung Ahn'
__license__ = 'Apache 2.0'
import copy
import time
from nose.plugins.attrib import attr
from mock import Mock
from mi.core.instrument.chunker import StringChunker
from mi.core.log import get_logger
log = get_logger()
from mi.idk.unit_test import InstrumentDriverUnitTestCase
from mi.idk.unit_test import InstrumentDriverIntegrationTestCase
from mi.instrument.kml.cam.camds.driver import DataParticleType, CamdsDiskStatusKey, CamdsHealthStatusKey
from mi.idk.unit_test import DriverTestMixin
from mi.idk.unit_test import DriverStartupConfigKey
from mi.instrument.kml.cam.camds.driver import Parameter, ParameterIndex
from mi.instrument.kml.cam.camds.driver import CAMDSPrompt, InstrumentDriver, CAMDSProtocol
from mi.instrument.kml.cam.camds.driver import ScheduledJob
from mi.instrument.kml.cam.camds.driver import InstrumentCommands, ProtocolState, ProtocolEvent, Capability
from mi.idk.unit_test import InstrumentDriverTestCase, ParameterTestConfigKey
from mi.core.common import BaseEnum
NEWLINE = '\r\n'
# ##
# Driver parameters for tests
###
InstrumentDriverTestCase.initialize(
driver_module='mi.instrument.kml.cam.camds.driver',
driver_class="InstrumentDriver",
instrument_agent_resource_id='HTWZMW',
instrument_agent_preload_id='IA7',
instrument_agent_name='kml cam',
instrument_agent_packet_config=DataParticleType(),
driver_startup_config={
DriverStartupConfigKey.PARAMETERS: {
Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.KEY]: Parameter.ACQUIRE_STATUS_INTERVAL[
ParameterIndex.DEFAULT_DATA],
Parameter.AUTO_CAPTURE_DURATION[ParameterIndex.KEY]: Parameter.AUTO_CAPTURE_DURATION[
ParameterIndex.DEFAULT_DATA],
Parameter.CAMERA_GAIN[ParameterIndex.KEY]: Parameter.CAMERA_GAIN[ParameterIndex.DEFAULT_DATA],
Parameter.CAMERA_MODE[ParameterIndex.KEY]: Parameter.CAMERA_MODE[ParameterIndex.DEFAULT_DATA],
Parameter.COMPRESSION_RATIO[ParameterIndex.KEY]: Parameter.COMPRESSION_RATIO[ParameterIndex.DEFAULT_DATA],
Parameter.FOCUS_POSITION[ParameterIndex.KEY]: Parameter.FOCUS_POSITION[ParameterIndex.DEFAULT_DATA],
Parameter.FRAME_RATE[ParameterIndex.KEY]: Parameter.FRAME_RATE[ParameterIndex.DEFAULT_DATA],
Parameter.IMAGE_RESOLUTION[ParameterIndex.KEY]: Parameter.IMAGE_RESOLUTION[ParameterIndex.DEFAULT_DATA],
Parameter.IRIS_POSITION[ParameterIndex.KEY]: Parameter.IRIS_POSITION[ParameterIndex.DEFAULT_DATA],
Parameter.LAMP_BRIGHTNESS[ParameterIndex.KEY]: Parameter.LAMP_BRIGHTNESS[ParameterIndex.DEFAULT_DATA],
Parameter.NTP_SETTING[ParameterIndex.KEY]: Parameter.NTP_SETTING[ParameterIndex.DEFAULT_DATA],
Parameter.PAN_POSITION[ParameterIndex.KEY]: Parameter.PAN_POSITION[ParameterIndex.DEFAULT_DATA],
Parameter.PRESET_NUMBER[ParameterIndex.KEY]: Parameter.PRESET_NUMBER[ParameterIndex.DEFAULT_DATA],
Parameter.SAMPLE_INTERVAL[ParameterIndex.KEY]: Parameter.SAMPLE_INTERVAL[ParameterIndex.DEFAULT_DATA],
Parameter.TILT_POSITION[ParameterIndex.KEY]: Parameter.TILT_POSITION[ParameterIndex.DEFAULT_DATA],
Parameter.WHEN_DISK_IS_FULL[ParameterIndex.KEY]: Parameter.WHEN_DISK_IS_FULL[ParameterIndex.DEFAULT_DATA],
Parameter.ZOOM_POSITION[ParameterIndex.KEY]: Parameter.ZOOM_POSITION[ParameterIndex.DEFAULT_DATA]
},
DriverStartupConfigKey.SCHEDULER: {
ScheduledJob.SAMPLE: {},
ScheduledJob.STATUS: {},
ScheduledJob.STOP_CAPTURE: {}
}
}
)
class TeledynePrompt(BaseEnum):
"""
Device i/o prompts..
"""
COMMAND = '\r\n>\r\n>'
ERR = 'ERR:'
###################################################################
###
# Driver constant definitions
###
###############################################################################
# DATA PARTICLE TEST MIXIN #
# Defines a set of assert methods used for data particle verification #
# #
# In python mixin classes are classes designed such that they wouldn't be #
# able to stand on their own, but are inherited by other classes generally #
# using multiple inheritance. #
# #
# This class defines a configuration structure for testing and common assert #
# methods for validating data particles.
###############################################################################
class CAMDSMixin(DriverTestMixin):
"""
Mixin class used for storing data particle constance
and common data assertion methods.
"""
# Create some short names for the parameter test config
TYPE = ParameterTestConfigKey.TYPE
READONLY = ParameterTestConfigKey.READONLY
STARTUP = ParameterTestConfigKey.STARTUP
DA = ParameterTestConfigKey.DIRECT_ACCESS
VALUE = ParameterTestConfigKey.VALUE
REQUIRED = ParameterTestConfigKey.REQUIRED
DEFAULT = ParameterTestConfigKey.DEFAULT
STATES = ParameterTestConfigKey.STATES
###
# Parameter and Type Definitions
###
_driver_parameters = {
Parameter.CAMERA_GAIN[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.CAMERA_GAIN[ParameterIndex.D_DEFAULT]},
Parameter.CAMERA_MODE[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: True, STARTUP: True,
DEFAULT: Parameter.CAMERA_MODE[ParameterIndex.D_DEFAULT],
VALUE: Parameter.CAMERA_MODE[ParameterIndex.D_DEFAULT]},
Parameter.COMPRESSION_RATIO[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: True, STARTUP: True,
DEFAULT: Parameter.COMPRESSION_RATIO[ParameterIndex.D_DEFAULT],
VALUE: Parameter.COMPRESSION_RATIO[ParameterIndex.D_DEFAULT]},
Parameter.FOCUS_POSITION[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: Parameter.FOCUS_POSITION[ParameterIndex.D_DEFAULT],
VALUE: Parameter.FOCUS_POSITION[ParameterIndex.D_DEFAULT]},
Parameter.FRAME_RATE[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: True, STARTUP: True,
DEFAULT: Parameter.FRAME_RATE[ParameterIndex.D_DEFAULT],
VALUE: Parameter.FRAME_RATE[ParameterIndex.D_DEFAULT]},
Parameter.IMAGE_RESOLUTION[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: True, STARTUP: True,
DEFAULT: Parameter.IMAGE_RESOLUTION[ParameterIndex.D_DEFAULT],
VALUE: Parameter.IMAGE_RESOLUTION[ParameterIndex.D_DEFAULT]},
Parameter.IRIS_POSITION[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.IRIS_POSITION[ParameterIndex.D_DEFAULT]},
Parameter.LAMP_BRIGHTNESS[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.LAMP_BRIGHTNESS[ParameterIndex.D_DEFAULT]},
Parameter.NTP_SETTING[ParameterIndex.KEY]:
{TYPE: str, READONLY: True, DA: True, STARTUP: False,
DEFAULT: None, VALUE: None},
Parameter.PAN_POSITION[ParameterIndex.KEY]:
{TYPE: int, READONLY: True, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.PAN_POSITION[ParameterIndex.D_DEFAULT]},
Parameter.TILT_POSITION[ParameterIndex.KEY]:
{TYPE: int, READONLY: True, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.TILT_POSITION[ParameterIndex.D_DEFAULT]},
Parameter.WHEN_DISK_IS_FULL[ParameterIndex.KEY]:
{TYPE: int, READONLY: True, DA: True, STARTUP: False,
DEFAULT: None, VALUE: None},
Parameter.ZOOM_POSITION[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: None,
VALUE: Parameter.ZOOM_POSITION[ParameterIndex.D_DEFAULT]},
# Engineering parameters
Parameter.PRESET_NUMBER[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: Parameter.PRESET_NUMBER[ParameterIndex.DEFAULT_DATA],
VALUE: Parameter.PRESET_NUMBER[ParameterIndex.D_DEFAULT]},
Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.KEY]:
{TYPE: str, READONLY: False, DA: False, STARTUP: False,
DEFAULT: Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.DEFAULT_DATA],
VALUE: Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.D_DEFAULT]},
Parameter.SAMPLE_INTERVAL[ParameterIndex.KEY]:
{TYPE: str, READONLY: False, DA: False, STARTUP: False,
DEFAULT: Parameter.SAMPLE_INTERVAL[ParameterIndex.DEFAULT_DATA],
VALUE: Parameter.SAMPLE_INTERVAL[ParameterIndex.D_DEFAULT]},
Parameter.AUTO_CAPTURE_DURATION[ParameterIndex.KEY]:
{TYPE: int, READONLY: False, DA: False, STARTUP: False,
DEFAULT: Parameter.AUTO_CAPTURE_DURATION[ParameterIndex.DEFAULT_DATA],
VALUE: Parameter.AUTO_CAPTURE_DURATION[ParameterIndex.D_DEFAULT]}
}
_driver_capabilities = {
# capabilities defined in the IOS
Capability.DISCOVER: {STATES: [ProtocolState.UNKNOWN]},
Capability.START_AUTOSAMPLE: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.STOP_AUTOSAMPLE: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.ACQUIRE_STATUS: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.GOTO_PRESET: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.LAMP_OFF: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.LAMP_ON: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.LASERS_OFF: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.LASERS_ON: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.ACQUIRE_SAMPLE: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
Capability.EXECUTE_AUTO_CAPTURE: {STATES: [ProtocolState.COMMAND, ProtocolState.AUTOSAMPLE]},
}
size_1 = chr(0x01)
size_2 = chr(0x02)
size_3 = chr(0x03)
size_5 = chr(0x05)
size_6 = chr(0x06)
size_9 = chr(0x09)
size_A = chr(0x0A)
size_C = chr(0x0C)
size_4 = chr(0x04)
size_7 = chr(0x07)
size_8 = chr(0x08)
size_B = chr(0x0B)
_ACK = chr(0x06)
_health_data = '<' + size_7 + ':' + size_6 + ':' + 'HS' + size_1 + size_2 + size_3 + '>'
_health_dict = {
CamdsHealthStatusKey.humidity: {'type': int, 'value': 2},
CamdsHealthStatusKey.temp: {'type': int, 'value': 1},
CamdsHealthStatusKey.error: {'type': int, 'value': 3}
}
_disk_data = '<' + size_B + ':' + size_6 + ':' + 'GC' + size_1 + size_2 + \
size_3 + size_4 + size_5 + size_6 + size_7 + '>'
_disk_status_dict = {
CamdsDiskStatusKey.disk_remaining: {'type': int, 'value': 100},
CamdsDiskStatusKey.image_on_disk: {'type': int, 'value': 3},
CamdsDiskStatusKey.image_remaining: {'type': int, 'value': 1029},
CamdsDiskStatusKey.size: {'type': int, 'value': 1543},
}
# Driver Parameter Methods
###
def assert_driver_parameters(self, current_parameters, verify_values=False):
"""
Verify that all driver parameters are correct and potentially verify values.
@param current_parameters: driver parameters read from the driver instance
@param verify_values: should we verify values against definition?
"""
log.debug("assert_driver_parameters current_parameters = " + str(current_parameters))
temp_parameters = copy.deepcopy(self._driver_parameters)
temp_parameters.update(self._driver_parameters)
self.assert_parameters(current_parameters, temp_parameters, verify_values)
def assert_health_data(self, data_particle, verify_values=True):
"""
Verify CAMDS health status data particle
@param data_particle: CAMDS health status DataParticle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.CAMDS_HEALTH_STATUS)
self.assert_data_particle_parameters(data_particle, self._health_dict) # , verify_values
def assert_disk_data(self, data_particle, verify_values=True):
"""
Verify CAMDS disk status data particle
@param data_particle: CAMDS disk status data particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.CAMDS_DISK_STATUS)
self.assert_data_particle_parameters(data_particle, self._disk_status_dict) # , verify_values
###############################################################################
# UNIT TESTS #
# Unit tests test the method calls and parameters using Mock. #
###############################################################################
@attr('UNIT', group='mi')
class DriverUnitTest(InstrumentDriverUnitTestCase, CAMDSMixin):
def setUp(self):
InstrumentDriverUnitTestCase.setUp(self)
def test_driver_schema(self):
"""
get the driver schema and verify it is configured properly
"""
temp_parameters = copy.deepcopy(self._driver_parameters)
driver = InstrumentDriver(self._got_data_event_callback)
self.assert_driver_schema(driver, temp_parameters, self._driver_capabilities)
def test_got_data(self):
"""
Verify sample data passed through the got data method produces the correct data particles
"""
# Create and initialize the instrument driver with a mock port agent
driver = InstrumentDriver(self._got_data_event_callback)
self.assert_initialize_driver(driver)
self.assert_raw_particle_published(driver, True)
# Start validating data particles
self.assert_particle_published(driver, self._health_data, self.assert_health_data, True)
self.assert_particle_published(driver, self._disk_data, self.assert_disk_data, True)
def test_driver_parameters(self):
"""
Verify the set of parameters known by the driver
"""
driver = InstrumentDriver(self._got_data_event_callback)
self.assert_initialize_driver(driver, ProtocolState.COMMAND)
expected_parameters = sorted(self._driver_parameters.keys())
expected_parameters = sorted(expected_parameters)
reported_parameters = sorted(driver.get_resource(Parameter.ALL))
self.assertEqual(reported_parameters, expected_parameters)
# Verify the parameter definitions
self.assert_driver_parameter_definition(driver, self._driver_parameters)
def test_driver_enums(self):
"""
Verify that all driver enumeration has no duplicate values that might cause confusion. Also
do a little extra validation for the Capabilities
"""
self.assert_enum_has_no_duplicates(InstrumentCommands())
self.assert_enum_has_no_duplicates(ProtocolState())
self.assert_enum_has_no_duplicates(ProtocolEvent())
self.assert_enum_has_no_duplicates(Parameter())
self.assert_enum_has_no_duplicates(DataParticleType())
self.assert_enum_has_no_duplicates(ScheduledJob())
# Test capabilities for duplicates, them verify that capabilities is a subset of proto events
self.assert_enum_has_no_duplicates(Capability())
self.assert_enum_complete(Capability(), ProtocolEvent())
def test_chunker(self):
"""
Test the chunker and verify the particles created.
"""
chunker = StringChunker(CAMDSProtocol.sieve_function)
self.assert_chunker_sample(chunker, self._health_data)
self.assert_chunker_sample_with_noise(chunker, self._health_data)
self.assert_chunker_fragmented_sample(chunker, self._health_data, 5)
self.assert_chunker_combined_sample(chunker, self._health_data)
self.assert_chunker_sample(chunker, self._disk_data)
self.assert_chunker_sample_with_noise(chunker, self._disk_data)
self.assert_chunker_fragmented_sample(chunker, self._disk_data, 6)
self.assert_chunker_combined_sample(chunker, self._disk_data)
def test_protocol_filter_capabilities(self):
"""
This tests driver filter_capabilities.
Iterate through available capabilities, and verify that they can pass successfully through the filter.
Test silly made up capabilities to verify they are blocked by filter.
"""
my_event_callback = Mock(spec="UNKNOWN WHAT SHOULD GO HERE FOR evt_callback")
protocol = CAMDSProtocol(CAMDSPrompt, NEWLINE, my_event_callback)
driver_capabilities = Capability().list()
test_capabilities = Capability().list()
# Add a bogus capability that will be filtered out.
test_capabilities.append("BOGUS_CAPABILITY")
# Verify "BOGUS_CAPABILITY was filtered out
self.assertEquals(driver_capabilities, protocol._filter_capabilities(test_capabilities))
def test_set(self):
params = [
(Parameter.CAMERA_GAIN, 1, '<\x04:GS:\x01>'),
(Parameter.CAMERA_GAIN, 2, '<\x04:GS:\x02>'),
(Parameter.CAMERA_GAIN, 3, '<\x04:GS:\x03>'),
(Parameter.CAMERA_GAIN, 4, '<\x04:GS:\x04>'),
(Parameter.CAMERA_GAIN, 5, '<\x04:GS:\x05>'),
(Parameter.CAMERA_GAIN, 32, '<\x04:GS:\x20>'),
(Parameter.CAMERA_GAIN, 255, '<\x04:GS:\xff>'),
(Parameter.CAMERA_MODE, 0, '<\x04:SV:\x00>'),
(Parameter.CAMERA_MODE, 9, '<\x04:SV:\x09>'),
(Parameter.CAMERA_MODE, 10, '<\x04:SV:\x0a>'),
(Parameter.CAMERA_MODE, 11, '<\x04:SV:\x0b>'),
(Parameter.FRAME_RATE, 1, '<\x04:FR:\x01>'),
(Parameter.FRAME_RATE, 5, '<\x04:FR:\x05>'),
(Parameter.FRAME_RATE, 10, '<\x04:FR:\x0a>'),
(Parameter.FRAME_RATE, 20, '<\x04:FR:\x14>'),
(Parameter.FRAME_RATE, 30, '<\x04:FR:\x1e>'),
(Parameter.IMAGE_RESOLUTION, 1, '<\x04:SD:\x01>'),
(Parameter.IMAGE_RESOLUTION, 2, '<\x04:SD:\x02>'),
(Parameter.IMAGE_RESOLUTION, 4, '<\x04:SD:\x04>'),
(Parameter.IMAGE_RESOLUTION, 8, '<\x04:SD:\x08>'),
(Parameter.IMAGE_RESOLUTION, 16, '<\x04:SD:\x10>'),
(Parameter.IMAGE_RESOLUTION, 32, '<\x04:SD:\x20>'),
(Parameter.IMAGE_RESOLUTION, 64, '<\x04:SD:\x40>'),
(Parameter.IMAGE_RESOLUTION, 100, '<\x04:SD:\x64>'),
(Parameter.COMPRESSION_RATIO, 1, '<\x04:CD:\x01>'),
(Parameter.COMPRESSION_RATIO, 2, '<\x04:CD:\x02>'),
(Parameter.COMPRESSION_RATIO, 4, '<\x04:CD:\x04>'),
(Parameter.COMPRESSION_RATIO, 8, '<\x04:CD:\x08>'),
(Parameter.COMPRESSION_RATIO, 16, '<\x04:CD:\x10>'),
(Parameter.COMPRESSION_RATIO, 32, '<\x04:CD:\x20>'),
(Parameter.COMPRESSION_RATIO, 64, '<\x04:CD:\x40>'),
(Parameter.COMPRESSION_RATIO, 100, '<\x04:CD:\x64>'),
(Parameter.FOCUS_POSITION, 0, '<\x04:FG:\x00>'),
(Parameter.FOCUS_POSITION, 100, '<\x04:FG:\x64>'),
(Parameter.FOCUS_POSITION, 200, '<\x04:FG:\xc8>'),
(Parameter.CAMERA_MODE, 9, '<\x04:SV:\t>'),
(Parameter.IRIS_POSITION, 8, '<\x04:IG:\x08>'),
(Parameter.FOCUS_POSITION, 100, '<\x04:FG:d>'),
(Parameter.COMPRESSION_RATIO, 100, '<\x04:CD:d>'),
(Parameter.LAMP_BRIGHTNESS, '3:50', '<\x05:BF:\x032>'),
]
for param, input_value, output_value in params:
key = param[ParameterIndex.KEY]
self.assertEqual(output_value, self._build_set_command(key, input_value))
def _build_set_command(self, param, val):
"""
Build handler for set commands. param=val followed by newline.
String val constructed by param dict formatting function.
@param param the parameter key to set.
@param val the parameter value to set.
@return The set command to be sent to the device.
@throws InstrumentParameterException if the parameter is not valid or
if the formatting function could not accept the value passed.
"""
try:
if isinstance(val, basestring):
val = ''.join(chr(int(x)) for x in val.split(':'))
else:
if param == Parameter.LAMP_BRIGHTNESS[ParameterIndex.KEY]:
# Set both lamps to an equal value by setting first byte to \x03 which indicates to the instrument
# to apply the given value to both lamps
val = ''.join( (chr(3), chr(val)) )
elif param == Parameter.CAMERA_GAIN[ParameterIndex.KEY]:
# CAMERA_GAIN must be an integer between 1 and 32, or equal to 255 (auto gain)
if val == 255 or (0 < val < 33):
val = chr(val)
else:
raise Exception('The desired value for %s must be an integer less '
'either equal to 255 or between 1 and 32: %s' % (param, val))
else:
val = chr(val)
if param == Parameter.NTP_SETTING[ParameterIndex.KEY]:
val = val + Parameter.NTP_SETTING[ParameterIndex.DEFAULT_DATA]
data_size = len(val) + 3
param_tuple = getattr(Parameter, param)
set_cmd = '<%s:%s:%s>' % (chr(data_size), param_tuple[ParameterIndex.SET], val)
log.debug("Set command: %r" % set_cmd)
except KeyError:
raise Exception('Unknown driver parameter. %s' % param)
return set_cmd
###############################################################################
# INTEGRATION TESTS #
# Integration test test the direct driver / instrument interaction #
# but making direct calls via zeromq. #
# - Common Integration tests test the driver through the instrument agent #
# and common for all drivers (minimum requirement for ION ingestion) #
###############################################################################
@attr('INT', group='mi')
class DriverIntegrationTest(InstrumentDriverIntegrationTestCase, CAMDSMixin):
_tested = {}
def setUp(self):
self.port_agents = {}
InstrumentDriverIntegrationTestCase.setUp(self)
def assert_disk_status(self, data_particle, verify_values=True):
"""
Verify a disk status particle
@param data_particle: CAMDS disk status particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.CAMDS_DISK_STATUS)
self.assert_data_particle_parameters(data_particle, self._disk_status_dict) # , verify_values
def assert_health_status(self, data_particle, verify_values=True):
"""
Verify a health status particle
@param data_particle: CAMDS health status particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.CAMDS_HEALTH_STATUS)
self.assert_data_particle_parameters(data_particle, self._health_dict) # , verify_values
def assert_sample_meta(self, data_particle, verify_values=True):
"""
Verify an image meta particle
@param data_particle: CAMDS image meta data particle
@param verify_values: bool, should we verify parameter values
"""
self.assert_data_particle_header(data_particle, DataParticleType.CAMDS_IMAGE_METADATA)
def assert_acquire_status(self):
"""
Check data stream types for acquire_status()
"""
self.assert_async_particle_generation(DataParticleType.CAMDS_DISK_STATUS, self.assert_disk_status,
timeout=60)
self.assert_async_particle_generation(DataParticleType.CAMDS_HEALTH_STATUS,
self.assert_health_status, timeout=60)
def assert_acquire_sample(self):
"""
Check data stream types for acquire_status()
"""
self.assert_async_particle_generation(DataParticleType.CAMDS_IMAGE_METADATA, self.assert_sample_meta,
timeout=60)
def test_connection(self):
log.debug("######## Starting test_connection ##########")
self.assert_initialize_driver()
# Overwritten method
def test_driver_process(self):
"""
Test for correct launch of driver process and communications, including asynchronous driver events.
Overridden to support multiple port agents.
"""
log.info("Ensuring driver process was started properly ...")
# Verify processes exist.
self.assertNotEqual(self.driver_process, None)
drv_pid = self.driver_process.getpid()
self.assertTrue(isinstance(drv_pid, int))
self.assertNotEqual(self.port_agents, None)
for port_agent in self.port_agents.values():
pagent_pid = port_agent.get_pid()
self.assertTrue(isinstance(pagent_pid, int))
# Send a test message to the process interface, confirm result.
reply = self.driver_client.cmd_dvr('process_echo')
self.assert_(reply.startswith('ping from resource ppid:'))
reply = self.driver_client.cmd_dvr('driver_ping', 'foo')
self.assert_(reply.startswith('driver_ping: foo'))
# Test the event thread publishes and client side picks up events.
events = [
'I am important event #1!',
'And I am important event #2!'
]
self.driver_client.cmd_dvr('test_events', events=events)
time.sleep(1)
# Confirm the events received are as expected.
self.assertEqual(self.events, events)
# Test the exception mechanism.
# with self.assertRaises(ResourceError):
# exception_str = 'Oh no, something bad happened!'
# self.driver_client.cmd_dvr('test_exceptions', exception_str)
# Set bulk params and test auto sampling
def test_autosample_particle_generation(self):
"""
Test that we can generate particles when in autosample
"""
self.assert_initialize_driver()
params = {
Parameter.CAMERA_GAIN: 255,
Parameter.CAMERA_MODE: 9,
Parameter.FRAME_RATE: 30,
Parameter.IMAGE_RESOLUTION: 1,
Parameter.COMPRESSION_RATIO: 100,
Parameter.FOCUS_POSITION: 100,
Parameter.PAN_POSITION: 90,
Parameter.TILT_POSITION: 90
}
self.assert_set_bulk(params)
self.assert_driver_command(ProtocolEvent.START_AUTOSAMPLE, state=ProtocolState.AUTOSAMPLE, delay=1)
self.assert_driver_command(ProtocolEvent.STOP_AUTOSAMPLE, state=ProtocolState.COMMAND, delay=10)
# test commands in different modes
def test_commands(self):
"""
Run instrument commands from both command and streaming mode.
"""
self.assert_initialize_driver()
####
# First test in command mode
####
self.assert_driver_command(ProtocolEvent.ACQUIRE_STATUS)
self.assert_driver_command(ProtocolEvent.START_AUTOSAMPLE, state=ProtocolState.AUTOSAMPLE,
delay=20)
self.assert_driver_command(ProtocolEvent.STOP_AUTOSAMPLE, state=ProtocolState.COMMAND, delay=1)
self.assert_driver_command(ProtocolEvent.ACQUIRE_STATUS, delay=2)
self.assert_acquire_status()
self.assert_driver_command(ProtocolEvent.ACQUIRE_SAMPLE, delay=2)
self.assert_acquire_sample()
self.assert_driver_command(ProtocolEvent.GOTO_PRESET)
self.assert_driver_command(ProtocolEvent.LAMP_ON)
self.assert_driver_command(ProtocolEvent.LAMP_OFF)
self.assert_driver_command(ProtocolEvent.LASERS_ON)
self.assert_driver_command(ProtocolEvent.LASERS_OFF)
# ####
# # Test in streaming mode
# ####
# # Put us in streaming
self.assert_driver_command(ProtocolEvent.START_AUTOSAMPLE, state=ProtocolState.AUTOSAMPLE,
delay=1)
self.assert_driver_command(ProtocolEvent.ACQUIRE_STATUS, delay=2)
self.assert_acquire_status()
self.assert_driver_command(ProtocolEvent.ACQUIRE_SAMPLE, delay=2)
self.assert_acquire_sample()
self.assert_driver_command(ProtocolEvent.GOTO_PRESET)
self.assert_driver_command(ProtocolEvent.LAMP_ON)
self.assert_driver_command(ProtocolEvent.LAMP_OFF)
self.assert_driver_command(ProtocolEvent.LASERS_ON)
self.assert_driver_command(ProtocolEvent.LASERS_OFF)
self.assert_driver_command(ProtocolEvent.STOP_AUTOSAMPLE, state=ProtocolState.COMMAND, delay=1)
def test_scheduled_acquire_status_command(self):
"""
Verify the scheduled clock sync is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_set(Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.KEY], '00:00:07')
time.sleep(15)
self.assert_acquire_status()
self.assert_set(Parameter.ACQUIRE_STATUS_INTERVAL[ParameterIndex.KEY], '00:00:00')
self.assert_current_state(ProtocolState.COMMAND)
def test_scheduled_acquire_status_autosample(self):
"""
Verify the scheduled acquire status is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_current_state(ProtocolState.COMMAND)
self.assert_set(Parameter.SAMPLE_INTERVAL, '00:00:04')
self.assert_driver_command(ProtocolEvent.START_AUTOSAMPLE)
self.assert_current_state(ProtocolState.AUTOSAMPLE)
time.sleep(10)
self.assert_acquire_sample()
self.assert_driver_command(ProtocolEvent.STOP_AUTOSAMPLE)
self.assert_current_state(ProtocolState.COMMAND)
self.assert_set(Parameter.SAMPLE_INTERVAL, '00:00:00')
self.assert_current_state(ProtocolState.COMMAND)
def test_scheduled_capture(self):
"""
Verify the scheduled acquire status is triggered and functions as expected
"""
self.assert_initialize_driver()
self.assert_current_state(ProtocolState.COMMAND)
self.assert_set(Parameter.AUTO_CAPTURE_DURATION, 2)
self.assert_driver_command(InstrumentCommands.START_CAPTURE)
time.sleep(1)
self.assert_acquire_sample()
time.sleep(2)
self.assert_current_state(ProtocolState.COMMAND)
def test_acquire_status(self):
"""
Verify the acquire_status command is functional
"""
self.assert_initialize_driver()
self.assert_driver_command(ProtocolEvent.ACQUIRE_STATUS)
self.assert_acquire_status()
###############################################################################
# QUALIFICATION TESTS #
# Device specific qualification tests are for #
# testing device specific capabilities #
###############################################################################
| bsd-2-clause |
TripleSnail/blender-zombie | python/text.py | 1 | 1754 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bge
import bgl
import blf
DPI = 72
class TextObject(object):
def __init__(self, text, px, py, size, time=0):
self.text = text
self.px = px
self.py = py
self.size = size
self.time = time
text_objects = []
def init(controller):
font_path = bge.logic.expandPath('//fonts/DejaVuSans.ttf')
bge.logic.font_id = blf.load(font_path)
scene = bge.logic.getCurrentScene()
scene.post_draw = [write]
def write():
width = bge.render.getWindowWidth()
height = bge.render.getWindowHeight()
bgl.glMatrixMode(bgl.GL_PROJECTION)
bgl.glLoadIdentity()
bgl.gluOrtho2D(0, width, 0, height)
bgl.glMatrixMode(bgl.GL_MODELVIEW)
bgl.glLoadIdentity()
font_id = bge.logic.font_id
for text_obj in text_objects:
blf.position(font_id, width * text_obj.px , height * text_obj.py, 0)
blf.size(font_id, text_obj.size, DPI)
blf.draw(font_id, text_obj.text)
| gpl-2.0 |
salguarnieri/intellij-community | python/helpers/pydev/pydev_run_in_console.py | 8 | 2131 | '''
Entry point module to run a file in the interactive console.
'''
from pydevconsole import *
from _pydev_bundle import pydev_imports
from _pydevd_bundle.pydevd_utils import save_main_module
def run_file(file, globals=None, locals=None):
if os.path.isdir(file):
new_target = os.path.join(file, '__main__.py')
if os.path.isfile(new_target):
file = new_target
if globals is None:
m = save_main_module(file, 'pydev_run_in_console')
globals = m.__dict__
try:
globals['__builtins__'] = __builtins__
except NameError:
pass # Not there on Jython...
if locals is None:
locals = globals
sys.path.insert(0, os.path.split(file)[0])
print('Running %s'%file)
pydev_imports.execfile(file, globals, locals) # execute the script
return globals
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
sys.stdin = BaseStdIn()
port, client_port = sys.argv[1:3]
del sys.argv[1]
del sys.argv[1]
file = sys.argv[1]
del sys.argv[0]
from _pydev_bundle import pydev_localhost
if int(port) == 0 and int(client_port) == 0:
(h, p) = pydev_localhost.get_socket_name()
client_port = p
host = pydev_localhost.get_localhost()
#replace exit (see comments on method)
#note that this does not work in jython!!! (sys method can't be replaced).
sys.exit = do_exit
interpreter = InterpreterInterface(host, int(client_port), threading.currentThread())
server_thread = threading.Thread(target=start_console_server,
name='ServerThread',
args=(host, int(port), interpreter))
server_thread.setDaemon(True)
server_thread.start()
globals = run_file(file, None, None)
interpreter.get_namespace().update(globals)
process_exec_queue(interpreter) | apache-2.0 |
renyi/drum | drum/links/templatetags/drum_tags.py | 1 | 1472 | from __future__ import unicode_literals
from collections import defaultdict
from django.template.defaultfilters import timesince
from mezzanine import template
from mezzanine.generic.models import ThreadedComment
from drum.links.utils import order_by_score
from drum.links.models import LinkCategory
from drum.links.views import CommentList, USER_PROFILE_RELATED_NAME
register = template.Library()
@register.filter
def get_profile(user):
"""
Returns the profile object associated with the given user.
"""
return getattr(user, USER_PROFILE_RELATED_NAME)
@register.simple_tag(takes_context=True)
def order_comments_by_score_for(context, link):
"""
Preloads threaded comments in the same way Mezzanine initially does,
but here we order them by score.
"""
comments = defaultdict(list)
qs = link.comments.visible().select_related(
"user",
"user__%s" % (USER_PROFILE_RELATED_NAME)
)
for comment in order_by_score(qs, CommentList.score_fields, "submit_date"):
comments[comment.replied_to_id].append(comment)
context["all_comments"] = comments
return ""
@register.filter
def short_timesince(date):
return timesince(date).split(",")[0]
@register.as_tag
def link_category_list(*args):
return LinkCategory.objects.all()
@register.as_tag
def latest_comments(limit=5, *args):
qs = ThreadedComment.objects.filter(is_removed=False, is_public=True)
return qs.reverse()[:limit]
| bsd-2-clause |
vabue/RatticWeb | staff/importloaders.py | 8 | 1208 | from keepassdb import Database
from django.utils.encoding import smart_text
def keepass(filep, password):
groups = []
entries = []
groupstack = []
db = Database(filep, password)
_walkkeepass(groups, entries, groupstack, db.root)
return {'tags': groups, 'entries': entries}
def _walkkeepass(groups, entries, groupstack, root):
for n in root.children:
t = smart_text(n.title, errors='replace')
groupstack.append(t)
groups.append(t)
for e in n.entries:
if e.title != 'Meta-Info':
entries.append({
'title': smart_text(e.title, errors='replace'),
'username': smart_text(e.username, errors='replace'),
'password': smart_text(e.password, errors='replace'),
'description': smart_text(e.notes, errors='replace'),
'url': smart_text(e.url, errors='replace'),
'tags': list(groupstack),
'filecontent': e.binary,
'filename': smart_text(e.binary_desc, errors='replace'),
})
_walkkeepass(groups, entries, groupstack, n)
groupstack.pop()
| gpl-2.0 |
piotroxp/scibibscan | scib/lib/python3.6/site-packages/setuptools/dist.py | 45 | 37129 | __all__ = ['Distribution']
import re
import os
import warnings
import numbers
import distutils.log
import distutils.core
import distutils.cmd
import distutils.dist
from distutils.errors import (DistutilsOptionError, DistutilsPlatformError,
DistutilsSetupError)
from distutils.util import rfc822_escape
from setuptools.extern import six
from setuptools.extern.six.moves import map
from pkg_resources.extern import packaging
from setuptools.depends import Require
from setuptools import windows_support
from setuptools.monkey import get_unpatched
import pkg_resources
def _get_unpatched(cls):
warnings.warn("Do not call this function", DeprecationWarning)
return get_unpatched(cls)
# Based on Python 3.5 version
def write_pkg_file(self, file):
"""Write the PKG-INFO format data to a file object.
"""
version = '1.0'
if (self.provides or self.requires or self.obsoletes or
self.classifiers or self.download_url):
version = '1.1'
# Setuptools specific for PEP 345
if hasattr(self, 'python_requires'):
version = '1.2'
file.write('Metadata-Version: %s\n' % version)
file.write('Name: %s\n' % self.get_name())
file.write('Version: %s\n' % self.get_version())
file.write('Summary: %s\n' % self.get_description())
file.write('Home-page: %s\n' % self.get_url())
file.write('Author: %s\n' % self.get_contact())
file.write('Author-email: %s\n' % self.get_contact_email())
file.write('License: %s\n' % self.get_license())
if self.download_url:
file.write('Download-URL: %s\n' % self.download_url)
long_desc = rfc822_escape(self.get_long_description())
file.write('Description: %s\n' % long_desc)
keywords = ','.join(self.get_keywords())
if keywords:
file.write('Keywords: %s\n' % keywords)
self._write_list(file, 'Platform', self.get_platforms())
self._write_list(file, 'Classifier', self.get_classifiers())
# PEP 314
self._write_list(file, 'Requires', self.get_requires())
self._write_list(file, 'Provides', self.get_provides())
self._write_list(file, 'Obsoletes', self.get_obsoletes())
# Setuptools specific for PEP 345
if hasattr(self, 'python_requires'):
file.write('Requires-Python: %s\n' % self.python_requires)
# from Python 3.4
def write_pkg_info(self, base_dir):
"""Write the PKG-INFO file into the release tree.
"""
with open(os.path.join(base_dir, 'PKG-INFO'), 'w',
encoding='UTF-8') as pkg_info:
self.write_pkg_file(pkg_info)
sequence = tuple, list
def check_importable(dist, attr, value):
try:
ep = pkg_resources.EntryPoint.parse('x=' + value)
assert not ep.extras
except (TypeError, ValueError, AttributeError, AssertionError):
raise DistutilsSetupError(
"%r must be importable 'module:attrs' string (got %r)"
% (attr, value)
)
def assert_string_list(dist, attr, value):
"""Verify that value is a string list or None"""
try:
assert ''.join(value) != value
except (TypeError, ValueError, AttributeError, AssertionError):
raise DistutilsSetupError(
"%r must be a list of strings (got %r)" % (attr, value)
)
def check_nsp(dist, attr, value):
"""Verify that namespace packages are valid"""
ns_packages = value
assert_string_list(dist, attr, ns_packages)
for nsp in ns_packages:
if not dist.has_contents_for(nsp):
raise DistutilsSetupError(
"Distribution contains no modules or packages for " +
"namespace package %r" % nsp
)
parent, sep, child = nsp.rpartition('.')
if parent and parent not in ns_packages:
distutils.log.warn(
"WARNING: %r is declared as a package namespace, but %r"
" is not: please correct this in setup.py", nsp, parent
)
def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
for k, v in value.items():
if ':' in k:
k, m = k.split(':', 1)
if pkg_resources.invalid_marker(m):
raise DistutilsSetupError("Invalid environment marker: " + m)
list(pkg_resources.parse_requirements(v))
except (TypeError, ValueError, AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
)
def assert_bool(dist, attr, value):
"""Verify that value is True, False, 0, or 1"""
if bool(value) != value:
tmpl = "{attr!r} must be a boolean value (got {value!r})"
raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
def check_requirements(dist, attr, value):
"""Verify that install_requires is a valid requirements list"""
try:
list(pkg_resources.parse_requirements(value))
except (TypeError, ValueError) as error:
tmpl = (
"{attr!r} must be a string or list of strings "
"containing valid project/version requirement specifiers; {error}"
)
raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
def check_specifier(dist, attr, value):
"""Verify that value is a valid version specifier"""
try:
packaging.specifiers.SpecifierSet(value)
except packaging.specifiers.InvalidSpecifier as error:
tmpl = (
"{attr!r} must be a string or list of strings "
"containing valid version specifiers; {error}"
)
raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
def check_entry_points(dist, attr, value):
"""Verify that entry_points map is parseable"""
try:
pkg_resources.EntryPoint.parse_map(value)
except ValueError as e:
raise DistutilsSetupError(e)
def check_test_suite(dist, attr, value):
if not isinstance(value, six.string_types):
raise DistutilsSetupError("test_suite must be a string")
def check_package_data(dist, attr, value):
"""Verify that value is a dictionary of package names to glob lists"""
if isinstance(value, dict):
for k, v in value.items():
if not isinstance(k, str):
break
try:
iter(v)
except TypeError:
break
else:
return
raise DistutilsSetupError(
attr + " must be a dictionary mapping package names to lists of "
"wildcard patterns"
)
def check_packages(dist, attr, value):
for pkgname in value:
if not re.match(r'\w+(\.\w+)*', pkgname):
distutils.log.warn(
"WARNING: %r not a valid package name; please use only "
".-separated package names in setup.py", pkgname
)
_Distribution = get_unpatched(distutils.core.Distribution)
class Distribution(_Distribution):
"""Distribution with support for features, tests, and package data
This is an enhanced version of 'distutils.dist.Distribution' that
effectively adds the following new optional keyword arguments to 'setup()':
'install_requires' -- a string or sequence of strings specifying project
versions that the distribution requires when installed, in the format
used by 'pkg_resources.require()'. They will be installed
automatically when the package is installed. If you wish to use
packages that are not available in PyPI, or want to give your users an
alternate download location, you can add a 'find_links' option to the
'[easy_install]' section of your project's 'setup.cfg' file, and then
setuptools will scan the listed web pages for links that satisfy the
requirements.
'extras_require' -- a dictionary mapping names of optional "extras" to the
additional requirement(s) that using those extras incurs. For example,
this::
extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
indicates that the distribution can optionally provide an extra
capability called "reST", but it can only be used if docutils and
reSTedit are installed. If the user installs your package using
EasyInstall and requests one of your extras, the corresponding
additional requirements will be installed if needed.
'features' **deprecated** -- a dictionary mapping option names to
'setuptools.Feature'
objects. Features are a portion of the distribution that can be
included or excluded based on user options, inter-feature dependencies,
and availability on the current system. Excluded features are omitted
from all setup commands, including source and binary distributions, so
you can create multiple distributions from the same source tree.
Feature names should be valid Python identifiers, except that they may
contain the '-' (minus) sign. Features can be included or excluded
via the command line options '--with-X' and '--without-X', where 'X' is
the name of the feature. Whether a feature is included by default, and
whether you are allowed to control this from the command line, is
determined by the Feature object. See the 'Feature' class for more
information.
'test_suite' -- the name of a test suite to run for the 'test' command.
If the user runs 'python setup.py test', the package will be installed,
and the named test suite will be run. The format is the same as
would be used on a 'unittest.py' command line. That is, it is the
dotted name of an object to import and call to generate a test suite.
'package_data' -- a dictionary mapping package names to lists of filenames
or globs to use to find data files contained in the named packages.
If the dictionary has filenames or globs listed under '""' (the empty
string), those names will be searched for in every package, in addition
to any names for the specific package. Data files found using these
names/globs will be installed along with the package, in the same
location as the package. Note that globs are allowed to reference
the contents of non-package subdirectories, as long as you use '/' as
a path separator. (Globs are automatically converted to
platform-specific paths at runtime.)
In addition to these new keywords, this class also has several new methods
for manipulating the distribution's contents. For example, the 'include()'
and 'exclude()' methods can be thought of as in-place add and subtract
commands that add or remove packages, modules, extensions, and so on from
the distribution. They are used by the feature subsystem to configure the
distribution for the included and excluded features.
"""
_patched_dist = None
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
# PKG-INFO, but which might not yet be built if this is a fresh
# checkout.
#
if not attrs or 'name' not in attrs or 'version' not in attrs:
return
key = pkg_resources.safe_name(str(attrs['name'])).lower()
dist = pkg_resources.working_set.by_key.get(key)
if dist is not None and not dist.has_metadata('PKG-INFO'):
dist._version = pkg_resources.safe_version(str(attrs['version']))
self._patched_dist = dist
def __init__(self, attrs=None):
have_package_data = hasattr(self, "package_data")
if not have_package_data:
self.package_data = {}
_attrs_dict = attrs or {}
if 'features' in _attrs_dict or 'require_features' in _attrs_dict:
Feature.warn_deprecated()
self.require_features = []
self.features = {}
self.dist_files = []
self.src_root = attrs and attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
# Make sure we have any eggs needed to interpret 'attrs'
if attrs is not None:
self.dependency_links = attrs.pop('dependency_links', [])
assert_string_list(self, 'dependency_links', self.dependency_links)
if attrs and 'setup_requires' in attrs:
self.fetch_build_eggs(attrs['setup_requires'])
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
vars(self).setdefault(ep.name, None)
_Distribution.__init__(self, attrs)
if isinstance(self.metadata.version, numbers.Number):
# Some people apparently take "version number" too literally :)
self.metadata.version = str(self.metadata.version)
if self.metadata.version is not None:
try:
ver = packaging.version.Version(self.metadata.version)
normalized_version = str(ver)
if self.metadata.version != normalized_version:
warnings.warn(
"Normalizing '%s' to '%s'" % (
self.metadata.version,
normalized_version,
)
)
self.metadata.version = normalized_version
except (packaging.version.InvalidVersion, TypeError):
warnings.warn(
"The version specified (%r) is an invalid version, this "
"may not work as expected with newer versions of "
"setuptools, pip, and PyPI. Please see PEP 440 for more "
"details." % self.metadata.version
)
if getattr(self, 'python_requires', None):
self.metadata.python_requires = self.python_requires
def parse_command_line(self):
"""Process features after parsing command line options"""
result = _Distribution.parse_command_line(self)
if self.features:
self._finalize_features()
return result
def _feature_attrname(self, name):
"""Convert feature name to corresponding option attribute name"""
return 'with_' + name.replace('-', '_')
def fetch_build_eggs(self, requires):
"""Resolve pre-setup requirements"""
resolved_dists = pkg_resources.working_set.resolve(
pkg_resources.parse_requirements(requires),
installer=self.fetch_build_egg,
replace_conflicting=True,
)
for dist in resolved_dists:
pkg_resources.working_set.add(dist, replace=True)
return resolved_dists
def finalize_options(self):
_Distribution.finalize_options(self)
if self.features:
self._set_global_opts_from_features()
for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
value = getattr(self, ep.name, None)
if value is not None:
ep.require(installer=self.fetch_build_egg)
ep.load()(self, ep.name, value)
if getattr(self, 'convert_2to3_doctests', None):
# XXX may convert to set here when we can rely on set being builtin
self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests]
else:
self.convert_2to3_doctests = []
def get_egg_cache_dir(self):
egg_cache_dir = os.path.join(os.curdir, '.eggs')
if not os.path.exists(egg_cache_dir):
os.mkdir(egg_cache_dir)
windows_support.hide_file(egg_cache_dir)
readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
with open(readme_txt_filename, 'w') as f:
f.write('This directory contains eggs that were downloaded '
'by setuptools to build, test, and run plug-ins.\n\n')
f.write('This directory caches those eggs to prevent '
'repeated downloads.\n\n')
f.write('However, it is safe to delete this directory.\n\n')
return egg_cache_dir
def fetch_build_egg(self, req):
"""Fetch an egg needed for building"""
try:
cmd = self._egg_fetcher
cmd.package_index.to_scan = []
except AttributeError:
from setuptools.command.easy_install import easy_install
dist = self.__class__({'script_args': ['easy_install']})
dist.parse_config_files()
opts = dist.get_option_dict('easy_install')
keep = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts'
)
for key in list(opts):
if key not in keep:
del opts[key] # don't use any other settings
if self.dependency_links:
links = self.dependency_links[:]
if 'find_links' in opts:
links = opts['find_links'][1].split() + links
opts['find_links'] = ('setup', links)
install_dir = self.get_egg_cache_dir()
cmd = easy_install(
dist, args=["x"], install_dir=install_dir, exclude_scripts=True,
always_copy=False, build_directory=None, editable=False,
upgrade=False, multi_version=True, no_report=True, user=False
)
cmd.ensure_finalized()
self._egg_fetcher = cmd
return cmd.easy_install(req)
def _set_global_opts_from_features(self):
"""Add --with-X/--without-X options based on optional features"""
go = []
no = self.negative_opt.copy()
for name, feature in self.features.items():
self._set_feature(name, None)
feature.validate(self)
if feature.optional:
descr = feature.description
incdef = ' (default)'
excdef = ''
if not feature.include_by_default():
excdef, incdef = incdef, excdef
go.append(('with-' + name, None, 'include ' + descr + incdef))
go.append(('without-' + name, None, 'exclude ' + descr + excdef))
no['without-' + name] = 'with-' + name
self.global_options = self.feature_options = go + self.global_options
self.negative_opt = self.feature_negopt = no
def _finalize_features(self):
"""Add/remove features and resolve dependencies between them"""
# First, flag all the enabled items (and thus their dependencies)
for name, feature in self.features.items():
enabled = self.feature_is_included(name)
if enabled or (enabled is None and feature.include_by_default()):
feature.include_in(self)
self._set_feature(name, 1)
# Then disable the rest, so that off-by-default features don't
# get flagged as errors when they're required by an enabled feature
for name, feature in self.features.items():
if not self.feature_is_included(name):
feature.exclude_from(self)
self._set_feature(name, 0)
def get_command_class(self, command):
"""Pluggable version of get_command_class()"""
if command in self.cmdclass:
return self.cmdclass[command]
for ep in pkg_resources.iter_entry_points('distutils.commands', command):
ep.require(installer=self.fetch_build_egg)
self.cmdclass[command] = cmdclass = ep.load()
return cmdclass
else:
return _Distribution.get_command_class(self, command)
def print_commands(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
# don't require extras as the commands won't be invoked
cmdclass = ep.resolve()
self.cmdclass[ep.name] = cmdclass
return _Distribution.print_commands(self)
def get_command_list(self):
for ep in pkg_resources.iter_entry_points('distutils.commands'):
if ep.name not in self.cmdclass:
# don't require extras as the commands won't be invoked
cmdclass = ep.resolve()
self.cmdclass[ep.name] = cmdclass
return _Distribution.get_command_list(self)
def _set_feature(self, name, status):
"""Set feature's inclusion status"""
setattr(self, self._feature_attrname(name), status)
def feature_is_included(self, name):
"""Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
return getattr(self, self._feature_attrname(name))
def include_feature(self, name):
"""Request inclusion of feature named 'name'"""
if self.feature_is_included(name) == 0:
descr = self.features[name].description
raise DistutilsOptionError(
descr + " is required, but was excluded or is not available"
)
self.features[name].include_in(self)
self._set_feature(name, 1)
def include(self, **attrs):
"""Add items to distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
the distribution's 'py_modules' attribute, if it was not already
there.
Currently, this method only supports inclusion for attributes that are
lists or tuples. If you need to add support for adding to other
attributes in this or a subclass, you can add an '_include_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
will try to call 'dist._include_foo({"bar":"baz"})', which can then
handle whatever special inclusion logic is needed.
"""
for k, v in attrs.items():
include = getattr(self, '_include_' + k, None)
if include:
include(v)
else:
self._include_misc(k, v)
def exclude_package(self, package):
"""Remove packages, modules, and extensions in named package"""
pfx = package + '.'
if self.packages:
self.packages = [
p for p in self.packages
if p != package and not p.startswith(pfx)
]
if self.py_modules:
self.py_modules = [
p for p in self.py_modules
if p != package and not p.startswith(pfx)
]
if self.ext_modules:
self.ext_modules = [
p for p in self.ext_modules
if p.name != package and not p.name.startswith(pfx)
]
def has_contents_for(self, package):
"""Return true if 'exclude_package(package)' would do something"""
pfx = package + '.'
for p in self.iter_distribution_names():
if p == package or p.startswith(pfx):
return True
def _exclude_misc(self, name, value):
"""Handle 'exclude()' for list/tuple attrs without a special handler"""
if not isinstance(value, sequence):
raise DistutilsSetupError(
"%s: setting must be a list or tuple (%r)" % (name, value)
)
try:
old = getattr(self, name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is not None and not isinstance(old, sequence):
raise DistutilsSetupError(
name + ": this setting cannot be changed via include/exclude"
)
elif old:
setattr(self, name, [item for item in old if item not in value])
def _include_misc(self, name, value):
"""Handle 'include()' for list/tuple attrs without a special handler"""
if not isinstance(value, sequence):
raise DistutilsSetupError(
"%s: setting must be a list (%r)" % (name, value)
)
try:
old = getattr(self, name)
except AttributeError:
raise DistutilsSetupError(
"%s: No such distribution setting" % name
)
if old is None:
setattr(self, name, value)
elif not isinstance(old, sequence):
raise DistutilsSetupError(
name + ": this setting cannot be changed via include/exclude"
)
else:
setattr(self, name, old + [item for item in value if item not in old])
def exclude(self, **attrs):
"""Remove items from distribution that are named in keyword arguments
For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
the distribution's 'py_modules' attribute. Excluding packages uses
the 'exclude_package()' method, so all of the package's contained
packages, modules, and extensions are also excluded.
Currently, this method only supports exclusion from attributes that are
lists or tuples. If you need to add support for excluding from other
attributes in this or a subclass, you can add an '_exclude_X' method,
where 'X' is the name of the attribute. The method will be called with
the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
handle whatever special exclusion logic is needed.
"""
for k, v in attrs.items():
exclude = getattr(self, '_exclude_' + k, None)
if exclude:
exclude(v)
else:
self._exclude_misc(k, v)
def _exclude_packages(self, packages):
if not isinstance(packages, sequence):
raise DistutilsSetupError(
"packages: setting must be a list or tuple (%r)" % (packages,)
)
list(map(self.exclude_package, packages))
def _parse_command_opts(self, parser, args):
# Remove --with-X/--without-X options when processing command args
self.global_options = self.__class__.global_options
self.negative_opt = self.__class__.negative_opt
# First, expand any aliases
command = args[0]
aliases = self.get_option_dict('aliases')
while command in aliases:
src, alias = aliases[command]
del aliases[command] # ensure each alias can expand only once!
import shlex
args[:1] = shlex.split(alias, True)
command = args[0]
nargs = _Distribution._parse_command_opts(self, parser, args)
# Handle commands that want to consume all remaining arguments
cmd_class = self.get_command_class(command)
if getattr(cmd_class, 'command_consumes_arguments', None):
self.get_option_dict(command)['args'] = ("command line", nargs)
if nargs is not None:
return []
return nargs
def get_cmdline_options(self):
"""Return a '{cmd: {opt:val}}' map of all command-line options
Option names are all long, but do not include the leading '--', and
contain dashes rather than underscores. If the option doesn't take
an argument (e.g. '--quiet'), the 'val' is 'None'.
Note that options provided by config files are intentionally excluded.
"""
d = {}
for cmd, opts in self.command_options.items():
for opt, (src, val) in opts.items():
if src != "command line":
continue
opt = opt.replace('_', '-')
if val == 0:
cmdobj = self.get_command_obj(cmd)
neg_opt = self.negative_opt.copy()
neg_opt.update(getattr(cmdobj, 'negative_opt', {}))
for neg, pos in neg_opt.items():
if pos == opt:
opt = neg
val = None
break
else:
raise AssertionError("Shouldn't be able to get here")
elif val == 1:
val = None
d.setdefault(cmd, {})[opt] = val
return d
def iter_distribution_names(self):
"""Yield all packages, modules, and extension names in distribution"""
for pkg in self.packages or ():
yield pkg
for module in self.py_modules or ():
yield module
for ext in self.ext_modules or ():
if isinstance(ext, tuple):
name, buildinfo = ext
else:
name = ext.name
if name.endswith('module'):
name = name[:-6]
yield name
def handle_display_options(self, option_order):
"""If there were any non-global "display-only" options
(--help-commands or the metadata display options) on the command
line, display the requested info and return true; else return
false.
"""
import sys
if six.PY2 or self.help_commands:
return _Distribution.handle_display_options(self, option_order)
# Stdout may be StringIO (e.g. in tests)
import io
if not isinstance(sys.stdout, io.TextIOWrapper):
return _Distribution.handle_display_options(self, option_order)
# Don't wrap stdout if utf-8 is already the encoding. Provides
# workaround for #334.
if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
return _Distribution.handle_display_options(self, option_order)
# Print metadata in UTF-8 no matter the platform
encoding = sys.stdout.encoding
errors = sys.stdout.errors
newline = sys.platform != 'win32' and '\n' or None
line_buffering = sys.stdout.line_buffering
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
try:
return _Distribution.handle_display_options(self, option_order)
finally:
sys.stdout = io.TextIOWrapper(
sys.stdout.detach(), encoding, errors, newline, line_buffering)
class Feature:
"""
**deprecated** -- The `Feature` facility was never completely implemented
or supported, `has reported issues
<https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
a future version.
A subset of the distribution that can be excluded if unneeded/wanted
Features are created using these keyword arguments:
'description' -- a short, human readable description of the feature, to
be used in error messages, and option help messages.
'standard' -- if true, the feature is included by default if it is
available on the current system. Otherwise, the feature is only
included if requested via a command line '--with-X' option, or if
another included feature requires it. The default setting is 'False'.
'available' -- if true, the feature is available for installation on the
current system. The default setting is 'True'.
'optional' -- if true, the feature's inclusion can be controlled from the
command line, using the '--with-X' or '--without-X' options. If
false, the feature's inclusion status is determined automatically,
based on 'availabile', 'standard', and whether any other feature
requires it. The default setting is 'True'.
'require_features' -- a string or sequence of strings naming features
that should also be included if this feature is included. Defaults to
empty list. May also contain 'Require' objects that should be
added/removed from the distribution.
'remove' -- a string or list of strings naming packages to be removed
from the distribution if this feature is *not* included. If the
feature *is* included, this argument is ignored. This argument exists
to support removing features that "crosscut" a distribution, such as
defining a 'tests' feature that removes all the 'tests' subpackages
provided by other features. The default for this argument is an empty
list. (Note: the named package(s) or modules must exist in the base
distribution when the 'setup()' function is initially called.)
other keywords -- any other keyword arguments are saved, and passed to
the distribution's 'include()' and 'exclude()' methods when the
feature is included or excluded, respectively. So, for example, you
could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
added or removed from the distribution as appropriate.
A feature must include at least one 'requires', 'remove', or other
keyword argument. Otherwise, it can't affect the distribution in any way.
Note also that you can subclass 'Feature' to create your own specialized
feature types that modify the distribution in other ways when included or
excluded. See the docstrings for the various methods here for more detail.
Aside from the methods, the only feature attributes that distributions look
at are 'description' and 'optional'.
"""
@staticmethod
def warn_deprecated():
warnings.warn(
"Features are deprecated and will be removed in a future "
"version. See https://github.com/pypa/setuptools/issues/65.",
DeprecationWarning,
stacklevel=3,
)
def __init__(self, description, standard=False, available=True,
optional=True, require_features=(), remove=(), **extras):
self.warn_deprecated()
self.description = description
self.standard = standard
self.available = available
self.optional = optional
if isinstance(require_features, (str, Require)):
require_features = require_features,
self.require_features = [
r for r in require_features if isinstance(r, str)
]
er = [r for r in require_features if not isinstance(r, str)]
if er:
extras['require_features'] = er
if isinstance(remove, str):
remove = remove,
self.remove = remove
self.extras = extras
if not remove and not require_features and not extras:
raise DistutilsSetupError(
"Feature %s: must define 'require_features', 'remove', or at least one"
" of 'packages', 'py_modules', etc."
)
def include_by_default(self):
"""Should this feature be included by default?"""
return self.available and self.standard
def include_in(self, dist):
"""Ensure feature and its requirements are included in distribution
You may override this in a subclass to perform additional operations on
the distribution. Note that this method may be called more than once
per feature, and so should be idempotent.
"""
if not self.available:
raise DistutilsPlatformError(
self.description + " is required, "
"but is not available on this platform"
)
dist.include(**self.extras)
for f in self.require_features:
dist.include_feature(f)
def exclude_from(self, dist):
"""Ensure feature is excluded from distribution
You may override this in a subclass to perform additional operations on
the distribution. This method will be called at most once per
feature, and only after all included features have been asked to
include themselves.
"""
dist.exclude(**self.extras)
if self.remove:
for item in self.remove:
dist.exclude_package(item)
def validate(self, dist):
"""Verify that feature makes sense in context of distribution
This method is called by the distribution just before it parses its
command line. It checks to ensure that the 'remove' attribute, if any,
contains only valid package/module names that are present in the base
distribution when 'setup()' is called. You may override it in a
subclass to perform any other required validation of the feature
against a target distribution.
"""
for item in self.remove:
if not dist.has_contents_for(item):
raise DistutilsSetupError(
"%s wants to be able to remove %s, but the distribution"
" doesn't contain any packages or modules under %s"
% (self.description, item, item)
)
| mit |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/Queue.py | 188 | 8561 | """A multi-producer, multi-consumer queue."""
from time import time as _time
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
from collections import deque
import heapq
__all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue']
class Empty(Exception):
"Exception raised by Queue.get(block=0)/get_nowait()."
pass
class Full(Exception):
"Exception raised by Queue.put(block=0)/put_nowait()."
pass
class Queue:
"""Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
"""
def __init__(self, maxsize=0):
self.maxsize = maxsize
self._init(maxsize)
# mutex must be held whenever the queue is mutating. All methods
# that acquire mutex must release it before returning. mutex
# is shared between the three conditions, so acquiring and
# releasing the conditions also acquires and releases mutex.
self.mutex = _threading.Lock()
# Notify not_empty whenever an item is added to the queue; a
# thread waiting to get is notified then.
self.not_empty = _threading.Condition(self.mutex)
# Notify not_full whenever an item is removed from the queue;
# a thread waiting to put is notified then.
self.not_full = _threading.Condition(self.mutex)
# Notify all_tasks_done whenever the number of unfinished tasks
# drops to zero; thread waiting to join() is notified to resume
self.all_tasks_done = _threading.Condition(self.mutex)
self.unfinished_tasks = 0
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by Queue consumer threads. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items
have been processed (meaning that a task_done() call was received
for every item that had been put() into the queue).
Raises a ValueError if called more times than there were items
placed in the queue.
"""
self.all_tasks_done.acquire()
try:
unfinished = self.unfinished_tasks - 1
if unfinished <= 0:
if unfinished < 0:
raise ValueError('task_done() called too many times')
self.all_tasks_done.notify_all()
self.unfinished_tasks = unfinished
finally:
self.all_tasks_done.release()
def join(self):
"""Blocks until all items in the Queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer thread calls task_done()
to indicate the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
self.all_tasks_done.acquire()
try:
while self.unfinished_tasks:
self.all_tasks_done.wait()
finally:
self.all_tasks_done.release()
def qsize(self):
"""Return the approximate size of the queue (not reliable!)."""
self.mutex.acquire()
n = self._qsize()
self.mutex.release()
return n
def empty(self):
"""Return True if the queue is empty, False otherwise (not reliable!)."""
self.mutex.acquire()
n = not self._qsize()
self.mutex.release()
return n
def full(self):
"""Return True if the queue is full, False otherwise (not reliable!)."""
self.mutex.acquire()
n = 0 < self.maxsize == self._qsize()
self.mutex.release()
return n
def put(self, item, block=True, timeout=None):
"""Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
"""
self.not_full.acquire()
try:
if self.maxsize > 0:
if not block:
if self._qsize() == self.maxsize:
raise Full
elif timeout is None:
while self._qsize() == self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while self._qsize() == self.maxsize:
remaining = endtime - _time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
finally:
self.not_full.release()
def put_nowait(self, item):
"""Put an item into the queue without blocking.
Only enqueue the item if a free slot is immediately available.
Otherwise raise the Full exception.
"""
return self.put(item, False)
def get(self, block=True, timeout=None):
"""Remove and return an item from the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until an item is available. If 'timeout' is
a positive number, it blocks at most 'timeout' seconds and raises
the Empty exception if no item was available within that time.
Otherwise ('block' is false), return an item if one is immediately
available, else raise the Empty exception ('timeout' is ignored
in that case).
"""
self.not_empty.acquire()
try:
if not block:
if not self._qsize():
raise Empty
elif timeout is None:
while not self._qsize():
self.not_empty.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a positive number")
else:
endtime = _time() + timeout
while not self._qsize():
remaining = endtime - _time()
if remaining <= 0.0:
raise Empty
self.not_empty.wait(remaining)
item = self._get()
self.not_full.notify()
return item
finally:
self.not_empty.release()
def get_nowait(self):
"""Remove and return an item from the queue without blocking.
Only get an item if one is immediately available. Otherwise
raise the Empty exception.
"""
return self.get(False)
# Override these methods to implement other queue organizations
# (e.g. stack or priority queue).
# These will only be called with appropriate locks held
# Initialize the queue representation
def _init(self, maxsize):
self.queue = deque()
def _qsize(self, len=len):
return len(self.queue)
# Put a new item in the queue
def _put(self, item):
self.queue.append(item)
# Get an item from the queue
def _get(self):
return self.queue.popleft()
class PriorityQueue(Queue):
'''Variant of Queue that retrieves open entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item, heappush=heapq.heappush):
heappush(self.queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self.queue)
class LifoQueue(Queue):
'''Variant of Queue that retrieves most recently added entries first.'''
def _init(self, maxsize):
self.queue = []
def _qsize(self, len=len):
return len(self.queue)
def _put(self, item):
self.queue.append(item)
def _get(self):
return self.queue.pop()
| gpl-3.0 |
meghana1995/sympy | sympy/physics/quantum/tests/test_gate.py | 88 | 11664 | from sympy import exp, symbols, sqrt, I, pi, Mul, Integer, Wild
from sympy.core.compatibility import range
from sympy.matrices import Matrix
from sympy.physics.quantum.gate import (XGate, YGate, ZGate, random_circuit,
CNOT, IdentityGate, H, X, Y, S, T, Z, SwapGate, gate_simp, gate_sort,
CNotGate, TGate, HadamardGate, PhaseGate, UGate, CGate)
from sympy.physics.quantum.commutator import Commutator
from sympy.physics.quantum.anticommutator import AntiCommutator
from sympy.physics.quantum.represent import represent
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.qubit import Qubit, IntQubit, qubit_to_matrix, \
matrix_to_qubit
from sympy.physics.quantum.matrixutils import matrix_to_zero
from sympy.physics.quantum.matrixcache import sqrt2_inv
from sympy.physics.quantum import Dagger
def test_gate():
"""Test a basic gate."""
h = HadamardGate(1)
assert h.min_qubits == 2
assert h.nqubits == 1
i0 = Wild('i0')
i1 = Wild('i1')
h0_w1 = HadamardGate(i0)
h0_w2 = HadamardGate(i0)
h1_w1 = HadamardGate(i1)
assert h0_w1 == h0_w2
assert h0_w1 != h1_w1
assert h1_w1 != h0_w2
cnot_10_w1 = CNOT(i1, i0)
cnot_10_w2 = CNOT(i1, i0)
cnot_01_w1 = CNOT(i0, i1)
assert cnot_10_w1 == cnot_10_w2
assert cnot_10_w1 != cnot_01_w1
assert cnot_10_w2 != cnot_01_w1
def test_UGate():
a, b, c, d = symbols('a,b,c,d')
uMat = Matrix([[a, b], [c, d]])
# Test basic case where gate exists in 1-qubit space
u1 = UGate((0,), uMat)
assert represent(u1, nqubits=1) == uMat
assert qapply(u1*Qubit('0')) == a*Qubit('0') + c*Qubit('1')
assert qapply(u1*Qubit('1')) == b*Qubit('0') + d*Qubit('1')
# Test case where gate exists in a larger space
u2 = UGate((1,), uMat)
u2Rep = represent(u2, nqubits=2)
for i in range(4):
assert u2Rep*qubit_to_matrix(IntQubit(i, 2)) == \
qubit_to_matrix(qapply(u2*IntQubit(i, 2)))
def test_cgate():
"""Test the general CGate."""
# Test single control functionality
CNOTMatrix = Matrix(
[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]])
assert represent(CGate(1, XGate(0)), nqubits=2) == CNOTMatrix
# Test multiple control bit functionality
ToffoliGate = CGate((1, 2), XGate(0))
assert represent(ToffoliGate, nqubits=3) == \
Matrix(
[[1, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0,
1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 0, 1, 0]])
ToffoliGate = CGate((3, 0), XGate(1))
assert qapply(ToffoliGate*Qubit('1001')) == \
matrix_to_qubit(represent(ToffoliGate*Qubit('1001'), nqubits=4))
assert qapply(ToffoliGate*Qubit('0000')) == \
matrix_to_qubit(represent(ToffoliGate*Qubit('0000'), nqubits=4))
CYGate = CGate(1, YGate(0))
CYGate_matrix = Matrix(
((1, 0, 0, 0), (0, 1, 0, 0), (0, 0, 0, -I), (0, 0, I, 0)))
# Test 2 qubit controlled-Y gate decompose method.
assert represent(CYGate.decompose(), nqubits=2) == CYGate_matrix
CZGate = CGate(0, ZGate(1))
CZGate_matrix = Matrix(
((1, 0, 0, 0), (0, 1, 0, 0), (0, 0, 1, 0), (0, 0, 0, -1)))
assert qapply(CZGate*Qubit('11')) == -Qubit('11')
assert matrix_to_qubit(represent(CZGate*Qubit('11'), nqubits=2)) == \
-Qubit('11')
# Test 2 qubit controlled-Z gate decompose method.
assert represent(CZGate.decompose(), nqubits=2) == CZGate_matrix
CPhaseGate = CGate(0, PhaseGate(1))
assert qapply(CPhaseGate*Qubit('11')) == \
I*Qubit('11')
assert matrix_to_qubit(represent(CPhaseGate*Qubit('11'), nqubits=2)) == \
I*Qubit('11')
# Test that the dagger, inverse, and power of CGate is evaluated properly
assert Dagger(CZGate) == CZGate
assert pow(CZGate, 1) == Dagger(CZGate)
assert Dagger(CZGate) == CZGate.inverse()
assert Dagger(CPhaseGate) != CPhaseGate
assert Dagger(CPhaseGate) == CPhaseGate.inverse()
assert Dagger(CPhaseGate) == pow(CPhaseGate, -1)
assert pow(CPhaseGate, -1) == CPhaseGate.inverse()
def test_UGate_CGate_combo():
a, b, c, d = symbols('a,b,c,d')
uMat = Matrix([[a, b], [c, d]])
cMat = Matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, a, b], [0, 0, c, d]])
# Test basic case where gate exists in 1-qubit space.
u1 = UGate((0,), uMat)
cu1 = CGate(1, u1)
assert represent(cu1, nqubits=2) == cMat
assert qapply(cu1*Qubit('10')) == a*Qubit('10') + c*Qubit('11')
assert qapply(cu1*Qubit('11')) == b*Qubit('10') + d*Qubit('11')
assert qapply(cu1*Qubit('01')) == Qubit('01')
assert qapply(cu1*Qubit('00')) == Qubit('00')
# Test case where gate exists in a larger space.
u2 = UGate((1,), uMat)
u2Rep = represent(u2, nqubits=2)
for i in range(4):
assert u2Rep*qubit_to_matrix(IntQubit(i, 2)) == \
qubit_to_matrix(qapply(u2*IntQubit(i, 2)))
def test_represent_hadamard():
"""Test the representation of the hadamard gate."""
circuit = HadamardGate(0)*Qubit('00')
answer = represent(circuit, nqubits=2)
# Check that the answers are same to within an epsilon.
assert answer == Matrix([sqrt2_inv, sqrt2_inv, 0, 0])
def test_represent_xgate():
"""Test the representation of the X gate."""
circuit = XGate(0)*Qubit('00')
answer = represent(circuit, nqubits=2)
assert Matrix([0, 1, 0, 0]) == answer
def test_represent_ygate():
"""Test the representation of the Y gate."""
circuit = YGate(0)*Qubit('00')
answer = represent(circuit, nqubits=2)
assert answer[0] == 0 and answer[1] == I and \
answer[2] == 0 and answer[3] == 0
def test_represent_zgate():
"""Test the representation of the Z gate."""
circuit = ZGate(0)*Qubit('00')
answer = represent(circuit, nqubits=2)
assert Matrix([1, 0, 0, 0]) == answer
def test_represent_phasegate():
"""Test the representation of the S gate."""
circuit = PhaseGate(0)*Qubit('01')
answer = represent(circuit, nqubits=2)
assert Matrix([0, I, 0, 0]) == answer
def test_represent_tgate():
"""Test the representation of the T gate."""
circuit = TGate(0)*Qubit('01')
assert Matrix([0, exp(I*pi/4), 0, 0]) == represent(circuit, nqubits=2)
def test_compound_gates():
"""Test a compound gate representation."""
circuit = YGate(0)*ZGate(0)*XGate(0)*HadamardGate(0)*Qubit('00')
answer = represent(circuit, nqubits=2)
assert Matrix([I/sqrt(2), I/sqrt(2), 0, 0]) == answer
def test_cnot_gate():
"""Test the CNOT gate."""
circuit = CNotGate(1, 0)
assert represent(circuit, nqubits=2) == \
Matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]])
circuit = circuit*Qubit('111')
assert matrix_to_qubit(represent(circuit, nqubits=3)) == \
qapply(circuit)
circuit = CNotGate(1, 0)
assert Dagger(circuit) == circuit
assert Dagger(Dagger(circuit)) == circuit
assert circuit*circuit == 1
def test_gate_sort():
"""Test gate_sort."""
for g in (X, Y, Z, H, S, T):
assert gate_sort(g(2)*g(1)*g(0)) == g(0)*g(1)*g(2)
e = gate_sort(X(1)*H(0)**2*CNOT(0, 1)*X(1)*X(0))
assert e == H(0)**2*CNOT(0, 1)*X(0)*X(1)**2
assert gate_sort(Z(0)*X(0)) == -X(0)*Z(0)
assert gate_sort(Z(0)*X(0)**2) == X(0)**2*Z(0)
assert gate_sort(Y(0)*H(0)) == -H(0)*Y(0)
assert gate_sort(Y(0)*X(0)) == -X(0)*Y(0)
assert gate_sort(Z(0)*Y(0)) == -Y(0)*Z(0)
assert gate_sort(T(0)*S(0)) == S(0)*T(0)
assert gate_sort(Z(0)*S(0)) == S(0)*Z(0)
assert gate_sort(Z(0)*T(0)) == T(0)*Z(0)
assert gate_sort(Z(0)*CNOT(0, 1)) == CNOT(0, 1)*Z(0)
assert gate_sort(S(0)*CNOT(0, 1)) == CNOT(0, 1)*S(0)
assert gate_sort(T(0)*CNOT(0, 1)) == CNOT(0, 1)*T(0)
assert gate_sort(X(1)*CNOT(0, 1)) == CNOT(0, 1)*X(1)
# This takes a long time and should only be uncommented once in a while.
# nqubits = 5
# ngates = 10
# trials = 10
# for i in range(trials):
# c = random_circuit(ngates, nqubits)
# assert represent(c, nqubits=nqubits) == \
# represent(gate_sort(c), nqubits=nqubits)
def test_gate_simp():
"""Test gate_simp."""
e = H(0)*X(1)*H(0)**2*CNOT(0, 1)*X(1)**3*X(0)*Z(3)**2*S(4)**3
assert gate_simp(e) == H(0)*CNOT(0, 1)*S(4)*X(0)*Z(4)
assert gate_simp(X(0)*X(0)) == 1
assert gate_simp(Y(0)*Y(0)) == 1
assert gate_simp(Z(0)*Z(0)) == 1
assert gate_simp(H(0)*H(0)) == 1
assert gate_simp(T(0)*T(0)) == S(0)
assert gate_simp(S(0)*S(0)) == Z(0)
assert gate_simp(Integer(1)) == Integer(1)
assert gate_simp(X(0)**2 + Y(0)**2) == Integer(2)
def test_swap_gate():
"""Test the SWAP gate."""
swap_gate_matrix = Matrix(
((1, 0, 0, 0), (0, 0, 1, 0), (0, 1, 0, 0), (0, 0, 0, 1)))
assert represent(SwapGate(1, 0).decompose(), nqubits=2) == swap_gate_matrix
assert qapply(SwapGate(1, 3)*Qubit('0010')) == Qubit('1000')
nqubits = 4
for i in range(nqubits):
for j in range(i):
assert represent(SwapGate(i, j), nqubits=nqubits) == \
represent(SwapGate(i, j).decompose(), nqubits=nqubits)
def test_one_qubit_commutators():
"""Test single qubit gate commutation relations."""
for g1 in (IdentityGate, X, Y, Z, H, T, S):
for g2 in (IdentityGate, X, Y, Z, H, T, S):
e = Commutator(g1(0), g2(0))
a = matrix_to_zero(represent(e, nqubits=1, format='sympy'))
b = matrix_to_zero(represent(e.doit(), nqubits=1, format='sympy'))
assert a == b
e = Commutator(g1(0), g2(1))
assert e.doit() == 0
def test_one_qubit_anticommutators():
"""Test single qubit gate anticommutation relations."""
for g1 in (IdentityGate, X, Y, Z, H):
for g2 in (IdentityGate, X, Y, Z, H):
e = AntiCommutator(g1(0), g2(0))
a = matrix_to_zero(represent(e, nqubits=1, format='sympy'))
b = matrix_to_zero(represent(e.doit(), nqubits=1, format='sympy'))
assert a == b
e = AntiCommutator(g1(0), g2(1))
a = matrix_to_zero(represent(e, nqubits=2, format='sympy'))
b = matrix_to_zero(represent(e.doit(), nqubits=2, format='sympy'))
assert a == b
def test_cnot_commutators():
"""Test commutators of involving CNOT gates."""
assert Commutator(CNOT(0, 1), Z(0)).doit() == 0
assert Commutator(CNOT(0, 1), T(0)).doit() == 0
assert Commutator(CNOT(0, 1), S(0)).doit() == 0
assert Commutator(CNOT(0, 1), X(1)).doit() == 0
assert Commutator(CNOT(0, 1), CNOT(0, 1)).doit() == 0
assert Commutator(CNOT(0, 1), CNOT(0, 2)).doit() == 0
assert Commutator(CNOT(0, 2), CNOT(0, 1)).doit() == 0
assert Commutator(CNOT(1, 2), CNOT(1, 0)).doit() == 0
def test_random_circuit():
c = random_circuit(10, 3)
assert isinstance(c, Mul)
m = represent(c, nqubits=3)
assert m.shape == (8, 8)
assert isinstance(m, Matrix)
def test_hermitian_XGate():
x = XGate(1, 2)
x_dagger = Dagger(x)
assert (x == x_dagger)
def test_hermitian_YGate():
y = YGate(1, 2)
y_dagger = Dagger(y)
assert (y == y_dagger)
def test_hermitian_ZGate():
z = ZGate(1, 2)
z_dagger = Dagger(z)
assert (z == z_dagger)
def test_unitary_XGate():
x = XGate(1, 2)
x_dagger = Dagger(x)
assert (x*x_dagger == 1)
def test_unitary_YGate():
y = YGate(1, 2)
y_dagger = Dagger(y)
assert (y*y_dagger == 1)
def test_unitary_ZGate():
z = ZGate(1, 2)
z_dagger = Dagger(z)
assert (z*z_dagger == 1)
| bsd-3-clause |
grantsewell/nzbToMedia | libs/xdg/RecentFiles.py | 17 | 6137 | """
Implementation of the XDG Recent File Storage Specification Version 0.2
http://standards.freedesktop.org/recent-file-spec
"""
import xml.dom.minidom, xml.sax.saxutils
import os, time, fcntl
from xdg.Exceptions import ParsingError
class RecentFiles:
def __init__(self):
self.RecentFiles = []
self.filename = ""
def parse(self, filename=None):
"""Parse a list of recently used files.
filename defaults to ``~/.recently-used``.
"""
if not filename:
filename = os.path.join(os.getenv("HOME"), ".recently-used")
try:
doc = xml.dom.minidom.parse(filename)
except IOError:
raise ParsingError('File not found', filename)
except xml.parsers.expat.ExpatError:
raise ParsingError('Not a valid .menu file', filename)
self.filename = filename
for child in doc.childNodes:
if child.nodeType == xml.dom.Node.ELEMENT_NODE:
if child.tagName == "RecentFiles":
for recent in child.childNodes:
if recent.nodeType == xml.dom.Node.ELEMENT_NODE:
if recent.tagName == "RecentItem":
self.__parseRecentItem(recent)
self.sort()
def __parseRecentItem(self, item):
recent = RecentFile()
self.RecentFiles.append(recent)
for attribute in item.childNodes:
if attribute.nodeType == xml.dom.Node.ELEMENT_NODE:
if attribute.tagName == "URI":
recent.URI = attribute.childNodes[0].nodeValue
elif attribute.tagName == "Mime-Type":
recent.MimeType = attribute.childNodes[0].nodeValue
elif attribute.tagName == "Timestamp":
recent.Timestamp = int(attribute.childNodes[0].nodeValue)
elif attribute.tagName == "Private":
recent.Prviate = True
elif attribute.tagName == "Groups":
for group in attribute.childNodes:
if group.nodeType == xml.dom.Node.ELEMENT_NODE:
if group.tagName == "Group":
recent.Groups.append(group.childNodes[0].nodeValue)
def write(self, filename=None):
"""Write the list of recently used files to disk.
If the instance is already associated with a file, filename can be
omitted to save it there again.
"""
if not filename and not self.filename:
raise ParsingError('File not found', filename)
elif not filename:
filename = self.filename
f = open(filename, "w")
fcntl.lockf(f, fcntl.LOCK_EX)
f.write('<?xml version="1.0"?>\n')
f.write("<RecentFiles>\n")
for r in self.RecentFiles:
f.write(" <RecentItem>\n")
f.write(" <URI>%s</URI>\n" % xml.sax.saxutils.escape(r.URI))
f.write(" <Mime-Type>%s</Mime-Type>\n" % r.MimeType)
f.write(" <Timestamp>%s</Timestamp>\n" % r.Timestamp)
if r.Private == True:
f.write(" <Private/>\n")
if len(r.Groups) > 0:
f.write(" <Groups>\n")
for group in r.Groups:
f.write(" <Group>%s</Group>\n" % group)
f.write(" </Groups>\n")
f.write(" </RecentItem>\n")
f.write("</RecentFiles>\n")
fcntl.lockf(f, fcntl.LOCK_UN)
f.close()
def getFiles(self, mimetypes=None, groups=None, limit=0):
"""Get a list of recently used files.
The parameters can be used to filter by mime types, by group, or to
limit the number of items returned. By default, the entire list is
returned, except for items marked private.
"""
tmp = []
i = 0
for item in self.RecentFiles:
if groups:
for group in groups:
if group in item.Groups:
tmp.append(item)
i += 1
elif mimetypes:
for mimetype in mimetypes:
if mimetype == item.MimeType:
tmp.append(item)
i += 1
else:
if item.Private == False:
tmp.append(item)
i += 1
if limit != 0 and i == limit:
break
return tmp
def addFile(self, item, mimetype, groups=None, private=False):
"""Add a recently used file.
item should be the URI of the file, typically starting with ``file:///``.
"""
# check if entry already there
if item in self.RecentFiles:
index = self.RecentFiles.index(item)
recent = self.RecentFiles[index]
else:
# delete if more then 500 files
if len(self.RecentFiles) == 500:
self.RecentFiles.pop()
# add entry
recent = RecentFile()
self.RecentFiles.append(recent)
recent.URI = item
recent.MimeType = mimetype
recent.Timestamp = int(time.time())
recent.Private = private
if groups:
recent.Groups = groups
self.sort()
def deleteFile(self, item):
"""Remove a recently used file, by URI, from the list.
"""
if item in self.RecentFiles:
self.RecentFiles.remove(item)
def sort(self):
self.RecentFiles.sort()
self.RecentFiles.reverse()
class RecentFile:
def __init__(self):
self.URI = ""
self.MimeType = ""
self.Timestamp = ""
self.Private = False
self.Groups = []
def __cmp__(self, other):
return cmp(self.Timestamp, other.Timestamp)
def __lt__ (self, other):
return self.Timestamp < other.Timestamp
def __eq__(self, other):
return self.URI == str(other)
def __str__(self):
return self.URI
| gpl-3.0 |
sugarguo/Flask_Blog | ext_lib/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py | 216 | 13871 | '''SSL with SNI_-support for Python 2.
This needs the following packages installed:
* pyOpenSSL (tested with 0.13)
* ndg-httpsclient (tested with 0.3.2)
* pyasn1 (tested with 0.1.6)
To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`.
This can be done in a ``sitecustomize`` module, or at any other time before
your application begins using ``urllib3``, like this::
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except ImportError:
pass
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
encryption in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
Default: ``EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM EECDH+ECDSA+SHA256
EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA EECDH RC4 !aNULL !eNULL !LOW !3DES
!MD5 !EXP !PSK !SRP !DSS'``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
from socket import _fileobject
import ssl
import select
from cStringIO import StringIO
from .. import connection
from .. import util
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
# SNI only *really* works if we can read the subjectAltName of certificates.
HAS_SNI = SUBJ_ALT_NAME_SUPPORT
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
# Default SSL/TLS cipher list.
# Recommendation by https://community.qualys.com/blogs/securitylabs/2013/08/05/
# configuring-apache-nginx-and-openssl-for-forward-secrecy
DEFAULT_SSL_CIPHER_LIST = 'EECDH+ECDSA+AESGCM EECDH+aRSA+AESGCM ' + \
'EECDH+ECDSA+SHA256 EECDH+aRSA+SHA256 EECDH+aRSA+RC4 EDH+aRSA ' + \
'EECDH RC4 !aNULL !eNULL !LOW !3DES !MD5 !EXP !PSK !SRP !DSS'
orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
def inject_into_urllib3():
'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
connection.ssl_wrap_socket = ssl_wrap_socket
util.HAS_SNI = HAS_SNI
def extract_from_urllib3():
'Undo monkey-patching by :func:`inject_into_urllib3`.'
connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
util.HAS_SNI = orig_util_HAS_SNI
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
# There is no limit to how many SAN certificates a certificate may have,
# however this needs to have some limit so we'll set an arbitrarily high
# limit.
sizeSpec = univ.SequenceOf.sizeSpec + \
constraint.ValueSizeConstraint(1, 1024)
### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert):
# Search through extensions
dns_name = []
if not SUBJ_ALT_NAME_SUPPORT:
return dns_name
general_names = SubjectAltName()
for i in range(peer_cert.get_extension_count()):
ext = peer_cert.get_extension(i)
ext_name = ext.get_short_name()
if ext_name != 'subjectAltName':
continue
# PyOpenSSL returns extension data in ASN.1 encoded form
ext_dat = ext.get_data()
decoded_dat = der_decoder.decode(ext_dat,
asn1Spec=general_names)
for name in decoded_dat:
if not isinstance(name, SubjectAltName):
continue
for entry in range(len(name)):
component = name.getComponentByPosition(entry)
if component.getName() != 'dNSName':
continue
dns_name.append(str(component.getComponent()))
return dns_name
class fileobject(_fileobject):
def read(self, size=-1):
# Use max, disallow tiny reads in a loop as they are very inefficient.
# We never leave read() with any leftover data from a new recv() call
# in our internal buffer.
rbufsize = max(self._rbufsize, self.default_bufsize)
# Our use of StringIO rather than lists of string objects returned by
# recv() minimizes memory usage and fragmentation that occurs when
# rbufsize is large compared to the typical return value of recv().
buf = self._rbuf
buf.seek(0, 2) # seek end
if size < 0:
# Read until EOF
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
try:
data = self._sock.recv(rbufsize)
except OpenSSL.SSL.WantReadError:
continue
if not data:
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or EOF seen, whichever comes first
buf_len = buf.tell()
if buf_len >= size:
# Already have size bytes in our buffer? Extract and return.
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
left = size - buf_len
# recv() will malloc the amount of memory given as its
# parameter even though it often returns much less data
# than that. The returned data string is short lived
# as we copy it into a StringIO and free it. This avoids
# fragmentation issues on many platforms.
try:
data = self._sock.recv(left)
except OpenSSL.SSL.WantReadError:
continue
if not data:
break
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid buffer data copies when:
# - We have no data in our buffer.
# AND
# - Our call to recv returned exactly the
# number of bytes we were asked to read.
return data
if n == left:
buf.write(data)
del data # explicit free
break
assert n <= left, "recv(%d) returned %d bytes" % (left, n)
buf.write(data)
buf_len += n
del data # explicit free
#assert buf_len == buf.tell()
return buf.getvalue()
def readline(self, size=-1):
buf = self._rbuf
buf.seek(0, 2) # seek end
if buf.tell() > 0:
# check if we already have it in our buffer
buf.seek(0)
bline = buf.readline(size)
if bline.endswith('\n') or len(bline) == size:
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return bline
del bline
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
buf.seek(0)
buffers = [buf.read()]
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
data = None
recv = self._sock.recv
while True:
try:
while data != "\n":
data = recv(1)
if not data:
break
buffers.append(data)
except OpenSSL.SSL.WantReadError:
continue
break
return "".join(buffers)
buf.seek(0, 2) # seek end
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
try:
data = self._sock.recv(self._rbufsize)
except OpenSSL.SSL.WantReadError:
continue
if not data:
break
nl = data.find('\n')
if nl >= 0:
nl += 1
buf.write(data[:nl])
self._rbuf.write(data[nl:])
del data
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or \n or EOF seen, whichever comes first
buf.seek(0, 2) # seek end
buf_len = buf.tell()
if buf_len >= size:
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
try:
data = self._sock.recv(self._rbufsize)
except OpenSSL.SSL.WantReadError:
continue
if not data:
break
left = size - buf_len
# did we just receive a newline?
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
# save the excess data to _rbuf
self._rbuf.write(data[nl:])
if buf_len:
buf.write(data[:nl])
break
else:
# Shortcut. Avoid data copy through buf when returning
# a substring of our first recv().
return data[:nl]
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid data copy through buf when
# returning exactly all of our first recv().
return data
if n >= left:
buf.write(data[:left])
self._rbuf.write(data[left:])
break
buf.write(data)
buf_len += n
#assert buf_len == buf.tell()
return buf.getvalue()
class WrappedSocket(object):
'''API-compatibility wrapper for Python OpenSSL's Connection-class.'''
def __init__(self, connection, socket):
self.connection = connection
self.socket = socket
def fileno(self):
return self.socket.fileno()
def makefile(self, mode, bufsize=-1):
return fileobject(self.connection, mode, bufsize)
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
def sendall(self, data):
return self.connection.sendall(data)
def close(self):
return self.connection.shutdown()
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
if not x509:
return x509
if binary_form:
return OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1,
x509)
return {
'subject': (
(('commonName', x509.get_subject().CN),),
),
'subjectAltName': [
('DNS', value)
for value in get_subj_alt_name(x509)
]
}
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
if certfile:
ctx.use_certificate_file(certfile)
if keyfile:
ctx.use_privatekey_file(keyfile)
if cert_reqs != ssl.CERT_NONE:
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
if ca_certs:
try:
ctx.load_verify_locations(ca_certs, None)
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
ctx.set_options(OP_NO_COMPRESSION)
# Set list of supported ciphersuites.
ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
cnx = OpenSSL.SSL.Connection(ctx, sock)
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
select.select([sock], [], [])
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake', e)
break
return WrappedSocket(cnx, sock)
| gpl-3.0 |
liaoqingwei/flask | setup.py | 141 | 2096 | """
Flask
-----
Flask is a microframework for Python based on Werkzeug, Jinja 2 and good
intentions. And before you ask: It's BSD licensed!
Flask is Fun
````````````
Save in a hello.py:
.. code:: python
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
And Easy to Setup
`````````````````
And run it:
.. code:: bash
$ pip install Flask
$ python hello.py
* Running on http://localhost:5000/
Links
`````
* `website <http://flask.pocoo.org/>`_
* `documentation <http://flask.pocoo.org/docs/>`_
* `development version
<http://github.com/mitsuhiko/flask/zipball/master#egg=Flask-dev>`_
"""
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('flask/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='Flask',
version=version,
url='http://github.com/mitsuhiko/flask/',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='A microframework based on Werkzeug, Jinja2 '
'and good intentions',
long_description=__doc__,
packages=['flask', 'flask.ext'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'Werkzeug>=0.7',
'Jinja2>=2.4',
'itsdangerous>=0.21',
'click>=2.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
entry_points='''
[console_scripts]
flask=flask.cli:main
'''
)
| bsd-3-clause |
gdsfactory/gdsfactory | pp/layers.py | 1 | 9564 | """A GDS layer is a tuple of two integers.
You can:
- Define your layers in a dataclass
- Load it from Klayout XML file (.lyp)
LayerSet adapted from phidl.device_layout
load_lyp, name_to_description, name_to_short_name adapted from phidl.utilities
preview_layerset adapted from phidl.geometry
"""
import pathlib
from pathlib import Path
from typing import Optional, Tuple
import xmltodict
from phidl.device_layout import Layer as LayerPhidl
from phidl.device_layout import LayerSet as LayerSetPhidl
from pp.component import Component
from pp.name import clean_name
from pp.tech import TECH
from pp.types import PathType
LAYER = TECH.layer
class LayerSet(LayerSetPhidl):
def add_layer(
self,
name: str = "unnamed",
gds_layer: int = 0,
gds_datatype: int = 0,
description: Optional[str] = None,
color: Optional[str] = None,
inverted: bool = False,
alpha: float = 0.6,
dither: bool = None,
):
"""Adds a layer to an existing LayerSet object for nice colors.
Args:
name: Name of the Layer.
gds_layer: GDSII Layer number.
gds_datatype: GDSII datatype.
description: Layer description.
color: Hex code of color for the Layer.
inverted: If true, inverts the Layer.
alpha: layer opacity between 0 and 1.
dither: KLayout dither style, only used in phidl.utilities.write_lyp().
"""
new_layer = LayerPhidl(
gds_layer=gds_layer,
gds_datatype=gds_datatype,
name=name,
description=description,
inverted=inverted,
color=color,
alpha=alpha,
dither=dither,
)
if name in self._layers:
raise ValueError(
f"Adding {name} already defined {list(self._layers.keys())}"
)
else:
self._layers[name] = new_layer
# def __getitem__(self, val: str) -> Tuple[int, int]:
# """Returns gds layer tuple."""
# if val not in self._layers:
# raise ValueError(f"Layer {val} not in {list(self._layers.keys())}")
# else:
# layer = self._layers[val]
# return layer.gds_layer, layer.gds_datatype
def __repr__(self):
"""Prints the number of Layers in the LayerSet object."""
return (
f"LayerSet ({len(self._layers)} layers total) \n"
+ f"{list(self._layers.keys())}"
)
def get(self, name: str) -> LayerPhidl:
"""Returns Layer from name."""
if name not in self._layers:
raise ValueError(f"Layer {name} not in {list(self._layers.keys())}")
else:
return self._layers[name]
def get_from_tuple(self, layer_tuple: Tuple[int, int]) -> LayerPhidl:
"""Returns Layer from layer tuple (gds_layer, gds_datatype)."""
tuple_to_name = {
(v.gds_layer, v.gds_datatype): k for k, v in self._layers.items()
}
if layer_tuple not in tuple_to_name:
raise ValueError(f"Layer {layer_tuple} not in {list(tuple_to_name.keys())}")
name = tuple_to_name[layer_tuple]
return self._layers[name]
LAYER_COLORS = LayerSet() # Layerset makes plotgds look good
LAYER_COLORS.add_layer("WG", LAYER.WG[0], 0, "wg", color="gray", alpha=1)
LAYER_COLORS.add_layer("WGCLAD", LAYER.WGCLAD[0], 0, "", color="gray", alpha=0)
LAYER_COLORS.add_layer("SLAB150", LAYER.SLAB150[0], 0, "", color="lightblue", alpha=0.6)
LAYER_COLORS.add_layer("SLAB90", LAYER.SLAB90[0], 0, "", color="lightblue", alpha=0.2)
LAYER_COLORS.add_layer("WGN", LAYER.WGN[0], 0, "", color="orange", alpha=1)
LAYER_COLORS.add_layer("WGN_CLAD", LAYER.WGN_CLAD[0], 0, "", color="gray", alpha=0)
LAYER_COLORS.add_layer("DEVREC", LAYER.DEVREC[0], 0, "", color="gray", alpha=0.1)
PORT_LAYER_TO_TYPE = {
LAYER.PORT: "optical",
LAYER.PORTE: "dc",
LAYER.PORTH: "heater",
LAYER.TE: "vertical_te",
LAYER.TM: "vertical_tm",
}
PORT_TYPE_TO_LAYER = {v: k for k, v in PORT_LAYER_TO_TYPE.items()}
def preview_layerset(
ls: LayerSet = LAYER_COLORS, size: float = 100.0, spacing: float = 100.0
) -> Component:
"""Generates a preview Device with representations of all the layers,
used for previewing LayerSet color schemes in quickplot or saved .gds
files
"""
import numpy as np
import pp
D = Component(name="layerset")
scale = size / 100
num_layers = len(ls._layers)
matrix_size = int(np.ceil(np.sqrt(num_layers)))
sorted_layers = sorted(
ls._layers.values(), key=lambda x: (x.gds_layer, x.gds_datatype)
)
for n, layer in enumerate(sorted_layers):
R = pp.components.rectangle(size=(100 * scale, 100 * scale), layer=layer)
T = pp.components.text(
text="%s\n%s / %s" % (layer.name, layer.gds_layer, layer.gds_datatype),
size=20 * scale,
position=(50 * scale, -20 * scale),
justify="center",
layer=layer,
)
xloc = n % matrix_size
yloc = int(n // matrix_size)
D.add_ref(R).movex((100 + spacing) * xloc * scale).movey(
-(100 + spacing) * yloc * scale
)
D.add_ref(T).movex((100 + spacing) * xloc * scale).movey(
-(100 + spacing) * yloc * scale
)
return D
def _name_to_short_name(name_str: str) -> str:
"""Maps the name entry of the lyp element to a name of the layer,
i.e. the dictionary key used to access it.
Default format of the lyp name is
key - layer/datatype - description
or
key - description
"""
if name_str is None:
raise IOError(f"layer {name_str} has no name")
fields = name_str.split("-")
name = fields[0].split()[0].strip()
return clean_name(name)
def _name_to_description(name_str) -> str:
"""Gets the description of the layer contained in the lyp name field.
It is not strictly necessary to have a description. If none there, it returns ''.
Default format of the lyp name is
key - layer/datatype - description
or
key - description
"""
if name_str is None:
raise IOError(f"layer {name_str} has no name")
fields = name_str.split()
description = ""
if len(fields) > 1:
description = " ".join(fields[1:])
return description
def _add_layer(entry, lys: LayerSet) -> LayerSet:
"""Entry is a dict of one element of 'properties'.
No return value. It adds it to the lys variable directly
"""
info = entry["source"].split("@")[0]
# skip layers without name or with */*
if "'" in info or "*" in info:
return
name = entry.get("name") or entry.get("source")
if not name:
return
gds_layer, gds_datatype = info.split("/")
gds_layer = gds_layer.split()[-1]
gds_datatype = gds_datatype.split()[-1]
settings = dict()
settings["gds_layer"] = int(gds_layer)
settings["gds_datatype"] = int(gds_datatype)
settings["color"] = entry["fill-color"]
settings["dither"] = entry["dither-pattern"]
settings["name"] = _name_to_short_name(name)
settings["description"] = _name_to_description(name)
lys.add_layer(**settings)
return lys
def load_lyp(filepath: Path) -> LayerSet:
"""Returns a LayerSet object from a Klayout lyp file in XML format."""
with open(filepath, "r") as fx:
lyp_dict = xmltodict.parse(fx.read(), process_namespaces=True)
# lyp files have a top level that just has one dict: layer-properties
# That has multiple children 'properties', each for a layer. So it gives a list
lyp_list = lyp_dict["layer-properties"]["properties"]
if not isinstance(lyp_list, list):
lyp_list = [lyp_list]
lys = LayerSet()
for entry in lyp_list:
try:
group_members = entry["group-members"]
except KeyError: # it is a real layer
_add_layer(entry, lys)
else: # it is a group of other entries
if not isinstance(group_members, list):
group_members = [group_members]
for member in group_members:
_add_layer(member, lys)
return lys
# For port labelling purpose
# LAYERS_OPTICAL = [LAYER.WG]
# LAYERS_ELECTRICAL = [LAYER.M1, LAYER.M2, LAYER.M3]
# LAYERS_HEATER = [LAYER.HEATER]
def lyp_to_dataclass(lyp_filepath: PathType, overwrite: bool = True) -> str:
filepathin = pathlib.Path(lyp_filepath)
filepathout = filepathin.with_suffix(".py")
if filepathout.exists() and not overwrite:
raise FileExistsError(f"You can delete {filepathout}")
script = """
import dataclasses
@dataclasses.dataclass
class LayerMap():
"""
lys = load_lyp(filepathin)
for layer_name, layer in sorted(lys._layers.items()):
script += (
f" {layer_name}: Layer = ({layer.gds_layer}, {layer.gds_datatype})\n"
)
filepathout.write_text(script)
return script
def test_load_lyp():
from pp.config import layer_path
lys = load_lyp(layer_path)
assert len(lys._layers) == 82
return lys
if __name__ == "__main__":
pass
# print(LAYER_STACK.get_from_tuple((1, 0)))
# print(LAYER_STACK.get_layer_to_material())
# lys = test_load_lyp()
# c = preview_layerset(ls)
# c.show()
# print(LAYERS_OPTICAL)
# print(layer("wgcore"))
# print(layer("wgclad"))
# print(layer("padding"))
# print(layer("TEXT"))
# print(type(layer("wgcore")))
| mit |
n4hy/gnuradio | gr-shd/grc/gen_shd_smini_blocks.py | 17 | 8247 | """
Copyright 2011 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
MAIN_TMPL = """\
<?xml version="1.0"?>
<block>
<name>SHD: SMINI $sourk.title()</name>
<key>shd_smini_$(sourk)</key>
<import>from gnuradio import shd</import>
<make>shd.smini_$(sourk)(
device_addr=\$dev_addr,
io_type=shd.io_type.\$type.type,
num_channels=\$nchan,
)
\#if \$clock_rate()
self.\$(id).set_clock_rate(\$clock_rate, shd.ALL_MBOARDS)
\#end if
#for $m in range($max_mboards)
########################################################################
\#if \$num_mboards() > $m and \$ref_source$(m)() == 'external'
self.\$(id).set_clock_config(shd.clock_config.external(), $m)
\#end if
########################################################################
\#if \$num_mboards() > $m and \$ref_source$(m)() == 'internal'
self.\$(id).set_clock_config(shd.clock_config.internal(), $m)
\#end if
########################################################################
\#if \$num_mboards() > $m and \$ref_source$(m)() == 'mimo'
_config = shd.clock_config()
_config.ref_source = shd.clock_config.REF_MIMO
_config.pps_source = shd.clock_config.PPS_MIMO
self.\$(id).set_clock_config(_config, $m)
\#end if
########################################################################
\#if \$num_mboards() > $m and \$sd_spec$(m)()
self.\$(id).set_subdev_spec(\$sd_spec$(m), $m)
\#end if
########################################################################
#end for
\#if \$sync()
self.\$(id).set_time_unknown_pps(shd.time_spec())
\#end if
self.\$(id).set_samp_rate(\$samp_rate)
#for $n in range($max_nchan)
\#if \$nchan() > $n
self.\$(id).set_center_freq(\$center_freq$(n), $n)
self.\$(id).set_gain(\$gain$(n), $n)
\#end if
#end for
</make>
<callback>set_samp_rate(\$samp_rate)</callback>
#for $n in range($max_nchan)
<callback>set_center_freq(\$center_freq$(n), $n)</callback>
<callback>set_gain(\$gain$(n), $n)</callback>
#end for
<param>
<name>$(direction.title())put Type</name>
<key>type</key>
<type>enum</type>
<option>
<name>Complex</name>
<key>complex</key>
<opt>type:COMPLEX_FLOAT32</opt>
<opt>vlen:1</opt>
</option>
<option>
<name>Short</name>
<key>short</key>
<opt>type:COMPLEX_INT16</opt>
<opt>vlen:2</opt>
</option>
</param>
<param>
<name>Device Addr</name>
<key>dev_addr</key>
<value></value>
<type>string</type>
<hide>
\#if \$dev_addr()
none
\#else
part
\#end if
</hide>
</param>
<param>
<name>Sync</name>
<key>sync</key>
<value></value>
<type>enum</type>
<hide>\#if \$sync() then 'none' else 'part'#</hide>
<option>
<name>unknown PPS</name>
<key>sync</key>
</option>
<option>
<name>don't sync</name>
<key></key>
</option>
</param>
<param>
<name>Clock Rate (Hz)</name>
<key>clock_rate</key>
<value>0.0</value>
<type>real</type>
<hide>\#if \$clock_rate() then 'none' else 'part'#</hide>
<option>
<name>Default</name>
<key>0.0</key>
</option>
</param>
<param>
<name>Num Mboards</name>
<key>num_mboards</key>
<value>1</value>
<type>int</type>
<hide>part</hide>
#for $m in range(1, $max_mboards+1)
<option>
<name>$(m)</name>
<key>$m</key>
</option>
#end for
</param>
#for $m in range($max_mboards)
<param>
<name>Mb$(m): Ref Source</name>
<key>ref_source$(m)</key>
<value></value>
<type>enum</type>
<hide>
\#if not \$num_mboards() > $m
all
\#elif \$ref_source$(m)()
none
\#else
part
\#end if
</hide>
<option><name>Default</name><key></key></option>
<option><name>Internal</name><key>internal</key></option>
<option><name>External</name><key>external</key></option>
<option><name>MIMO Cable</name><key>mimo</key></option>
</param>
<param>
<name>Mb$(m): Subdev Spec</name>
<key>sd_spec$(m)</key>
<value></value>
<type>string</type>
<hide>
\#if not \$num_mboards() > $m
all
\#elif \$sd_spec$(m)()
none
\#else
part
\#end if
</hide>
</param>
#end for
<param>
<name>Num Channels</name>
<key>nchan</key>
<value>1</value>
<type>int</type>
#for $n in range(1, $max_nchan+1)
<option>
<name>$(n)</name>
<key>$n</key>
</option>
#end for
</param>
<param>
<name>Samp Rate (Sps)</name>
<key>samp_rate</key>
<value>samp_rate</value>
<type>real</type>
</param>
$params
<check>$max_nchan >= \$nchan</check>
<check>\$nchan > 0</check>
<check>$max_mboards >= \$num_mboards</check>
<check>\$num_mboards > 0</check>
<check>\$nchan >= \$num_mboards</check>
<$sourk>
<name>$direction</name>
<type>\$type</type>
<vlen>\$type.vlen</vlen>
<nports>\$nchan</nports>
</$sourk>
<doc>
The SHD SMINI $sourk.title() Block:
Device Address:
The device address is a delimited string used to locate SHD devices on your system. \\
If left blank, the first SHD device found will be used. \\
Use the device address to specify a specific device or list of devices.
SMINI1 Example: serial=12345678
SMINI2 Example: type=xmini
Num Motherboards:
Selects the number of SMINI motherboards in this device configuration.
Reference Source:
Where the motherboard should sync its time and clock references.
If source and sink blocks reference the same device,
it is only necessary to set the reference source on one of the blocks.
Subdevice specification:
Each motherboard should have its own subdevice specification \\
and all subdevice specifications should be the same length. \\
Select the subdevice or subdevices for each channel using a markup string. \\
The markup string consists of a list of dboard_slot:subdev_name pairs (one pair per channel). \\
If left blank, the SHD will try to select the first subdevice on your system. \\
See the application notes for further details.
Single channel example: :AB
Dual channel example: :A :B
Num Channels:
Selects the total number of channels in this multi-SMINI configuration.
Ex: 4 motherboards with 2 channels per board = 8 channels total
Sample rate:
The sample rate is the number of samples per second input by this block. \\
The SHD device driver will try its best to match the requested sample rate. \\
If the requested rate is not possible, the SHD block will print an error at runtime.
Center frequency:
The center frequency is the overall frequency of the RF chain. \\
For greater control of how the SHD tunes elements in the RF chain, \\
pass a tune_request object rather than a simple target frequency.
Tuning with an LO offset example: shd.tune_request(freq, lo_off)
</doc>
</block>
"""
PARAMS_TMPL = """
<param>
<name>Ch$(n): Center Freq (Hz)</name>
<key>center_freq$(n)</key>
<value>0</value>
<type>real</type>
<hide>\#if \$nchan() > $n then 'none' else 'all'#</hide>
</param>
<param>
<name>Ch$(n): Gain (dB)</name>
<key>gain$(n)</key>
<value>0</value>
<type>real</type>
<hide>\#if \$nchan() > $n then 'none' else 'all'#</hide>
</param>
"""
def parse_tmpl(_tmpl, **kwargs):
from Cheetah import Template
return str(Template.Template(_tmpl, kwargs))
max_num_mboards = 8
max_num_channels = max_num_mboards*4
if __name__ == '__main__':
import sys
for file in sys.argv[1:]:
if 'source' in file:
sourk = 'source'
direction = 'out'
elif 'sink' in file:
sourk = 'sink'
direction = 'in'
else: raise Exception, 'is %s a source or sink?'%file
params = ''.join([parse_tmpl(PARAMS_TMPL, n=n) for n in range(max_num_channels)])
open(file, 'w').write(parse_tmpl(MAIN_TMPL,
max_nchan=max_num_channels,
max_mboards=max_num_mboards,
params=params,
sourk=sourk,
direction=direction,
))
| gpl-3.0 |
labordoc/labordoc-next | modules/bibformat/lib/bibformat_xslt_engine.py | 7 | 23437 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
bibformat_xslt_engine - Wrapper for an XSLT engine.
Some functions are registered in order to be used in XSL code:
- creation_date(recID)
- modification_date(recID)
Dependencies: Need one of the following XSLT processors:
- lxml
- libxml2 & libxslt
- 4suite
Used by: bibformat_engine.py
"""
__revision__ = "$Id$"
import sys
import os
from invenio.config import \
CFG_SITE_URL
from invenio.bibformat_config import \
CFG_BIBFORMAT_TEMPLATES_PATH
from invenio.bibformat_dblayer import \
get_creation_date, \
get_modification_date
# The namespace used for BibFormat function
CFG_BIBFORMAT_FUNCTION_NS = "http://cdsweb.cern.ch/bibformat/fn"
# Import one XSLT processor
#
# processor_type:
# 0 : No processor found
# 1 : lxml
# 2 : libxslt
# 3 : 4suite
processor_type = 0
try:
# lxml
from lxml import etree
processor_type = 1
except ImportError:
pass
if processor_type == 0:
try:
# libxml2 & libxslt
import libxml2
import libxslt
processor_type = 2
except ImportError:
pass
if processor_type == 0:
try:
# 4suite
from Ft.Xml.Xslt import Processor
from Ft.Xml import InputSource
from xml.dom import Node
processor_type = 3
except ImportError:
pass
if processor_type == 0:
# No XSLT processor found
sys.stderr.write('No XSLT processor could be found.\n' \
'No output produced.\n')
#sys.exit(1)
##################################################################
# Support for 'creation_date' and 'modification_date' functions #
def get_creation_date_lxml(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record creation date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:creation_date(445)"/> where 445 is a recID
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
@param ctx: context as passed by lxml
@param recID: record ID
@param fmt: format of the returned date
@return: creation date of X{recID}
@rtype: string
"""
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
elif isinstance(recID, list):
recID = recID[0]
if isinstance(recID, str):
recID_int = int(recID)
else:
recID_int = int(recID.text)
else:
recID_int = int(recID.text)
if isinstance(fmt, str):
fmt_str = fmt
elif isinstance(fmt, list):
fmt = fmt[0]
if isinstance(fmt, str):
fmt_str = fmt
else:
fmt_str = fmt.text
else:
fmt_str = fmt.text
return get_creation_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def get_creation_date_libxslt(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record creation date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:creation_date(445)"/> where 445 is a recID
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
@param ctx: context as passed by libxslt
@param recID: record ID
@param fmt: format of the returned date
@return: creation date of X{recID}
@rtype: string
"""
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
else:
recID_int = libxml2.xmlNode(_obj=recID[0]).children.content
if isinstance(fmt, str):
fmt_str = fmt
else:
fmt_str = libxml2.xmlNode(_obj=recID[0]).children.content
return get_creation_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def get_creation_date_4suite(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
4suite extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record creation date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:creation_date(445)"/>
if value is int, value is converted to string
if value is Node, first child node (text node) is taken as value
@param ctx: context as passed by 4suite
@param recID: record ID
@param fmt: format of the returned date
@return: creation date of X{recID}
@rtype: string
"""
try:
if len(recID) > 0 and isinstance(recID[0], Node):
recID_int = recID[0].firstChild.nodeValue
if recID_int is None:
return ''
else:
recID_int = int(recID)
if len(fmt) > 0 and isinstance(fmt[0], Node):
fmt_str = fmt[0].firstChild.nodeValue
if fmt_str is None:
fmt_str = "%Y-%m-%dT%H:%M:%SZ"
else:
fmt_str = str(fmt)
return get_creation_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def get_modification_date_lxml(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record modification date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:creation_date(445)"/> where 445 is a recID
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
@param ctx: context as passed by lxml
@param recID: record ID
@param fmt: format of the returned date
@return: modification date of X{recID}
@rtype: string
"""
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
elif isinstance(recID, list):
recID = recID[0]
if isinstance(recID, str):
recID_int = int(recID)
else:
recID_int = int(recID.text)
else:
recID_int = int(recID.text)
if isinstance(fmt, str):
fmt_str = fmt
elif isinstance(fmt, list):
fmt = fmt[0]
if isinstance(fmt, str):
fmt_str = fmt
else:
fmt_str = fmt.text
else:
fmt_str = fmt.text
return get_modification_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def get_modification_date_libxslt(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record modification date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:creation_date(445)"/> where 445 is a recID
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
@param ctx: context as passed by libxslt
@param recID: record ID
@param fmt: format of the returned date
@return: modification date of X{recID}
@rtype: string
"""
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
else:
recID_int = libxml2.xmlNode(_obj=recID[0]).children.content
if isinstance(fmt, str):
fmt_str = fmt
else:
fmt_str = libxml2.xmlNode(_obj=recID[0]).children.content
return get_modification_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def get_modification_date_4suite(ctx, recID, fmt="%Y-%m-%dT%H:%M:%SZ"):
"""
4suite extension function:
Bridge between BibFormat and XSL stylesheets.
Returns record modification date.
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:modification_date(445)"/>
if value is int, value is converted to string
if value is Node, first child node (text node) is taken as value
@param ctx: context as passed by 4suite
@param recID: record ID
@param fmt: format of the returned date
@return: modification date of X{recID}
@rtype: string
"""
try:
if len(recID) > 0 and isinstance(recID[0], Node):
recID_int = recID[0].firstChild.nodeValue
if recID_int is None:
return ''
else:
recID_int = int(recID_int)
if len(fmt) > 0 and isinstance(fmt[0], Node):
fmt_str = fmt[0].firstChild.nodeValue
if fmt_str is None:
fmt_str = "%Y-%m-%dT%H:%M:%SZ"
else:
fmt_str = str(fmt)
return get_modification_date(recID_int, fmt_str)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def eval_bibformat_lxml(ctx, recID, template_code):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns the evaluation of the given piece of format template
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:eval_bibformat(marc:controlfield[@tag='001'],'<BFE_SERVER_INFO var="recurl">')" />
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
template_code is evaluated as a format template piece of code. '<'
and '"' need to be escaped with '<' and '"'
@param ctx: context as passed by lxml
@param recID: record ID
@param template_code: the code calling a BFE_ as it would be use in format template
@return: the evalued call to a format template (usually a call to a format element)
@rtype: string
""" #'
from invenio.bibformat_engine import \
format_with_format_template, \
BibFormatObject
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
elif isinstance(recID, list):
recID = recID[0]
if isinstance(recID, str):
recID_int = int(recID)
else:
recID_int = int(recID.text)
else:
recID_int = int(recID.text)
bfo = BibFormatObject(recID_int)
return format_with_format_template(None, bfo,
verbose=0,
format_template_code=template_code)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def eval_bibformat_libxslt(ctx, recID, template_code):
"""
libxslt extension function:
Bridge between BibFormat and XSL stylesheets.
Returns the evaluation of the given piece of format template
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:eval_bibformat(marc:controlfield[@tag='001'],'<BFE_SERVER_INFO var="recurl">')" />
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
template_code is evaluated as a format template piece of code. '<'
and '"' need to be escaped with '<' and '"'
@param ctx: context as passed by libxslt
@param recID: record ID
@param template_code: the code calling a BFE_ as it would be use in format template
@return: the evalued call to a format template (usually a call to a format element)
@rtype: string
""" #'
from invenio.bibformat_engine import \
format_with_format_template, \
BibFormatObject
try:
if isinstance(recID, str):
recID_int = int(recID)
elif isinstance(recID, (int, long)):
recID_int = recID
else:
recID_int = libxml2.xmlNode(_obj=recID[0]).children.content
bfo = BibFormatObject(recID_int)
return format_with_format_template(None, bfo,
verbose=0,
format_template_code=template_code)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
def eval_bibformat_4suite(ctx, recID, template_code):
"""
4suite extension function:
Bridge between BibFormat and XSL stylesheets.
Returns the evaluation of the given piece of format template
Can be used in that way in XSL stylesheet
(provided xmlns:fn="http://cdsweb.cern.ch/bibformat/fn" has been declared):
<xsl:value-of select="fn:eval_bibformat(marc:controlfield[@tag='001'],'<BFE_SERVER_INFO var="recurl">')" />
if recID is string, value is converted to int
if recID is Node, first child node (text node) is taken as value
template_code is evaluated as a format template piece of code. '<'
and '"' need to be escaped with '<' and '"'
@param ctx: context as passed by 4suite
@param recID: record ID
@param template_code: the code calling a BFE_ as it would be use in format template
@return: the evalued call to a format template (usually a call to a format element)
@rtype: string
""" #'
from invenio.bibformat_engine import \
format_with_format_template, \
BibFormatObject
try:
if len(recID) > 0 and isinstance(recID[0], Node):
recID_int = recID[0].firstChild.nodeValue
if recID_int is None:
return ''
else:
recID_int = int(recID_int)
bfo = BibFormatObject(recID_int)
return format_with_format_template(None, bfo,
verbose=0,
format_template_code=template_code)
except Exception, err:
sys.stderr.write("Error during formatting function evaluation: " + \
str(err) + \
'\n')
return ''
# End of date-related functions #
##################################################################
def format(xmltext, template_filename=None, template_source=None):
"""
Processes an XML text according to a template, and returns the result.
The template can be given either by name (or by path) or by source.
If source is given, name is ignored.
bibformat_xslt_engine will look for template_filename in standard directories
for templates. If not found, template_filename will be assumed to be a path to
a template. If none can be found, return None.
@param xmltext: The string representation of the XML to process
@param template_filename: The name of the template to use for the processing
@param template_source: The configuration describing the processing.
@return: the transformed XML text.
"""
if processor_type == 0:
# No XSLT processor found
sys.stderr.write('No XSLT processor could be found.')
#sys.exit(1)
# Retrieve template and read it
if template_source:
template = template_source
elif template_filename:
try:
path_to_templates = (CFG_BIBFORMAT_TEMPLATES_PATH + os.sep +
template_filename)
if os.path.exists(path_to_templates):
template = file(path_to_templates).read()
elif os.path.exists(template_filename):
template = file(template_filename).read()
else:
sys.stderr.write(template_filename +' does not exist.')
return None
except IOError:
sys.stderr.write(template_filename +' could not be read.')
return None
else:
sys.stderr.write(template_filename +' was not given.')
return None
# Some massaging of the input to avoid the default namespace issue
# in XPath. More elegant solution might be found though.
xmltext = xmltext.replace('xmlns="http://www.loc.gov/MARC21/slim"', '')
# For older MARCXML records stored in bibfmt with empty indicators
xmltext = xmltext.replace('ind1=""', 'ind1=" "')
xmltext = xmltext.replace('ind2=""', 'ind2=" "')
result = ""
if processor_type == 1:
# lxml
try:
xml = etree.XML(xmltext)
except etree.XMLSyntaxError, e:
error = 'The XML code given is invalid. [%s]' % (e,)
sys.stderr.write(error)
return result
except:
error = 'Failed to process the XML code.'
sys.stderr.write(error)
return result
try:
xsl = etree.XML(template)
except etree.XMLSyntaxError, e:
error = 'The XSL code given is invalid. [%s]' % (e,)
sys.stderr.write(error)
return result
except:
error = 'Failed to process the XSL code.'
sys.stderr.write(error)
return result
try:
fns = etree.FunctionNamespace(CFG_BIBFORMAT_FUNCTION_NS)
fns["creation_date"] = get_creation_date_lxml
fns["modification_date"] = get_modification_date_lxml
fns["eval_bibformat"] = eval_bibformat_lxml
except etree.NamespaceRegistryError, e:
error = 'Failed registering the XPath extension function. [%s]' % (e,)
sys.stderr.write(error)
return result
try:
xslt = etree.XSLT(xsl)
except etree.XSLTParseError, e:
error = 'The XSL code given is invalid. [%s]' % (e,)
sys.stderr.write(error)
return result
except:
error = 'Failed to process the XSL code.'
sys.stderr.write(error)
return result
try:
temporary_result = xslt(xml)
except:
error = 'Failed to perform the XSL transformation.'
sys.stderr.write(error)
return result
result = str(temporary_result)
# Housekeeping
del temporary_result
del xslt
del xsl
del xml
elif processor_type == 2:
# libxml2 & libxslt
# Register BibFormat functions for use in XSL
libxslt.registerExtModuleFunction("creation_date",
CFG_BIBFORMAT_FUNCTION_NS,
get_creation_date_libxslt)
libxslt.registerExtModuleFunction("modification_date",
CFG_BIBFORMAT_FUNCTION_NS,
get_modification_date_libxslt)
libxslt.registerExtModuleFunction("eval_bibformat",
CFG_BIBFORMAT_FUNCTION_NS,
eval_bibformat_libxslt)
# Load template and source
template_xml = libxml2.parseDoc(template)
processor = libxslt.parseStylesheetDoc(template_xml)
source = libxml2.parseDoc(xmltext)
# Transform
result_object = processor.applyStylesheet(source, None)
try:
result = processor.saveResultToString(result_object)
except SystemError :
# Catch an exception thrown when result is empty,
# due to a bug in libxslt
result = ''
# Deallocate
processor.freeStylesheet()
source.freeDoc()
result_object.freeDoc()
elif processor_type == 3:
# 4suite
# Init
processor = Processor.Processor()
# Register BibFormat functions for use in XSL
processor.registerExtensionFunction(CFG_BIBFORMAT_FUNCTION_NS,
"creation_date",
get_creation_date_4suite)
processor.registerExtensionFunction(CFG_BIBFORMAT_FUNCTION_NS,
"modification_date",
get_modification_date_4suite)
processor.registerExtensionFunction(CFG_BIBFORMAT_FUNCTION_NS,
"eval_bibformat",
eval_bibformat_4suite)
# Load template and source
transform = InputSource.DefaultFactory.fromString(template,
uri=CFG_SITE_URL)
source = InputSource.DefaultFactory.fromString(xmltext,
uri=CFG_SITE_URL)
processor.appendStylesheet(transform)
# Transform
result = processor.run(source)
else:
sys.stderr.write("No XSLT processor could be found")
return result
if __name__ == "__main__":
pass
| gpl-2.0 |
mensler/ansible | lib/ansible/modules/cloud/azure/azure_rm_publicipaddress_facts.py | 68 | 6200 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: azure_rm_publicipaddress_facts
version_added: "2.1"
short_description: Get public IP facts.
description:
- Get facts for a specific public IP or all public IPs within a resource group.
options:
name:
description:
- Only show results for a specific Public IP.
required: false
default: null
resource_group:
description:
- Limit results by resource group. Required when using name parameter.
required: false
default: null
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
required: false
default: null
extends_documentation_fragment:
- azure
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Get facts for one Public IP
azure_rm_publicip_facts:
resource_group: Testing
name: publicip001
- name: Get facts for all Public IPs within a resource groups
azure_rm_publicip_facts:
resource_group: Testing
'''
RETURN = '''
azure_publicipaddresses:
description: List of public IP address dicts.
returned: always
type: list
example: [{
"etag": 'W/"a31a6d7d-cb18-40a5-b16d-9f4a36c1b18a"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/publicIPAddresses/pip2001",
"location": "eastus2",
"name": "pip2001",
"properties": {
"idleTimeoutInMinutes": 4,
"provisioningState": "Succeeded",
"publicIPAllocationMethod": "Dynamic",
"resourceGuid": "29de82f4-a7da-440e-bd3d-9cabb79af95a"
},
"type": "Microsoft.Network/publicIPAddresses"
}]
'''
from ansible.module_utils.basic import *
from ansible.module_utils.azure_rm_common import *
try:
from msrestazure.azure_exceptions import CloudError
from azure.common import AzureMissingResourceHttpError, AzureHttpError
except:
# This is handled in azure_rm_common
pass
AZURE_OBJECT_CLASS = 'PublicIp'
class AzureRMPublicIPFacts(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
name=dict(type='str'),
resource_group=dict(type='str'),
tags=dict(type='list')
)
self.results = dict(
changed=False,
ansible_facts=dict(azure_publicipaddresses=[])
)
self.name = None
self.resource_group = None
self.tags = None
super(AzureRMPublicIPFacts, self).__init__(self.module_arg_spec,
supports_tags=False,
facts_module=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name and not self.resource_group:
self.fail("Parameter error: resource group required when filtering by name.")
if self.name:
self.results['ansible_facts']['azure_publicipaddresses'] = self.get_item()
elif self.resource_group:
self.results['ansible_facts']['azure_publicipaddresses'] = self.list_resource_group()
else:
self.results['ansible_facts']['azure_publicipaddresses'] = self.list_all()
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.name))
item = None
result = []
try:
item = self.network_client.public_ip_addresses.get(self.resource_group, self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
result = [pip]
return result
def list_resource_group(self):
self.log('List items in resource groups')
try:
response = self.network_client.public_ip_addresses.list(self.resource_group)
except AzureHttpError as exc:
self.fail("Error listing items in resource groups {0} - {1}".format(self.resource_group, str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
results.append(pip)
return results
def list_all(self):
self.log('List all items')
try:
response = self.network_client.public_ip_addresses.list_all()
except AzureHttpError as exc:
self.fail("Error listing all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
pip = self.serialize_obj(item, AZURE_OBJECT_CLASS)
pip['name'] = item.name
pip['type'] = item.type
results.append(pip)
return results
def main():
AzureRMPublicIPFacts()
if __name__ == '__main__':
main()
| gpl-3.0 |
ddico/project | project_baseuser/project.py | 28 | 1678 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 Daniel Reis
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
from openerp import SUPERUSER_ID
class ProjectTask(orm.Model):
_inherit = 'project.task'
def message_post(
self, cr, uid, thread_id, body='', subject=None,
type='notification', subtype=None, parent_id=False,
attachments=None, context=None, content_subtype='html', **kwargs):
""" Overrides mail_thread message_post so that we can write messages
on read only documents.
"""
return super(ProjectTask, self).message_post(
cr, SUPERUSER_ID,
thread_id, body=body, subject=subject, type=type, subtype=subtype,
parent_id=parent_id, attachments=attachments, context=context,
content_subtype=content_subtype, **kwargs)
| agpl-3.0 |
alissonperez/django-onmydesk | onmydesk/utils.py | 1 | 1501 | """Module with common utilities to this package"""
import re
from datetime import timedelta
import importlib
def my_import(class_name):
"""
Usage example::
Report = my_import('myclass.models.Report')
model_instance = Report()
model_instance.name = 'Test'
model_instance.save()
:param str class_name: Class name
:returns: Class object
"""
*packs, class_name = class_name.split('.')
try:
module = importlib.import_module('.'.join(packs))
klass = getattr(module, class_name)
return klass
except (ImportError, AttributeError) as e:
msg = 'Could not import "{}" from {}: {}.'.format(
class_name, e.__class__.__name__, e)
raise ImportError(msg)
def str_to_date(value, reference_date):
'''
Convert a string like 'D-1' to a "reference_date - timedelta(days=1)"
:param str value: String like 'D-1', 'D+1', 'D'...
:param date reference_date: Date to be used as 'D'
:returns: Result date
:rtype: date
'''
n_value = value.strip(' ').replace(' ', '').upper()
if not re.match('^D[\-+][0-9]+$|^D$', n_value):
raise ValueError('Wrong value "{}"'.format(value))
if n_value == 'D':
return reference_date
elif n_value[:2] == 'D-':
days = int(n_value[2:])
return reference_date - timedelta(days=days)
elif n_value[:2] == 'D+':
days = int(n_value[2:])
return reference_date + timedelta(days=days)
| mit |
yfried/ansible | lib/ansible/modules/network/iosxr/iosxr_command.py | 40 | 6710 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: iosxr_command
version_added: "2.1"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Run commands on remote devices running Cisco IOS XR
description:
- Sends arbitrary commands to an IOS XR node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(iosxr_config) to configure iosxr devices.
extends_documentation_fragment: iosxr
notes:
- This module does not support netconf connection
- Tested against IOS XR 6.1.2
options:
commands:
description:
- List of commands to send to the remote iosxr device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
default: all
choices: ['any', 'all']
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
iosxr_command:
commands: show version
- name: run show version and check to see if output contains iosxr
iosxr_command:
commands: show version
wait_for: result[0] contains IOS-XR
- name: run multiple commands on remote nodes
iosxr_command:
commands:
- show version
- show interfaces
- { command: example command that prompts, prompt: expected prompt, answer: yes}
- name: run multiple commands and evaluate the output
iosxr_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains IOS-XR
- result[1] contains Loopback0
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always apart from low level errors (such as action plugin)
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always apart from low level errors (such as action plugin)
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.network.common.utils import to_lines
from ansible.module_utils.network.iosxr.iosxr import run_commands, iosxr_argument_spec
from ansible.module_utils.network.iosxr.iosxr import command_spec
def parse_commands(module, warnings):
commands = module.params['commands']
for item in list(commands):
try:
command = item['command']
except Exception:
command = item
if module.check_mode and not command.startswith('show'):
warnings.append(
'Only show commands are supported when using check mode, not '
'executing %s' % command
)
commands.remove(item)
return commands
def main():
argument_spec = dict(
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(iosxr_argument_spec)
argument_spec.update(command_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
result = {'changed': False, 'warnings': warnings}
commands = parse_commands(module, warnings)
wait_for = module.params['wait_for'] or list()
try:
conditionals = [Conditional(c) for c in wait_for]
except AttributeError as exc:
module.fail_json(msg=to_text(exc))
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'stdout': responses,
'stdout_lines': list(to_lines(responses)),
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
CasataliaLabs/biscuit_drishtiman | Pmw-2.0.0/Pmw/Pmw_2_0_0/demos/Args.py | 2 | 6332 | """Handle command line arguments.
This module contains functions to parse and access the arguments given
to the program on the command line.
"""
import types
import string
import sys
# Symbolic constants for the indexes into an argument specifier tuple.
NAME = 0
MANDATORY = 1
TYPE = 2
HELP = 3
DEFAULT = 4
SPEC_LENGTH = 5
Bool = []
helpSpec = (
('help', 0, Bool, 'print help and exit'),
)
def parseArgs(title, argv, argSpecs, filesOK):
"""Parse and check command line arguments.
Scan the command line arguments in *argv* according to the argument
specifier *argSpecs*. Return **None** if there are no errors in
the arguments, otherwise return an error string describing the error.
This function must be called to initialise this module.
title -- The name of the program. This is used when returning
error messages or help text.
argv -- A sequence containing the arguments given to the program.
Normally **sys.argv**.
argSpecs -- A sequence of argument specifiers. Each specifier describes
a valid command line argument and consists of 4 or 5 items:
- The argument name (without a leading minus sign **-**).
- A boolean value, true if the argument is mandatory.
- This should be **Args.Bool** if the argument has no option.
Otherwise it should be a string describing the option
required for this argument. This is used when printing help.
- A short string describing the argument.
- The default value of the argument. This should only be used
for non-mandatory arguments expecting an option.
For example:
(
('foreground', 0, 'colour', 'colour of text', 'black'),
('geometry', 0, 'spec', 'geometry of initial window'),
('server', 1, 'ompserver', 'ompserver to connect to'),
('silent', 0, Args.Bool, 'do not sound bell'),
)
"""
global programName
global _fileList
errMsg = title + ' command line error: '
programName = argv[0];
argSpecs = helpSpec + argSpecs
argSpecDic = {}
for spec in argSpecs:
arg = spec[NAME]
argSpecDic[arg] = spec
if len(spec) >= SPEC_LENGTH:
set(arg, spec[DEFAULT])
elif spec[TYPE] is Bool:
set(arg, 0)
else:
set(arg, None)
knownKeys = list(argSpecDic.keys())
i = 1
_fileList = []
argc = len(argv)
while i < argc:
arg = argv[i]
key = arg[1:]
if key in knownKeys:
spec = argSpecDic[key]
if spec[TYPE] is Bool:
set(key, 1)
else:
i = i + 1
if i >= argc:
return errMsg + 'missing argument to \'' + arg + '\' option.'
value = argv[i]
if len(spec) >= SPEC_LENGTH:
try:
if type(spec[DEFAULT]) == int:
typeStr = 'integer'
value = string.atoi(value)
elif type(spec[DEFAULT]) == float:
typeStr = 'float'
value = string.atof(value)
except:
sys.exc_info()[2] = None # Clean up object references
return errMsg + 'cannot convert string \'' + value + \
'\' to ' + typeStr + ' for option \'-' + key + '\'.'
set(key, value)
else:
_fileList.append(arg)
i = i + 1
if get('help'):
return _helpString(title, argSpecs)
if not filesOK and len(_fileList) > 0:
if len(_fileList) == 1:
return errMsg + 'unknown option \'' + str(_fileList[0]) + '\'.'
else:
return errMsg + 'unknown options ' + str(_fileList) + '.'
_missing = []
for spec in argSpecs:
if spec[MANDATORY] and get(spec[NAME]) is None:
_missing.append(spec[NAME])
if len(_missing) == 1:
return errMsg + 'required argument \'-' + \
str(_missing[0]) + '\' is missing.'
elif len(_missing) > 1:
return errMsg + 'required arguments ' + \
str(['-' + s for s in _missing]) + ' are missing.'
return None
def fileList():
return _fileList
def _helpString(title, argSpecs):
max = 0
for spec in argSpecs:
if spec[TYPE] is Bool:
width = len(spec[NAME]) + 1
else:
width = len(spec[NAME]) + 4 + len(spec[TYPE])
if width > max:
max = width
rtn = title + ' command line arguments:'
format = '\n %-' + str(max) + 's %s'
for mandatory in (1, 0):
needHeader = 1
for spec in argSpecs:
if mandatory and spec[MANDATORY] or not mandatory and not spec[MANDATORY]:
if needHeader:
if mandatory:
rtn = rtn + '\n Mandatory arguments:'
else:
rtn = rtn + '\n Optional arguments (defaults in parentheses):'
needHeader = 0
if spec[TYPE] is Bool:
arg = '-%s' % spec[NAME]
else:
arg = '-%s <%s>' % (spec[NAME], spec[TYPE])
if len(spec) >= SPEC_LENGTH:
if type(spec[DEFAULT]) == bytes:
definition = spec[HELP] + ' (' + spec[DEFAULT] + ')'
else:
definition = spec[HELP] + ' (' + str(spec[DEFAULT]) + ')'
else:
definition = spec[HELP]
rtn = rtn + format % (arg, definition)
return rtn
def exists(key):
return key in configDict
def get(key):
return configDict[key]
def set(key, value):
global configDict
configDict[key] = value
configDict = {}
| gpl-3.0 |
jtorrents/networkx | networkx/algorithms/richclub.py | 47 | 3516 | # -*- coding: utf-8 -*-
import networkx as nx
__author__ = """\n""".join(['Ben Edwards',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['rich_club_coefficient']
def rich_club_coefficient(G, normalized=True, Q=100):
"""Return the rich-club coefficient of the graph G.
The rich-club coefficient is the ratio, for every degree k, of the
number of actual to the number of potential edges for nodes
with degree greater than k:
.. math::
\\phi(k) = \\frac{2 Ek}{Nk(Nk-1)}
where Nk is the number of nodes with degree larger than k, and Ek
be the number of edges among those nodes.
Parameters
----------
G : NetworkX graph
normalized : bool (optional)
Normalize using randomized network (see [1]_)
Q : float (optional, default=100)
If normalized=True build a random network by performing
Q*M double-edge swaps, where M is the number of edges in G,
to use as a null-model for normalization.
Returns
-------
rc : dictionary
A dictionary, keyed by degree, with rich club coefficient values.
Examples
--------
>>> G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)])
>>> rc = nx.rich_club_coefficient(G,normalized=False)
>>> rc[0] # doctest: +SKIP
0.4
Notes
------
The rich club definition and algorithm are found in [1]_. This
algorithm ignores any edge weights and is not defined for directed
graphs or graphs with parallel edges or self loops.
Estimates for appropriate values of Q are found in [2]_.
References
----------
.. [1] Julian J. McAuley, Luciano da Fontoura Costa, and Tibério S. Caetano,
"The rich-club phenomenon across complex network hierarchies",
Applied Physics Letters Vol 91 Issue 8, August 2007.
http://arxiv.org/abs/physics/0701290
.. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon,
"Uniform generation of random graphs with arbitrary degree
sequences", 2006. http://arxiv.org/abs/cond-mat/0312028
"""
if G.is_multigraph() or G.is_directed():
raise Exception('rich_club_coefficient is not implemented for ',
'directed or multiedge graphs.')
if len(G.selfloop_edges()) > 0:
raise Exception('rich_club_coefficient is not implemented for ',
'graphs with self loops.')
rc=_compute_rc(G)
if normalized:
# make R a copy of G, randomize with Q*|E| double edge swaps
# and use rich_club coefficient of R to normalize
R = G.copy()
E = R.number_of_edges()
nx.double_edge_swap(R,Q*E,max_tries=Q*E*10)
rcran=_compute_rc(R)
for d in rc:
# if rcran[d] > 0:
rc[d]/=rcran[d]
return rc
def _compute_rc(G):
# compute rich club coefficient for all k degrees in G
deghist = nx.degree_histogram(G)
total = sum(deghist)
# number of nodes with degree > k (omit last entry which is zero)
nks = [total-cs for cs in nx.utils.cumulative_sum(deghist) if total-cs > 1]
deg=G.degree()
edge_degrees=sorted(sorted((deg[u],deg[v])) for u,v in G.edges_iter())
ek=G.number_of_edges()
k1,k2=edge_degrees.pop(0)
rc={}
for d,nk in zip(range(len(nks)),nks):
while k1 <= d:
if len(edge_degrees)==0:
break
k1,k2=edge_degrees.pop(0)
ek-=1
rc[d] = 2.0*ek/(nk*(nk-1))
return rc
| bsd-3-clause |
bradallred/gemrb | gemrb/GUIScripts/iwd2/Abilities.py | 1 | 7433 | # GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, ability (GUICG4)
import GemRB
from GUIDefines import *
import CharOverview
import CommonTables
from ie_stats import IE_STR, IE_DEX, IE_CON, IE_INT, IE_WIS, IE_CHR
AbilityWindow = 0
TextAreaControl = 0
DoneButton = 0
AbilityTable = 0
PointsLeft = 0
Minimum = 0
Maximum = 0
Add = 0
KitIndex = 0
CharGen = 0
Stats = [ IE_STR, IE_DEX, IE_CON, IE_INT, IE_WIS, IE_CHR ]
def CalcLimits(Abidx):
global Minimum, Maximum, Add
if not CharGen:
pc = GemRB.GameGetSelectedPCSingle ()
Minimum = GemRB.GetPlayerStat (pc, Stats[Abidx], 1)
Maximum = 25
return
Abracead = GemRB.LoadTable("ABRACEAD")
RaceID = GemRB.GetVar("Race")
RowIndex = CommonTables.Races.FindValue(3, RaceID)
RaceName = CommonTables.Races.GetRowName(RowIndex)
Minimum = 3
Maximum = 18
Abclasrq = GemRB.LoadTable("ABCLASRQ")
tmp = Abclasrq.GetValue(KitIndex, Abidx)
if tmp!=0 and tmp>Minimum:
Minimum = tmp
Abracerq = GemRB.LoadTable("ABRACERQ")
Race = Abracerq.GetRowIndex(RaceName)
tmp = Abracerq.GetValue(Race, Abidx*2)
if tmp!=0 and tmp>Minimum:
Minimum = tmp
tmp = Abracerq.GetValue(Race, Abidx*2+1)
if tmp!=0 and tmp>Maximum:
Maximum = tmp
Race = Abracead.GetRowIndex(RaceName)
Add = Abracead.GetValue(Race, Abidx)
Maximum = Maximum + Add
Minimum = Minimum + Add
if Minimum<1:
Minimum=1
return
def GetModColor(mod):
if mod < 0:
return {'r' : 255, 'g' : 0, 'b' : 0}
elif mod > 0:
return {'r' : 0, 'g' : 255, 'b' : 0}
else:
return {'r' : 255, 'g' : 255, 'b' : 255}
def RollPress():
global Add
GemRB.SetVar("Ability",0)
SumLabel = AbilityWindow.GetControl(0x10000002)
SumLabel.SetTextColor ({'r' : 255, 'g' : 255, 'b' : 0})
SumLabel.SetUseRGB(1)
SumLabel.SetText(str(PointsLeft))
for i in range(0,6):
CalcLimits(i)
v = 10+Add
if not CharGen:
v = Minimum
b = v//2-5
GemRB.SetVar("Ability "+str(i), v )
Label = AbilityWindow.GetControl(0x10000003+i)
Label.SetText(str(v) )
Label = AbilityWindow.GetControl(0x10000024+i)
Label.SetUseRGB(1)
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
return
def OnLoad():
OpenAbilitiesWindow (1, 16)
def OpenAbilitiesWindow(chargen, points):
global AbilityWindow, TextAreaControl, DoneButton
global CharGen, PointsLeft
global AbilityTable
global KitIndex, Minimum, Maximum
CharGen = chargen
PointsLeft = points
AbilityTable = GemRB.LoadTable ("ability")
if chargen:
Kit = GemRB.GetVar("Class Kit")
Class = GemRB.GetVar("Class")-1
if Kit == 0:
KitName = CommonTables.Classes.GetRowName(Class)
else:
#rowname is just a number, first value row what we need here
KitName = CommonTables.KitList.GetValue(Kit, 0)
Abclasrq = GemRB.LoadTable("ABCLASRQ")
KitIndex = Abclasrq.GetRowIndex(KitName)
# in a fit of clarity, they used the same ids in both windowpacks
if chargen:
AbilityWindow = GemRB.LoadWindow (4, "GUICG")
else:
AbilityWindow = GemRB.LoadWindow (7, "GUIREC")
CharOverview.PositionCharGenWin(AbilityWindow)
RollPress ()
for i in range(0,6):
Button = AbilityWindow.GetControl(i+30)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, JustPress)
Button.SetVarAssoc("Ability", i)
Button = AbilityWindow.GetControl(i*2+16)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, LeftPress)
Button.SetVarAssoc("Ability", i )
Button.SetActionInterval (200)
Button = AbilityWindow.GetControl(i*2+17)
Button.SetEvent(IE_GUI_BUTTON_ON_PRESS, RightPress)
Button.SetVarAssoc("Ability", i )
Button.SetActionInterval (200)
if chargen:
BackButton = AbilityWindow.GetControl (36)
BackButton.SetText (15416)
BackButton.MakeEscape()
BackButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BackPress)
else:
AbilityWindow.DeleteControl (36)
DoneButton = AbilityWindow.GetControl(0)
DoneButton.SetText(36789)
DoneButton.MakeDefault()
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, NextPress)
TextAreaControl = AbilityWindow.GetControl(29)
TextAreaControl.SetText(17247)
if not chargen:
AbilityWindow.ShowModal (MODAL_SHADOW_GRAY)
else:
AbilityWindow.Focus()
return
def RightPress(btn, Abidx):
global PointsLeft
Ability = GemRB.GetVar("Ability "+str(Abidx) )
#should be more elaborate
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
if Ability<=Minimum:
return
Ability -= 1
GemRB.SetVar("Ability "+str(Abidx), Ability)
PointsLeft = PointsLeft + 1
SumLabel = AbilityWindow.GetControl(0x10000002)
SumLabel.SetText(str(PointsLeft) )
SumLabel.SetTextColor ({'r' : 255, 'g' : 255, 'b' : 0})
Label = AbilityWindow.GetControl(0x10000003+Abidx)
Label.SetText(str(Ability) )
Label = AbilityWindow.GetControl(0x10000024+Abidx)
b = Ability // 2 - 5
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
return
def JustPress(btn, Abidx):
Ability = GemRB.GetVar("Ability "+str(Abidx) )
#should be more elaborate
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
return
def LeftPress(btn, Abidx):
global PointsLeft
CalcLimits(Abidx)
GemRB.SetToken("MINIMUM",str(Minimum) )
GemRB.SetToken("MAXIMUM",str(Maximum) )
Ability = GemRB.GetVar("Ability "+str(Abidx) )
TextAreaControl.SetText(AbilityTable.GetValue(Abidx, 1) )
if PointsLeft == 0:
return
if Ability>=Maximum: #should be more elaborate
return
Ability += 1
GemRB.SetVar("Ability "+str(Abidx), Ability)
PointsLeft = PointsLeft - 1
SumLabel = AbilityWindow.GetControl(0x10000002)
if PointsLeft == 0:
SumLabel.SetTextColor({'r' : 255, 'g' : 255, 'b' : 255})
SumLabel.SetText(str(PointsLeft) )
Label = AbilityWindow.GetControl(0x10000003+Abidx)
Label.SetText(str(Ability) )
Label = AbilityWindow.GetControl(0x10000024+Abidx)
b = Ability // 2 - 5
Label.SetTextColor (GetModColor (b))
Label.SetText("%+d"%(b))
if PointsLeft == 0:
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def BackPress():
if AbilityWindow:
AbilityWindow.Unload()
GemRB.SetNextScript("CharGen5")
for i in range(6):
GemRB.SetVar("Ability "+str(i),0) #scrapping the abilities
return
def NextPress():
if AbilityWindow:
AbilityWindow.Unload()
if CharGen:
GemRB.SetNextScript("CharGen6") #skills
else:
# set the upgraded stats
pc = GemRB.GameGetSelectedPCSingle ()
for i in range (len(Stats)):
newValue = GemRB.GetVar ("Ability "+str(i))
GemRB.SetPlayerStat (pc, Stats[i], newValue)
# open up the next levelup window
import Enemy
Enemy.OpenEnemyWindow ()
return
| gpl-2.0 |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py | 1 | 8295 | import hierlist, string, regutil, os
import win32con, win32ui, win32api
import commctrl
from pywin.mfc import dialog
import glob
import pyclbr
import pywin.framework.scriptutils
import afxres
class HLIErrorItem(hierlist.HierListItem):
def __init__(self, text):
self.text = text
hierlist.HierListItem.__init__(self)
def GetText(self):
return self.text
class HLICLBRItem(hierlist.HierListItem):
def __init__(self, name, file, lineno, suffix = ""):
# If the 'name' object itself has a .name, use it. Not sure
# how this happens, but seems pyclbr related.
# See PyWin32 bug 817035
self.name = getattr(name, "name", name)
self.file = file
self.lineno = lineno
self.suffix = suffix
def __cmp__(self, other):
return cmp(self.name, other.name)
def GetText(self):
return self.name + self.suffix
def TakeDefaultAction(self):
if self.file:
pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop=1)
else:
win32ui.SetStatusText("The source of this object is unknown")
def PerformItemSelected(self):
if self.file is None:
msg = "%s - source can not be located." % (self.name, )
else:
msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file)
win32ui.SetStatusText(msg)
class HLICLBRClass(HLICLBRItem):
def __init__(self, clbrclass, suffix = ""):
try:
name = clbrclass.name
file = clbrclass.file
lineno = clbrclass.lineno
self.super = clbrclass.super
self.methods = clbrclass.methods
except AttributeError:
name = clbrclass
file = lineno = None
self.super = []; self.methods = {}
HLICLBRItem.__init__(self, name, file, lineno, suffix)
def GetSubList(self):
ret = []
for c in self.super:
ret.append(HLICLBRClass(c, " (Parent class)"))
for meth, lineno in self.methods.items():
ret.append(HLICLBRMethod(meth, self.file, lineno, " (method)"))
return ret
def IsExpandable(self):
return len(self.methods) + len(self.super)
def GetBitmapColumn(self):
return 21
class HLICLBRFunction(HLICLBRClass):
def GetBitmapColumn(self):
return 22
class HLICLBRMethod(HLICLBRItem):
def GetBitmapColumn(self):
return 22
class HLIModuleItem(hierlist.HierListItem):
def __init__(self, path):
hierlist.HierListItem.__init__(self)
self.path = path
def GetText(self):
return os.path.split(self.path)[1] + " (module)"
def IsExpandable(self):
return 1
def TakeDefaultAction(self):
win32ui.GetApp().OpenDocumentFile( self.path )
def GetBitmapColumn(self):
col = 4 # Default
try:
if win32api.GetFileAttributes(self.path) & win32con.FILE_ATTRIBUTE_READONLY:
col = 5
except win32api.error:
pass
return col
def GetSubList(self):
mod, path = pywin.framework.scriptutils.GetPackageModuleName(self.path)
win32ui.SetStatusText("Building class list - please wait...", 1)
win32ui.DoWaitCursor(1)
try:
try:
reader = pyclbr.readmodule_ex # Post 1.5.2 interface.
extra_msg = " or functions"
except AttributeError:
reader = pyclbr.readmodule
extra_msg = ""
data = reader(mod, [path])
if data:
ret = []
for item in data.values():
if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2)
ret.append(HLICLBRFunction( item, " (function)" ) )
else:
ret.append(HLICLBRClass( item, " (class)") )
ret.sort()
return ret
else:
return [HLIErrorItem("No Python classes%s in module." % (extra_msg,))]
finally:
win32ui.DoWaitCursor(0)
win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
def MakePathSubList(path):
ret = []
for filename in glob.glob(os.path.join(path,'*')):
if os.path.isdir(filename) and os.path.isfile(os.path.join(filename, "__init__.py")):
ret.append(HLIDirectoryItem(filename, os.path.split(filename)[1]))
else:
if string.lower(os.path.splitext(filename)[1]) in ['.py', '.pyw']:
ret.append(HLIModuleItem(filename))
return ret
class HLIDirectoryItem(hierlist.HierListItem):
def __init__(self, path, displayName = None, bSubDirs = 0):
hierlist.HierListItem.__init__(self)
self.path = path
self.bSubDirs = bSubDirs
if displayName:
self.displayName = displayName
else:
self.displayName = path
def IsExpandable(self):
return 1
def GetText(self):
return self.displayName
def GetSubList(self):
ret = MakePathSubList(self.path)
if os.path.split(self.path)[1] == "win32com": # Complete and utter hack for win32com.
try:
path = win32api.GetFullPathName(os.path.join(self.path, "..\\win32comext"))
ret = ret + MakePathSubList(path)
except win32ui.error:
pass
return ret
class HLIProjectRoot(hierlist.HierListItem):
def __init__(self, projectName, displayName = None):
hierlist.HierListItem.__init__(self)
self.projectName = projectName
self.displayName = displayName or projectName
def GetText(self):
return self.displayName
def IsExpandable(self):
return 1
def GetSubList(self):
paths = regutil.GetRegisteredNamedPath(self.projectName)
pathList = string.split(paths,";")
if len(pathList)==1: # Single dir - dont bother putting the dir in
ret = MakePathSubList(pathList[0])
else:
ret = map( HLIDirectoryItem, pathList )
return ret
class HLIRoot(hierlist.HierListItem):
def __init__(self):
hierlist.HierListItem.__init__(self)
def IsExpandable(self):
return 1
def GetSubList(self):
keyStr = regutil.BuildDefaultPythonKey() + "\\PythonPath"
hKey = win32api.RegOpenKey(regutil.GetRootKey(), keyStr)
try:
ret = []
ret.append(HLIProjectRoot("", "Standard Python Library")) # The core path.
index = 0
while 1:
try:
ret.append(HLIProjectRoot(win32api.RegEnumKey(hKey, index)))
index = index + 1
except win32api.error:
break
return ret
finally:
win32api.RegCloseKey(hKey)
class dynamic_browser (dialog.Dialog):
style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE
cs = (
win32con.WS_CHILD |
win32con.WS_VISIBLE |
commctrl.TVS_HASLINES |
commctrl.TVS_LINESATROOT |
commctrl.TVS_HASBUTTONS
)
dt = [
["Python Projects", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")],
["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs]
]
def __init__ (self, hli_root):
dialog.Dialog.__init__ (self, self.dt)
self.hier_list = hierlist.HierListWithItems (
hli_root,
win32ui.IDB_BROWSER_HIER
)
self.HookMessage (self.on_size, win32con.WM_SIZE)
def OnInitDialog (self):
self.hier_list.HierInit (self)
return dialog.Dialog.OnInitDialog (self)
def on_size (self, params):
lparam = params[3]
w = win32api.LOWORD(lparam)
h = win32api.HIWORD(lparam)
self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h))
def BrowseDialog():
root = HLIRoot()
if not root.IsExpandable():
raise TypeError, "Browse() argument must have __dict__ attribute, or be a Browser supported type"
dlg = dynamic_browser (root)
dlg.CreateWindow()
def DockableBrowserCreator(parent):
root = HLIRoot()
hl = hierlist.HierListWithItems (
root,
win32ui.IDB_BROWSER_HIER
)
style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS
control = win32ui.CreateTreeCtrl()
control.CreateWindow(style, (0, 0, 150, 300), parent, win32ui.IDC_LIST1)
list = hl.HierInit (parent, control)
return control
def DockablePathBrowser():
import pywin.docking.DockingBar
bar = pywin.docking.DockingBar.DockingBar()
bar.CreateWindow(win32ui.GetMainFrame(), DockableBrowserCreator, "Path Browser", 0x8e0a)
bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
win32ui.GetMainFrame().DockControlBar(bar)
# The "default" entry point
Browse = DockablePathBrowser
| epl-1.0 |
markmichaelrichter/Heroku_Buildpack | test/django-1.3-skeleton/haystack/settings.py | 81 | 5035 | # Django settings for haystack project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '@$87s&royz$nvav^3*$4u6^htybq*o=ge504rqp7r2)@ec*g(3'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'haystack.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| mit |
qrkourier/ansible | lib/ansible/plugins/strategy/debug.py | 20 | 5917 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
DOCUMENTATION:
strategy: debug
short_description: Executes tasks in interactive debug session.
description:
- Task execution is 'linear' but controlled by an interactive debug session.
version_added: "2.1"
author: Kishin Yagami
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import cmd
import pprint
import sys
from ansible.module_utils.six.moves import reduce
from ansible.plugins.strategy.linear import StrategyModule as LinearStrategyModule
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class NextAction(object):
""" The next action after an interpreter's exit. """
REDO = 1
CONTINUE = 2
EXIT = 3
def __init__(self, result=EXIT):
self.result = result
class StrategyModule(LinearStrategyModule):
def __init__(self, tqm):
self.curr_tqm = tqm
super(StrategyModule, self).__init__(tqm)
def _queue_task(self, host, task, task_vars, play_context):
self.curr_host = host
self.curr_task = task
self.curr_task_vars = task_vars
self.curr_play_context = play_context
super(StrategyModule, self)._queue_task(host, task, task_vars, play_context)
def _process_pending_results(self, iterator, one_pass=False, max_passes=None):
if not hasattr(self, "curr_host"):
return super(StrategyModule, self)._process_pending_results(iterator, one_pass, max_passes)
prev_host_state = iterator.get_host_state(self.curr_host)
results = super(StrategyModule, self)._process_pending_results(iterator, one_pass)
while self._need_debug(results):
next_action = NextAction()
dbg = Debugger(self, results, next_action)
dbg.cmdloop()
if next_action.result == NextAction.REDO:
# rollback host state
self.curr_tqm.clear_failed_hosts()
iterator._host_states[self.curr_host.name] = prev_host_state
if reduce(lambda total, res: res.is_failed() or total, results, False):
self._tqm._stats.failures[self.curr_host.name] -= 1
elif reduce(lambda total, res: res.is_unreachable() or total, results, False):
self._tqm._stats.dark[self.curr_host.name] -= 1
# redo
super(StrategyModule, self)._queue_task(self.curr_host, self.curr_task, self.curr_task_vars, self.curr_play_context)
results = super(StrategyModule, self)._process_pending_results(iterator, one_pass)
elif next_action.result == NextAction.CONTINUE:
break
elif next_action.result == NextAction.EXIT:
exit(1)
return results
def _need_debug(self, results):
return reduce(lambda total, res: res.is_failed() or res.is_unreachable() or total, results, False)
class Debugger(cmd.Cmd):
prompt = '(debug) ' # debugger
prompt_continuous = '> ' # multiple lines
def __init__(self, strategy_module, results, next_action):
# cmd.Cmd is old-style class
cmd.Cmd.__init__(self)
self.intro = "Debugger invoked"
self.scope = {}
self.scope['task'] = strategy_module.curr_task
self.scope['vars'] = strategy_module.curr_task_vars
self.scope['host'] = strategy_module.curr_host
self.scope['result'] = results[0]._result
self.scope['results'] = results # for debug of this debugger
self.next_action = next_action
def cmdloop(self):
try:
cmd.Cmd.cmdloop(self)
except KeyboardInterrupt:
pass
def do_EOF(self, args):
return self.do_quit(args)
def do_quit(self, args):
display.display('aborted')
self.next_action.result = NextAction.EXIT
return True
do_q = do_quit
def do_continue(self, args):
self.next_action.result = NextAction.CONTINUE
return True
do_c = do_continue
def do_redo(self, args):
self.next_action.result = NextAction.REDO
return True
do_r = do_redo
def evaluate(self, args):
try:
return eval(args, globals(), self.scope)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def do_p(self, args):
try:
result = self.evaluate(args)
display.display(pprint.pformat(result))
except:
pass
def execute(self, args):
try:
code = compile(args + '\n', '<stdin>', 'single')
exec(code, globals(), self.scope)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else:
exc_type_name = t.__name__
display.display('***%s:%s' % (exc_type_name, repr(v)))
raise
def default(self, line):
try:
self.execute(line)
except:
pass
| gpl-3.0 |
thoughtpalette/thoughts.thoughtpalette.com | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/styles/friendly.py | 364 | 2515 | # -*- coding: utf-8 -*-
"""
pygments.styles.friendly
~~~~~~~~~~~~~~~~~~~~~~~~
A modern style based on the VIM pyte theme.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class FriendlyStyle(Style):
"""
A modern style based on the VIM pyte theme.
"""
background_color = "#f0f0f0"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #60a0b0",
Comment.Preproc: "noitalic #007020",
Comment.Special: "noitalic bg:#fff0f0",
Keyword: "bold #007020",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold #902000",
Operator: "#666666",
Operator.Word: "bold #007020",
Name.Builtin: "#007020",
Name.Function: "#06287e",
Name.Class: "bold #0e84b5",
Name.Namespace: "bold #0e84b5",
Name.Exception: "#007020",
Name.Variable: "#bb60d5",
Name.Constant: "#60add5",
Name.Label: "bold #002070",
Name.Entity: "bold #d55537",
Name.Attribute: "#4070a0",
Name.Tag: "bold #062873",
Name.Decorator: "bold #555555",
String: "#4070a0",
String.Doc: "italic",
String.Interpol: "italic #70a0d0",
String.Escape: "bold #4070a0",
String.Regex: "#235388",
String.Symbol: "#517918",
String.Other: "#c65d09",
Number: "#40a070",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
| mit |
Commonists/SurfaceImageContentGap | surfaceimagecontentgap/rc.py | 1 | 2812 | from argparse import ArgumentParser
import datetime
import time
from surfaceimagecontentgap.imagegap import isthereanimage
from surfaceimagecontentgap.bot import SurfaceContentGapBot
def last_rc_time(site):
"""Datetime of last change."""
rc = site.recentchanges()
last_rev = rc.next()
return datetime.datetime \
.utcfromtimestamp(time.mktime(last_rev['timestamp']))
def previoushour(dt):
"""One hour previous given datetime."""
delta = datetime.timedelta(hours=1)
return dt - delta
def previousday(dt):
"""One day before given datetime."""
delta = datetime.timedelta(days=1)
return dt - delta
def rc_from(site, dt):
"""Recent changes from a given datetime."""
kwargs = {
'end': dt.strftime('%Y%m%d%H%M%S'),
'namespace': 0
}
rc = site.recentchanges(**kwargs)
# revisions
changes = []
# page titles
pages = []
for rev in rc:
changes.append(rev)
title = rev['title'].encode('utf-8')
if title not in pages:
pages.append(title)
return {
'list_revisions': changes,
'list_pages': pages
}
def articles_from_titles(site, titles):
"""Articles object from list of titles"""
return [site.Pages[title.decode('utf-8')] for title in titles]
def list_articles(bot):
# site
site = bot.site
# last hours rc
end_dt = previoushour(last_rc_time(site))
recent_changes = rc_from(site, end_dt)
pages = recent_changes['list_pages']
return articles_from_titles(site, pages)
def main():
description = 'Analyzing Wikipedia to surface image content gap (rc).'
parser = ArgumentParser(description=description)
parser.add_argument('-w', '--wikipedia',
type=str,
dest='lang',
required=False,
default='fr',
help='Language code for Wikipedia')
parser.add_argument('-r', '--report',
type=str,
dest='report',
required=True,
help='Page name to write a report.')
parser.add_argument('-f', '--configfile',
type=str,
dest='config',
required=True,
help='Config file with login and password.')
args = parser.parse_args()
kwargs = {
'config_file': args.config,
'lang': args.lang,
'report': args.report,
'list_fun': list_articles,
'filter_fun': lambda bot, x: not isthereanimage(x),
'rank_fun': lambda bot, x: 0,
'frequency': 60
}
rc_bot = SurfaceContentGapBot(**kwargs)
rc_bot.run()
if __name__ == '__main__':
main()
| mit |
vitalyvolkov/fontbakery | bakery/project/views.py | 1 | 16523 | # coding: utf-8
# Copyright 2013 The Font Bakery Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See AUTHORS.txt for the list of Authors and LICENSE.txt for the License.
# pylint:disable-msg=E1101
from flask import (Blueprint, render_template, g, flash, request,
url_for, redirect, json, Markup, current_app, abort, make_response)
from flask.ext.babel import gettext as _
from ..decorators import login_required
from ..utils import project_fontaine
from .models import Project, ProjectBuild
from functools import wraps
import itsdangerous
project = Blueprint('project', __name__, url_prefix='/project')
DEFAULT_SUBSET_LIST = [
'menu', 'latin', 'latin-ext+latin', 'cyrillic+latin', 'cyrillic-ext+latin',
'greek+latin', 'greek-ext+latin', 'vietnamese+latin']
def chkhash(hashstr):
try:
int(hashstr, 16)
except ValueError:
flash(_('Error in provided data'))
abort(500)
@project.before_request
def before_request():
if g.user:
g.projects = Project.query.filter_by(login=g.user.login).all()
# project resolve decorator
def project_required(f):
""" Decorator reads project_id from arguments list and resolve it into project object.
In parallel it check if project object is ready Usage:
@project.route('/test', methods=['GET'])
@project_required
def test(p):
# p is Project model instance
return "Project is available"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if 'project_id' in kwargs:
project_id = kwargs.pop('project_id')
else:
project_id = args.pop(0)
args = list(args)
p = Project.query.filter_by(
login=g.user.login, id=project_id).first_or_404()
# Here can be located ownership access checks in the future.
if p.is_ready:
args.insert(0, p)
return f(*args, **kwargs)
else:
flash(_('Project is being synchronized, wait until it is done'))
return redirect(url_for('frontend.splash'))
return decorated_function
# API methods
@project.route('/api/<int:project_id>/build', methods=['GET'])
@login_required
@project_required
def build(p):
""" Revision id is dangerous parameter, because it added to command line to
git call. That is why it always should be signed with hash.
"""
if not p.config['local'].get('setup'):
flash(_("Complete setup first"))
return redirect(url_for('project.setup', project_id=p.id))
if request.args.get('revision'):
signer = itsdangerous.Signer(current_app.secret_key)
revision = signer.unsign(request.args.get('revision'))
build = ProjectBuild.make_build(p, revision)
else:
build = ProjectBuild.make_build(p, 'HEAD')
flash(Markup(_("Updated repository (<a href='%(repo)s'>see files</a>) Next step: <a href='%(step)s'>set it up</a>",
repo=url_for('project.ufiles', project_id=p.id), step=url_for('project.setup', project_id=p.id))))
return redirect(url_for('project.log', project_id=p.id, build_id=build.id))
@project.route('/api/<int:project_id>/pull', methods=['GET'])
@login_required
# this is only exception where decorator @project_required is not needed
def pull(project_id):
p = Project.query.filter_by(
login=g.user.login, id=project_id).first_or_404()
p.sync()
flash(_("Changes will be pulled from upstream in a moment"))
return redirect(url_for('project.git', project_id=p.id))
# Setup views
@project.route('/<int:project_id>/setup', methods=['GET', 'POST'])
@login_required
@project_required
def setup(p):
config = p.config
originalConfig = p.config
error = False
if request.method == 'GET':
return render_template('project/setup.html', project=p)
if not request.form.get('license_file') in config['local']['txt_files']:
error = True
flash(_("Please select the license file"))
config['state']['license_file'] = request.form.get('license_file')
if request.form.get('familyname'):
if len(request.form.get('familyname')) > 0:
config['state']['familyname'] = request.form.get('familyname')
else:
if 'familyname' in config['state']:
config['state'].pop('familyname')
if config['local']['ufo_dirs'] and config['local']['ttx_files']:
if request.form.get('source_files_type'):
if request.form.get('source_files_type') in ['ttx', 'ufo']:
config['state']['source_files_type'] = request.form.get('source_files_type')
else:
config['state'].pop('source_files_type')
else:
error = True
flash(_('Select UFO or TTX as primary source'))
txt_files_to_copy = request.form.getlist('txt_files')
config['state']['txt_files_copied'] = txt_files_to_copy
# XXX: unsure should it be local or state property
process_files = request.form.getlist('process_files')
config['state']['process_files'] = process_files
subset_list = request.form.getlist('subset')
for i in subset_list:
if i not in dict(p.get_subsets()).keys():
error = True
flash(_('Subset value is wrong'))
if len(subset_list) < 0:
error = True
flash(_("Select at least one subset from list"))
config['state']['subset'] = subset_list
if request.form.get('ttfautohint'):
if len(request.form.get('ttfautohint')) > 0:
config['state']['ttfautohint'] = request.form.get('ttfautohint')
else:
if 'ttfautohint' in config['state']:
config['state'].pop('ttfautohint')
if error:
return render_template('project/setup.html', project=p)
if originalConfig != config:
flash(_("Setup updated"))
config['local']['setup'] = True
p.save_state()
if request.form.get('bake'):
p.save_state()
return redirect(url_for('project.build', project_id=p.id))
else:
flash(_("Setup saved"))
return redirect(url_for('project.setup', project_id=p.id))
@project.route('/<int:project_id>/setup/dashboard_save', methods=['POST'])
@login_required
@project_required
def dashboard_save(p):
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
for item in request.form:
if request.form.get(item):
if len(request.form.get(item)) > 0:
p.config['state'][item] = request.form.get(item)
flash(_('Set %(item)s', item=item))
else:
if item in p.config['state']:
del p.config['state'][item]
flash(_('Unset %(item)s', item=item))
p.save_state()
return redirect(url_for('project.setup', project_id=p.id))
# File browser views
@project.route('/<int:project_id>/files/', methods=['GET'])
@project.route('/<int:project_id>/files/<revision>/', methods=['GET'])
@login_required
@project_required
def ufiles(p, revision=None, name=None):
# this page can be visible by others, not only by owner
# TODO consider all pages for that
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
return render_template('project/ufiles.html', project=p,
revision=revision)
@project.route('/<int:project_id>/files/<revision>/<path:name>', methods=['GET'])
@login_required
@project_required
def ufile(p, revision=None, name=None):
# this page can be visible by others, not only by owner
# TODO consider all pages for that
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
mime, data = p.revision_file(revision, name)
return render_template('project/ufile.html', project=p,
revision=revision, name=name, mime=mime, data=data)
@project.route('/<int:project_id>/files/<revision>/blob', methods=['GET'])
@login_required
@project_required
def ufileblob(p, revision=None):
""" Mandatory parameter is `name` signed by cypher hash on server side.
This view is pretty much "heavy", each request spawn additional process and
read its output.
"""
if revision and revision != 'HEAD':
chkhash(revision)
else:
revision = 'HEAD'
signer = itsdangerous.Signer(current_app.secret_key)
name = signer.unsign(request.args.get('name'))
mime, data = p.revision_file(revision, name)
if mime.startswith('image'):
response = make_response(data)
response.headers['Content-Type'] = mime
response.headers['Content-Disposition'] = 'attachment; filename=%s' % name
return response
else:
abort(500)
# Builds views
@project.route('/<int:project_id>/build', methods=['GET'])
@login_required
@project_required
def history(p):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(project=p).order_by("id desc").all()
return render_template('project/history.html', project=p, builds=b)
@project.route('/<int:project_id>/build/<int:build_id>/log', methods=['GET'])
@login_required
@project_required
def log(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
param = {'login': p.login, 'id': p.id, 'revision': b.revision, 'build': b.id}
log_file = "%(login)s/%(id)s.out/%(build)s.%(revision)s.process.log" % param
return render_template('project/log.html', project=p, build=b, log_file=log_file)
@project.route('/<int:project_id>/build/<int:build_id>/rfiles', methods=['GET'])
@login_required
@project_required
def rfiles(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not b.is_done:
return redirect(url_for('project.log', project_id=p.id, build_id=b.id))
yaml = p.read_asset('yaml')
f = project_fontaine(p, b)
tree = b.files()
return render_template('project/rfiles.html', project=p, yaml=yaml,
fontaineFonts=f, build=b, tree=tree)
@project.route('/<int:project_id>/build/<int:build_id>/tests', methods=['GET'])
@login_required
@project_required
def rtests(p, build_id):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = b.result_tests()
summary = {
'all_tests': sum([int(y.get('sum', 0)) for x, y in test_result.items()]),
'fonts': test_result.keys(),
'all_error': sum([len(x.get('error', [])) for x in test_result.values()]),
'all_failure': sum([len(x.get('failure', [])) for x in test_result.values()]),
'all_fixed': sum([len(x.get('fixed', [])) for x in test_result.values()]),
'all_success': sum([len(x.get('success', [])) for x in test_result.values()]),
'fix_asap': [dict(font=y, **t) for t in x['failure'] for y, x in test_result.items() if 'required' in t['tags']],
}
return render_template('project/rtests.html', project=p,
tests=test_result, build=b, summary=summary)
@project.route('/<int:project_id>/build/<int:build_id>/', methods=['GET'])
@login_required
@project_required
def summary(p, build_id):
""" Results of processing tests, for ttf files """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = b.result_tests()
summary = {
'all_tests': sum([int(y.get('sum', 0)) for x, y in test_result.items()]),
'fonts': test_result.keys(),
'all_error': sum([len(x.get('error', [])) for x in test_result.values()]),
'all_failure': sum([len(x.get('failure', [])) for x in test_result.values()]),
'all_fixed': sum([len(x.get('fixed', [])) for x in test_result.values()]),
'all_success': sum([len(x.get('success', [])) for x in test_result.values()]),
'fix_asap': [dict(font=y, **t) for t in x.get('failure', []) for y, x in test_result.items() if 'required' in t['tags']],
}
return render_template('project/summary.html', project=p,
tests=test_result, build=b, summary=summary)
@project.route('/<int:project_id>/build/<int:build_id>/description', methods=['GET', 'POST'])
@login_required
@project_required
def description(p, build_id):
""" Description file management """
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if request.method == 'GET':
data = b.read_asset('description')
return render_template('project/description.html', project=p, build=b,
description=data)
# POST
b.save_asset('description', request.form.get('description'))
flash(_('Description saved'))
return redirect(url_for('project.description', build_id=b.id, project_id=p.id))
@project.route('/<int:project_id>/build/<int:build_id>/metadatajson', methods=['GET', 'POST'])
@login_required
@project_required
def metadatajson(p, build_id):
b = ProjectBuild.query.filter_by(id=build_id, project=p).first_or_404()
if request.method == 'GET':
metadata = b.read_asset('metadata')
metadata_new = b.read_asset('metadata_new')
return render_template('project/metadatajson.html', project=p, build=b,
metadata=metadata, metadata_new=metadata_new)
# POST
try:
# this line trying to parse json
json.loads(request.form.get('metadata'))
b.save_asset('metadata', request.form.get('metadata'),
del_new=request.form.get('delete', None))
flash(_('METADATA.json saved'))
return redirect(url_for('project.metadatajson', project_id=p.id, build_id=b.id))
except ValueError:
flash(_('Wrong format for METADATA.json file'))
metadata_new = b.read_asset('metadata_new')
return render_template('project/metadatajson.html', project=p, build=b,
metadata=request.form.get('metadata'),
metadata_new=metadata_new)
# Base views
@project.route('/<int:project_id>/tests/<revision>', methods=['GET'])
@login_required
@project_required
def utests(p, revision):
""" Results of processing tests, for ufo files """
if not p.is_ready:
return redirect(url_for('project.log', project_id=p.id))
test_result = p.revision_tests(revision)
return render_template('project/utests.html', project=p, revision=revision,
tests=test_result)
@project.route('/<int:project_id>/git', methods=['GET'])
@login_required
@project_required
def git(p):
""" Results of processing tests, for ttf files """
gitlog = p.gitlog()
return render_template('project/gitlog.html', project=p, log=gitlog)
@project.route('/<int:project_id>/diff', methods=['GET'])
@login_required
@project_required
def diff(p):
""" Show diff between different revisions, since we want to make this view
more user friendly we can't signify left and right revision. And this mean
that we should check input data"""
if not all([request.args.get('left'), request.args.get('right')]):
flash(_("Left and right hash for comparsion should be provided"))
try:
left = request.args.get('left')
right = request.args.get('right')
# let python try to parse strings, if it fails then there can be
# something evil
int(left, 16)
int(right, 16)
except ValueError:
flash(_('Error in provided data'))
return redirect(url_for('project.git', project_id=p.id))
diffdata = p.diff_files(left, right)
return render_template('project/diff.html', project=p,
diff=diffdata, left=left, right=right)
| apache-2.0 |
WildfireDEV/android_kernel_samsung_s6 | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
robbiet480/home-assistant | homeassistant/components/input_select/reproduce_state.py | 16 | 2636 | """Reproduce an Input select state."""
import asyncio
import logging
from types import MappingProxyType
from typing import Any, Dict, Iterable, Optional
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import Context, State
from homeassistant.helpers.typing import HomeAssistantType
from . import (
ATTR_OPTION,
ATTR_OPTIONS,
DOMAIN,
SERVICE_SELECT_OPTION,
SERVICE_SET_OPTIONS,
)
ATTR_GROUP = [ATTR_OPTION, ATTR_OPTIONS]
_LOGGER = logging.getLogger(__name__)
async def _async_reproduce_state(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce a single state."""
cur_state = hass.states.get(state.entity_id)
# Return if we can't find entity
if cur_state is None:
_LOGGER.warning("Unable to find entity %s", state.entity_id)
return
# Return if we are already at the right state.
if cur_state.state == state.state and all(
check_attr_equal(cur_state.attributes, state.attributes, attr)
for attr in ATTR_GROUP
):
return
# Set service data
service_data = {ATTR_ENTITY_ID: state.entity_id}
# If options are specified, call SERVICE_SET_OPTIONS
if ATTR_OPTIONS in state.attributes:
service = SERVICE_SET_OPTIONS
service_data[ATTR_OPTIONS] = state.attributes[ATTR_OPTIONS]
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
# Remove ATTR_OPTIONS from service_data so we can reuse service_data in next call
del service_data[ATTR_OPTIONS]
# Call SERVICE_SELECT_OPTION
service = SERVICE_SELECT_OPTION
service_data[ATTR_OPTION] = state.state
await hass.services.async_call(
DOMAIN, service, service_data, context=context, blocking=True
)
async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce Input select states."""
# Reproduce states in parallel.
await asyncio.gather(
*(
_async_reproduce_state(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
)
def check_attr_equal(
attr1: MappingProxyType, attr2: MappingProxyType, attr_str: str
) -> bool:
"""Return true if the given attributes are equal."""
return attr1.get(attr_str) == attr2.get(attr_str)
| apache-2.0 |
crosswalk-project/crosswalk-test-suite | misc/sampleapp-android-tests/sampleapp/spacedodgegame_manifestorientationscale.py | 14 | 4198 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<haox.li@intel.com>
import unittest
import os
import sys
import commands
import comm
from TestApp import *
app_name = "Spacedodgegame"
package_name = "org.xwalk." + app_name.lower()
active_name = app_name + "Activity"
sample_src = comm.sample_src_pref + "space-dodge-game/manifest-orientation-scale/"
testapp = None
comm.setUp()
class Spacedodgegame(unittest.TestCase):
def test_1_pack(self):
#clean up old apk
commands.getstatusoutput("rm %s%s*" % (comm.build_app_dest, "org.xwalk." + app_name.lower()))
cmd = "%s --crosswalk=%s --platforms=android --android=%s --targets=%s --enable-remote-debugging %s" % \
(comm.apptools,
comm.crosswalkzip,
comm.MODE,
comm.ARCH,
sample_src)
comm.pack(cmd, app_name.lower(), self)
def test_2_install(self):
apk_file = commands.getstatusoutput("ls %s| grep %s" % (comm.build_app_dest, app_name.lower()))[1]
if apk_file.endswith(".apk"):
global testapp
testapp = TestApp(comm.device, comm.build_app_dest + apk_file, package_name, active_name)
if testapp.isInstalled():
testapp.uninstall()
self.assertTrue(testapp.install())
else:
print("-->> No packed %s apk in %s" % (app_name, comm.build_app_dest))
self.assertTrue(False)
def test_3_launch(self):
if testapp is not None:
self.assertTrue(testapp.launch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_4_switch(self):
if testapp is not None:
self.assertTrue(testapp.switch())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_5_stop(self):
if testapp is not None:
self.assertTrue(testapp.stop())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_6_uninstall(self):
if testapp is not None:
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
def test_7_uninstall_when_app_running(self):
if testapp is not None:
if not testapp.isInstalled():
testapp.install()
if not testapp.isRunning():
testapp.launch()
self.assertTrue(testapp.uninstall())
else:
print("-->> Fail to pack %s apk" % app_name)
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
BonexGu/Blik2D-SDK | Blik2D/addon/tensorflow-1.2.1_for_blik/tensorflow/contrib/keras/__init__.py | 60 | 1261 | # -*- coding: utf-8 -*-
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of the Keras API meant to be a high-level API for TensorFlow.
Detailed documentation and user guides are available at
[keras.io](https://keras.io).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.keras.api.keras import *
try:
from tensorflow.contrib.keras import python # pylint: disable=g-import-not-at-top
del python
except ImportError:
pass
del absolute_import
del division
del print_function
| mit |
AutorestCI/azure-sdk-for-python | azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/storage_account_regenerate_key_parameters.py | 4 | 1068 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StorageAccountRegenerateKeyParameters(Model):
"""The parameters used to regenerate the storage account key.
:param key_name: The name of storage keys that want to be regenerated,
possible vaules are key1, key2.
:type key_name: str
"""
_validation = {
'key_name': {'required': True},
}
_attribute_map = {
'key_name': {'key': 'keyName', 'type': 'str'},
}
def __init__(self, key_name):
super(StorageAccountRegenerateKeyParameters, self).__init__()
self.key_name = key_name
| mit |
ehsan/airmozilla | airmozilla/manage/views/locations.py | 9 | 5376 | import collections
from django import http
from django.contrib import messages
from django.shortcuts import render, redirect, get_object_or_404
from django.db import transaction
from jsonview.decorators import json_view
from airmozilla.main.models import (
Event,
Location,
SuggestedEvent,
LocationDefaultEnvironment
)
from airmozilla.manage import forms
from .decorators import (
staff_required,
permission_required,
cancel_redirect
)
@staff_required
@permission_required('main.change_location')
def locations(request):
context = {}
locations = Location.objects.all()
context['locations'] = locations
associated_events = collections.defaultdict(int)
associated_suggested_events = collections.defaultdict(int)
events = Event.objects.exclude(location__isnull=True)
for each in events.values('location_id'):
associated_events[each['location_id']] += 1
suggested_events = SuggestedEvent.objects.exclude(location__isnull=True)
for each in suggested_events.values('location_id'):
associated_suggested_events[each['location_id']] += 1
context['associated_events'] = associated_events
context['associated_suggested_events'] = associated_suggested_events
return render(request, 'manage/locations.html', context)
@staff_required
@permission_required('main.change_location')
@cancel_redirect('manage:locations')
@transaction.atomic
def location_edit(request, id):
location = get_object_or_404(Location, id=id)
if request.method == 'POST' and request.POST.get('delete'):
LocationDefaultEnvironment.objects.get(
id=request.POST.get('delete'),
location=location
).delete()
messages.info(request, 'Configuration deleted.')
return redirect('manage:location_edit', location.id)
if request.method == 'POST' and not request.POST.get('default'):
form = forms.LocationEditForm(request.POST, instance=location)
if form.is_valid():
form.save()
messages.info(request, 'Location "%s" saved.' % location)
return redirect('manage:locations')
else:
form = forms.LocationEditForm(instance=location)
if request.method == 'POST' and request.POST.get('default'):
default_environment_form = forms.LocationDefaultEnvironmentForm(
request.POST
)
if default_environment_form.is_valid():
fc = default_environment_form.cleaned_data
if LocationDefaultEnvironment.objects.filter(
location=location,
privacy=fc['privacy']
):
# there can only be one of them
lde = LocationDefaultEnvironment.objects.get(
location=location,
privacy=fc['privacy']
)
lde.template = fc['template']
else:
lde = LocationDefaultEnvironment.objects.create(
location=location,
privacy=fc['privacy'],
template=fc['template']
)
lde.template_environment = fc['template_environment']
lde.save()
messages.info(request, 'Default location environment saved.')
return redirect('manage:location_edit', location.id)
else:
default_environment_form = forms.LocationDefaultEnvironmentForm()
context = {
'form': form,
'location': location,
'default_environment_form': default_environment_form
}
context['location_default_environments'] = (
LocationDefaultEnvironment.objects
.filter(location=location).order_by('privacy', 'template')
)
return render(request, 'manage/location_edit.html', context)
@staff_required
@permission_required('main.add_location')
@cancel_redirect('manage:events')
@transaction.atomic
def location_new(request):
if request.method == 'POST':
form = forms.LocationEditForm(request.POST, instance=Location())
if form.is_valid():
form.save()
messages.success(request, 'Location created.')
return redirect('manage:locations')
else:
form = forms.LocationEditForm()
return render(request, 'manage/location_new.html', {'form': form})
@staff_required
@permission_required('main.delete_location')
@transaction.atomic
def location_remove(request, id):
location = get_object_or_404(Location, id=id)
if request.method == 'POST':
# This is only allowed if there are no events or suggested events
# associated with this location
if (
Event.objects.filter(location=location) or
SuggestedEvent.objects.filter(location=location)
):
return http.HttpResponseBadRequest("Still being used")
location.delete()
messages.info(request, 'Location "%s" removed.' % location.name)
return redirect('manage:locations')
@staff_required
@json_view
def location_timezone(request):
"""Responds with the timezone for the requested Location. Used to
auto-fill the timezone form in event requests/edits."""
if not request.GET.get('location'):
raise http.Http404('no location')
location = get_object_or_404(Location, id=request.GET['location'])
return {'timezone': location.timezone}
| bsd-3-clause |
mskrzypkows/servo | tests/wpt/web-platform-tests/referrer-policy/generic/tools/spec_validator.py | 326 | 6673 | #!/usr/bin/env python
import json, sys
from common_paths import *
def assert_non_empty_string(obj, field):
assert field in obj, 'Missing field "%s"' % field
assert isinstance(obj[field], basestring), \
'Field "%s" must be a string' % field
assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field
def assert_non_empty_list(obj, field):
assert isinstance(obj[field], list), \
'%s must be a list' % field
assert len(obj[field]) > 0, \
'%s list must not be empty' % field
def assert_non_empty_dict(obj, field):
assert isinstance(obj[field], dict), \
'%s must be a dict' % field
assert len(obj[field]) > 0, \
'%s dict must not be empty' % field
def assert_contains(obj, field):
assert field in obj, 'Must contain field "%s"' % field
def assert_value_from(obj, field, items):
assert obj[field] in items, \
'Field "%s" must be from: %s' % (field, str(items))
def assert_atom_or_list_items_from(obj, field, items):
if isinstance(obj[field], basestring) or isinstance(obj[field], int):
assert_value_from(obj, field, items)
return
assert_non_empty_list(obj, field)
for allowed_value in obj[field]:
assert allowed_value != '*', "Wildcard is not supported for lists!"
assert allowed_value in items, \
'Field "%s" must be from: %s' % (field, str(items))
def assert_contains_only_fields(obj, expected_fields):
for expected_field in expected_fields:
assert_contains(obj, expected_field)
for actual_field in obj:
assert actual_field in expected_fields, \
'Unexpected field "%s".' % actual_field
def assert_value_unique_in(value, used_values):
assert value not in used_values, 'Duplicate value "%s"!' % str(value)
used_values[value] = True
def validate(spec_json, details):
""" Validates the json specification for generating tests. """
details['object'] = spec_json
assert_contains_only_fields(spec_json, ["specification",
"referrer_policy_schema",
"test_expansion_schema",
"subresource_path",
"excluded_tests"])
assert_non_empty_list(spec_json, "specification")
assert_non_empty_list(spec_json, "referrer_policy_schema")
assert_non_empty_dict(spec_json, "test_expansion_schema")
assert_non_empty_list(spec_json, "excluded_tests")
specification = spec_json['specification']
referrer_policy_schema = spec_json['referrer_policy_schema']
test_expansion_schema = spec_json['test_expansion_schema']
excluded_tests = spec_json['excluded_tests']
subresource_path = spec_json['subresource_path']
valid_test_expansion_fields = ['name'] + test_expansion_schema.keys()
# Validate each single spec.
for spec in specification:
details['object'] = spec
# Validate required fields for a single spec.
assert_contains_only_fields(spec, ['name',
'title',
'description',
'referrer_policy',
'specification_url',
'test_expansion'])
assert_non_empty_string(spec, 'name')
assert_non_empty_string(spec, 'title')
assert_non_empty_string(spec, 'description')
assert_non_empty_string(spec, 'specification_url')
assert_value_from(spec, 'referrer_policy', referrer_policy_schema)
assert_non_empty_list(spec, 'test_expansion')
# Validate spec's test expansion.
used_spec_names = {}
for spec_exp in spec['test_expansion']:
details['object'] = spec_exp
assert_non_empty_string(spec_exp, 'name')
# The name is unique in same expansion group.
assert_value_unique_in((spec_exp['expansion'], spec_exp['name']),
used_spec_names)
assert_contains_only_fields(spec_exp, valid_test_expansion_fields)
for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact
assert_atom_or_list_items_from(
spec_exp, artifact, ['*'] + test_expansion_schema[artifact])
del details['test_expansion_field']
# Validate the test_expansion schema members.
details['object'] = test_expansion_schema
assert_contains_only_fields(test_expansion_schema, ['expansion',
'delivery_method',
'redirection',
'origin',
'source_protocol',
'target_protocol',
'subresource',
'referrer_url'])
# Validate excluded tests.
details['object'] = excluded_tests
for excluded_test_expansion in excluded_tests:
assert_contains_only_fields(excluded_test_expansion,
valid_test_expansion_fields)
details['object'] = excluded_test_expansion
for artifact in test_expansion_schema:
details['test_expansion_field'] = artifact
assert_atom_or_list_items_from(
excluded_test_expansion,
artifact,
['*'] + test_expansion_schema[artifact])
del details['test_expansion_field']
# Validate subresource paths.
details['object'] = subresource_path
assert_contains_only_fields(subresource_path,
test_expansion_schema['subresource']);
for subresource in subresource_path:
local_rel_path = "." + subresource_path[subresource]
full_path = os.path.join(test_root_directory, local_rel_path)
assert os.path.isfile(full_path), "%s is not an existing file" % path
del details['object']
def assert_valid_spec_json(spec_json):
error_details = {}
try:
validate(spec_json, error_details)
except AssertionError, err:
print 'ERROR:', err.message
print json.dumps(error_details, indent=4)
sys.exit(1)
def main():
spec_json = load_spec_json();
assert_valid_spec_json(spec_json)
print "Spec JSON is valid."
if __name__ == '__main__':
main()
| mpl-2.0 |
creasyw/IMTAphy | documentation/doctools/tags/0.3/sphinx/web/markup.py | 4 | 7490 | # -*- coding: utf-8 -*-
"""
sphinx.web.markup
~~~~~~~~~~~~~~~~~
Awfully simple markup used in comments. Syntax:
`this is some <code>`
like <tt> in HTML
``this is like ` just that i can contain backticks``
like <tt> in HTML
*emphasized*
translates to <em class="important">
**strong**
translates to <strong>
!!!very important message!!!
use this to mark important or dangerous things.
Translates to <em class="dangerous">
[[http://www.google.com/]]
Simple link with the link target as caption. If the
URL is relative the provided callback is called to get
the full URL.
[[http://www.google.com/ go to google]]
Link with "go to google" as caption.
<code>preformatted code that could by python code</code>
Python code (most of the time), otherwise preformatted.
<quote>cite someone</quote>
Like <blockquote> in HTML.
:copyright: 2007-2008 by Armin Ronacher.
:license: BSD.
"""
import cgi
import re
from urlparse import urlparse
from sphinx.highlighting import highlight_block
inline_formatting = {
'escaped_code': ('``', '``'),
'code': ('`', '`'),
'strong': ('**', '**'),
'emphasized': ('*', '*'),
'important': ('!!!', '!!!'),
'link': ('[[', ']]'),
'quote': ('<quote>', '</quote>'),
'code_block': ('<code>', '</code>'),
'paragraph': (r'\n{2,}', None),
'newline': (r'\\$', None)
}
simple_formattings = {
'strong_begin': '<strong>',
'strong_end': '</strong>',
'emphasized_begin': '<em>',
'emphasized_end': '</em>',
'important_begin': '<em class="important">',
'important_end': '</em>',
'quote_begin': '<blockquote>',
'quote_end': '</blockquote>'
}
raw_formatting = set(['link', 'code', 'escaped_code', 'code_block'])
formatting_start_re = re.compile('|'.join(
'(?P<%s>%s)' % (name, end is not None and re.escape(start) or start)
for name, (start, end)
in sorted(inline_formatting.items(), key=lambda x: -len(x[1][0]))
), re.S | re.M)
formatting_end_res = dict(
(name, re.compile(re.escape(end))) for name, (start, end)
in inline_formatting.iteritems() if end is not None
)
without_end_tag = set(name for name, (_, end) in inline_formatting.iteritems()
if end is None)
class StreamProcessor(object):
def __init__(self, stream):
self._pushed = []
self._stream = stream
def __iter__(self):
return self
def next(self):
if self._pushed:
return self._pushed.pop()
return self._stream.next()
def push(self, token, data):
self._pushed.append((token, data))
def get_data(self, drop_needle=False):
result = []
try:
while True:
token, data = self.next()
if token != 'text':
if not drop_needle:
self.push(token, data)
break
result.append(data)
except StopIteration:
pass
return ''.join(result)
class MarkupParser(object):
def __init__(self, make_rel_url):
self.make_rel_url = make_rel_url
def tokenize(self, text):
text = '\n'.join(text.splitlines())
last_pos = 0
pos = 0
end = len(text)
stack = []
text_buffer = []
while pos < end:
if stack:
m = formatting_end_res[stack[-1]].match(text, pos)
if m is not None:
if text_buffer:
yield 'text', ''.join(text_buffer)
del text_buffer[:]
yield stack[-1] + '_end', None
stack.pop()
pos = m.end()
continue
m = formatting_start_re.match(text, pos)
if m is not None:
if text_buffer:
yield 'text', ''.join(text_buffer)
del text_buffer[:]
for key, value in m.groupdict().iteritems():
if value is not None:
if key in without_end_tag:
yield key, None
else:
if key in raw_formatting:
regex = formatting_end_res[key]
m2 = regex.search(text, m.end())
if m2 is None:
yield key, text[m.end():]
else:
yield key, text[m.end():m2.start()]
m = m2
else:
yield key + '_begin', None
stack.append(key)
break
if m is None:
break
else:
pos = m.end()
continue
text_buffer.append(text[pos])
pos += 1
yield 'text', ''.join(text_buffer)
for token in reversed(stack):
yield token + '_end', None
def stream_to_html(self, text):
stream = StreamProcessor(self.tokenize(text))
paragraph = []
result = []
def new_paragraph():
result.append(paragraph[:])
del paragraph[:]
for token, data in stream:
if token in simple_formattings:
paragraph.append(simple_formattings[token])
elif token in ('text', 'escaped_code', 'code'):
if data:
data = cgi.escape(data)
if token in ('escaped_code', 'code'):
data = '<tt>%s</tt>' % data
paragraph.append(data)
elif token == 'link':
if ' ' in data:
href, caption = data.split(' ', 1)
else:
href = caption = data
protocol = urlparse(href)[0]
nofollow = True
if not protocol:
href = self.make_rel_url(href)
nofollow = False
elif protocol == 'javascript':
href = href[11:]
paragraph.append('<a href="%s"%s>%s</a>' % (cgi.escape(href),
nofollow and ' rel="nofollow"' or '',
cgi.escape(caption)))
elif token == 'code_block':
result.append(highlight_block(data, 'python'))
new_paragraph()
elif token == 'paragraph':
new_paragraph()
elif token == 'newline':
paragraph.append('<br>')
if paragraph:
result.append(paragraph)
for item in result:
if isinstance(item, list):
if item:
yield '<p>%s</p>' % ''.join(item)
else:
yield item
def to_html(self, text):
return ''.join(self.stream_to_html(text))
def markup(text, make_rel_url=lambda x: './' + x):
return MarkupParser(make_rel_url).to_html(text)
| gpl-2.0 |
Purg/SMQTK | python/smqtk/bin/classifyFiles.py | 1 | 5843 | """
Based on an input, trained classifier configuration, classify a number of media
files, whose descriptor is computed by the configured descriptor generator.
Input files that classify as the given label are then output to standard out.
Thus, this script acts like a filter.
"""
import glob
import json
import logging
import os
from smqtk.algorithms import get_classifier_impls
from smqtk.algorithms import get_descriptor_generator_impls
from smqtk.representation import ClassificationElementFactory
from smqtk.representation import DescriptorElementFactory
from smqtk.representation.data_element.file_element import DataFileElement
from smqtk.utils import plugin
from smqtk.utils.bin_utils import (
initialize_logging,
output_config,
basic_cli_parser,
)
__author__ = "paul.tunison@kitware.com"
def get_cli_parser():
parser = basic_cli_parser(__doc__)
g_classifier = parser.add_argument_group("Classification")
g_classifier.add_argument('--overwrite',
action='store_true', default=False,
help='When generating a configuration file, '
'overwrite an existing file.')
g_classifier.add_argument('-l', '--label',
type=str, default=None,
help='The class to filter by. This is based on '
'the classifier configuration/model used. '
'If this is not provided, we will list the '
'available labels in the provided '
'classifier configuration.')
# Positional
parser.add_argument("file_globs",
nargs='*',
metavar='GLOB',
help='Series of shell globs specifying the files to '
'classify.')
return parser
def get_default_config():
return {
"descriptor_factory":
DescriptorElementFactory.get_default_config(),
"descriptor_generator":
plugin.make_config(get_descriptor_generator_impls()),
"classification_factory":
ClassificationElementFactory.get_default_config(),
"classifier":
plugin.make_config(get_classifier_impls()),
}
def main():
log = logging.getLogger(__name__)
parser = get_cli_parser()
args = parser.parse_args()
config_path = args.config
generate_config = args.generate_config
config_overwrite = args.overwrite
is_debug = args.verbose
label = args.label
file_globs = args.file_globs
initialize_logging(logging.getLogger(__name__),
is_debug and logging.DEBUG or logging.INFO)
initialize_logging(logging.getLogger('smqtk'),
is_debug and logging.DEBUG or logging.INFO)
log.debug("Showing debug messages.")
config = get_default_config()
config_loaded = False
if config_path and os.path.isfile(config_path):
with open(config_path) as f:
log.info("Loading configuration: %s", config_path)
config.update(
json.load(f)
)
config_loaded = True
output_config(generate_config, config, log, config_overwrite, 100)
if not config_loaded:
log.error("No configuration provided")
exit(101)
classify_files(config, label, file_globs)
def classify_files(config, label, file_globs):
log = logging.getLogger(__name__)
#: :type: smqtk.algorithms.Classifier
classifier = \
plugin.from_plugin_config(config['classifier'],
get_classifier_impls())
def log_avaialable_labels():
log.info("Available classifier labels:")
for l in classifier.get_labels():
log.info("- %s", l)
if label is None:
log_avaialable_labels()
return
elif label not in classifier.get_labels():
log.error("Invalid classification label provided to compute and filter "
"on: '%s'", label)
log_avaialable_labels()
return
log.info("Collecting files from globs")
#: :type: list[DataFileElement]
data_elements = []
uuid2filepath = {}
for g in file_globs:
if os.path.isfile(g):
d = DataFileElement(g)
data_elements.append(d)
uuid2filepath[d.uuid()] = g
else:
log.debug("expanding glob: %s", g)
for fp in glob.iglob(g):
d = DataFileElement(fp)
data_elements.append(d)
uuid2filepath[d.uuid()] = fp
if not data_elements:
raise RuntimeError("No files provided for classification.")
log.info("Computing descriptors")
descriptor_factory = \
DescriptorElementFactory.from_config(config['descriptor_factory'])
#: :type: smqtk.algorithms.DescriptorGenerator
descriptor_generator = \
plugin.from_plugin_config(config['descriptor_generator'],
get_descriptor_generator_impls())
descr_map = descriptor_generator\
.compute_descriptor_async(data_elements, descriptor_factory)
log.info("Classifying descriptors")
classification_factory = ClassificationElementFactory \
.from_config(config['classification_factory'])
classification_map = classifier\
.classify_async(descr_map.values(), classification_factory)
log.info("Printing input file paths that classified as the given label.")
# map of UUID to filepath:
uuid2c = dict((c.uuid, c) for c in classification_map.itervalues())
for data in data_elements:
if uuid2c[data.uuid()].max_label() == label:
print uuid2filepath[data.uuid()]
if __name__ == '__main__':
main()
| bsd-3-clause |
tumbl3w33d/ansible | lib/ansible/modules/network/onyx/onyx_buffer_pool.py | 37 | 4908 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_buffer_pool
version_added: "2.8"
author: "Anas Badaha (@anasb)"
short_description: Configures Buffer Pool
description:
- This module provides declarative management of Onyx Buffer Pool configuration
on Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.8130
options:
name:
description:
- pool name.
required: true
pool_type:
description:
- pool type.
choices: ['lossless', 'lossy']
default: lossy
memory_percent:
description:
- memory percent.
switch_priority:
description:
- switch priority, range 1-7.
"""
EXAMPLES = """
- name: configure buffer pool
onyx_buffer_pool:
name: roce
pool_type: lossless
memory_percent: 50.00
switch_priority: 3
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- traffic pool roce type lossless
- traffic pool roce memory percent 50.00
- traffic pool roce map switch-priority 3
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import show_cmd
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
class OnyxBufferPoolModule(BaseOnyxModule):
def init_module(self):
""" initialize module
"""
element_spec = dict(
name=dict(type='str', required=True),
pool_type=dict(choices=['lossless', 'lossy'], default='lossy'),
memory_percent=dict(type='float'),
switch_priority=dict(type='int')
)
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def get_required_config(self):
module_params = self._module.params
self._required_config = dict(module_params)
self.validate_param_values(self._required_config)
def validate_switch_priority(self, value):
if value and not 0 <= int(value) <= 7:
self._module.fail_json(msg='switch_priority value must be between 0 and 7')
def _set_traffic_pool_config(self, traffic_pool_config):
if traffic_pool_config is None:
return
traffic_pool_config = traffic_pool_config.get(self._required_config.get('name'))
self._current_config['pool_type'] = traffic_pool_config[0].get("Type")
self._current_config['switch_priority'] = int(traffic_pool_config[0].get("Switch Priorities"))
self._current_config['memory_percent'] = float(traffic_pool_config[0].get("Memory [%]"))
def _show_traffic_pool(self):
cmd = "show traffic pool {0}".format(self._required_config.get("name"))
return show_cmd(self._module, cmd, json_fmt=True, fail_on_error=False)
def load_current_config(self):
self._current_config = dict()
traffic_pool_config = self._show_traffic_pool()
self._set_traffic_pool_config(traffic_pool_config)
def generate_commands(self):
name = self._required_config.get("name")
pool_type = self._required_config.get("pool_type")
if self._current_config is None:
self._add_add_traffic_pool_cmds(name, pool_type)
else:
current_pool_type = self._current_config.get("pool_type")
if pool_type != current_pool_type:
self._add_add_traffic_pool_cmds(name, pool_type)
memory_percent = self._required_config.get("memory_percent")
if memory_percent is not None:
curr_memory_percent = self._current_config.get("memory_percent")
if curr_memory_percent is None or memory_percent != curr_memory_percent:
self._commands.append('traffic pool {0} memory percent {1}'.format(name, memory_percent))
switch_priority = self._required_config.get("switch_priority")
if switch_priority is not None:
curr_switch_priority = self._current_config.get("switch_priority")
if curr_switch_priority is None or switch_priority != curr_switch_priority:
self._commands.append('traffic pool {0} map switch-priority {1}'.format(name, switch_priority))
def _add_add_traffic_pool_cmds(self, name, pool_type):
self._commands.append('traffic pool {0} type {1}'.format(name, pool_type))
def main():
""" main entry point for module execution
"""
OnyxBufferPoolModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
bgris/ODL_bgris | lib/python3.5/site-packages/odl/util/graphics.py | 1 | 15419 | # Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Functions for graphical output."""
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
import numpy as np
from odl.util.testutils import run_doctests
from odl.util.utility import is_real_dtype
__all__ = ('show_discrete_data',)
def _safe_minmax(values):
"""Calculate min and max of array with guards for nan and inf."""
# Nan and inf guarded min and max
minval = np.min(values[np.isfinite(values)])
maxval = np.max(values[np.isfinite(values)])
return minval, maxval
def _colorbar_ticks(minval, maxval):
"""Return the ticks (values show) in the colorbar."""
return [minval, (maxval + minval) / 2., maxval]
def _digits(minval, maxval):
"""Digits needed to comforatbly display values in [minval, maxval]"""
if minval == maxval:
return 3
else:
return min(10, max(2, int(1 + abs(np.log10(maxval - minval)))))
def _colorbar_format(minval, maxval):
"""Return the format string for the colorbar."""
return '%.{}f'.format(_digits(minval, maxval))
def _axes_info(grid, npoints=5):
result = []
min_pt = grid.min()
max_pt = grid.max()
for axis in range(grid.ndim):
xmin = min_pt[axis]
xmax = max_pt[axis]
points = np.linspace(xmin, xmax, npoints)
indices = np.linspace(0, grid.shape[axis] - 1, npoints, dtype=int)
tick_values = grid.coord_vectors[axis][indices]
# Do not use corner point in case of a partition, use outer corner
tick_values[[0, -1]] = xmin, xmax
format_str = '{:.' + str(_digits(xmin, xmax)) + 'f}'
tick_labels = [format_str.format(f) for f in tick_values]
result += [(points, tick_labels)]
return result
def show_discrete_data(values, grid, title=None, method='',
force_show=False, fig=None, **kwargs):
"""Display a discrete 1d or 2d function.
Parameters
----------
values : `numpy.ndarray`
The values to visualize
grid : `TensorGrid` or `RectPartition`
Grid of the values
title : string, optional
Set the title of the figure
method : string, optional
1d methods:
'plot' : graph plot
'scatter' : scattered 2d points
(2nd axis <-> value)
2d methods:
'imshow' : image plot with coloring according to value,
including a colorbar.
'scatter' : cloud of scattered 3d points
(3rd axis <-> value)
'wireframe', 'plot_wireframe' : surface plot
force_show : bool, optional
Whether the plot should be forced to be shown now or deferred until
later. Note that some backends always displays the plot, regardless
of this value.
fig : `matplotlib.figure.Figure`, optional
The figure to show in. Expected to be of same "style", as the figure
given by this function. The most common usecase is that fig is the
return value from an earlier call to this function.
Default: New figure
interp : {'nearest', 'linear'}, optional
Interpolation method to use.
Default: 'nearest'
axis_labels : string, optional
Axis labels, default: ['x', 'y']
update_in_place : bool, optional
Update the content of the figure in place. Intended for faster real
time plotting, typically ~5 times faster.
This is only performed for ``method == 'imshow'`` with real data and
``fig != None``. Otherwise this parameter is treated as False.
Default: False
axis_fontsize : int, optional
Fontsize for the axes. Default: 16
kwargs : {'figsize', 'saveto', ...}
Extra keyword arguments passed on to display method
See the Matplotlib functions for documentation of extra
options.
Returns
-------
fig : `matplotlib.figure.Figure`
The resulting figure. It is also shown to the user.
See Also
--------
matplotlib.pyplot.plot : Show graph plot
matplotlib.pyplot.imshow : Show data as image
matplotlib.pyplot.scatter : Show scattered 3d points
"""
# Importing pyplot takes ~2 sec, only import when needed.
import matplotlib.pyplot as plt
args_re = []
args_im = []
dsp_kwargs = {}
sub_kwargs = {}
arrange_subplots = (121, 122) # horzontal arrangement
# Create axis labels which remember their original meaning
axis_labels = kwargs.pop('axis_labels', ['x', 'y'])
values_are_complex = not is_real_dtype(values.dtype)
figsize = kwargs.pop('figsize', None)
saveto = kwargs.pop('saveto', None)
interp = kwargs.pop('interp', 'nearest')
axis_fontsize = kwargs.pop('axis_fontsize', 16)
# Check if we should and can update the plot in place
update_in_place = kwargs.pop('update_in_place', False)
if (update_in_place and
(fig is None or values_are_complex or values.ndim != 2 or
(values.ndim == 2 and method not in ('', 'imshow')))):
update_in_place = False
if values.ndim == 1: # TODO: maybe a plotter class would be better
if not method:
if interp == 'nearest':
method = 'step'
dsp_kwargs['where'] = 'mid'
elif interp == 'linear':
method = 'plot'
else:
method = 'plot'
if method == 'plot' or method == 'step' or method == 'scatter':
args_re += [grid.coord_vectors[0], values.real]
args_im += [grid.coord_vectors[0], values.imag]
else:
raise ValueError('`method` {!r} not supported'
''.format(method))
elif values.ndim == 2:
if not method:
method = 'imshow'
if method == 'imshow':
args_re = [np.rot90(values.real)]
args_im = [np.rot90(values.imag)] if values_are_complex else []
extent = [grid.min()[0], grid.max()[0],
grid.min()[1], grid.max()[1]]
if interp == 'nearest':
interpolation = 'nearest'
elif interp == 'linear':
interpolation = 'bilinear'
else:
interpolation = 'none'
dsp_kwargs.update({'interpolation': interpolation,
'cmap': 'bone',
'extent': extent,
'aspect': 'auto'})
elif method == 'scatter':
pts = grid.points()
args_re = [pts[:, 0], pts[:, 1], values.ravel().real]
args_im = ([pts[:, 0], pts[:, 1], values.ravel().imag]
if values_are_complex else [])
sub_kwargs.update({'projection': '3d'})
elif method in ('wireframe', 'plot_wireframe'):
method = 'plot_wireframe'
x, y = grid.meshgrid
args_re = [x, y, np.rot90(values.real)]
args_im = ([x, y, np.rot90(values.imag)] if values_are_complex
else [])
sub_kwargs.update({'projection': '3d'})
else:
raise ValueError('`method` {!r} not supported'
''.format(method))
else:
raise NotImplementedError('no method for {}d display implemented'
''.format(values.ndim))
# Additional keyword args are passed on to the display method
dsp_kwargs.update(**kwargs)
if fig is not None:
# Reuse figure if given as input
if not isinstance(fig, plt.Figure):
raise TypeError('`fig` {} not a matplotlib figure'.format(fig))
if not plt.fignum_exists(fig.number):
# If figure does not exist, user either closed the figure or
# is using IPython, in this case we need a new figure.
fig = plt.figure(figsize=figsize)
updatefig = False
else:
# Set current figure to given input
fig = plt.figure(fig.number)
updatefig = True
if values.ndim > 1 and not update_in_place:
# If the figure is larger than 1d, we can clear it since we
# dont reuse anything. Keeping it causes performance problems.
fig.clf()
else:
fig = plt.figure(figsize=figsize)
updatefig = False
if values_are_complex:
# Real
if len(fig.axes) == 0:
# Create new axis if needed
sub_re = plt.subplot(arrange_subplots[0], **sub_kwargs)
sub_re.set_title('Real part')
sub_re.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub_re.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub_re.set_ylabel('value')
else:
sub_re = fig.axes[0]
display_re = getattr(sub_re, method)
csub_re = display_re(*args_re, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow' and len(fig.axes) < 2:
# Create colorbar if none seems to exist
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval_re, maxval_re = _safe_minmax(values.real)
else:
minval_re, maxval_re = kwargs['clim']
ticks_re = _colorbar_ticks(minval_re, maxval_re)
format_re = _colorbar_format(minval_re, maxval_re)
plt.colorbar(csub_re, orientation='horizontal',
ticks=ticks_re, format=format_re)
# Imaginary
if len(fig.axes) < 3:
sub_im = plt.subplot(arrange_subplots[1], **sub_kwargs)
sub_im.set_title('Imaginary part')
sub_im.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub_im.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub_im.set_ylabel('value')
else:
sub_im = fig.axes[2]
display_im = getattr(sub_im, method)
csub_im = display_im(*args_im, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow' and len(fig.axes) < 4:
# Create colorbar if none seems to exist
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval_im, maxval_im = _safe_minmax(values.imag)
else:
minval_im, maxval_im = kwargs['clim']
ticks_im = _colorbar_ticks(minval_im, maxval_im)
format_im = _colorbar_format(minval_im, maxval_im)
plt.colorbar(csub_im, orientation='horizontal',
ticks=ticks_im, format=format_im)
else:
if len(fig.axes) == 0:
# Create new axis object if needed
sub = plt.subplot(111, **sub_kwargs)
sub.set_xlabel(axis_labels[0], fontsize=axis_fontsize)
if values.ndim == 2:
sub.set_ylabel(axis_labels[1], fontsize=axis_fontsize)
else:
sub.set_ylabel('value')
try:
# For 3d plots
sub.set_zlabel('z')
except AttributeError:
pass
else:
sub = fig.axes[0]
if update_in_place:
import matplotlib as mpl
imgs = [obj for obj in sub.get_children()
if isinstance(obj, mpl.image.AxesImage)]
if len(imgs) > 0 and updatefig:
imgs[0].set_data(args_re[0])
csub = imgs[0]
# Update min-max
if 'clim' not in kwargs:
minval, maxval = _safe_minmax(values)
else:
minval, maxval = kwargs['clim']
csub.set_clim(minval, maxval)
else:
display = getattr(sub, method)
csub = display(*args_re, **dsp_kwargs)
else:
display = getattr(sub, method)
csub = display(*args_re, **dsp_kwargs)
# Axis ticks
if method == 'imshow' and not grid.is_uniform:
(xpts, xlabels), (ypts, ylabels) = _axes_info(grid)
plt.xticks(xpts, xlabels)
plt.yticks(ypts, ylabels)
if method == 'imshow':
# Add colorbar
# Use clim from kwargs if given
if 'clim' not in kwargs:
minval, maxval = _safe_minmax(values)
else:
minval, maxval = kwargs['clim']
ticks = _colorbar_ticks(minval, maxval)
format = _colorbar_format(minval, maxval)
if len(fig.axes) < 2:
# Create colorbar if none seems to exist
plt.colorbar(mappable=csub, ticks=ticks, format=format)
elif update_in_place:
# If it exists and we should update it
csub.colorbar.set_clim(minval, maxval)
csub.colorbar.set_ticks(ticks)
csub.colorbar.set_ticklabels([format % tick for tick in ticks])
csub.colorbar.draw_all()
# Fixes overlapping stuff at the expense of potentially squashed subplots
if not update_in_place:
fig.tight_layout()
if title is not None:
if not values_are_complex:
# Do not overwrite title for complex values
plt.title(title)
fig.canvas.manager.set_window_title(title)
if updatefig or plt.isinteractive():
# If we are running in interactive mode, we can always show the fig
# This causes an artifact, where users of `CallbackShow` without
# interactive mode only shows the figure after the second iteration.
plt.show(block=False)
if not update_in_place:
plt.draw()
plt.pause(0.0001)
else:
try:
sub.draw_artist(csub)
fig.canvas.blit(fig.bbox)
fig.canvas.update()
fig.canvas.flush_events()
except AttributeError:
plt.draw()
plt.pause(0.0001)
if force_show:
plt.show()
if saveto is not None:
fig.savefig(saveto)
return fig
if __name__ == '__main__':
run_doctests()
| gpl-3.0 |
kylejusticemagnuson/pyti | tests/test_stochastic.py | 1 | 15612 | from __future__ import absolute_import
import unittest
import numpy as np
from tests.sample_data import SampleData
from pyti import stochastic
class TestStochastic(unittest.TestCase):
def setUp(self):
"""Create data to use for testing."""
self.data = SampleData().get_sample_close_data()
self.percent_k_period_6_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, 0.9856979405034324, 1.0, 1.0, 0.63513513513513342,
0.27567567567568274, 1.0, 1.0, 0.68322981366460012, 0.0,
0.15515515515516184, 0.0, 0.0, 0.0, 0.06131650135257203, 0.0, 0.0,
0.4255711127487089, 1.0, 0.85463958582237798, 0.63201911589008342,
0.58422939068100166, 0.67256637168141331, 0.55555555555554825, 0.0, 1.0,
0.39352306182532032, 0.0, 0.0, 0.56253794778384958, 0.82179720704310821,
1.0, 1.0, 0.83066712049012859, 0.23241362167536711,
0.059955822025878437, 0.23704663212435031, 0.78950777202072531, 1.0,
1.0, 0.94086165373294273, 1.0, 1.0, 1.0, 0.36487221315932178,
0.23273518216421837, 0.38695960311835798, 0.0, 0.0, 0.0, 0.0,
0.33420252064319617, 0.31533601378518206, 1.0, 0.0, 0.17607726597325543,
0.038632986627041961, 0.15453194650816784, 0.0, 1.0,
0.61413043478261453, 1.0, 1.0, 0.21932367149758231, 1.0, 1.0,
0.17894736842105138, 0.0, 0.0, 0.12548638132295883, 0.2840466926070046,
0.0, 0.0, 0.80735411670663715, 0.0, 1.0, 1.0, 1.0, 0.42937563971340847,
0.14943705220061232, 0.0, 0.11392405063290814, 0.32856356631810901,
0.48005698005698194, 0.24288107202678813, 0.62814070351758511, 1.0, 1.0,
1.0, 1.0, 1.0, 0.52095130237826281, 1.0, 1.0, 1.0, 1.0,
0.86164383561643876, 0.0, 0.52147239263801737, 0.0, 0.14857651245551226,
0.28054740957966762, 0.3811983471074456, 0.0, 0.0, 0.0, 0.0, 0.0,
0.052040212891779666, 0.0, 0.35317460317461002, 0.0, 0.0, 0.0,
0.0079254079254060007, 0.0, 0.12661930631007018, 0.0, 0.0, 0.0,
0.067722772277229157, 0.0, 0.24025100851636036]
self.percent_k_period_8_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, 1.0, 0.78084415584415301, 0.49576669802445755,
1.0, 1.0, 0.68940316686967806, 0.0, 0.15515515515516184, 0.0, 0.0, 0.0,
0.048909134500121687, 0.0, 0.0, 0.25598404255319046,
0.81420233463035285, 0.79071481208548022, 0.63201911589008342,
0.58422939068100166, 0.82317801672640178, 0.81521306252488657,
0.0066371681415952387, 0.75649591685225837, 0.39352306182532032, 0.0,
0.0, 0.56253794778384958, 0.82179720704310821, 1.0, 1.0,
0.83066712049012859, 0.47447243022464258, 0.49302246426140284,
0.41436738752174873, 0.79488797727989935, 0.93264248704663077, 1.0,
0.94253770150806226, 1.0, 1.0, 1.0, 0.61401189689358671,
0.45394736842105277, 0.52963567156063163, 0.22512234910277268, 0.0, 0.0,
0.0, 0.33420252064319617, 0.23859191655801873, 0.43850499782702834, 0.0,
0.17607726597325543, 0.038632986627041961, 0.15453194650816784, 0.0,
0.26686004350978676, 0.16388687454677281, 1.0, 1.0, 0.21932367149758231,
1.0, 1.0, 0.17956423741547525, 0.0, 0.0, 0.12548638132295883,
0.2840466926070046, 0.0, 0.0, 0.61925199264255404, 0.0, 1.0, 1.0, 1.0,
0.42937563971340847, 0.14943705220061232, 0.070112589559877536,
0.17604912998976188, 0.32856356631810901, 0.18547055586131053,
0.079801871216287013, 0.53418803418803562, 1.0, 1.0, 1.0, 1.0, 1.0,
0.7004249291784771, 1.0, 1.0, 1.0, 1.0, 0.86164383561643876,
0.55342465753424508, 0.78630136986300425, 0.0, 0.14857651245551226,
0.25533807829181515, 0.32829181494662379, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040534315983417502, 0.0, 0.07229894394801159, 0.0, 0.0, 0.0,
0.0071881606765310463, 0.0, 0.1097826086956511, 0.0, 0.0, 0.0,
0.059915907498249425, 0.0, 0.19406227371469995]
self.percent_k_period_10_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, 0.76439560439560383, 1.0, 1.0,
0.74727452923687354, 0.009910802775026999, 0.15515515515516184, 0.0,
0.0, 0.0, 0.048909134500121687, 0.0, 0.0, 0.22642619094295152,
0.55651595744680871, 0.47562056737588476, 0.51459143968871746,
0.54053058216654259, 0.82317801672640178, 0.81521306252488657,
0.46356033452807566, 0.86937475109517781, 0.30235988200590008, 0.0, 0.0,
0.56253794778384958, 0.82179720704310821, 1.0, 1.0, 0.83066712049012859,
0.47447243022464258, 0.49302246426140284, 0.59904697072838564,
0.88938053097345127, 0.94829729057916878, 1.0, 0.94253770150806226, 1.0,
1.0, 1.0, 0.78188608776843938, 0.70181741335587489, 0.7141440846001329,
0.44852941176470656, 0.0, 0.0, 0.0, 0.24289324068224727,
0.17340492735312743, 0.43850499782702834, 0.0, 0.089840788476118455,
0.025024061597689246, 0.15453194650816784, 0.0, 0.26686004350978676,
0.16388687454677281, 0.70195794053661897, 0.75054387237128717,
0.21932367149758231, 1.0, 1.0, 0.2986512524084754, 0.0, 0.0,
0.12548638132295883, 0.2840466926070046, 0.0, 0.0, 0.3709144326110913,
0.0, 0.86767371601208776, 1.0, 1.0, 0.42937563971340847,
0.14943705220061232, 0.070112589559877536, 0.17604912998976188,
0.37563971340839536, 0.24257932446264166, 0.079801871216287013,
0.2063841496973037, 0.37094111172262106, 1.0, 1.0, 1.0, 1.0,
0.7004249291784771, 1.0, 1.0, 1.0, 1.0, 0.9124783362218376,
0.63122171945701588, 0.78630136986300425, 0.0, 0.14857651245551226,
0.25533807829181515, 0.32829181494662379, 0.0, 0.0, 0.0, 0.0, 0.0,
0.040534315983417502, 0.0, 0.057382333978080118, 0.0, 0.0, 0.0,
0.0064540622627167372, 0.0, 0.10167785234899253, 0.0, 0.0, 0.0,
0.037053087757313918, 0.0, 0.17340666450986797]
self.percent_d_period_6_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, 0.99523264683447754, 0.87837837837837773,
0.63693693693693865, 0.63693693693693876, 0.75855855855856091,
0.8944099378882, 0.56107660455486663, 0.27946165627325398,
0.051718385051720613, 0.051718385051720613, 0.0, 0.020438833784190678,
0.020438833784190678, 0.020438833784190678, 0.14185703758290297,
0.47519037091623634, 0.76007023285702902, 0.82888623390415372,
0.69029603079782087, 0.62960495941749939, 0.60411710597265433,
0.40937397574565387, 0.51851851851851605, 0.46450768727510677,
0.46450768727510677, 0.13117435394177343, 0.18751264926128319,
0.46144505160898591, 0.79477838494231923, 0.9405990690143694,
0.94355570683004286, 0.68769358072183184, 0.37434552139712474,
0.17647202527519865, 0.36217007539031804, 0.6755181347150252,
0.9298359240069084, 0.98028721791098095, 0.98028721791098095,
0.98028721791098095, 1.0, 0.78829073771977398, 0.53253579844118004,
0.32818899948063268, 0.20656492842752547, 0.12898653437278598, 0.0, 0.0,
0.11140084021439872, 0.2165128448094594, 0.54984617814279269,
0.43844533792839407, 0.39202575532441847, 0.071570084200099124,
0.12308073303615508, 0.064388311045069938, 0.38484398216938925,
0.53804347826087151, 0.87137681159420488, 0.87137681159420488,
0.73977455716586071, 0.73977455716586071, 0.73977455716586071,
0.7263157894736838, 0.39298245614035049, 0.059649122807017126,
0.041828793774319611, 0.13651102464332113, 0.13651102464332113,
0.09468223086900153, 0.26911803890221236, 0.26911803890221236,
0.60245137223554568, 0.66666666666666663, 1.0, 0.80979187990446944,
0.52627089730467358, 0.19293756397134029, 0.08778703427784014,
0.1474958723170057, 0.307514865669333, 0.35050053946729304,
0.45035958520045166, 0.6236739251814577, 0.87604690117252837, 1.0, 1.0,
1.0, 0.84031710079275435, 0.84031710079275435, 0.84031710079275435, 1.0,
1.0, 0.95388127853881288, 0.62054794520547951, 0.46103874275148532,
0.17382413087933912, 0.22334963503117655, 0.14304130734505996,
0.27010742304754182, 0.22058191889570442, 0.12706611570248186, 0.0, 0.0,
0.0, 0.017346737630593221, 0.017346737630593221, 0.13507160535546323,
0.11772486772487001, 0.11772486772487001, 0.0, 0.0026418026418020004,
0.0026418026418020004, 0.044848238078492059, 0.04220643543669006,
0.04220643543669006, 0.0, 0.022574257425743052, 0.022574257425743052,
0.10265792693119651]
self.percent_d_period_8_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, 0.75887028462287021,
0.75887028462287009, 0.83192223267481913, 0.89646772228989269,
0.56313438895655932, 0.28151944067494666, 0.051718385051720613,
0.051718385051720613, 0.0, 0.016303044833373897, 0.016303044833373897,
0.016303044833373897, 0.085328014184396825, 0.35672879239451444,
0.62030039642300794, 0.74564542086863883, 0.66898777288552169,
0.67980884109916229, 0.74087348997742997, 0.54834274913096126,
0.52611538250624668, 0.38555204893972461, 0.38333965955919291,
0.13117435394177343, 0.18751264926128319, 0.46144505160898591,
0.79477838494231923, 0.9405990690143694, 0.94355570683004286,
0.76837985023825706, 0.59938733832539137, 0.46062076066926472,
0.56742594302101701, 0.71396595061609303, 0.9091768214421766,
0.95839339618489772, 0.98084590050268738, 0.98084590050268738, 1.0,
0.87133729896452883, 0.68931975510487975, 0.53253164562509037,
0.40290179636148565, 0.25158600688780147, 0.075040783034257555, 0.0,
0.11140084021439872, 0.19093147906707164, 0.33709981167608111,
0.22569897146168236, 0.20486075460009459, 0.071570084200099124,
0.12308073303615508, 0.064388311045069938, 0.14046399667265153,
0.14358230601885319, 0.4769156393521865, 0.72129562484892418,
0.73977455716586071, 0.73977455716586071, 0.73977455716586071,
0.72652141247182511, 0.3931880791384918, 0.05985474580515842,
0.041828793774319611, 0.13651102464332113, 0.13651102464332113,
0.09468223086900153, 0.20641733088085135, 0.20641733088085135,
0.53975066421418472, 0.66666666666666663, 1.0, 0.80979187990446944,
0.52627089730467358, 0.21630842715796614, 0.13186625725008391,
0.19157509528924946, 0.23002775072306048, 0.19794533113190219,
0.26648682042187771, 0.53799663513477425, 0.84472934472934524, 1.0, 1.0,
1.0, 0.9001416430594924, 0.9001416430594924, 0.9001416430594924, 1.0,
1.0, 0.95388127853881288, 0.80502283105022787, 0.73378995433789607,
0.44657534246574976, 0.31162596077283883, 0.1346381969157758,
0.24406880189798374, 0.19454329774614632, 0.10943060498220793, 0.0, 0.0,
0.0, 0.013511438661139167, 0.013511438661139167, 0.037611086643809695,
0.02409964798267053, 0.02409964798267053, 0.0, 0.0023960535588436823,
0.0023960535588436823, 0.038990256457394047, 0.036594202898550365,
0.036594202898550365, 0.0, 0.019971969166083143, 0.019971969166083143,
0.084659393737649788]
self.percent_d_period_10_expected = [np.nan, np.nan, np.nan, np.nan,
np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan,
0.92146520146520128, 0.91575817641229118, 0.58572844400396684,
0.30411349572235413, 0.055021985976729616, 0.051718385051720613, 0.0,
0.016303044833373897, 0.016303044833373897, 0.016303044833373897,
0.075475396980983836, 0.26098071612992008, 0.41952090525521496,
0.51557598817047035, 0.5102475297437149, 0.62610001286055394,
0.72630722047261032, 0.70065047125978808, 0.71604938271604668,
0.54509832254305113, 0.39057821103369261, 0.10078662733530003,
0.18751264926128319, 0.46144505160898591, 0.79477838494231923,
0.9405990690143694, 0.94355570683004286, 0.76837985023825706,
0.59938733832539137, 0.52218062173814372, 0.66048332198774662,
0.81224159742700186, 0.94589260718420665, 0.96361166402907694,
0.98084590050268738, 0.98084590050268738, 1.0, 0.92729536258947975,
0.82790116704143812, 0.73261586190814898, 0.62149696990690473,
0.38755783212161315, 0.14950980392156885, 0.0, 0.080964413560749085,
0.13876605601179157, 0.284934388620801, 0.20396997506005191,
0.17611526210104891, 0.038288283357935902, 0.089798932193991862,
0.059852002701952366, 0.14046399667265153, 0.14358230601885319,
0.37756828619772614, 0.53879622915155967, 0.55727516146849621,
0.65662251462295651, 0.73977455716586071, 0.76621708413615852,
0.43288375080282515, 0.099550417469491795, 0.041828793774319611,
0.13651102464332113, 0.13651102464332113, 0.09468223086900153,
0.12363814420369711, 0.12363814420369711, 0.41286271620772635,
0.62255790533736255, 0.95589123867069592, 0.80979187990446944,
0.52627089730467358, 0.21630842715796614, 0.13186625725008391,
0.20726714431934493, 0.26475605595359963, 0.23267363636244134,
0.17625511512541078, 0.21904237754540393, 0.52577508713997501,
0.79031370390754041, 1.0, 1.0, 0.9001416430594924, 0.9001416430594924,
0.9001416430594924, 1.0, 1.0, 0.97082611207394587, 0.84790001855961783,
0.7766671418472858, 0.47250769644000673, 0.31162596077283883,
0.1346381969157758, 0.24406880189798374, 0.19454329774614632,
0.10943060498220793, 0.0, 0.0, 0.0, 0.013511438661139167,
0.013511438661139167, 0.032638883320499211, 0.019127444659360039,
0.019127444659360039, 0.0, 0.0021513540875722457, 0.0021513540875722457,
0.036043971537236423, 0.033892617449664174, 0.033892617449664174, 0.0,
0.012351029252437973, 0.012351029252437973, 0.070153250755727301]
def test_percent_k_period_6(self):
period = 6
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_6_expected)
def test_percent_k_period_8(self):
period = 8
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_8_expected)
def test_percent_k_period_10(self):
period = 10
percent_k = stochastic.percent_k(self.data, period)
np.testing.assert_array_equal(percent_k, self.percent_k_period_10_expected)
def test_percent_k_invalid_period(self):
period = 128
with self.assertRaises(Exception):
stochastic.percent_k(self.data, period)
def test_percent_d_period_6(self):
period = 6
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_6_expected)
def test_percent_d_period_8(self):
period = 8
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_8_expected)
def test_percent_d_period_10(self):
period = 10
percent_d = stochastic.percent_d(self.data, period)
np.testing.assert_array_equal(percent_d, self.percent_d_period_10_expected)
def test_percent_d_invalid_period(self):
period = 128
with self.assertRaises(Exception) as cm:
stochastic.percent_d(self.data, period)
expected = "Error: data_len < period"
self.assertEqual(str(cm.exception), expected)
| mit |
kenshay/ImageScripter | Script_Runner/PYTHON/Lib/site-packages/setuptools/command/install_lib.py | 431 | 3840 | import os
import imp
from itertools import product, starmap
import distutils.command.install_lib as orig
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
"""
Return a collections.Sized collections.Container of paths to be
excluded for single_version_externally_managed installations.
"""
all_packages = (
pkg
for ns_pkg in self._get_SVEM_NSPs()
for pkg in self._all_packages(ns_pkg)
)
excl_specs = product(all_packages, self._gen_exclusion_paths())
return set(starmap(self._exclude_pkg_path, excl_specs))
def _exclude_pkg_path(self, pkg, exclusion_path):
"""
Given a package name and exclusion path within that package,
compute the full exclusion path.
"""
parts = pkg.split('.') + [exclusion_path]
return os.path.join(self.install_dir, *parts)
@staticmethod
def _all_packages(pkg_name):
"""
>>> list(install_lib._all_packages('foo.bar.baz'))
['foo.bar.baz', 'foo.bar', 'foo']
"""
while pkg_name:
yield pkg_name
pkg_name, sep, child = pkg_name.rpartition('.')
def _get_SVEM_NSPs(self):
"""
Get namespace packages (list) but only for
single_version_externally_managed installations and empty otherwise.
"""
# TODO: is it necessary to short-circuit here? i.e. what's the cost
# if get_finalized_command is called even when namespace_packages is
# False?
if not self.distribution.namespace_packages:
return []
install_cmd = self.get_finalized_command('install')
svem = install_cmd.single_version_externally_managed
return self.distribution.namespace_packages if svem else []
@staticmethod
def _gen_exclusion_paths():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
# always exclude the package module itself
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
yield base + '.opt-2.pyc'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
| gpl-3.0 |
edcast-inc/edx-platform-edcast | common/djangoapps/student/tests/test_login.py | 1 | 25194 | '''
Tests for student activation and login
'''
import json
import unittest
from unittest import skip
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import HttpResponseBadRequest, HttpResponse
import httpretty
from mock import patch
from social.apps.django_app.default.models import UserSocialAuth
from external_auth.models import ExternalAuthMap
from student.tests.factories import UserFactory, RegistrationFactory, UserProfileFactory
from student.views import login_oauth_token
from third_party_auth.tests.utils import (
ThirdPartyOAuthTestMixin,
ThirdPartyOAuthTestMixinFacebook,
ThirdPartyOAuthTestMixinGoogle
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
class LoginTest(TestCase):
'''
Test student.views.login_user() view
'''
def setUp(self):
super(LoginTest, self).setUp()
# Create one user and save it to the database
self.user = UserFactory.build(username='test', email='test@edx.org')
self.user.set_password('test_password')
self.user.save()
# Create a registration for the user
RegistrationFactory(user=self.user)
# Create a profile for the user
UserProfileFactory(user=self.user)
# Create the test client
self.client = Client()
cache.clear()
# Store the login url
try:
self.url = reverse('login_post')
except NoReverseMatch:
self.url = reverse('login')
def test_login_success(self):
response, mock_audit_log = self._login_response('test@edx.org', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', u'test@edx.org'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_success_no_pii(self):
response, mock_audit_log = self._login_response('test@edx.org', 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'test@edx.org'])
def test_login_success_unicode_email(self):
unicode_email = u'test' + unichr(40960) + u'@edx.org'
self.user.email = unicode_email
self.user.save()
response, mock_audit_log = self._login_response(unicode_email, 'test_password', patched_audit_log='student.models.AUDIT_LOG')
self._assert_response(response, success=True)
self._assert_audit_log(mock_audit_log, 'info', [u'Login success', unicode_email])
def test_login_fail_no_user_exists(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'ADVANCED_SECURITY': True})
def test_login_fail_incorrect_email_with_advanced_security(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email', nonexistent_email])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_no_user_exists_no_pii(self):
nonexistent_email = u'not_a_user@edx.org'
response, mock_audit_log = self._login_response(nonexistent_email, 'test_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Unknown user email'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [nonexistent_email])
def test_login_fail_wrong_password(self):
response, mock_audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'test@edx.org', u'invalid'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_fail_wrong_password_no_pii(self):
response, mock_audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False,
value='Email or password is incorrect')
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'invalid'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'test@edx.org'])
def test_login_not_activated(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_login_not_activated_no_pii(self):
# De-activate the user
self.user.is_active = False
self.user.save()
# Should now be unable to login
response, mock_audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=False,
value="This account has not been activated")
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'Account not active for user'])
self._assert_not_in_audit_log(mock_audit_log, 'warning', [u'test'])
def test_login_unicode_email(self):
unicode_email = u'test@edx.org' + unichr(40960)
response, mock_audit_log = self._login_response(unicode_email, 'test_password')
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', unicode_email])
def test_login_unicode_password(self):
unicode_password = u'test_password' + unichr(1972)
response, mock_audit_log = self._login_response('test@edx.org', unicode_password)
self._assert_response(response, success=False)
self._assert_audit_log(mock_audit_log, 'warning', [u'Login failed', u'password for', u'test@edx.org', u'invalid'])
def test_logout_logging(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout', u'test'])
def test_login_user_info_cookie(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
# Verify the format of the "user info" cookie set on login
cookie = self.client.cookies[settings.EDXMKTG_USER_INFO_COOKIE_NAME]
user_info = json.loads(cookie.value)
# Check that the version is set
self.assertEqual(user_info["version"], settings.EDXMKTG_USER_INFO_COOKIE_VERSION)
# Check that the username and email are set
self.assertEqual(user_info["username"], self.user.username)
self.assertEqual(user_info["email"], self.user.email)
# Check that the URLs are absolute
for url in user_info["header_urls"].values():
self.assertIn("http://testserver/", url)
@skip('we skip in edcast')
def test_logout_deletes_mktg_cookies(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
# Check that the marketing site cookies have been set
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
# Log out
logout_url = reverse('logout')
response = self.client.post(logout_url)
# Check that the marketing site cookies have been deleted
# (cookies are deleted by setting an expiration date in 1970)
for cookie_name in [settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, settings.EDXMKTG_USER_INFO_COOKIE_NAME]:
cookie = self.client.cookies[cookie_name]
self.assertIn("01-Jan-1970", cookie.get('expires'))
@override_settings(
EDXMKTG_LOGGED_IN_COOKIE_NAME=u"unicode-logged-in",
EDXMKTG_USER_INFO_COOKIE_NAME=u"unicode-user-info",
)
@skip('we skip in edcast')
def test_unicode_mktg_cookie_names(self):
# When logged in cookie names are loaded from JSON files, they may
# have type `unicode` instead of `str`, which can cause errors
# when calling Django cookie manipulation functions.
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
response = self.client.post(reverse('logout'))
self.assertRedirects(response, "/")
@patch.dict("django.conf.settings.FEATURES", {'SQUELCH_PII_IN_LOGS': True})
def test_logout_logging_no_pii(self):
response, _ = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
logout_url = reverse('logout')
with patch('student.models.AUDIT_LOG') as mock_audit_log:
response = self.client.post(logout_url)
self.assertEqual(response.status_code, 302)
self._assert_audit_log(mock_audit_log, 'info', [u'Logout'])
self._assert_not_in_audit_log(mock_audit_log, 'info', [u'test'])
def test_login_ratelimited_success(self):
# Try (and fail) logging in with fewer attempts than the limit of 30
# and verify that you can still successfully log in afterwards.
for i in xrange(20):
password = u'test_password{0}'.format(i)
response, _audit_log = self._login_response('test@edx.org', password)
self._assert_response(response, success=False)
# now try logging in with a valid password
response, _audit_log = self._login_response('test@edx.org', 'test_password')
self._assert_response(response, success=True)
def test_login_ratelimited(self):
# try logging in 30 times, the default limit in the number of failed
# login attempts in one 5 minute period before the rate gets limited
for i in xrange(30):
password = u'test_password{0}'.format(i)
self._login_response('test@edx.org', password)
# check to see if this response indicates that this was ratelimited
response, _audit_log = self._login_response('test@edx.org', 'wrong_password')
self._assert_response(response, success=False, value='Too many failed login attempts')
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session(self):
creds = {'email': 'test@edx.org', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
# Reload the user from the database
self.user = UserFactory.FACTORY_FOR.objects.get(pk=self.user.pk)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
try:
# this test can be run with either lms or studio settings
# since studio does not have a dashboard url, we should
# look for another url that is login_required, in that case
url = reverse('dashboard')
except NoReverseMatch:
url = reverse('upload_transcripts')
response = client1.get(url)
# client1 will be logged out
self.assertEqual(response.status_code, 302)
@patch.dict("django.conf.settings.FEATURES", {'PREVENT_CONCURRENT_LOGINS': True})
def test_single_session_with_url_not_having_login_required_decorator(self):
# accessing logout url as it does not have login-required decorator it will avoid redirect
# and go inside the enforce_single_login
creds = {'email': 'test@edx.org', 'password': 'test_password'}
client1 = Client()
client2 = Client()
response = client1.post(self.url, creds)
self._assert_response(response, success=True)
self.assertEqual(self.user.profile.get_meta()['session_id'], client1.session.session_key)
# second login should log out the first
response = client2.post(self.url, creds)
self._assert_response(response, success=True)
url = reverse('logout')
response = client1.get(url)
self.assertEqual(response.status_code, 302)
def test_change_enrollment_400(self):
"""
Tests that a 400 in change_enrollment doesn't lead to a 404
and in fact just logs in the user without incident
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponseBadRequest("I am a 400")
response, _ = self._login_response(
'test@edx.org',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def test_change_enrollment_200_no_redirect(self):
"""
Tests "redirect_url" is None if change_enrollment returns a HttpResponse
with no content
"""
# add this post param to trigger a call to change_enrollment
extra_post_params = {"enrollment_action": "enroll"}
with patch('student.views.change_enrollment') as mock_change_enrollment:
mock_change_enrollment.return_value = HttpResponse()
response, _ = self._login_response(
'test@edx.org',
'test_password',
extra_post_params=extra_post_params,
)
response_content = json.loads(response.content)
self.assertIsNone(response_content["redirect_url"])
self._assert_response(response, success=True)
def _login_response(self, email, password, patched_audit_log='student.views.AUDIT_LOG', extra_post_params=None):
''' Post the login info '''
post_params = {'email': email, 'password': password}
if extra_post_params is not None:
post_params.update(extra_post_params)
with patch(patched_audit_log) as mock_audit_log:
result = self.client.post(self.url, post_params)
return result, mock_audit_log
def _assert_response(self, response, success=None, value=None):
'''
Assert that the response had status 200 and returned a valid
JSON-parseable dict.
If success is provided, assert that the response had that
value for 'success' in the JSON dict.
If value is provided, assert that the response contained that
value for 'value' in the JSON dict.
'''
self.assertEqual(response.status_code, 200)
try:
response_dict = json.loads(response.content)
except ValueError:
self.fail("Could not parse response content as JSON: %s"
% str(response.content))
if success is not None:
self.assertEqual(response_dict['success'], success)
if value is not None:
msg = ("'%s' did not contain '%s'" %
(str(response_dict['value']), str(value)))
self.assertTrue(value in response_dict['value'], msg)
def _assert_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertIn(log_string, format_string)
def _assert_not_in_audit_log(self, mock_audit_log, level, log_strings):
"""
Check that the audit log has received the expected call as its last call.
"""
method_calls = mock_audit_log.method_calls
name, args, _kwargs = method_calls[-1]
self.assertEquals(name, level)
self.assertEquals(len(args), 1)
format_string = args[0]
for log_string in log_strings:
self.assertNotIn(log_string, format_string)
class ExternalAuthShibTest(ModuleStoreTestCase):
"""
Tests how login_user() interacts with ExternalAuth, in particular Shib
"""
def setUp(self):
super(ExternalAuthShibTest, self).setUp()
self.course = CourseFactory.create(
org='Stanford',
number='456',
display_name='NO SHIB',
user_id=self.user.id,
)
self.shib_course = CourseFactory.create(
org='Stanford',
number='123',
display_name='Shib Only',
enrollment_domain='shib:https://idp.stanford.edu/',
user_id=self.user.id,
)
self.user_w_map = UserFactory.create(email='withmap@stanford.edu')
self.extauth = ExternalAuthMap(external_id='withmap@stanford.edu',
external_email='withmap@stanford.edu',
external_domain='shib:https://idp.stanford.edu/',
external_credentials="",
user=self.user_w_map)
self.user_w_map.save()
self.extauth.save()
self.user_wo_map = UserFactory.create(email='womap@gmail.com')
self.user_wo_map.save()
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_page_redirect(self):
"""
Tests that when a shib user types their email address into the login page, they get redirected
to the shib login.
"""
response = self.client.post(reverse('login'), {'email': self.user_w_map.email, 'password': ''})
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj, {
'success': False,
'redirect': reverse('shib-login'),
})
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_login_required_dashboard(self):
"""
Tests redirects to when @login_required to dashboard, which should always be the normal login,
since there is no course context
"""
response = self.client.get(reverse('dashboard'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://testserver/login?next=/dashboard')
@unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
def test_externalauth_login_required_course_context(self):
"""
Tests the redirects when visiting course-specific URL with @login_required.
Should vary by course depending on its enrollment_domain
"""
TARGET_URL = reverse('courseware', args=[self.course.id.to_deprecated_string()]) # pylint: disable=invalid-name
noshib_response = self.client.get(TARGET_URL, follow=True)
self.assertEqual(noshib_response.redirect_chain[-1],
('http://testserver/login?next={url}'.format(url=TARGET_URL), 302))
self.assertContains(noshib_response, ("Sign in or Register | {platform_name}"
.format(platform_name=settings.PLATFORM_NAME)))
self.assertEqual(noshib_response.status_code, 200)
TARGET_URL_SHIB = reverse('courseware', args=[self.shib_course.id.to_deprecated_string()]) # pylint: disable=invalid-name
shib_response = self.client.get(**{'path': TARGET_URL_SHIB,
'follow': True,
'REMOTE_USER': self.extauth.external_id,
'Shib-Identity-Provider': 'https://idp.stanford.edu/'})
# Test that the shib-login redirect page with ?next= and the desired page are part of the redirect chain
# The 'courseware' page actually causes a redirect itself, so it's not the end of the chain and we
# won't test its contents
self.assertEqual(shib_response.redirect_chain[-3],
('http://testserver/shib-login/?next={url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.redirect_chain[-2],
('http://testserver{url}'.format(url=TARGET_URL_SHIB), 302))
self.assertEqual(shib_response.status_code, 200)
@httpretty.activate
class LoginOAuthTokenMixin(ThirdPartyOAuthTestMixin):
"""
Mixin with tests for the login_oauth_token view. A TestCase that includes
this must define the following:
BACKEND: The name of the backend from python-social-auth
USER_URL: The URL of the endpoint that the backend retrieves user data from
UID_FIELD: The field in the user data that the backend uses as the user id
"""
def setUp(self):
super(LoginOAuthTokenMixin, self).setUp()
self.url = reverse(login_oauth_token, kwargs={"backend": self.BACKEND})
def _assert_error(self, response, status_code, error):
"""Assert that the given response was a 400 with the given error code"""
self.assertEqual(response.status_code, status_code)
self.assertEqual(json.loads(response.content), {"error": error})
self.assertNotIn("partial_pipeline", self.client.session)
def test_success(self):
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 204)
self.assertEqual(self.client.session['_auth_user_id'], self.user.id) # pylint: disable=no-member
def test_invalid_token(self):
self._setup_provider_response(success=False)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_missing_token(self):
response = self.client.post(self.url)
self._assert_error(response, 400, "invalid_request")
def test_unlinked_user(self):
UserSocialAuth.objects.all().delete()
self._setup_provider_response(success=True)
response = self.client.post(self.url, {"access_token": "dummy"})
self._assert_error(response, 401, "invalid_token")
def test_get_method(self):
response = self.client.get(self.url, {"access_token": "dummy"})
self.assertEqual(response.status_code, 405)
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestFacebook(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinFacebook, TestCase):
"""Tests login_oauth_token with the Facebook backend"""
pass
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class LoginOAuthTokenTestGoogle(LoginOAuthTokenMixin, ThirdPartyOAuthTestMixinGoogle, TestCase):
"""Tests login_oauth_token with the Google backend"""
pass
| agpl-3.0 |
endlessm/chromium-browser | net/tools/build_hpack_constants.py | 43 | 18730 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script builds a table that has to be manully inserted into
net/spdy/hpack/hpack_constants.cc. It also contains data that potentially has to
be updated if the http hpack algorithm changes its constants."""
import re
# This is from
# http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-08
# It may include dummy rows as long as those don't look too much like real
# data.
SPEC_DATA_DRAFT_08 = r"""
code
code as bits as hex len
sym aligned to MSB aligned in
to LSB bits
( 0) |11111111|11000 1ff8 [13]
( 1) |11111111|11111111|1011000 7fffd8 [23]
( 2) |11111111|11111111|11111110|0010 fffffe2 [28]
( 3) |11111111|11111111|11111110|0011 fffffe3 [28]
( 4) |11111111|11111111|11111110|0100 fffffe4 [28]
( 5) |11111111|11111111|11111110|0101 fffffe5 [28]
( 6) |11111111|11111111|11111110|0110 fffffe6 [28]
( 7) |11111111|11111111|11111110|0111 fffffe7 [28]
( 8) |11111111|11111111|11111110|1000 fffffe8 [28]
( 9) |11111111|11111111|11101010 ffffea [24]
( 10) |11111111|11111111|11111111|111100 3ffffffc [30]
( 11) |11111111|11111111|11111110|1001 fffffe9 [28]
( 12) |11111111|11111111|11111110|1010 fffffea [28]
( 13) |11111111|11111111|11111111|111101 3ffffffd [30]
( 14) |11111111|11111111|11111110|1011 fffffeb [28]
( 15) |11111111|11111111|11111110|1100 fffffec [28]
( 16) |11111111|11111111|11111110|1101 fffffed [28]
( 17) |11111111|11111111|11111110|1110 fffffee [28]
( 18) |11111111|11111111|11111110|1111 fffffef [28]
( 19) |11111111|11111111|11111111|0000 ffffff0 [28]
( 20) |11111111|11111111|11111111|0001 ffffff1 [28]
( 21) |11111111|11111111|11111111|0010 ffffff2 [28]
( 22) |11111111|11111111|11111111|111110 3ffffffe [30]
( 23) |11111111|11111111|11111111|0011 ffffff3 [28]
( 24) |11111111|11111111|11111111|0100 ffffff4 [28]
( 25) |11111111|11111111|11111111|0101 ffffff5 [28]
( 26) |11111111|11111111|11111111|0110 ffffff6 [28]
( 27) |11111111|11111111|11111111|0111 ffffff7 [28]
( 28) |11111111|11111111|11111111|1000 ffffff8 [28]
( 29) |11111111|11111111|11111111|1001 ffffff9 [28]
( 30) |11111111|11111111|11111111|1010 ffffffa [28]
( 31) |11111111|11111111|11111111|1011 ffffffb [28]
' ' ( 32) |010100 14 [ 6]
'!' ( 33) |11111110|00 3f8 [10]
'"' ( 34) |11111110|01 3f9 [10]
'#' ( 35) |11111111|1010 ffa [12]
'$' ( 36) |11111111|11001 1ff9 [13]
'%' ( 37) |010101 15 [ 6]
'&' ( 38) |11111000 f8 [ 8]
''' ( 39) |11111111|010 7fa [11]
'(' ( 40) |11111110|10 3fa [10]
')' ( 41) |11111110|11 3fb [10]
'*' ( 42) |11111001 f9 [ 8]
'+' ( 43) |11111111|011 7fb [11]
',' ( 44) |11111010 fa [ 8]
'-' ( 45) |010110 16 [ 6]
'.' ( 46) |010111 17 [ 6]
'/' ( 47) |011000 18 [ 6]
'0' ( 48) |00000 0 [ 5]
'1' ( 49) |00001 1 [ 5]
'2' ( 50) |00010 2 [ 5]
'3' ( 51) |011001 19 [ 6]
'4' ( 52) |011010 1a [ 6]
'5' ( 53) |011011 1b [ 6]
'6' ( 54) |011100 1c [ 6]
'7' ( 55) |011101 1d [ 6]
'8' ( 56) |011110 1e [ 6]
'9' ( 57) |011111 1f [ 6]
':' ( 58) |1011100 5c [ 7]
';' ( 59) |11111011 fb [ 8]
'<' ( 60) |11111111|1111100 7ffc [15]
'=' ( 61) |100000 20 [ 6]
'>' ( 62) |11111111|1011 ffb [12]
'?' ( 63) |11111111|00 3fc [10]
'@' ( 64) |11111111|11010 1ffa [13]
'A' ( 65) |100001 21 [ 6]
'B' ( 66) |1011101 5d [ 7]
'C' ( 67) |1011110 5e [ 7]
'D' ( 68) |1011111 5f [ 7]
'E' ( 69) |1100000 60 [ 7]
'F' ( 70) |1100001 61 [ 7]
'G' ( 71) |1100010 62 [ 7]
'H' ( 72) |1100011 63 [ 7]
'I' ( 73) |1100100 64 [ 7]
'J' ( 74) |1100101 65 [ 7]
'K' ( 75) |1100110 66 [ 7]
'L' ( 76) |1100111 67 [ 7]
'M' ( 77) |1101000 68 [ 7]
'N' ( 78) |1101001 69 [ 7]
'O' ( 79) |1101010 6a [ 7]
'P' ( 80) |1101011 6b [ 7]
'Q' ( 81) |1101100 6c [ 7]
'R' ( 82) |1101101 6d [ 7]
'S' ( 83) |1101110 6e [ 7]
'T' ( 84) |1101111 6f [ 7]
'U' ( 85) |1110000 70 [ 7]
'V' ( 86) |1110001 71 [ 7]
'W' ( 87) |1110010 72 [ 7]
'X' ( 88) |11111100 fc [ 8]
'Y' ( 89) |1110011 73 [ 7]
'Z' ( 90) |11111101 fd [ 8]
'[' ( 91) |11111111|11011 1ffb [13]
'\' ( 92) |11111111|11111110|000 7fff0 [19]
']' ( 93) |11111111|11100 1ffc [13]
'^' ( 94) |11111111|111100 3ffc [14]
'_' ( 95) |100010 22 [ 6]
'`' ( 96) |11111111|1111101 7ffd [15]
'a' ( 97) |00011 3 [ 5]
'b' ( 98) |100011 23 [ 6]
'c' ( 99) |00100 4 [ 5]
'd' (100) |100100 24 [ 6]
'e' (101) |00101 5 [ 5]
'f' (102) |100101 25 [ 6]
'g' (103) |100110 26 [ 6]
'h' (104) |100111 27 [ 6]
'i' (105) |00110 6 [ 5]
'j' (106) |1110100 74 [ 7]
'k' (107) |1110101 75 [ 7]
'l' (108) |101000 28 [ 6]
'm' (109) |101001 29 [ 6]
'n' (110) |101010 2a [ 6]
'o' (111) |00111 7 [ 5]
'p' (112) |101011 2b [ 6]
'q' (113) |1110110 76 [ 7]
'r' (114) |101100 2c [ 6]
's' (115) |01000 8 [ 5]
't' (116) |01001 9 [ 5]
'u' (117) |101101 2d [ 6]
'v' (118) |1110111 77 [ 7]
'w' (119) |1111000 78 [ 7]
'x' (120) |1111001 79 [ 7]
'y' (121) |1111010 7a [ 7]
'z' (122) |1111011 7b [ 7]
'{' (123) |11111111|1111110 7ffe [15]
'|' (124) |11111111|100 7fc [11]
'}' (125) |11111111|111101 3ffd [14]
'~' (126) |11111111|11101 1ffd [13]
(127) |11111111|11111111|11111111|1100 ffffffc [28]
(128) |11111111|11111110|0110 fffe6 [20]
(129) |11111111|11111111|010010 3fffd2 [22]
(130) |11111111|11111110|0111 fffe7 [20]
(131) |11111111|11111110|1000 fffe8 [20]
(132) |11111111|11111111|010011 3fffd3 [22]
(133) |11111111|11111111|010100 3fffd4 [22]
(134) |11111111|11111111|010101 3fffd5 [22]
(135) |11111111|11111111|1011001 7fffd9 [23]
(136) |11111111|11111111|010110 3fffd6 [22]
(137) |11111111|11111111|1011010 7fffda [23]
(138) |11111111|11111111|1011011 7fffdb [23]
(139) |11111111|11111111|1011100 7fffdc [23]
(140) |11111111|11111111|1011101 7fffdd [23]
(141) |11111111|11111111|1011110 7fffde [23]
(142) |11111111|11111111|11101011 ffffeb [24]
(143) |11111111|11111111|1011111 7fffdf [23]
(144) |11111111|11111111|11101100 ffffec [24]
(145) |11111111|11111111|11101101 ffffed [24]
(146) |11111111|11111111|010111 3fffd7 [22]
(147) |11111111|11111111|1100000 7fffe0 [23]
(148) |11111111|11111111|11101110 ffffee [24]
(149) |11111111|11111111|1100001 7fffe1 [23]
(150) |11111111|11111111|1100010 7fffe2 [23]
(151) |11111111|11111111|1100011 7fffe3 [23]
(152) |11111111|11111111|1100100 7fffe4 [23]
(153) |11111111|11111110|11100 1fffdc [21]
(154) |11111111|11111111|011000 3fffd8 [22]
(155) |11111111|11111111|1100101 7fffe5 [23]
(156) |11111111|11111111|011001 3fffd9 [22]
(157) |11111111|11111111|1100110 7fffe6 [23]
(158) |11111111|11111111|1100111 7fffe7 [23]
(159) |11111111|11111111|11101111 ffffef [24]
(160) |11111111|11111111|011010 3fffda [22]
(161) |11111111|11111110|11101 1fffdd [21]
(162) |11111111|11111110|1001 fffe9 [20]
(163) |11111111|11111111|011011 3fffdb [22]
(164) |11111111|11111111|011100 3fffdc [22]
(165) |11111111|11111111|1101000 7fffe8 [23]
(166) |11111111|11111111|1101001 7fffe9 [23]
(167) |11111111|11111110|11110 1fffde [21]
(168) |11111111|11111111|1101010 7fffea [23]
(169) |11111111|11111111|011101 3fffdd [22]
(170) |11111111|11111111|011110 3fffde [22]
(171) |11111111|11111111|11110000 fffff0 [24]
(172) |11111111|11111110|11111 1fffdf [21]
(173) |11111111|11111111|011111 3fffdf [22]
(174) |11111111|11111111|1101011 7fffeb [23]
(175) |11111111|11111111|1101100 7fffec [23]
(176) |11111111|11111111|00000 1fffe0 [21]
(177) |11111111|11111111|00001 1fffe1 [21]
(178) |11111111|11111111|100000 3fffe0 [22]
(179) |11111111|11111111|00010 1fffe2 [21]
(180) |11111111|11111111|1101101 7fffed [23]
(181) |11111111|11111111|100001 3fffe1 [22]
(182) |11111111|11111111|1101110 7fffee [23]
(183) |11111111|11111111|1101111 7fffef [23]
(184) |11111111|11111110|1010 fffea [20]
(185) |11111111|11111111|100010 3fffe2 [22]
(186) |11111111|11111111|100011 3fffe3 [22]
(187) |11111111|11111111|100100 3fffe4 [22]
(188) |11111111|11111111|1110000 7ffff0 [23]
(189) |11111111|11111111|100101 3fffe5 [22]
(190) |11111111|11111111|100110 3fffe6 [22]
(191) |11111111|11111111|1110001 7ffff1 [23]
(192) |11111111|11111111|11111000|00 3ffffe0 [26]
(193) |11111111|11111111|11111000|01 3ffffe1 [26]
(194) |11111111|11111110|1011 fffeb [20]
(195) |11111111|11111110|001 7fff1 [19]
(196) |11111111|11111111|100111 3fffe7 [22]
(197) |11111111|11111111|1110010 7ffff2 [23]
(198) |11111111|11111111|101000 3fffe8 [22]
(199) |11111111|11111111|11110110|0 1ffffec [25]
(200) |11111111|11111111|11111000|10 3ffffe2 [26]
(201) |11111111|11111111|11111000|11 3ffffe3 [26]
(202) |11111111|11111111|11111001|00 3ffffe4 [26]
(203) |11111111|11111111|11111011|110 7ffffde [27]
(204) |11111111|11111111|11111011|111 7ffffdf [27]
(205) |11111111|11111111|11111001|01 3ffffe5 [26]
(206) |11111111|11111111|11110001 fffff1 [24]
(207) |11111111|11111111|11110110|1 1ffffed [25]
(208) |11111111|11111110|010 7fff2 [19]
(209) |11111111|11111111|00011 1fffe3 [21]
(210) |11111111|11111111|11111001|10 3ffffe6 [26]
(211) |11111111|11111111|11111100|000 7ffffe0 [27]
(212) |11111111|11111111|11111100|001 7ffffe1 [27]
(213) |11111111|11111111|11111001|11 3ffffe7 [26]
(214) |11111111|11111111|11111100|010 7ffffe2 [27]
(215) |11111111|11111111|11110010 fffff2 [24]
(216) |11111111|11111111|00100 1fffe4 [21]
(217) |11111111|11111111|00101 1fffe5 [21]
(218) |11111111|11111111|11111010|00 3ffffe8 [26]
(219) |11111111|11111111|11111010|01 3ffffe9 [26]
(220) |11111111|11111111|11111111|1101 ffffffd [28]
(221) |11111111|11111111|11111100|011 7ffffe3 [27]
(222) |11111111|11111111|11111100|100 7ffffe4 [27]
(223) |11111111|11111111|11111100|101 7ffffe5 [27]
(224) |11111111|11111110|1100 fffec [20]
(225) |11111111|11111111|11110011 fffff3 [24]
(226) |11111111|11111110|1101 fffed [20]
(227) |11111111|11111111|00110 1fffe6 [21]
(228) |11111111|11111111|101001 3fffe9 [22]
(229) |11111111|11111111|00111 1fffe7 [21]
(230) |11111111|11111111|01000 1fffe8 [21]
(231) |11111111|11111111|1110011 7ffff3 [23]
(232) |11111111|11111111|101010 3fffea [22]
(233) |11111111|11111111|101011 3fffeb [22]
(234) |11111111|11111111|11110111|0 1ffffee [25]
(235) |11111111|11111111|11110111|1 1ffffef [25]
(236) |11111111|11111111|11110100 fffff4 [24]
(237) |11111111|11111111|11110101 fffff5 [24]
(238) |11111111|11111111|11111010|10 3ffffea [26]
(239) |11111111|11111111|1110100 7ffff4 [23]
(240) |11111111|11111111|11111010|11 3ffffeb [26]
(241) |11111111|11111111|11111100|110 7ffffe6 [27]
(242) |11111111|11111111|11111011|00 3ffffec [26]
(243) |11111111|11111111|11111011|01 3ffffed [26]
(244) |11111111|11111111|11111100|111 7ffffe7 [27]
(245) |11111111|11111111|11111101|000 7ffffe8 [27]
(246) |11111111|11111111|11111101|001 7ffffe9 [27]
(247) |11111111|11111111|11111101|010 7ffffea [27]
(248) |11111111|11111111|11111101|011 7ffffeb [27]
(249) |11111111|11111111|11111111|1110 ffffffe [28]
(250) |11111111|11111111|11111101|100 7ffffec [27]
(251) |11111111|11111111|11111101|101 7ffffed [27]
(252) |11111111|11111111|11111101|110 7ffffee [27]
(253) |11111111|11111111|11111101|111 7ffffef [27]
(254) |11111111|11111111|11111110|000 7fffff0 [27]
(255) |11111111|11111111|11111011|10 3ffffee [26]
EOS (256) |11111111|11111111|11111111|111111 3fffffff [30]
"""
count = 0
spec_lines = SPEC_DATA_DRAFT_08.splitlines()
for l in spec_lines:
m = re.match(
r"^\s*('.+'|EOS)? \( *(\d+)\) \|([10\|]+) +\w+ \[ ?(\d+)\]", l)
if m:
g = m.groups()
spec_comment = g[0]
spec_bitstring = g[2]
bitstring = spec_bitstring.replace('|','').ljust(32,'0')
bitvalue = int(bitstring, 2)
bitcount = g[3]
valueid = g[1]
comment = ' // %s' % spec_bitstring
if spec_comment:
comment = ' // %3s %s' % (spec_comment, spec_bitstring)
else:
comment = ' // %s' % spec_bitstring
print(' {0x%08xul, %02s, %03s},%s' % (
bitvalue, bitcount, valueid, comment))
assert int(valueid) == count, "Expected data for %s, got %s." % (count,
valueid)
count += 1
print("Total: %s" % count)
assert count == 257, "Expected 257 values, got %d." % count
| bsd-3-clause |
yantrabuddhi/blocos | tabs/UploadTab.py | 1 | 13455 | # -*- coding: utf-8 -*-
# Este arquivo é parte do programa Monitor
# Monitor é um software livre; você pode redistribui-lo e/ou
# modifica-lo dentro dos termos da Licença Pública Geral GNU como
# publicada pela Fundação do Software Livre (FSF); na versão 3 da
# Licença, ou (na sua opinião) qualquer versão.
#
# Este programa é distribuido na esperança que possa ser util,
# mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer
# MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a
# Licença Pública Geral GNU para maiores detalhes.
#
# Você deve ter recebido uma cópia da Licença Pública Geral GNU
# junto com este programa, se não, escreva para a Fundação do Software
# Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# Centro de Tecnologia da Informação Renato Archer, Campinas-SP, Brasil
# Projeto realizado com fundos do Conselho Nacional de Desenvolvimento Científico e Tecnológico (CNPQ)
# Esse código faz parte do projeto BR-Gogo, disponível em http://sourceforge.net/projects/br-gogo/
import os
if os.name=='nt':
import win32api
import win32con
from gettext import gettext as _
try:
import gtk
except ImportError:
#print _('GTK+ Runtime Enviromnt precisa ser instalado:')
print _('GTK+ Runtime Enviroment needs to be installed:')
print "http://downloads.sourceforge.net/gladewin32/gtk-2.12.9-win32-1.exe?modtime=1208401479&big_mirror=0"
raw_input()
from Tab import Tab
from pyLogoCompiler.Exceptions import ConnectionProblem
import pango
import math
from cairoplot import plots
from cairoplot.series import Series
# >>>>>>>>>>>>>>>>> temp
# For non-dev machines, quick hack at attempt to show traceback in in a msg dialog
import sys
import traceback
def logexception(type, value, tb):
text = ' '.join(t for t in traceback.format_exception(type, value, tb))
print text
try:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, \
gtk.MESSAGE_INFO, \
gtk.BUTTONS_OK, \
text)
dialog.run()
dialog.destroy()
except:
pass
sys.excepthook = logexception
# <<<<<<<<<<<<<<<<<<< temp
class UploadTab(Tab):
LAST_DATA_FILENAME = '.last_data.txt'
defaultTab = 9
def __init__(self, gui, GoGo, liststoreSensorsTypes, sensorTypes):
self.gui = gui
self.GoGo = GoGo
self.sensorTypes = sensorTypes
self.dataFilename = ""
self.data = []
self.colDataRaw = []
self.colDataMapped = []
self.textviewData = self.gui.get_widget('textviewData')
self.textviewData.modify_font(pango.FontDescription('monospace'))
self.textviewBuffer = gtk.TextBuffer()
self.textviewData.set_buffer(self.textviewBuffer)
self.spinbuttonColumns = self.gui.get_widget('spinbuttonColumns')
self.checkbuttonShowHeaders = self.gui.get_widget('checkbuttonShowHeaders')
self.checkbuttonTwoLineHeader = self.gui.get_widget('checkbuttonTwoLineHeader')
self.radiobuttonUploadAuto = self.gui.get_widget("radiobuttonUploadAuto")
self.uploadCount = self.gui.get_widget("spinbuttonUploadCount")
self.progressbar = self.gui.get_widget('progressbarUpload')
self.lblProgress = self.gui.get_widget('labelValuesUploaded')
self.colSpec = []
for c in range(8):
w = self.gui.get_widget('comboboxC%i' % c)
w.set_active(0)
w.set_sensitive(c == 0)
w.set_model(liststoreSensorsTypes)
self.colSpec.append(w)
try:
f=open(self.LAST_DATA_FILENAME,'r')
self.textviewBuffer.set_text(f.read())
f.close()
except:
pass
self.graphContainer = None
self.graphWidth = 50
self.graphHeight = 50
self.graphData = None
self.graph = None
self.graphVisible = False
self.graphUpdateRequired = False
self.notebookDataView = self.gui.get_widget('notebookDataView')
#self.notebookDataView.set_current_page(0)
def buttonStartUpload_clicked_cb(self,widget):
try:
self.progressbar.set_fraction(0.0)
self.lblProgress.set_text(_("%i Values Uploaded") % 0)
while gtk.events_pending():
gtk.main_iteration(False)
if self.radiobuttonUploadAuto.get_active():
self.data = self.GoGo.autoUpload(None, self.uploadProgress_cb)
else:
count = self.uploadCount.get_value_as_int()
self.data = self.GoGo.autoUpload(count, self.uploadProgress_cb)
except ConnectionProblem:
self.showWarning(_("Check GoGo plugged in, turned on and connected"))
return
except:
self.showError(_("Error communicating"))
return
else:
self.lblProgress.set_text(_("%i Values Uploaded") % len(self.data))
if self.refreshTextView():
self.showInfo(_("Data successfully uploaded."), self.gui.get_widget('mainWindow'))
def buttonSaveData_clicked_cb(self,widget):
if len(self.data) == 0:
return
dialog = gtk.FileChooserDialog(_("Save As.."), None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE, gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
response = dialog.run()
if response == gtk.RESPONSE_OK:
self.dataFilename = dialog.get_filename()
try:
FILE = open(self.dataFilename,"w")
FILE.write(self.dataFormattedForSaving())
FILE.close()
except:
self.showError(Exception.__str__())
dialog.destroy()
def buttonClearData_clicked_cb(self,widget):
self.data = []
self.colDataRaw = []
self.colDataMapped = []
self.dataFilename = ""
self.progressbar.set_fraction(0.0)
self.lblProgress.set_text(_("%i Values Uploaded") % 0)
self.refreshTextView()
def spinbuttonColumns_changed_cb(self,widget):
cc = self.spinbuttonColumns.get_value_as_int()
for c in range(8):
self.colSpec[c].set_sensitive(c < cc)
self.refreshTextView()
def colSpec_changed_cb(self,widget):
self.refreshTextView()
def checkbuttonShowHeaders_toggled_cb(self,widget):
self.checkbuttonTwoLineHeader.set_sensitive(widget.get_active())
self.refreshTextView()
def checkbuttonTwoLineHeader_toggled_cb(self,widget):
self.refreshTextView()
def notebookDataView_switch_page_cb(self,widget,page,page_num):
self.graphVisible = page_num == 1
if self.graphVisible:
self.refreshGraph()
def getSelectedSensors(self):
sensorIndexes = [w.get_active() for w in self.colSpec[:self.spinbuttonColumns.get_value_as_int()]]
for i in [i for i,v in enumerate(sensorIndexes) if v == -1]:
sensorIndexes[i] = 0
try:
return [self.sensorTypes[n] for n in sensorIndexes]
except:
return None
def calibrateData(self):
self.colDataMapped = []
maxRows = max([len(c) for c in self.colDataRaw])
sensors = self.getSelectedSensors()
for c,data in enumerate(self.colDataRaw):
m = [round(sensors[c].get_new_value(v),3) for v in data]
if len(m) < maxRows:
m += [''] * (maxRows - len(m))
self.colDataMapped += [m]
def getSensorHeaders(self):
self.useHdrs = False
self.hdrs = []
if not self.checkbuttonShowHeaders.get_active():
return False
sensors = self.getSelectedSensors()
if not sensors:
return False
self.hdrs = [[s.name,s.unit] for s in sensors]
for i in [i for i,h in enumerate(self.hdrs) if h[1] == None or h[1] == '']:
self.hdrs[i][1] = 'None'
self.useHdrs = True
return True
def csvHeaders(self):
if not self.useHdrs:
return ''
if not self.checkbuttonTwoLineHeader.get_active():
t = ','.join([('%s (%s)' % (h[0],h[1])) for h in self.hdrs]) + '\n'
return t
t = ','.join([h[0] for h in self.hdrs]) + '\n'
t += ','.join([h[1] for h in self.hdrs]) + '\n'
return t
def displayHeaders(self):
if not self.useHdrs:
return ''
t = ''
if not self.checkbuttonTwoLineHeader.get_active():
hdrs = [('%s (%s)' % (h[0],h[1])) for h in self.hdrs]
hdrs = [h.rjust(max(len(h),self.defaultTab), ' ') for h in hdrs]
self.hdrTabs = []
for h in hdrs:
t += h + ' '
self.hdrTabs.extend([len(h)])
return t + '\n' + ('-' * len(t)) + '\n'
hdrs0 = []
hdrs1 = []
for h in self.hdrs:
w = max(len(h[0]), len(h[1]), self.defaultTab)
hdrs0 += [h[0].rjust(w, ' ')]
hdrs1 += [h[1].rjust(w, ' ')]
self.hdrTabs = []
for h in hdrs0:
t += h + ' '
self.hdrTabs.extend([len(h)])
w = len(t)
t += '\n'
for h in hdrs1:
t += h + ' '
return t + '\n' + ('-' * w) + '\n'
def dataFormattedForSaving(self):
t = self.csvHeaders()
for line in self.colDataMapped:
t = t + ','.join(map(str, line)) + '\n'
return t
def dataFormattedForDisplay(self):
t = self.displayHeaders()
if len(self.colDataMapped) == 1:
d = zip(self.colDataMapped[0])
else:
d = zip(*self.colDataMapped)
for r,rowData in enumerate(d):
for c,v in enumerate(rowData):
if self.useHdrs:
t = t + str(v).rjust(self.hdrTabs[c], ' ') + ' '
else:
t = t + str(v).rjust(self.defaultTab, ' ') + ' '
t = t + '\n'
return t
def refreshTextView(self):
if len(self.data) == 0:
self.textviewBuffer.set_text("")
return False
if self.getSensorHeaders():
nCols = self.spinbuttonColumns.get_value_as_int()
if nCols == 1:
self.colDataRaw = [self.data]
else:
self.colDataRaw = list(self.data[i::nCols] for i in range(nCols))
for i in range(nCols-1, -1):
if len(self.colDataRaw[i]) > len(self.colDataRaw[i+1]):
self.colDataRaw[i].pop()
print "aqui"
self.calibrateData()
self.textviewBuffer.set_text(self.dataFormattedForDisplay())
self.graphUpdateRequired = True
self.refreshGraph()
return True
else:
self.showWarning(_("Please, add at least one sensor in Sensors Tab"))
return False
def refreshGraph(self):
if not (self.graphVisible and self.graphUpdateRequired): return
if self.graphContainer == None:
self.graphContainer = self.gui.get_widget("dataGraphContainer")
if self.graphContainer == None: return
r = self.graphContainer.get_allocation()
self.graphWidth, self.graphHeight = (r.width,r.height)
self.graph = None
data = {}
for c,t in enumerate(self.colDataMapped):
lbl = '%(colNum)i-%(name)s (%(units)s)' % \
{'colNum': c+1, 'name': self.hdrs[c][0], 'units': self.hdrs[c][1]}
data[lbl] = t
#if len(self.data) % self.spinbuttonColumns.get_value_as_int() > 0:
# self.showWarning(_("The graph can not be generated with this configuration.\nPlease check the number of columns."))
#else:
self.drawGraph(data,[str(x) for x in range(len(self.colDataMapped[0]))])
self.graphUpdateRequired = False
def drawGraph(self, data=[], xLabels=[]):
if data == {}: return
if self.graph != None:
self.graphContainer.remove(self.graph.handler)
self.graph = plots.DotLinePlot('gtk', data=data, x_labels=xLabels,
width=self.graphWidth, height=self.graphHeight, background="white",
border=5, axis=True, grid=True, series_legend = True)
self.graphContainer.add(self.graph.handler)
self.graph.handler.show()
def uploadProgress_cb(self, count, total):
self.progressbar.set_fraction(float(count) / total)
self.lblProgress.set_text(_('%i Values Uploaded' % count))
while gtk.events_pending():
gtk.main_iteration(False)
| gpl-3.0 |
thaumos/ansible | test/integration/targets/module_precedence/multiple_roles/foo/library/ping.py | 480 | 2146 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: ping
version_added: historical
short_description: Try to connect to host, verify a usable python and return C(pong) on success.
description:
- A trivial test module, this module always returns C(pong) on successful
contact. It does not make sense in playbooks, but it is useful from
C(/usr/bin/ansible) to verify the ability to login and that a usable python is configured.
- This is NOT ICMP ping, this is just a trivial test module.
options: {}
author:
- "Ansible Core Team"
- "Michael DeHaan"
'''
EXAMPLES = '''
# Test we can logon to 'webservers' and execute python with json lib.
ansible webservers -m ping
'''
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec=dict(
data=dict(required=False, default=None),
),
supports_check_mode=True
)
result = dict(ping='pong')
if module.params['data']:
if module.params['data'] == 'crash':
raise Exception("boom")
result['ping'] = module.params['data']
result['location'] = 'role: foo'
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Spiderlover/Toontown | toontown/cogdominium/DistCogdoMazeGameAI.py | 4 | 1117 | from direct.directnotify import DirectNotifyGlobal
from toontown.cogdominium.DistCogdoGameAI import DistCogdoGameAI
class DistCogdoMazeGameAI(DistCogdoGameAI):
notify = DirectNotifyGlobal.directNotify.newCategory("DistCogdoMazeGameAI")
def requestAction(self, todo0, todo1):
pass
def doAction(self, todo0, todo1, todo2):
pass
def setNumSuits(self, todo0):
pass
def requestUseGag(self, todo0, todo1, todo2, todo3):
pass
def toonUsedGag(self, todo0, todo1, todo2, todo3, todo4):
pass
def requestSuitHitByGag(self, todo0, todo1):
pass
def suitHitByGag(self, todo0, todo1, todo2):
pass
def requestHitBySuit(self, todo0, todo1, todo2):
pass
def toonHitBySuit(self, todo0, todo1, todo2, todo3):
pass
def requestHitByDrop(self):
pass
def toonHitByDrop(self, todo0):
pass
def requestPickUp(self, todo0):
pass
def pickUp(self, todo0, todo1, todo2):
pass
def requestGag(self, todo0):
pass
def hasGag(self, todo0, todo1):
pass
| mit |
fureszpeter/a2billing | CallBack/callback-daemon-py/setup.py | 6 | 2172 | # This file is part of A2Billing (http://www.a2billing.net/)
#
# A2Billing, Commercial Open Source Telecom Billing platform,
# powered by Star2billing S.L. <http://www.star2billing.com/>
#
# @copyright Copyright (C) 2004-2015 - Star2billing S.L.
# @author Belaid Arezqui <areski@gmail.com>
# @license http://www.fsf.org/licensing/licenses/agpl-3.0.html
# @package A2Billing
#
# Software License Agreement (GNU Affero General Public License)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Daemon to proceed Call-Back request from the a2billing plaftorm
#
from setuptools import setup, find_packages
setup(
name="callback_daemon",
version="1.1",
packages=['callback_daemon'],
package_data={
'': ['*.txt', '*.conf', '*.debian', '*.rc']
},
entry_points={
'console_scripts': [
'a2b_callback_daemon=callback_daemon.a2b_callback_daemon:main'
]
},
author="Belaid Arezqui",
author_email="areski@gmail.com",
description="This Package provide a callback daemon for a2billing",
license="AGPLv3+",
keywords="callback a2billing daemon",
url="http://www.asterisk2billing.org/",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers, Users',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development'
],
)
| agpl-3.0 |
liquidg3/altair | node_modules/npm/node_modules/node-gyp/gyp/tools/pretty_sln.py | 1831 | 5099 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
r'}"\) = "(.*)", "(.*)", "(.*)"$')
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile(
r'ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
edcast-inc/edx-platform-edcast | docs/en_us/platform_api/source/conf.py | 23 | 6203 | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# pylint: disable=redefined-builtin
# pylint: disable=protected-access
# pylint: disable=unused-argument
import os
from path import path
import sys
import mock
MOCK_MODULES = [
'lxml',
'requests',
'xblock',
'fields',
'xblock.fields',
'frament',
'xblock.fragment',
'webob',
'multidict',
'webob.multidict',
'core',
'xblock.core',
'runtime',
'xblock.runtime',
'sortedcontainers',
'contracts',
'plugin',
'xblock.plugin',
'opaque_keys.edx.asides',
'asides',
'dogstats_wrapper',
'fs',
'fs.errors',
'edxmako',
'edxmako.shortcuts',
'shortcuts',
'crum',
'opaque_keys.edx.locator',
'LibraryLocator',
'Location',
'ipware',
'ip',
'ipware.ip',
'get_ip',
'pygeoip',
'ipaddr',
'django_countries',
'fields',
'django_countries.fields',
'opaque_keys',
'opaque_keys.edx',
'opaque_keys.edx.keys',
'CourseKey',
'UsageKey',
'BlockTypeKey',
'opaque_keys.edx.locations',
'SlashSeparatedCourseKey',
'Locator',
'south',
'modelsinspector',
'south.modelsinspector',
'add_introspection_rules',
'courseware',
'access',
'courseware.access',
'is_mobile_available_for_user',
'courseware.model_data',
'courseware.module_render',
'courseware.views',
'util.request',
'eventtracking',
'xmodule',
'xmodule.exceptions',
'xmodule.modulestore',
'xmodule.modulestore.exceptions',
'xmodule.modulestore.django',
'courseware.models',
'milestones',
'milestones.api',
'milestones.models',
'milestones.exceptions',
'ratelimitbackend',
'analytics',
'courseware.courses',
'staticfiles',
'storage',
'staticfiles.storage',
'content',
'xmodule.contentstore',
'xmodule.contentstore.content',
'xblock.exceptions',
'xmodule.seq_module',
'xmodule.vertical_module',
'xmodule.x_module',
'nltk',
'ratelimitbackend',
'ratelimitbackend.exceptions',
'social',
'social.apps',
'social.apps.django_app',
'social.backends',
'mako',
'exceptions',
'mako.exceptions',
'boto',
'exception',
'boto.exception',
'PIL',
'reportlab',
'lib',
'reportlab.lib',
'pdfgen',
'canvas',
'pdfgen',
'pdfgen.canvas',
'reportlab.pdfgen',
'reportlab.pdfgen.canvas',
'reportlab.lib.pagesizes',
'reportlab.lib.units',
'reportlab.lib.styles',
'reportlab.platypus',
'reportlab.platypus.tables',
'boto',
's3',
'connection',
'boto.s3',
'boto.s3.connection',
'boto.s3.key',
'Crypto',
'Crypto.Cipher',
'Crypto.PublicKey',
'openid',
'store',
'interface',
'openid.store',
'store.interface',
'openid.store.interface',
'external_auth.views',
'html_to_text',
'mail_utils',
'ratelimitbackend.backends',
'social.apps.django_app.default',
'social.exceptions',
'social.pipeline',
'xmodule.error_module',
'accounts.api',
'modulestore.mongo.base',
'xmodule.modulestore.mongo',
'xmodule.modulestore.mongo.base',
'edxval',
'edxval.api',
'model_utils',
'model_utils.models',
'model_utils.managers',
'certificates',
'certificates.models',
'certificates.models.GeneratedCertificate',
'shoppingcart',
'shopppingcart.models',
'shopppingcart.api',
'api',
'student',
'student.views',
'student.forms',
'student.models',
'celery',
'celery.task',
'student.roles',
'embargo.models',
'xmodule.vertical_block',
'vertical_block',
'errors',
'UserNotFound',
'UserNotAuthorized',
'AccountUpdateError',
'AccountValidationError',
'transaction',
'parsers',
'MergePatchParser',
'get_account_settings',
'update_account_settings',
'serializers',
'profile_images.images',
'xmodule.course_module'
]
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock(class_that_is_extended=object)
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
sys.path.append('../../../../')
from docs.shared.conf import *
# Add any paths that contain templates here, relative to this directory.
#templates_path.append('source/_templates')
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path.append('source/_static')
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
root = path('../../../..').abspath()
sys.path.insert(0, root)
sys.path.append(root / "common/lib/xmodule")
sys.path.append(root / "common/djangoapps")
sys.path.append(root / "lms/djangoapps")
sys.path.append(root / "openedx/core/djangoapps")
sys.path.insert(
0,
os.path.abspath(
os.path.normpath(
os.path.dirname(__file__) + '/../../../'
)
)
)
sys.path.append('.')
# django configuration - careful here
if on_rtd:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
else:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath',
'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
project = u'EdX Platform APIs'
copyright = u'2015, edX'
exclude_patterns = ['build', 'links.rst']
| agpl-3.0 |
vanng822/cssutils | src/cssutils/tests/test_mediaquery.py | 3 | 3579 | # -*- coding: iso-8859-1 -*-
"""Testcases for cssutils.stylesheets.MediaQuery"""
import xml.dom
import basetest
import cssutils.stylesheets
class MediaQueryTestCase(basetest.BaseTestCase):
def setUp(self):
super(MediaQueryTestCase, self).setUp()
self.r = cssutils.stylesheets.MediaQuery()
def test_mediaText(self):
"MediaQuery.mediaText"
tests = {
u'all': None,
u'braille': None,
u'embossed': None,
u'handheld': None,
u'print': None,
u'projection': None,
u'screen': None,
u'speech': None,
u'tty': None,
u'tv': None,
u'ALL': None,
u'a\\ll': None,
u'not tv': None,
u'n\\ot t\\v': None,
u'only tv': None,
u'\\only \\tv': None,
u'PRINT': None,
u'NOT PRINT': None,
u'ONLY PRINT': None,
u'tv and (color)': None,
u'not tv and (color)': None,
u'only tv and (color)': None,
u'print and(color)': u'print and (color)'
}
self.do_equal_r(tests, att='mediaText')
tests = {
u'': xml.dom.SyntaxErr,
u'two values': xml.dom.SyntaxErr,
u'or even three': xml.dom.SyntaxErr,
u'aural': xml.dom.SyntaxErr, # a dimension
u'3d': xml.dom.SyntaxErr, # a dimension
}
self.do_raise_r(tests, att='_setMediaText')
def test_mediaType(self):
"MediaQuery.mediaType"
mq = cssutils.stylesheets.MediaQuery()
self.assertEqual(u'', mq.mediaText)
for mt in cssutils.stylesheets.MediaQuery.MEDIA_TYPES:
mq.mediaType = mt
self.assertEqual(mq.mediaType, mt)
mq.mediaType = mt.upper()
self.assertEqual(mq.mediaType, mt.upper())
mt = u'3D-UNKOwn-MEDIAtype0123'
#mq.mediaType = mt
self.assertRaises(xml.dom.SyntaxErr, mq._setMediaType, mt)
#self.assertRaises(xml.dom.InvalidCharacterErr, mq._setMediaType, mt)
def test_comments(self):
"MediaQuery.mediaText comments"
tests = {
u'all': None,
u'print': None,
u'not print': None,
u'only print': None,
u'print and (color)': None,
u'print and (color) and (width)': None,
u'print and (color: 2)': None,
u'print and (min-width: 100px)': None,
u'print and (min-width: 100px) and (color: red)': None,
u'not print and (min-width: 100px)': None,
u'only print and (min-width: 100px)': None,
u'/*1*/ tv /*2*/': None,
u'/*0*/ only /*1*/ tv /*2*/': None,
u'/*0* /not /*1*/ tv /*2*/': None,
u'/*x*/ only /*x*/ print /*x*/ and /*x*/ (/*x*/ min-width /*x*/: /*x*/ 100px /*x*/)': None,
u'print and/*1*/(color)': u'print and /*1*/ (color)'
}
self.do_equal_r(tests, att='mediaText')
def test_reprANDstr(self):
"MediaQuery.__repr__(), .__str__()"
mediaText='tv and (color)'
s = cssutils.stylesheets.MediaQuery(mediaText=mediaText)
self.assertTrue(mediaText in str(s))
s2 = eval(repr(s))
self.assertEqual(mediaText, s2.mediaText)
self.assertTrue(isinstance(s2, s.__class__))
if __name__ == '__main__':
import unittest
unittest.main()
| gpl-3.0 |
AnumSheraz/IP-Controlled-Robotic-Car | Manual-IP-Controlled-Robotic-Car/Code.py | 1 | 1696 |
import sys
from PyQt4 import QtGui, QtCore
import time, socket, json
from main import Ui_MainWindow
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
IP = "localhost"
PORT = 8001
class main_menu(QtGui.QMainWindow):
def __init__(self):
super(main_menu, self).__init__()
self.ui=Ui_MainWindow()
self.ui.setupUi(self)
self.show()
def keyPressEvent(self, event1):
verbose = {"FB":"", "LR":""}
if event1.key() == QtCore.Qt.Key_W:
#print "Up pressed"
verbose["FB"] = "F"
if event1.key() == QtCore.Qt.Key_S:
#print "D pressed"
verbose["FB"] = "B"
if event1.key() == QtCore.Qt.Key_A:
#print "L pressed"
verbose["LR"] = "L"
if event1.key() == QtCore.Qt.Key_D:
#print "R pressed"
verbose["LR"] = "R"
print verbose
json_data=json.dumps(verbose)
s.sendto((json_data), (IP, PORT))
def keyReleaseEvent(self, event):
verbose = {"FB":"", "LR":""}
if event.key() == QtCore.Qt.Key_W:
#print "Up rel"
verbose["FB"] = "S"
if event.key() == QtCore.Qt.Key_S:
#print "D rel"
verbose["FB"] = "S"
if event.key() == QtCore.Qt.Key_A:
#print "L pressed"
verbose["LR"] = "S"
if event.key() == QtCore.Qt.Key_D:
#print "R pressed"
verbose["LR"] = "S"
print verbose
json_data=json.dumps(verbose)
s.sendto((json_data), (IP, PORT))
def main():
app = QtGui.QApplication(sys.argv)
ex = main_menu()
app.exec_()
if __name__ == '__main__':
main()
| gpl-2.0 |
DanielNeugebauer/adhocracy | src/adhocracy/lib/util.py | 4 | 4767 | import collections
import logging
import os
import os.path
import shutil
import time
import uuid
from pylons import config
from pylons.i18n import _
log = logging.getLogger(__name__)
def timedelta2seconds(delta):
""" Convert a given timedelta to a number of seconds """
return ((delta.microseconds / 1000000.0) +
delta.seconds + (delta.days * 60 * 60 * 24))
def datetime2seconds(dt):
'''
convert a :class:`datetime.datetime` object into seconds since
the epoche.
'''
return time.mktime(dt.timetuple())
def random_token():
""" Get a random string, the first char group of a uuid4 """
return unicode(uuid.uuid4()).split('-').pop()
def random_username():
import random
vouwels = 'aeiouy'
consonants = 'bcdfghjklmnprstvwxz'
length = random.randint(6, 9)
consonant = (random.randint(0, 1) == 0)
name = u''
for _x in range(length):
if consonant:
name += random.choice(consonants)
else:
name += random.choice(vouwels)
consonant = not consonant
return name
def get_entity_or_abort(cls, id, instance_filter=True, **kwargs):
from templating import ret_abort
"""
Return either the instance identified by the given ID or
raise a HTTP 404 Exception within the controller.
"""
if not hasattr(cls, 'find'):
raise TypeError("The given class does not have a find() method")
obj = cls.find(id, instance_filter=instance_filter, **kwargs)
if not obj:
ret_abort(_("Could not find the entity '%s'") % id, code=404)
return obj
# File system related functions:
def get_site_directory(app_conf=None):
if app_conf is None:
app_conf = config
rel = app_conf.get('adhocracy.site.dir',
os.path.join(app_conf.get('here'), 'site'))
site_directory = os.path.abspath(rel)
if not os.path.exists(site_directory):
os.makedirs(site_directory)
elif not os.path.isdir(site_directory):
raise IOError("adhocracy.site.dir must be a directory!")
return site_directory
def get_fallback_directory(app_conf=None):
if app_conf is None:
app_conf = config
return os.path.abspath(app_conf.get('pylons.paths').get('root'))
def compose_path(basedir, *a):
path = os.path.join(basedir, *a)
path = os.path.abspath(path)
if not path.startswith(basedir):
# escape attempt
raise IOError("Path outside scope")
return path
def get_site_path(*a, **kwargs):
app_conf = kwargs.get('app_conf', None)
return compose_path(get_site_directory(app_conf=app_conf), *a)
def get_path(*a, **kwargs):
app_conf = kwargs.get('app_conf', None)
path = compose_path(get_site_directory(app_conf=app_conf), *a)
if not os.path.exists(path):
path = compose_path(get_fallback_directory(app_conf=app_conf), *a)
if not os.path.exists(path):
return None
return path
def create_site_subdirectory(*a, **kwargs):
app_conf = kwargs.get('app_conf')
path = get_site_path(*a, app_conf=app_conf)
if not os.path.exists(path):
os.makedirs(path)
return path
def replicate_fallback(*a, **kwargs):
to_path = get_site_path(*a, **kwargs)
if not os.path.exists(to_path):
log.debug("Setting up site item at: %s" % to_path)
to_dir = os.path.dirname(to_path)
if not os.path.exists(to_dir):
os.makedirs(to_dir)
from_path = get_path(*a, **kwargs)
if from_path is None:
raise IOError("Site file does not exist.")
if not from_path == to_path:
shutil.copy(from_path, to_path)
return to_path
def generate_sequence(initial=10,
factors=[2, 2.5, 2],
minimum=None,
maximum=None):
factor_deque = collections.deque(factors)
current = initial
while maximum is None or current < maximum:
if minimum is None or current >= minimum:
yield int(current)
current *= factor_deque[0]
factor_deque.rotate(-1)
yield int(current)
def get_client_ip(environ, config=config):
import adhocracy.config
if adhocracy.config.get_bool('adhocracy.behind_proxy', config=config):
try:
header_val = environ['HTTP_X_FORWARDED_FOR']
return header_val.rpartition(u',')[2].strip()
except KeyError:
pass
return environ['REMOTE_ADDR']
def split_filter(condition, seq):
'''
Splits *seq* into two lists based on *condition*, the items in the first
list match the conditions, the items in the second list don't.
'''
a, b = [], []
for item in seq:
(a if condition(item) else b).append(item)
return a, b
| agpl-3.0 |
gooddata/openstack-nova | nova/pci/whitelist.py | 5 | 3433 | # Copyright (c) 2013 Intel, Inc.
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova import exception
from nova.i18n import _
from nova.pci import devspec
class Whitelist(object):
"""White list class to represent assignable pci devices.
Not all devices on a compute node can be assigned to a guest. The cloud
administrator decides which devices can be assigned based on ``vendor_id``
or ``product_id``, etc. If no white list is specified, no devices will be
assignable.
"""
def __init__(self, whitelist_spec=None):
"""White list constructor
For example, the following json string specifies that devices whose
vendor_id is '8086' and product_id is '1520' can be assigned
to guests. ::
'[{"product_id":"1520", "vendor_id":"8086"}]'
:param whitelist_spec: A JSON string for a dictionary or list thereof.
Each dictionary specifies the pci device properties requirement.
See the definition of ``passthrough_whitelist`` in
``nova.conf.pci`` for details and examples.
"""
if whitelist_spec:
self.specs = self._parse_white_list_from_config(whitelist_spec)
else:
self.specs = []
@staticmethod
def _parse_white_list_from_config(whitelists):
"""Parse and validate the pci whitelist from the nova config."""
specs = []
for jsonspec in whitelists:
try:
dev_spec = jsonutils.loads(jsonspec)
except ValueError:
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'") % jsonspec)
if isinstance(dev_spec, dict):
dev_spec = [dev_spec]
elif not isinstance(dev_spec, list):
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'; "
"Expecting list or dict") % jsonspec)
for ds in dev_spec:
if not isinstance(ds, dict):
raise exception.PciConfigInvalidWhitelist(
reason=_("Invalid entry: '%s'; "
"Expecting dict") % ds)
spec = devspec.PciDeviceSpec(ds)
specs.append(spec)
return specs
def device_assignable(self, dev):
"""Check if a device can be assigned to a guest.
:param dev: A dictionary describing the device properties
"""
for spec in self.specs:
if spec.match(dev):
return True
return False
def get_devspec(self, pci_dev):
for spec in self.specs:
if spec.match_pci_obj(pci_dev):
return spec
| apache-2.0 |
yongshengwang/hue | build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/contrib/staticfiles/management/commands/findstatic.py | 123 | 1282 | from __future__ import unicode_literals
import os
from optparse import make_option
from django.core.management.base import LabelCommand
from django.utils.encoding import force_text
from django.contrib.staticfiles import finders
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
args = "[file ...]"
label = 'static file'
option_list = LabelCommand.option_list + (
make_option('--first', action='store_false', dest='all', default=True,
help="Only return the first match for each static file."),
)
def handle_label(self, path, **options):
verbosity = int(options.get('verbosity', 1))
result = finders.find(path, all=options['all'])
path = force_text(path)
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (force_text(os.path.realpath(path)) for path in result)
if verbosity >= 1:
output = '\n '.join(result)
return "Found '%s' here:\n %s" % (path, output)
else:
return '\n'.join(result)
else:
if verbosity >= 1:
self.stderr.write("No matching file found for '%s'." % path)
| apache-2.0 |
robinro/ansible | test/units/parsing/utils/test_addresses.py | 135 | 3742 | # -*- coding: utf-8 -*-
import unittest
from ansible.parsing.utils.addresses import parse_address
class TestParseAddress(unittest.TestCase):
tests = {
# IPv4 addresses
'192.0.2.3': ['192.0.2.3', None],
'192.0.2.3:23': ['192.0.2.3', 23],
# IPv6 addresses
'::': ['::', None],
'::1': ['::1', None],
'[::1]:442': ['::1', 442],
'abcd:ef98:7654:3210:abcd:ef98:7654:3210': ['abcd:ef98:7654:3210:abcd:ef98:7654:3210', None],
'[abcd:ef98:7654:3210:abcd:ef98:7654:3210]:42': ['abcd:ef98:7654:3210:abcd:ef98:7654:3210', 42],
'1234:5678:9abc:def0:1234:5678:9abc:def0': ['1234:5678:9abc:def0:1234:5678:9abc:def0', None],
'1234::9abc:def0:1234:5678:9abc:def0': ['1234::9abc:def0:1234:5678:9abc:def0', None],
'1234:5678::def0:1234:5678:9abc:def0': ['1234:5678::def0:1234:5678:9abc:def0', None],
'1234:5678:9abc::1234:5678:9abc:def0': ['1234:5678:9abc::1234:5678:9abc:def0', None],
'1234:5678:9abc:def0::5678:9abc:def0': ['1234:5678:9abc:def0::5678:9abc:def0', None],
'1234:5678:9abc:def0:1234::9abc:def0': ['1234:5678:9abc:def0:1234::9abc:def0', None],
'1234:5678:9abc:def0:1234:5678::def0': ['1234:5678:9abc:def0:1234:5678::def0', None],
'1234:5678:9abc:def0:1234:5678::': ['1234:5678:9abc:def0:1234:5678::', None],
'::9abc:def0:1234:5678:9abc:def0': ['::9abc:def0:1234:5678:9abc:def0', None],
'0:0:0:0:0:ffff:1.2.3.4': ['0:0:0:0:0:ffff:1.2.3.4', None],
'0:0:0:0:0:0:1.2.3.4': ['0:0:0:0:0:0:1.2.3.4', None],
'::ffff:1.2.3.4': ['::ffff:1.2.3.4', None],
'::1.2.3.4': ['::1.2.3.4', None],
'1234::': ['1234::', None],
# Hostnames
'some-host': ['some-host', None],
'some-host:80': ['some-host', 80],
'some.host.com:492': ['some.host.com', 492],
'[some.host.com]:493': ['some.host.com', 493],
'a-b.3foo_bar.com:23': ['a-b.3foo_bar.com', 23],
u'fóöbär': [u'fóöbär', None],
u'fóöbär:32': [u'fóöbär', 32],
u'fóöbär.éxàmplê.com:632': [u'fóöbär.éxàmplê.com', 632],
# Various errors
'': [None, None],
'some..host': [None, None],
'some.': [None, None],
'[example.com]': [None, None],
'some-': [None, None],
'some-.foo.com': [None, None],
'some.-foo.com': [None, None],
}
range_tests = {
'192.0.2.[3:10]': ['192.0.2.[3:10]', None],
'192.0.2.[3:10]:23': ['192.0.2.[3:10]', 23],
'abcd:ef98::7654:[1:9]': ['abcd:ef98::7654:[1:9]', None],
'[abcd:ef98::7654:[6:32]]:2222': ['abcd:ef98::7654:[6:32]', 2222],
'[abcd:ef98::7654:[9ab3:fcb7]]:2222': ['abcd:ef98::7654:[9ab3:fcb7]', 2222],
u'fóöb[a:c]r.éxàmplê.com:632': [u'fóöb[a:c]r.éxàmplê.com', 632],
'[a:b]foo.com': ['[a:b]foo.com', None],
'foo[a:b].com': ['foo[a:b].com', None],
'foo[a:b]:42': ['foo[a:b]', 42],
'foo[a-b]-.com': [None, None],
'foo[a-b]:32': [None, None],
'foo[x-y]': [None, None],
}
def test_without_ranges(self):
for t in self.tests:
test = self.tests[t]
try:
(host, port) = parse_address(t)
except:
host = None
port = None
assert host == test[0]
assert port == test[1]
def test_with_ranges(self):
for t in self.range_tests:
test = self.range_tests[t]
try:
(host, port) = parse_address(t, allow_ranges=True)
except:
host = None
port = None
assert host == test[0]
assert port == test[1]
| gpl-3.0 |
Yuriy-Leonov/nova | nova/tests/virt/libvirt/test_designer.py | 12 | 1298 | # Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Cyril Roelandt <cyril.roelandt@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from nova.virt.libvirt import designer
class DesignerTestCase(test.NoDBTestCase):
def test_set_vif_bandwidth_config_no_extra_specs(self):
# Test whether test_set_vif_bandwidth_config_no_extra_specs fails when
# its second parameter has no 'extra_specs' field.
try:
# The conf will never be user be used, so we can use 'None'.
# An empty dictionary is fine: all that matters it that there is no
# 'extra_specs' field.
designer.set_vif_bandwidth_config(None, {})
except KeyError as e:
self.fail('KeyError: %s' % e)
| apache-2.0 |
robjohnson189/home-assistant | homeassistant/components/sensor/tellstick.py | 19 | 3900 | """
Support for Tellstick sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.tellstick/
"""
import logging
from collections import namedtuple
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import TEMP_CELSIUS
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['tellcore-py==1.1.2']
_LOGGER = logging.getLogger(__name__)
DatatypeDescription = namedtuple('DatatypeDescription', ['name', 'unit'])
CONF_DATATYPE_MASK = 'datatype_mask'
CONF_ONLY_NAMED = 'only_named'
CONF_TEMPERATURE_SCALE = 'temperature_scale'
DEFAULT_DATATYPE_MASK = 127
DEFAULT_ONLY_NAMED = False
DEFAULT_TEMPERATURE_SCALE = TEMP_CELSIUS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_ONLY_NAMED, default=DEFAULT_ONLY_NAMED): cv.boolean,
vol.Optional(CONF_TEMPERATURE_SCALE, default=DEFAULT_TEMPERATURE_SCALE):
cv.string,
vol.Optional(CONF_DATATYPE_MASK, default=DEFAULT_DATATYPE_MASK):
cv.positive_int,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Tellstick sensors."""
import tellcore.telldus as telldus
import tellcore.constants as tellcore_constants
sensor_value_descriptions = {
tellcore_constants.TELLSTICK_TEMPERATURE:
DatatypeDescription('temperature', config.get(CONF_TEMPERATURE_SCALE)),
tellcore_constants.TELLSTICK_HUMIDITY:
DatatypeDescription('humidity', '%'),
tellcore_constants.TELLSTICK_RAINRATE:
DatatypeDescription('rain rate', ''),
tellcore_constants.TELLSTICK_RAINTOTAL:
DatatypeDescription('rain total', ''),
tellcore_constants.TELLSTICK_WINDDIRECTION:
DatatypeDescription('wind direction', ''),
tellcore_constants.TELLSTICK_WINDAVERAGE:
DatatypeDescription('wind average', ''),
tellcore_constants.TELLSTICK_WINDGUST:
DatatypeDescription('wind gust', '')
}
try:
tellcore_lib = telldus.TelldusCore()
except OSError:
_LOGGER.exception('Could not initialize Tellstick')
return
sensors = []
datatype_mask = config.get(CONF_DATATYPE_MASK)
for tellcore_sensor in tellcore_lib.sensors():
try:
sensor_name = config[tellcore_sensor.id]
except KeyError:
if config.get(CONF_ONLY_NAMED):
continue
sensor_name = str(tellcore_sensor.id)
for datatype in sensor_value_descriptions:
if datatype & datatype_mask:
if tellcore_sensor.has_value(datatype):
sensor_info = sensor_value_descriptions[datatype]
sensors.append(TellstickSensor(
sensor_name, tellcore_sensor, datatype, sensor_info))
add_devices(sensors)
class TellstickSensor(Entity):
"""Representation of a Tellstick sensor."""
def __init__(self, name, tellcore_sensor, datatype, sensor_info):
"""Initialize the sensor."""
self._datatype = datatype
self._tellcore_sensor = tellcore_sensor
self._unit_of_measurement = sensor_info.unit or None
self._value = None
self._name = '{} {}'.format(name, sensor_info.name)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update tellstick sensor."""
self._value = self._tellcore_sensor.value(self._datatype).value
| mit |
abdellatifkarroum/odoo | addons/calendar/contacts.py | 389 | 1414 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class calendar_contacts(osv.osv):
_name = 'calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Employee',required=True, domain=[]),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
} | agpl-3.0 |
hongtoushizi/kids | test/funcational/qps.py | 6 | 1847 | #! /usr/bin/env python
# coding=utf-8
import redis
from multiprocessing import Process, Queue
from time import sleep
KIDS_HOST = {'host': 't-infra', 'port': 3389}
def publisher(topics):
red = redis.StrictRedis(**KIDS_HOST)
i = 0
while True:
for topic in topics:
red.publish(topic, topic)
i += 1
def sub_msg(topic):
red = redis.StrictRedis(**KIDS_HOST)
red = red.pubsub()
red.subscribe(topic)
for item in red.listen():
pass
def psub_msg(pattern):
red = redis.StrictRedis(**KIDS_HOST)
red = red.pubsub()
red.psubscribe(pattern)
for item in red.listen():
pass
def main():
pub_num = 300
sub_num = 10
topic_num = 10
psub_num = 3
topics = []
for i in range(0, topic_num):
t = 'topic' + str(i)
topics.append(t)
subers = []
psubers = []
pubers = []
red = redis.StrictRedis(**KIDS_HOST)
for i in range(0, sub_num):
p = Process(target=sub_msg, args=(topics[i],))
p.start()
subers.append(p)
for i in range(0, psub_num):
p = Process(target=psub_msg, args=('*',))
p.start()
psubers.append(p)
print("all subscribers have connected")
for i in range(0, pub_num):
p = Process(target=publisher, args=(topics,))
p.start()
pubers.append(p)
print("all publishers have connected")
binfo = red.info()
while True:
sleep(5)
nowinfo = red.info()
time_pass = int(nowinfo['uptime_in_seconds']) - int(binfo['uptime_in_seconds'])
print 'time passed =%10d' % time_pass,
print 'QPS = %10d' % ((int(nowinfo['message_in']) - int(binfo['message_in'])) / time_pass),
print 'QPS from server =%10d' % nowinfo['message_in_per_second']
if __name__ == '__main__':
main()
| bsd-3-clause |
mkaluza/external_chromium_org | tools/perf/metrics/io.py | 24 | 2489 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from metrics import Metric
class IOMetric(Metric):
"""IO-related metrics, obtained via telemetry.core.Browser."""
@classmethod
def CustomizeBrowserOptions(cls, options):
options.AppendExtraBrowserArgs('--no-sandbox')
def Start(self, page, tab):
raise NotImplementedError()
def Stop(self, page, tab):
raise NotImplementedError()
def AddResults(self, tab, results):
# This metric currently only returns summary results, not per-page results.
raise NotImplementedError()
def AddSummaryResults(self, browser, results):
"""Add summary results to the results object."""
io_stats = browser.io_stats
if not io_stats['Browser']:
return
def AddSummariesForProcessType(process_type_io, process_type_trace):
"""For a given process type, add all relevant summary results.
Args:
process_type_io: Type of process (eg Browser or Renderer).
process_type_trace: String to be added to the trace name in the results.
"""
if 'ReadOperationCount' in io_stats[process_type_io]:
results.AddSummary('read_operations_' + process_type_trace, 'count',
io_stats[process_type_io]
['ReadOperationCount'],
data_type='unimportant')
if 'WriteOperationCount' in io_stats[process_type_io]:
results.AddSummary('write_operations_' + process_type_trace, 'count',
io_stats[process_type_io]
['WriteOperationCount'],
data_type='unimportant')
if 'ReadTransferCount' in io_stats[process_type_io]:
results.AddSummary('read_bytes_' + process_type_trace, 'kb',
io_stats[process_type_io]
['ReadTransferCount'] / 1024,
data_type='unimportant')
if 'WriteTransferCount' in io_stats[process_type_io]:
results.AddSummary('write_bytes_' + process_type_trace, 'kb',
io_stats[process_type_io]
['WriteTransferCount'] / 1024,
data_type='unimportant')
AddSummariesForProcessType('Browser', 'browser')
AddSummariesForProcessType('Renderer', 'renderer')
AddSummariesForProcessType('Gpu', 'gpu')
| bsd-3-clause |
SeanCameronConklin/aima-python | submissions/Porter/vacuum2Runner.py | 18 | 6343 | import agents as ag
import envgui as gui
# change this line ONLY to refer to your project
import submissions.Porter.vacuum2 as v2
# ______________________________________________________________________________
# Vacuum environmenty
class Dirt(ag.Thing):
pass
class VacuumEnvironment(ag.XYEnvironment):
"""The environment of [Ex. 2.12]. Agent perceives dirty or clean,
and bump (into obstacle) or not; 2D discrete world of unknown size;
performance measure is 100 for each dirt cleaned, and -1 for
each turn taken."""
def __init__(self, width=4, height=3):
super(VacuumEnvironment, self).__init__(width, height)
self.add_walls()
def thing_classes(self):
return [ag.Wall, Dirt,
# ReflexVacuumAgent, RandomVacuumAgent,
# TableDrivenVacuumAgent, ModelBasedVacuumAgent
]
def percept(self, agent):
"""The percept is a tuple of ('Dirty' or 'Clean', 'Bump' or 'None').
Unlike the TrivialVacuumEnvironment, location is NOT perceived."""
status = ('Dirty' if self.some_things_at(
agent.location, Dirt) else 'Clean')
bump = ('Bump' if agent.bump else'None')
return (bump, status)
def execute_action(self, agent, action):
if action == 'Suck':
dirt_list = self.list_things_at(agent.location, Dirt)
if dirt_list != []:
dirt = dirt_list[0]
agent.performance += 100
self.delete_thing(dirt)
else:
super(VacuumEnvironment, self).execute_action(agent, action)
if action != 'NoOp':
agent.performance -= 1
# # Launch a Text-Based Environment
# print('Two Cells, Agent on Left:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Right:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (2, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Top:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Bottom:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 2))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
def testVacuum(label, w=4, h=3,
dloc=[(1,1),(2,1)],
vloc=(1,1),
limit=6):
print(label)
v = VacuumEnvironment(w, h)
for loc in dloc:
v.add_thing(Dirt(), loc)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
v.add_thing(a, vloc)
t = gui.EnvTUI(v)
t.mapImageNames({
ag.Wall: '#',
Dirt: '@',
ag.Agent: 'V',
})
t.step(0)
t.list_things(Dirt)
t.step(limit)
if len(t.env.get_things(Dirt)) > 0:
t.list_things(Dirt)
else:
print('All clean!')
# Check to continue
if input('Do you want to continue [Y/n]? ') == 'n':
exit(0)
else:
print('----------------------------------------')
testVacuum('Two Cells, Agent on Left:')
testVacuum('Two Cells, Agent on Right:', vloc=(2,1))
testVacuum('Two Cells, Agent on Top:', w=3, h=4,
dloc=[(1,1), (1,2)], vloc=(1,1) )
testVacuum('Two Cells, Agent on Bottom:', w=3, h=4,
dloc=[(1,1), (1,2)], vloc=(1,2) )
testVacuum('Five Cells, Agent on Left:', w=7, h=3,
dloc=[(2,1), (4,1)], vloc=(1,1), limit=12)
testVacuum('Five Cells, Agent near Right:', w=7, h=3,
dloc=[(2,1), (3,1)], vloc=(4,1), limit=12)
testVacuum('Five Cells, Agent on Top:', w=3, h=7,
dloc=[(1,2), (1,4)], vloc=(1,1), limit=12 )
testVacuum('Five Cells, Agent Near Bottom:', w=3, h=7,
dloc=[(1,2), (1,3)], vloc=(1,4), limit=12 )
testVacuum('5x4 Grid, Agent in Top Left:', w=7, h=6,
dloc=[(1,4), (2,2), (3, 3), (4,1), (5,2)],
vloc=(1,1), limit=46 )
testVacuum('5x4 Grid, Agent near Bottom Right:', w=7, h=6,
dloc=[(1,3), (2,2), (3, 4), (4,1), (5,2)],
vloc=(4, 3), limit=46 )
v = VacuumEnvironment(6, 3)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
loc = v.random_location_inbounds()
v.add_thing(a, location=loc)
v.scatter_things(Dirt)
g = gui.EnvGUI(v, 'Vaccuum')
c = g.getCanvas()
c.mapImageNames({
ag.Wall: 'images/wall.jpg',
# Floor: 'images/floor.png',
Dirt: 'images/dirt.png',
ag.Agent: 'images/vacuum.png',
})
c.update()
g.mainloop() | mit |
be-cloud-be/horizon-addons | horizon/school_evaluations/wizard/evaluation_summary.py | 1 | 3973 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2015 be-cloud.be
# Jerome Sonnet <jerome.sonnet@be-cloud.be>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp import api, fields, models, _
from openerp.exceptions import UserError
from openerp.tools.safe_eval import safe_eval
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from dateutil.relativedelta import relativedelta
from datetime import datetime,date
import openerp.addons.decimal_precision as dp
_logger = logging.getLogger(__name__)
class EvaluationSummaryWizard(models.TransientModel):
_name = "school.evaluation.summary.wizard"
_description = "School Evaluation Summary Wizard"
year_id = fields.Many2one('school.year', string='Year', default=lambda self: self.env.user.current_year_id, ondelete='cascade')
domain_id = fields.Many2one('school.domain', string='Domain', ondelete='cascade')
session = fields.Selection([
('first','First Session'),
('second','Second Session'),
], string="Session")
@api.multi
def generate_summary(self):
self.ensure_one()
data = {}
data['year_id'] = self.year_id.id
data['domain_id'] = self.domain_id.id
data['session'] = self.session
return self.env['report'].get_action(self, 'school_evaluations.evaluation_summary_content', data=data)
class ReportEvaluationSummary(models.AbstractModel):
_name = 'report.school_evaluations.evaluation_summary_content'
@api.multi
def render_html(self, data):
_logger.info('render_html')
year_id = data['year_id']
session = data['session']
domain_id = data['domain_id']
if session == 'first':
states = ['postponed','awarded_first_session']
else:
states = ['awarded_second_session','failed']
if domain_id:
records = self.env['school.individual_bloc'].search([('year_id','=',year_id),('source_bloc_domain_id','=',domain_id),('state','in',states)],order="source_bloc_level, name")
else:
records = self.env['school.individual_bloc'].search([('year_id','=',year_id),('state','in',states)],order="source_bloc_level, name")
docs = [
{
"name" : 'Bac 1',
'blocs' : [],
},
{
"name" : 'Bac 2',
'blocs' : [],
},
{
"name" : 'Bac 3',
'blocs' : [],
},
{
"name" : 'Master 1',
'blocs' : [],
},
{
"name" : 'Master 2',
'blocs' : [],
},
]
for record in records:
docs[int(record.source_bloc_level)-1]['blocs'].append(record)
docargs = {
'doc_model': 'school.individual_bloc',
'docs': docs,
'year' : self.env['school.year'].browse(year_id).name,
}
return self.env['report'].render('school_evaluations.evaluation_summary_content', docargs) | agpl-3.0 |
chen2aaron/SnirteneCodes | JustForFUN/v2ex_mission.py | 1 | 1598 | # -*- coding: utf-8 -*-
import re
USE_SOCKS_PROXY = 0
if USE_SOCKS_PROXY:
import requesocks as requests
else:
import requests
# import socks, socket
# socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1080)
# socket.socket = socks.socksocket
username = 'x@gmail.com'
password = 'xyz'
host = 'http://www.v2ex.com'
signin_url = host + '/signin'
mission_url = host + '/mission/daily'
coin_url = mission_url + '/redeem'
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0',
'Origin': 'http://v2ex.com',
'Referer': 'http://v2ex.com/signin',
}
proxies = {
'http': 'socks5://127.0.0.1:1080',
'https': 'socks5://127.0.0.1:1080'
}
params = {
'u': username,
'p': password,
'next': '/',
}
# 建立session
s = requests.session()
# s.proxies = proxies if USE_SOCKS_PROXY else ''
# 请求登陆页面 获取once值
signin_resp = s.get(signin_url)
signin_once = re.findall(r'value="(\d+)" name="once"', signin_resp.text)[0]
# 将once加入POST表单中 请求登陆
params['once'] = signin_once
r = s.post(url=signin_url, data=params, headers=headers, verify=True)
if r.url != host + '/':
print('FAIL: %s' % r.url)
else:
# 登陆成功 请求/mission/daily页面 获取once值
daily_once = re.findall(r'once=(\d+)', s.get(mission_url).text)[0]
if daily_once:
# 拿到once请求/mission/daily/redeem
resp = s.get(url=coin_url, data={'once': daily_once}, headers=headers, verify=True)
print('SUCCESS: %s' % resp.url)
else:
print('BOOM: %s' % daily_once)
| gpl-2.0 |
HLFH/CouchPotatoServer | libs/CodernityDB/database_safe_shared.py | 44 | 8158 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011-2013 Codernity (http://codernity.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from CodernityDB.env import cdb_environment
from CodernityDB.database import PreconditionsException, RevConflict, Database
# from database import Database
from collections import defaultdict
from functools import wraps
from types import MethodType
class th_safe_gen:
def __init__(self, name, gen, l=None):
self.lock = l
self.__gen = gen
self.name = name
def __iter__(self):
return self
def next(self):
with self.lock:
return self.__gen.next()
@staticmethod
def wrapper(method, index_name, meth_name, l=None):
@wraps(method)
def _inner(*args, **kwargs):
res = method(*args, **kwargs)
return th_safe_gen(index_name + "_" + meth_name, res, l)
return _inner
def safe_wrapper(method, lock):
@wraps(method)
def _inner(*args, **kwargs):
with lock:
return method(*args, **kwargs)
return _inner
class SafeDatabase(Database):
def __init__(self, path, *args, **kwargs):
super(SafeDatabase, self).__init__(path, *args, **kwargs)
self.indexes_locks = defaultdict(
lambda: cdb_environment['rlock_obj']())
self.close_open_lock = cdb_environment['rlock_obj']()
self.main_lock = cdb_environment['rlock_obj']()
self.id_revs = {}
def __patch_index_gens(self, name):
ind = self.indexes_names[name]
for c in ('all', 'get_many'):
m = getattr(ind, c)
if getattr(ind, c + "_orig", None):
return
m_fixed = th_safe_gen.wrapper(m, name, c, self.indexes_locks[name])
setattr(ind, c, m_fixed)
setattr(ind, c + '_orig', m)
def __patch_index_methods(self, name):
ind = self.indexes_names[name]
lock = self.indexes_locks[name]
for curr in dir(ind):
meth = getattr(ind, curr)
if not curr.startswith('_') and isinstance(meth, MethodType):
setattr(ind, curr, safe_wrapper(meth, lock))
stor = ind.storage
for curr in dir(stor):
meth = getattr(stor, curr)
if not curr.startswith('_') and isinstance(meth, MethodType):
setattr(stor, curr, safe_wrapper(meth, lock))
def __patch_index(self, name):
self.__patch_index_methods(name)
self.__patch_index_gens(name)
def initialize(self, *args, **kwargs):
with self.close_open_lock:
self.close_open_lock.acquire()
res = super(SafeDatabase, self).initialize(*args, **kwargs)
for name in self.indexes_names.iterkeys():
self.indexes_locks[name] = cdb_environment['rlock_obj']()
return res
def open(self, *args, **kwargs):
with self.close_open_lock:
res = super(SafeDatabase, self).open(*args, **kwargs)
for name in self.indexes_names.iterkeys():
self.indexes_locks[name] = cdb_environment['rlock_obj']()
self.__patch_index(name)
return res
def create(self, *args, **kwargs):
with self.close_open_lock:
res = super(SafeDatabase, self).create(*args, **kwargs)
for name in self.indexes_names.iterkeys():
self.indexes_locks[name] = cdb_environment['rlock_obj']()
self.__patch_index(name)
return res
def close(self):
with self.close_open_lock:
return super(SafeDatabase, self).close()
def destroy(self):
with self.close_open_lock:
return super(SafeDatabase, self).destroy()
def add_index(self, *args, **kwargs):
with self.main_lock:
res = super(SafeDatabase, self).add_index(*args, **kwargs)
if self.opened:
self.indexes_locks[res] = cdb_environment['rlock_obj']()
self.__patch_index(res)
return res
def _single_update_index(self, index, data, db_data, doc_id):
with self.indexes_locks[index.name]:
super(SafeDatabase, self)._single_update_index(
index, data, db_data, doc_id)
def _single_delete_index(self, index, data, doc_id, old_data):
with self.indexes_locks[index.name]:
super(SafeDatabase, self)._single_delete_index(
index, data, doc_id, old_data)
def edit_index(self, *args, **kwargs):
with self.main_lock:
res = super(SafeDatabase, self).edit_index(*args, **kwargs)
if self.opened:
self.indexes_locks[res] = cdb_environment['rlock_obj']()
self.__patch_index(res)
return res
def set_indexes(self, *args, **kwargs):
try:
self.main_lock.acquire()
super(SafeDatabase, self).set_indexes(*args, **kwargs)
finally:
self.main_lock.release()
def reindex_index(self, index, *args, **kwargs):
if isinstance(index, basestring):
if not index in self.indexes_names:
raise PreconditionsException("No index named %s" % index)
index = self.indexes_names[index]
key = index.name + "reind"
self.main_lock.acquire()
if key in self.indexes_locks:
lock = self.indexes_locks[index.name + "reind"]
else:
self.indexes_locks[index.name +
"reind"] = cdb_environment['rlock_obj']()
lock = self.indexes_locks[index.name + "reind"]
self.main_lock.release()
try:
lock.acquire()
super(SafeDatabase, self).reindex_index(
index, *args, **kwargs)
finally:
lock.release()
def flush(self):
try:
self.main_lock.acquire()
super(SafeDatabase, self).flush()
finally:
self.main_lock.release()
def fsync(self):
try:
self.main_lock.acquire()
super(SafeDatabase, self).fsync()
finally:
self.main_lock.release()
def _update_id_index(self, _rev, data):
with self.indexes_locks['id']:
return super(SafeDatabase, self)._update_id_index(_rev, data)
def _delete_id_index(self, _id, _rev, data):
with self.indexes_locks['id']:
return super(SafeDatabase, self)._delete_id_index(_id, _rev, data)
def _update_indexes(self, _rev, data):
_id, new_rev, db_data = self._update_id_index(_rev, data)
with self.main_lock:
self.id_revs[_id] = new_rev
for index in self.indexes[1:]:
with self.main_lock:
curr_rev = self.id_revs.get(_id) # get last _id, _rev
if curr_rev != new_rev:
break # new update on the way stop current
self._single_update_index(index, data, db_data, _id)
with self.main_lock:
if self.id_revs[_id] == new_rev:
del self.id_revs[_id]
return _id, new_rev
def _delete_indexes(self, _id, _rev, data):
old_data = self.get('id', _id)
if old_data['_rev'] != _rev:
raise RevConflict()
with self.main_lock:
self.id_revs[_id] = _rev
for index in self.indexes[1:]:
self._single_delete_index(index, data, _id, old_data)
self._delete_id_index(_id, _rev, data)
with self.main_lock:
if self.id_revs[_id] == _rev:
del self.id_revs[_id]
| gpl-3.0 |
rubikloud/gpdb | gpMgmt/bin/gppylib/programs/gppkg.py | 19 | 11246 | # Line too long - pylint: disable=C0301
# Copyright (c) Greenplum Inc 2011. All Rights Reserved.
from optparse import OptionGroup
import os
import sys
import tarfile
try:
from gppylib import gplog, pgconf
from gppylib.commands import gp
from gppylib.commands.base import Command, ExecutionError
from gppylib.commands.unix import curr_platform, SUNOS
from gppylib.db import dbconn
from gppylib.gparray import GpArray
from gppylib.gpversion import GpVersion
from gppylib.gpparseopts import OptParser, OptChecker
from gppylib.mainUtils import addMasterDirectoryOptionForSingleClusterProgram, addStandardLoggingAndHelpOptions, ExceptionNoStackTraceNeeded
from gppylib.operations.package import MigratePackages, InstallPackage, UninstallPackage, QueryPackage, BuildGppkg, UpdatePackage, CleanGppkg, Gppkg, GPPKG_EXTENSION, GPPKG_ARCHIVE_PATH
from gppylib.userinput import ask_yesno
from gppylib.operations.unix import ListFilesByPattern
import yaml
except ImportError, ex:
sys.exit('Cannot import modules. Please check that you have sourced greenplum_path.sh. Detail: ' + str(ex))
logger = gplog.get_default_logger()
class GpPkgProgram:
""" This is the CLI entry point to package management code. """
def __init__(self, options, args):
self.master_datadir = options.masterDataDirectory
# TODO: AK: Program logic should not be dictating master, standby, and segment information
# In other words, the fundamental Operations should have APIs that preclude the need for this.
self.master_host = None
self.standby_host = None
self.segment_host_list = None
self.query = options.query
self.build = options.build
self.install = options.install
self.remove = options.remove
self.update = options.update
self.clean = options.clean
self.migrate = options.migrate
self.interactive = options.interactive
# only one of the following may be provided: --install, --remove, --update, --query, --build, --clean, --migrate
count = sum([1 for opt in ['install', 'remove', 'update', 'query', 'build', 'clean', 'migrate'] if getattr(self, opt)])
if count != 1:
raise ExceptionNoStackTraceNeeded('Exactly one of the following must be provided: --install, --remove, -update, --query, --clean, --migrate')
if self.query:
# gppkg -q can be supplemented with --info, --list, --all
count = sum([1 for opt in ['info', 'list', 'all'] if options.__dict__[opt]])
if count > 1:
raise ExceptionNoStackTraceNeeded('For --query, at most one of the following can be provided: --info, --list, --all')
# for all query options other than --all, a package path must be provided
if not options.all and len(args) != 1:
raise ExceptionNoStackTraceNeeded('A package must be specified for -q, -q --info, and -q --list.')
if options.info:
self.query = (QueryPackage.INFO, args[0])
elif options.list:
self.query = (QueryPackage.LIST, args[0])
elif options.all:
self.query = (QueryPackage.ALL, None)
else:
self.query = (None, args[0])
elif self.migrate:
if len(args) != 2:
raise ExceptionNoStackTraceNeeded('Invalid syntax, expecting "gppkg --migrate <from_gphome> <to_gphome>".')
self.migrate = (args[0], args[1])
@staticmethod
def create_parser():
parser = OptParser(option_class=OptChecker,
description="Greenplum Package Manager",
version='%prog version $Revision: #1 $')
parser.setHelp([])
addStandardLoggingAndHelpOptions(parser, includeNonInteractiveOption=True)
parser.remove_option('-q')
parser.remove_option('-l')
add_to = OptionGroup(parser, 'General Options')
parser.add_option_group(add_to)
addMasterDirectoryOptionForSingleClusterProgram(add_to)
# TODO: AK: Eventually, these options may need to be flexible enough to accept mutiple packages
# in one invocation. If so, the structure of this parser may need to change.
add_to.add_option('-i', '--install', help='install the given gppkg', metavar='<package>')
add_to.add_option('-u', '--update', help='update the given gppkg', metavar='<package>')
add_to.add_option('-r', '--remove', help='remove the given gppkg', metavar='<name>-<version>')
add_to.add_option('-q', '--query', help='query the gppkg database or a particular gppkg', action='store_true')
add_to.add_option('-b', '--build', help='build a gppkg', metavar='<directory>')
add_to.add_option('-c', '--clean', help='clean the cluster of the given gppkg', action='store_true')
add_to.add_option('--migrate', help='migrate gppkgs from a separate $GPHOME', metavar='<from_gphome> <to_gphome>', action='store_true', default=False)
add_to = OptionGroup(parser, 'Query Options')
parser.add_option_group(add_to)
add_to.add_option('--info', action='store_true', help='print information about the gppkg including name, version, description')
add_to.add_option('--list', action='store_true', help='print all the files present in the gppkg')
add_to.add_option('--all', action='store_true', help='print all the gppkgs installed by gppkg')
return parser
@staticmethod
def create_program(options, args):
""" TODO: AK: This convention may be unnecessary. """
return GpPkgProgram(options, args)
def _get_gpdb_host_list(self):
"""
TODO: AK: Get rid of this. Program logic should not be driving host list building .
This method gets the host names
of all hosts in the gpdb array.
It sets the following variables
GpPkgProgram.master_host to master
GpPkgProgram.standby_host to standby
GpPkgProgram.segment_host_list to segment hosts
"""
logger.debug('_get_gpdb_host_list')
#Get host list
gparr = GpArray.initFromCatalog(dbconn.DbURL(port = self.master_port), utility = True)
master_host = None
standby_host = None
segment_host_list = []
segs = gparr.getDbList()
for seg in segs:
if seg.isSegmentMaster(current_role = True):
master_host = seg.getSegmentHostName()
elif seg.isSegmentStandby(current_role = True):
standby_host = seg.getSegmentHostName()
else:
segment_host_list.append(seg.getSegmentHostName())
#Deduplicate the hosts so that we
#dont install multiple times on the same host
segment_host_list = list(set(segment_host_list))
#Segments might exist on the master host. Since we store the
#master host separately in self.master_host, storing the master_host
#in the segment_host_list is redundant.
for host in segment_host_list:
if host == master_host or host == standby_host:
segment_host_list.remove(host)
self.master_host = master_host
self.standby_host = standby_host
self.segment_host_list = segment_host_list
def _get_master_port(self, datadir):
'''
Obtain the master port from the pgconf file
'''
logger.debug('_get_master_port')
pgconf_dict = pgconf.readfile(os.path.join(datadir, 'postgresql.conf'))
return pgconf_dict.int('port')
def run(self):
if self.build:
BuildGppkg(self.build).run()
return
#Check for RPM and Solaris OS
if curr_platform == SUNOS:
raise ExceptionNoStackTraceNeeded('gppkg is not supported on Solaris')
try:
cmd = Command(name = 'Check for rpm', cmdStr = 'rpm --version')
cmd.run(validateAfter = True)
results = cmd.get_results().stdout.strip()
rpm_version_string = results.split(' ')[-1]
if not rpm_version_string.startswith('4.'):
raise ExceptionNoStackTraceNeeded('gppkg requires rpm version 4.x')
except ExecutionError, ex:
results = ex.cmd.get_results().stderr.strip()
if len(results) != 0 and 'not found' in results:
raise ExceptionNoStackTraceNeeded('gppkg requires RPM to be available in PATH')
if self.migrate:
MigratePackages(from_gphome = self.migrate[0],
to_gphome = self.migrate[1]).run()
return
# MASTER_DATA_DIRECTORY and PGPORT must not need to be set for
# --build and --migrate to function properly
if self.master_datadir is None:
self.master_datadir = gp.get_masterdatadir()
self.master_port = self._get_master_port(self.master_datadir)
# TODO: AK: Program logic should not drive host decisions.
self._get_gpdb_host_list()
if self.install:
pkg = Gppkg.from_package_path(self.install)
InstallPackage(pkg, self.master_host, self.standby_host, self.segment_host_list).run()
elif self.query:
query_type, package_path = self.query
QueryPackage(query_type, package_path).run()
elif self.remove:
if self.remove.count('-') != 1:
raise ExceptionNoStackTraceNeeded('Please specify the correct <name>-<version>.')
pkg_file_list = ListFilesByPattern(GPPKG_ARCHIVE_PATH, self.remove + '-*-*' + GPPKG_EXTENSION).run()
if len(pkg_file_list) == 0:
raise ExceptionNoStackTraceNeeded('Package %s has not been installed.' % self.remove)
assert len(pkg_file_list) == 1
pkg_file = pkg_file_list[0]
pkg = Gppkg.from_package_path(os.path.join(GPPKG_ARCHIVE_PATH, pkg_file))
UninstallPackage(pkg, self.master_host, self.standby_host, self.segment_host_list).run()
elif self.update:
logger.warning('WARNING: The process of updating a package includes removing all')
logger.warning('previous versions of the system objects related to the package. For')
logger.warning('example, previous versions of shared libraries are removed.')
logger.warning('After the update process, a database function will fail when it is')
logger.warning('called if the function references a package file that has been removed.')
if self.interactive:
if not ask_yesno(None, 'Do you still want to continue ?', 'N'):
logger.info('Skipping update of gppkg based on user input')
return
pkg = Gppkg.from_package_path(self.update)
UpdatePackage(pkg, self.master_host, self.standby_host, self.segment_host_list).run()
elif self.clean:
CleanGppkg(self.standby_host, self.segment_host_list).run()
def cleanup(self):
pass
| apache-2.0 |
migihajami/memin | memin/frontend.py | 1 | 10715 | __author__ = 'algol'
import cherrypy
from jinja2 import Environment, PackageLoader
import memin.core as mc
from configparser import ConfigParser
class Menu:
def __init__(self):
self.menu = [
{'name': 'Главная', 'link': '/'},
{'name': 'Персоны', 'link': '/persons'},
{'name': 'Залы', 'link': '/classrooms'},
{'name': 'Занятия', 'link': '/lessons'},
{'name': 'Типы платежей', 'link': '/payment_types'}
]
class FrontendBase:
def __init__(self):
self.env = Environment(loader=PackageLoader('memin', 'templates'))
self.menu = Menu()
def get_template(self, template_name='index.html'):
return self.env.get_template(template_name)
class Main(FrontendBase):
def __init__(self):
super().__init__()
@staticmethod
def checkPassword(realm, username, password):
c = ConfigParser()
c.read('config.ini')
users = {k: c['users'][k].strip("'") for k in c['users']}
if password == users.get(username, None):
return True
return False
@cherrypy.expose
def index(self, name=''):
return self.get_template('index.html').render(nick=name,
title='Main page',
h1='Главная страница',
menu=self.menu.menu
)
@cherrypy.expose
def halt(self):
cherrypy.engine.exit()
@cherrypy.expose
def persons(self):
return self.get_template('persons.html').render(
title='Main page',
h1='Персоны',
menu=self.menu.menu,
table_title='Персоны',
url_prefix='person',
ptypes=str({a.pk_value: a.name for a in mc.PaymentType.get_all()}),
classrooms=str({a.pk_value: a.name for a in mc.Classroom.get_all()}),
lessons=str({a.pk_value: a.name for a in mc.Lesson.get_all()})
)
@cherrypy.expose
def payment_types(self):
return self.get_template('payment_types.html').render(
title='Типы платежей',
h1='Типы платежей',
menu=self.menu.menu,
table_title='Типы платежей',
url_prefix='ptype'
)
@cherrypy.expose
def classrooms(self):
return self.get_template('classrooms.html').render(
title='Залы для занятий',
h1='Залы для занятий',
menu=self.menu.menu,
table_title='список залов',
url_prefix='classroom'
)
@cherrypy.expose
def lessons(self):
return self.get_template('lessons.html').render(
title='Занятия',
h1='Занятия',
menu=self.menu.menu,
table_title='Список занятий',
url_prefix='lesson'
)
class MeminCrud(FrontendBase):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Person(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
prs = mc.Person.get_all()
persons = [{'PersonID': p.pk_value,
'Fname': p.fname,
'Lname': p.lname,
'Phone': p.phone,
'Email': p.email,
'InsertDate': p.insert_date
} for p in prs]
res = {'Result': 'OK' if prs else 'ERROR', 'Records': persons, 'Args': args}
return res
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
p = mc.Person(args['Fname'], args['Lname'], args['Phone'], args['Email'])
args['PersonID'] = p.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
p = mc.Person.load(args['PersonID'])
p.fname = args['Fname']
p.lname = args['Lname']
p.phone = args['Phone']
p.email = args['Email']
p.save()
return {'Result': 'OK'}
class PaymentType(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
ptypes = mc.PaymentType.get_all()
res = [{'Name': p.name,
'Comment': p.comment,
'PaymentTypeID': p.pk_value} for p in ptypes]
return {'Result': 'OK' if ptypes else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
pt = mc.PaymentType(args['Name'], args['Comment'])
args['PaymenTypeID'] = pt.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
pt = mc.PaymentType.load(args['PaymentTypeID'])
pt.name = args['Name']
pt.comment = args['Comment']
pt.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Classroom(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
cl = mc.Classroom.get_all()
res = [{'Name': c.name,
'Address': c.address,
'Comment': c.comment,
'Active': c.active,
'ClassroomID': c.pk_value} for c in cl]
return {'Result': 'OK' if cl else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
cl = mc.Classroom(args['Name'],
args['Address'],
args['Comment'],
args['Active'] if 'Active' in args else 0
)
args['ClassroomID'] = cl.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
cl = mc.Classroom.load(args['ClassroomID'])
cl.comment = args['Comment']
cl.name = args['Name']
cl.active = args['Active'] if 'Active' in args else 0
cl.address = args['Address']
cl.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Lesson(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
lsns = mc.Lesson.get_all()
res = [{'Name': l.name,
'Comment': l.comment,
'Duration': l.duration,
'LessonID': l.pk_value
} for l in lsns]
return {'Result': 'OK' if lsns else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
l = mc.Lesson(args['Name'], args['Duration'], args['Comment'])
args['LessonID'] = l.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
l = mc.Lesson.load(args['LessonID'])
l.name = args['Name']
l.comment = args['Comment']
l.duration = args['Duration']
l.save()
return {'Result': 'OK'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Payment(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, **args):
pl = mc.Payment.get_all({'PersonID': args['PersonID']})
res = [{'PersonID': p.person_id,
'PaymentType': p.payment_type_id,
'PaymentTypeID': p.payment_type_id,
'PaymentID': p.pk_value,
'Amount': p.amount,
'Date': '-'.join(reversed(p.date.split('.')))
} for p in pl]
return {'Result': 'OK' if pl else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
p = mc.Payment(args['PersonID'], args['Amount'], args['PaymentType'])
args['PaymentID'] = p.save()
args['Date'] = p.date
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
raise Exception("Not implemented yet")
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
class Visit(MeminCrud):
def __init__(self):
super().__init__()
@cherrypy.expose
@cherrypy.tools.json_out()
def list(self, PersonID, **args):
visits = mc.Visit.get_all({'PersonID': PersonID})
res = [{'VisitID': a.pk_value,
'Classroom': a.classroom_id,
'Lesson': a.lesson_id,
'Date': '-'.join(reversed(a.date.split('.')))
} for a in visits]
return {'Result': 'OK' if visits else 'ERROR', 'Records': res}
@cherrypy.expose
@cherrypy.tools.json_out()
def create(self, **args):
v = mc.Visit(args['PersonID'], args['Classroom'], args['Lesson'], args['Date'])
args['VisitID'] = v.save()
return {'Result': 'OK', 'Record': args}
@cherrypy.expose
@cherrypy.tools.json_out()
def update(self, **args):
v = mc.Visit.load(args.get('VisitID'))
if v:
v.classroom_id = args['Classroom']
v.lesson_id = args['Lesson']
v.date = args['Date']
v.save()
return {'Result': 'OK'}
return {'Result': 'ERROR'}
@cherrypy.expose
@cherrypy.tools.json_out()
def delete(self, **args):
raise Exception("Not implemented yet")
| bsd-3-clause |
eXistenZNL/SickRage | lib/unidecode/x0c1.py | 253 | 4765 | data = (
'syae', # 0x00
'syaeg', # 0x01
'syaegg', # 0x02
'syaegs', # 0x03
'syaen', # 0x04
'syaenj', # 0x05
'syaenh', # 0x06
'syaed', # 0x07
'syael', # 0x08
'syaelg', # 0x09
'syaelm', # 0x0a
'syaelb', # 0x0b
'syaels', # 0x0c
'syaelt', # 0x0d
'syaelp', # 0x0e
'syaelh', # 0x0f
'syaem', # 0x10
'syaeb', # 0x11
'syaebs', # 0x12
'syaes', # 0x13
'syaess', # 0x14
'syaeng', # 0x15
'syaej', # 0x16
'syaec', # 0x17
'syaek', # 0x18
'syaet', # 0x19
'syaep', # 0x1a
'syaeh', # 0x1b
'seo', # 0x1c
'seog', # 0x1d
'seogg', # 0x1e
'seogs', # 0x1f
'seon', # 0x20
'seonj', # 0x21
'seonh', # 0x22
'seod', # 0x23
'seol', # 0x24
'seolg', # 0x25
'seolm', # 0x26
'seolb', # 0x27
'seols', # 0x28
'seolt', # 0x29
'seolp', # 0x2a
'seolh', # 0x2b
'seom', # 0x2c
'seob', # 0x2d
'seobs', # 0x2e
'seos', # 0x2f
'seoss', # 0x30
'seong', # 0x31
'seoj', # 0x32
'seoc', # 0x33
'seok', # 0x34
'seot', # 0x35
'seop', # 0x36
'seoh', # 0x37
'se', # 0x38
'seg', # 0x39
'segg', # 0x3a
'segs', # 0x3b
'sen', # 0x3c
'senj', # 0x3d
'senh', # 0x3e
'sed', # 0x3f
'sel', # 0x40
'selg', # 0x41
'selm', # 0x42
'selb', # 0x43
'sels', # 0x44
'selt', # 0x45
'selp', # 0x46
'selh', # 0x47
'sem', # 0x48
'seb', # 0x49
'sebs', # 0x4a
'ses', # 0x4b
'sess', # 0x4c
'seng', # 0x4d
'sej', # 0x4e
'sec', # 0x4f
'sek', # 0x50
'set', # 0x51
'sep', # 0x52
'seh', # 0x53
'syeo', # 0x54
'syeog', # 0x55
'syeogg', # 0x56
'syeogs', # 0x57
'syeon', # 0x58
'syeonj', # 0x59
'syeonh', # 0x5a
'syeod', # 0x5b
'syeol', # 0x5c
'syeolg', # 0x5d
'syeolm', # 0x5e
'syeolb', # 0x5f
'syeols', # 0x60
'syeolt', # 0x61
'syeolp', # 0x62
'syeolh', # 0x63
'syeom', # 0x64
'syeob', # 0x65
'syeobs', # 0x66
'syeos', # 0x67
'syeoss', # 0x68
'syeong', # 0x69
'syeoj', # 0x6a
'syeoc', # 0x6b
'syeok', # 0x6c
'syeot', # 0x6d
'syeop', # 0x6e
'syeoh', # 0x6f
'sye', # 0x70
'syeg', # 0x71
'syegg', # 0x72
'syegs', # 0x73
'syen', # 0x74
'syenj', # 0x75
'syenh', # 0x76
'syed', # 0x77
'syel', # 0x78
'syelg', # 0x79
'syelm', # 0x7a
'syelb', # 0x7b
'syels', # 0x7c
'syelt', # 0x7d
'syelp', # 0x7e
'syelh', # 0x7f
'syem', # 0x80
'syeb', # 0x81
'syebs', # 0x82
'syes', # 0x83
'syess', # 0x84
'syeng', # 0x85
'syej', # 0x86
'syec', # 0x87
'syek', # 0x88
'syet', # 0x89
'syep', # 0x8a
'syeh', # 0x8b
'so', # 0x8c
'sog', # 0x8d
'sogg', # 0x8e
'sogs', # 0x8f
'son', # 0x90
'sonj', # 0x91
'sonh', # 0x92
'sod', # 0x93
'sol', # 0x94
'solg', # 0x95
'solm', # 0x96
'solb', # 0x97
'sols', # 0x98
'solt', # 0x99
'solp', # 0x9a
'solh', # 0x9b
'som', # 0x9c
'sob', # 0x9d
'sobs', # 0x9e
'sos', # 0x9f
'soss', # 0xa0
'song', # 0xa1
'soj', # 0xa2
'soc', # 0xa3
'sok', # 0xa4
'sot', # 0xa5
'sop', # 0xa6
'soh', # 0xa7
'swa', # 0xa8
'swag', # 0xa9
'swagg', # 0xaa
'swags', # 0xab
'swan', # 0xac
'swanj', # 0xad
'swanh', # 0xae
'swad', # 0xaf
'swal', # 0xb0
'swalg', # 0xb1
'swalm', # 0xb2
'swalb', # 0xb3
'swals', # 0xb4
'swalt', # 0xb5
'swalp', # 0xb6
'swalh', # 0xb7
'swam', # 0xb8
'swab', # 0xb9
'swabs', # 0xba
'swas', # 0xbb
'swass', # 0xbc
'swang', # 0xbd
'swaj', # 0xbe
'swac', # 0xbf
'swak', # 0xc0
'swat', # 0xc1
'swap', # 0xc2
'swah', # 0xc3
'swae', # 0xc4
'swaeg', # 0xc5
'swaegg', # 0xc6
'swaegs', # 0xc7
'swaen', # 0xc8
'swaenj', # 0xc9
'swaenh', # 0xca
'swaed', # 0xcb
'swael', # 0xcc
'swaelg', # 0xcd
'swaelm', # 0xce
'swaelb', # 0xcf
'swaels', # 0xd0
'swaelt', # 0xd1
'swaelp', # 0xd2
'swaelh', # 0xd3
'swaem', # 0xd4
'swaeb', # 0xd5
'swaebs', # 0xd6
'swaes', # 0xd7
'swaess', # 0xd8
'swaeng', # 0xd9
'swaej', # 0xda
'swaec', # 0xdb
'swaek', # 0xdc
'swaet', # 0xdd
'swaep', # 0xde
'swaeh', # 0xdf
'soe', # 0xe0
'soeg', # 0xe1
'soegg', # 0xe2
'soegs', # 0xe3
'soen', # 0xe4
'soenj', # 0xe5
'soenh', # 0xe6
'soed', # 0xe7
'soel', # 0xe8
'soelg', # 0xe9
'soelm', # 0xea
'soelb', # 0xeb
'soels', # 0xec
'soelt', # 0xed
'soelp', # 0xee
'soelh', # 0xef
'soem', # 0xf0
'soeb', # 0xf1
'soebs', # 0xf2
'soes', # 0xf3
'soess', # 0xf4
'soeng', # 0xf5
'soej', # 0xf6
'soec', # 0xf7
'soek', # 0xf8
'soet', # 0xf9
'soep', # 0xfa
'soeh', # 0xfb
'syo', # 0xfc
'syog', # 0xfd
'syogg', # 0xfe
'syogs', # 0xff
)
| gpl-3.0 |
eckucukoglu/arm-linux-gnueabihf | lib/python2.7/site-packages/sepolicy/templates/network.py | 5 | 13603 | # Copyright (C) 2007-2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# policygentool is a tool for the initial generation of SELinux policy
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307 USA
#
#
########################### Type Enforcement File #############################
te_types="""
type TEMPLATETYPE_port_t;
corenet_port(TEMPLATETYPE_port_t)
"""
te_network="""\
sysnet_dns_name_resolve(TEMPLATETYPE_t)
corenet_all_recvfrom_unlabeled(TEMPLATETYPE_t)
"""
te_tcp="""\
allow TEMPLATETYPE_t self:tcp_socket create_stream_socket_perms;
corenet_tcp_sendrecv_generic_if(TEMPLATETYPE_t)
corenet_tcp_sendrecv_generic_node(TEMPLATETYPE_t)
corenet_tcp_sendrecv_all_ports(TEMPLATETYPE_t)
"""
te_in_tcp="""\
corenet_tcp_bind_generic_node(TEMPLATETYPE_t)
"""
te_in_need_port_tcp="""\
allow TEMPLATETYPE_t TEMPLATETYPE_port_t:tcp_socket name_bind;
"""
te_out_need_port_tcp="""\
allow TEMPLATETYPE_t TEMPLATETYPE_port_t:tcp_socket name_connect;
"""
te_udp="""\
allow TEMPLATETYPE_t self:udp_socket { create_socket_perms listen };
corenet_udp_sendrecv_generic_if(TEMPLATETYPE_t)
corenet_udp_sendrecv_generic_node(TEMPLATETYPE_t)
corenet_udp_sendrecv_all_ports(TEMPLATETYPE_t)
"""
te_in_udp="""\
corenet_udp_bind_generic_node(TEMPLATETYPE_t)
"""
te_in_need_port_udp="""\
allow TEMPLATETYPE_t TEMPLATETYPE_port_t:udp_socket name_bind;
"""
te_out_all_ports_tcp="""\
corenet_tcp_connect_all_ports(TEMPLATETYPE_t)
"""
te_out_reserved_ports_tcp="""\
corenet_tcp_connect_all_rpc_ports(TEMPLATETYPE_t)
"""
te_out_unreserved_ports_tcp="""\
corenet_tcp_connect_all_unreserved_ports(TEMPLATETYPE_t)
"""
te_in_all_ports_tcp="""\
corenet_tcp_bind_all_ports(TEMPLATETYPE_t)
"""
te_in_reserved_ports_tcp="""\
corenet_tcp_bind_all_rpc_ports(TEMPLATETYPE_t)
"""
te_in_unreserved_ports_tcp="""\
corenet_tcp_bind_all_unreserved_ports(TEMPLATETYPE_t)
"""
te_in_all_ports_udp="""\
corenet_udp_bind_all_ports(TEMPLATETYPE_t)
"""
te_in_reserved_ports_udp="""\
corenet_udp_bind_all_rpc_ports(TEMPLATETYPE_t)
"""
te_in_unreserved_ports_udp="""\
corenet_udp_bind_all_unreserved_ports(TEMPLATETYPE_t)
"""
if_rules="""\
########################################
## <summary>
## Send and receive TCP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="both" weight="10"/>
#
interface(`corenet_tcp_sendrecv_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:tcp_socket { send_msg recv_msg };
')
########################################
## <summary>
## Send UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="write" weight="10"/>
#
interface(`corenet_udp_send_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:udp_socket send_msg;
')
########################################
## <summary>
## Do not audit attempts to send UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_udp_send_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
dontaudit $1 TEMPLATETYPE_port_t:udp_socket send_msg;
')
########################################
## <summary>
## Receive UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="read" weight="10"/>
#
interface(`corenet_udp_receive_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:udp_socket recv_msg;
')
########################################
## <summary>
## Do not audit attempts to receive UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_udp_receive_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
dontaudit $1 TEMPLATETYPE_port_t:udp_socket recv_msg;
')
########################################
## <summary>
## Send and receive UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="both" weight="10"/>
#
interface(`corenet_udp_sendrecv_TEMPLATETYPE_port',`
corenet_udp_send_TEMPLATETYPE_port($1)
corenet_udp_receive_TEMPLATETYPE_port($1)
')
########################################
## <summary>
## Do not audit attempts to send and receive
## UDP traffic on the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_udp_sendrecv_TEMPLATETYPE_port',`
corenet_dontaudit_udp_send_TEMPLATETYPE_port($1)
corenet_dontaudit_udp_receive_TEMPLATETYPE_port($1)
')
########################################
## <summary>
## Bind TCP sockets to the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_tcp_bind_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:tcp_socket name_bind;
')
########################################
## <summary>
## Bind UDP sockets to the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_udp_bind_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:udp_socket name_bind;
')
########################################
## <summary>
## Do not audit attempts to sbind to TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_udp_bind_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
dontaudit $1 TEMPLATETYPE_port_t:udp_socket name_bind;
')
########################################
## <summary>
## Make a TCP connection to the TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`corenet_tcp_connect_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
allow $1 TEMPLATETYPE_port_t:tcp_socket name_connect;
')
########################################
## <summary>
## Do not audit attempts to make a TCP connection to TEMPLATETYPE port.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`corenet_dontaudit_tcp_connect_TEMPLATETYPE_port',`
gen_require(`
type TEMPLATETYPE_port_t;
')
dontaudit $1 TEMPLATETYPE_port_t:tcp_socket name_connect;
')
########################################
## <summary>
## Send TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="write" weight="10"/>
#
interface(`corenet_send_TEMPLATETYPE_client_packets',`
gen_require(`
type TEMPLATETYPE_client_packet_t;
')
allow $1 TEMPLATETYPE_client_packet_t:packet send;
')
########################################
## <summary>
## Do not audit attempts to send TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_send_TEMPLATETYPE_client_packets',`
gen_require(`
type TEMPLATETYPE_client_packet_t;
')
dontaudit $1 TEMPLATETYPE_client_packet_t:packet send;
')
########################################
## <summary>
## Receive TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="read" weight="10"/>
#
interface(`corenet_receive_TEMPLATETYPE_client_packets',`
gen_require(`
type TEMPLATETYPE_client_packet_t;
')
allow $1 TEMPLATETYPE_client_packet_t:packet recv;
')
########################################
## <summary>
## Do not audit attempts to receive TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_receive_TEMPLATETYPE_client_packets',`
gen_require(`
type TEMPLATETYPE_client_packet_t;
')
dontaudit $1 TEMPLATETYPE_client_packet_t:packet recv;
')
########################################
## <summary>
## Send and receive TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="both" weight="10"/>
#
interface(`corenet_sendrecv_TEMPLATETYPE_client_packets',`
corenet_send_TEMPLATETYPE_client_packets($1)
corenet_receive_TEMPLATETYPE_client_packets($1)
')
########################################
## <summary>
## Do not audit attempts to send and receive TEMPLATETYPE_client packets.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_sendrecv_TEMPLATETYPE_client_packets',`
corenet_dontaudit_send_TEMPLATETYPE_client_packets($1)
corenet_dontaudit_receive_TEMPLATETYPE_client_packets($1)
')
########################################
## <summary>
## Relabel packets to TEMPLATETYPE_client the packet type.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`corenet_relabelto_TEMPLATETYPE_client_packets',`
gen_require(`
type TEMPLATETYPE_client_packet_t;
')
allow $1 TEMPLATETYPE_client_packet_t:packet relabelto;
')
########################################
## <summary>
## Send TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="write" weight="10"/>
#
interface(`corenet_send_TEMPLATETYPE_server_packets',`
gen_require(`
type TEMPLATETYPE_server_packet_t;
')
allow $1 TEMPLATETYPE_server_packet_t:packet send;
')
########################################
## <summary>
## Do not audit attempts to send TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_send_TEMPLATETYPE_server_packets',`
gen_require(`
type TEMPLATETYPE_server_packet_t;
')
dontaudit $1 TEMPLATETYPE_server_packet_t:packet send;
')
########################################
## <summary>
## Receive TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="read" weight="10"/>
#
interface(`corenet_receive_TEMPLATETYPE_server_packets',`
gen_require(`
type TEMPLATETYPE_server_packet_t;
')
allow $1 TEMPLATETYPE_server_packet_t:packet recv;
')
########################################
## <summary>
## Do not audit attempts to receive TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_receive_TEMPLATETYPE_server_packets',`
gen_require(`
type TEMPLATETYPE_server_packet_t;
')
dontaudit $1 TEMPLATETYPE_server_packet_t:packet recv;
')
########################################
## <summary>
## Send and receive TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
## <infoflow type="both" weight="10"/>
#
interface(`corenet_sendrecv_TEMPLATETYPE_server_packets',`
corenet_send_TEMPLATETYPE_server_packets($1)
corenet_receive_TEMPLATETYPE_server_packets($1)
')
########################################
## <summary>
## Do not audit attempts to send and receive TEMPLATETYPE_server packets.
## </summary>
## <param name="domain">
## <summary>
## Domain to not audit.
## </summary>
## </param>
## <infoflow type="none"/>
#
interface(`corenet_dontaudit_sendrecv_TEMPLATETYPE_server_packets',`
corenet_dontaudit_send_TEMPLATETYPE_server_packets($1)
corenet_dontaudit_receive_TEMPLATETYPE_server_packets($1)
')
########################################
## <summary>
## Relabel packets to TEMPLATETYPE_server the packet type.
## </summary>
## <param name="domain">
## <summary>
## Domain allowed access.
## </summary>
## </param>
#
interface(`corenet_relabelto_TEMPLATETYPE_server_packets',`
gen_require(`
type TEMPLATETYPE_server_packet_t;
')
allow $1 TEMPLATETYPE_server_packet_t:packet relabelto;
')
"""
te_rules="""
"""
| gpl-2.0 |
Klaudit/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/runtests_unittest.py | 123 | 3157 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import platform
import sys
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.mocktool import MockOptions, MockTool
from webkitpy.tool.steps.runtests import RunTests
class RunTestsTest(unittest.TestCase):
def test_webkit_run_unit_tests(self):
tool = MockTool(log_executive=True)
tool._deprecated_port.run_python_unittests_command = lambda: None
tool._deprecated_port.run_perl_unittests_command = lambda: None
step = RunTests(tool, MockOptions(test=True, non_interactive=True, quiet=False))
if sys.platform != "cygwin":
expected_logs = """Running bindings generation tests
MOCK run_and_throw_if_fail: ['mock-run-bindings-tests'], cwd=/mock-checkout
Running WebKit unit tests
MOCK run_and_throw_if_fail: ['mock-run-webkit-unit-tests'], cwd=/mock-checkout
Running run-webkit-tests
MOCK run_and_throw_if_fail: ['mock-run-webkit-tests', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures=30', '--quiet', '--skip-failing-tests'], cwd=/mock-checkout
"""
else:
expected_logs = """Running bindings generation tests
MOCK run_and_throw_if_fail: ['mock-run-bindings-tests'], cwd=/mock-checkout
Running WebKit unit tests
MOCK run_and_throw_if_fail: ['mock-run-webkit-unit-tests'], cwd=/mock-checkout
Running run-webkit-tests
MOCK run_and_throw_if_fail: ['mock-run-webkit-tests', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures=30', '--no-build'], cwd=/mock-checkout
"""
OutputCapture().assert_outputs(self, step.run, [{}], expected_logs=expected_logs)
| bsd-3-clause |
YZHANGFPE/baxter_pykdl | src/urdf_parser_py/xml_reflection/basics.py | 19 | 2400 | import string
import yaml
import collections
from lxml import etree
# Different implementations mix well it seems
# @todo Do not use this?
from xml.etree.ElementTree import ElementTree
def xml_string(rootXml, addHeader = True):
# Meh
xmlString = etree.tostring(rootXml, pretty_print = True)
if addHeader:
xmlString = '<?xml version="1.0"?>\n' + xmlString
return xmlString
def dict_sub(obj, keys):
return dict((key, obj[key]) for key in keys)
def node_add(doc, sub):
if sub is None:
return None
if type(sub) == str:
return etree.SubElement(doc, sub)
elif isinstance(sub, etree._Element):
doc.append(sub) # This screws up the rest of the tree for prettyprint...
return sub
else:
raise Exception('Invalid sub value')
def pfloat(x):
return str(x).rstrip('.')
def xml_children(node):
children = node.getchildren()
def predicate(node):
return not isinstance(node, etree._Comment)
return list(filter(predicate, children))
def isstring(obj):
try:
return isinstance(obj, basestring)
except NameError:
return isinstance(obj, str)
def to_yaml(obj):
""" Simplify yaml representation for pretty printing """
# Is there a better way to do this by adding a representation with yaml.Dumper?
# Ordered dict: http://pyyaml.org/ticket/29#comment:11
if obj is None or isstring(obj):
out = str(obj)
elif type(obj) in [int, float, bool]:
return obj
elif hasattr(obj, 'to_yaml'):
out = obj.to_yaml()
elif isinstance(obj, etree._Element):
out = etree.tostring(obj, pretty_print = True)
elif type(obj) == dict:
out = {}
for (var, value) in obj.items():
out[str(var)] = to_yaml(value)
elif hasattr(obj, 'tolist'):
# For numpy objects
out = to_yaml(obj.tolist())
elif isinstance(obj, collections.Iterable):
out = [to_yaml(item) for item in obj]
else:
out = str(obj)
return out
class SelectiveReflection(object):
def get_refl_vars(self):
return list(vars(self).keys())
class YamlReflection(SelectiveReflection):
def to_yaml(self):
raw = dict((var, getattr(self, var)) for var in self.get_refl_vars())
return to_yaml(raw)
def __str__(self):
return yaml.dump(self.to_yaml()).rstrip() # Good idea? Will it remove other important things?
| bsd-3-clause |
pepetreshere/odoo | addons/point_of_sale/models/pos_payment_method.py | 4 | 4154 | from odoo import api, fields, models, _
from odoo.exceptions import UserError
class PosPaymentMethod(models.Model):
""" Used to classify pos.payment.
Generic characteristics of a pos.payment is described in this model.
E.g. A cash payment can be described by a pos.payment.method with
fields: is_cash_count = True and a cash_journal_id set to an
`account.journal` (type='cash') record.
When a pos.payment.method is cash, cash_journal_id is required as
it will be the journal where the account.bank.statement.line records
will be created.
"""
_name = "pos.payment.method"
_description = "Point of Sale Payment Methods"
_order = "id asc"
def _get_payment_terminal_selection(self):
return []
name = fields.Char(string="Payment Method", required=True, translate=True)
receivable_account_id = fields.Many2one('account.account',
string='Intermediary Account',
required=True,
domain=[('reconcile', '=', True), ('user_type_id.type', '=', 'receivable')],
default=lambda self: self.env.company.account_default_pos_receivable_account_id,
ondelete='restrict',
help='Account used as counterpart of the income account in the accounting entry representing the pos sales.')
is_cash_count = fields.Boolean(string='Cash')
cash_journal_id = fields.Many2one('account.journal',
string='Cash Journal',
domain=[('type', '=', 'cash')],
ondelete='restrict',
help='The payment method is of type cash. A cash statement will be automatically generated.')
split_transactions = fields.Boolean(
string='Split Transactions',
default=False,
help='If ticked, each payment will generate a separated journal item. Ticking that option will slow the closing of the PoS.')
open_session_ids = fields.Many2many('pos.session', string='Pos Sessions', compute='_compute_open_session_ids', help='Open PoS sessions that are using this payment method.')
config_ids = fields.Many2many('pos.config', string='Point of Sale Configurations')
company_id = fields.Many2one('res.company', string='Company', default=lambda self: self.env.company)
use_payment_terminal = fields.Selection(selection=lambda self: self._get_payment_terminal_selection(), string='Use a Payment Terminal', help='Record payments with a terminal on this journal.')
hide_use_payment_terminal = fields.Boolean(compute='_compute_hide_use_payment_terminal', help='Technical field which is used to '
'hide use_payment_terminal when no payment interfaces are installed.')
active = fields.Boolean(default=True)
@api.depends('is_cash_count')
def _compute_hide_use_payment_terminal(self):
no_terminals = not bool(self._fields['use_payment_terminal'].selection(self))
for payment_method in self:
payment_method.hide_use_payment_terminal = no_terminals or payment_method.is_cash_count
@api.onchange('use_payment_terminal')
def _onchange_use_payment_terminal(self):
"""Used by inheriting model to unset the value of the field related to the unselected payment terminal."""
pass
@api.depends('config_ids')
def _compute_open_session_ids(self):
for payment_method in self:
payment_method.open_session_ids = self.env['pos.session'].search([('config_id', 'in', payment_method.config_ids.ids), ('state', '!=', 'closed')])
@api.onchange('is_cash_count')
def _onchange_is_cash_count(self):
if not self.is_cash_count:
self.cash_journal_id = False
else:
self.use_payment_terminal = False
def _is_write_forbidden(self, fields):
return bool(fields and self.open_session_ids)
def write(self, vals):
if self._is_write_forbidden(set(vals.keys())):
raise UserError('Please close and validate the following open PoS Sessions before modifying this payment method.\n'
'Open sessions: %s' % (' '.join(self.open_session_ids.mapped('name')),))
return super(PosPaymentMethod, self).write(vals)
| agpl-3.0 |
Roshan2017/spinnaker | testing/citest/tests/openstack_smoke_test.py | 1 | 9447 | # Copyright 2017 Veritas Technologies, LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Smoke test to see if Spinnaker can interoperate with OpenStack.
See testable_service/integration_test.py and spinnaker_testing/spinnaker.py
for more details.
The test will use the spinnaker configuration parameters from the server
endpoint (gate) to determine the managed project it should verify, and to
determine the spinnaker account name to use when sending it commands.
Note:
This test needs certain environment variables defined in order for the
OpenStack client to work. Please refer testing/citest/README.md for more
details.
Sample Usage:
PYTHONPATH=testing/citest \
python tesing/citest/tests/openstack_smoke_test.py \
--native_hostname=host-running-smoke-test
"""
# Standard python modules.
import sys
# citest modules.
import citest.openstack_testing as os
import citest.json_contract as jc
import citest.json_predicate as jp
import citest.service_testing as st
import citest.base
# Spinnaker modules.
import spinnaker_testing as sk
import spinnaker_testing.gate as gate
class OpenStackSmokeTestScenario(sk.SpinnakerTestScenario):
"""Defines the scenario for the smoke test.
This scenario defines the different test operations.
We're going to:
Create a Spinnaker Application
Create a Spinnaker Security Group
Delete each of the above (in reverse order)
"""
@classmethod
def new_agent(cls, bindings):
"""Implements citest.service_testing.AgentTestScenario.new_agent."""
agent = gate.new_agent(bindings)
agent.default_max_wait_secs = 180
return agent
def __init__(self, bindings, agent=None):
"""Constructor.
Args:
bindings: [dict] The data bindings to use to configure the scenario.
agent: [GateAgent] The agent for invoking the test operations on Gate.
"""
super(OpenStackSmokeTestScenario, self).__init__(bindings, agent)
bindings = self.bindings
# We'll call out the app name because it is widely used
# because it scopes the context of our activities.
# pylint: disable=invalid-name
self.TEST_APP = bindings['TEST_APP']
self.TEST_STACK = bindings['TEST_STACK']
self.TEST_SECURITY_GROUP = 'sec_grp_'+ bindings['TEST_APP']
def create_app(self):
"""Creates OperationContract that creates a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_create_app_operation(
bindings=self.bindings, application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_OS_ACCOUNT']),
contract=contract)
def delete_app(self):
"""Creates OperationContract that deletes a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_delete_app_operation(
application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_OS_ACCOUNT']),
contract=contract)
def create_a_security_group(self):
"""Creates OsContract for createServerGroup.
To verify the operation, we just check that the spinnaker security group
for the given application was created.
"""
rules = [{
"fromPort":80,
"toPort":80,
"prevcidr":"0.0.0.0/0",
"cidr":"0.0.0.0/0",
"ruleType":"TCP",
"remoteSecurityGroupId":"",
"icmpType":-1,
"icmpCode":-1
},
{
"fromPort":10,
"toPort":65530,
"prevcidr":"",
"cidr":"",
"ruleType":"UDP",
"remoteSecurityGroupId":"SELF",
"icmpType":-1,
"icmpCode":-1
},
{
"fromPort":1,
"toPort":65535,
"prevcidr":"",
"cidr":"",
"ruleType":"ICMP",
"remoteSecurityGroupId":"SELF",
"icmpType":12,
"icmpCode":9}]
job = [{
"provider": "openstack",
"region": self.bindings['OS_REGION_NAME'],
"stack": self.TEST_STACK,
"description": "Test - create security group for {app}".format(
app=self.TEST_APP),
"detail": "",
"account": self.bindings['SPINNAKER_OS_ACCOUNT'],
"rules": rules,
"name": self.TEST_SECURITY_GROUP,
"securityGroupName": self.TEST_SECURITY_GROUP,
"cloudProvider": "openstack",
"type": "upsertSecurityGroup",
"user": self.bindings['TEST_OS_USERNAME']
}]
builder = os.OsContractBuilder(self.os_observer)
(builder.new_clause_builder(
'Security Group Created', retryable_for_secs=30)
.show_resource('security group', self.TEST_SECURITY_GROUP)
.contains_pred_list([
jp.DICT_MATCHES({
'name': jp.STR_SUBSTR(self.TEST_SECURITY_GROUP),
'rules': jp.STR_SUBSTR("protocol='tcp'")
and jp.STR_SUBSTR("port_range_min='80'")
and jp.STR_SUBSTR("port_range_max='80'")}),
jp.DICT_MATCHES({
'rules': jp.STR_SUBSTR("protocol='udp'")
and jp.STR_SUBSTR("port_range_min='10'")
and jp.STR_SUBSTR("port_range_max='65530'")}),
jp.DICT_MATCHES({
'rules': jp.STR_SUBSTR("protocol='icmp'")
and jp.STR_SUBSTR("port_range_min='12'")
and jp.STR_SUBSTR("port_range_max='9'")})]))
payload = self.agent.make_json_payload_from_kwargs(
job=job, description=' Test - create security group for {app}'.format(
app=self.TEST_APP),
application=self.TEST_APP)
return st.OperationContract(
self.new_post_operation(
title='create_security_group', data=payload,
path='applications/{app}/tasks'.format(app=self.TEST_APP)),
contract=builder.build())
def delete_a_security_group(self):
"""Creates OsContract for deleteServerGroup.
To verify the operation, we just check that the spinnaker security group
for the given application was deleted.
"""
#Get ID of the created security group
os_agent = os.OsAgent(None)
data = os_agent.get_resource('security group', self.TEST_SECURITY_GROUP)
security_group_id = data['id']
payload = self.agent.make_json_payload_from_kwargs(
job=[{
"Provider": "openstack",
"id": security_group_id,
"region": self.bindings['OS_REGION_NAME'],
"regions": [self.bindings['OS_REGION_NAME']],
"account": self.bindings['SPINNAKER_OS_ACCOUNT'],
"securityGroupName": self.TEST_SECURITY_GROUP,
"cloudProvider": "openstack",
"type": "deleteSecurityGroup",
"user": self.bindings['TEST_OS_USERNAME']
}],
application=self.TEST_APP,
description='Delete Security Group: : ' + self.TEST_SECURITY_GROUP)
builder = os.OsContractBuilder(self.os_observer)
(builder.new_clause_builder(
'Security Group Deleted', retryable_for_secs=30)
.show_resource('security group', self.TEST_SECURITY_GROUP,
no_resources_ok=True)
.excludes_path_eq('name', self.TEST_SECURITY_GROUP)
)
return st.OperationContract(
self.new_post_operation(
title='delete_security_group', data=payload,
path='applications/{app}/tasks'.format(app=self.TEST_APP)),
contract=builder.build())
class OpenStackSmokeTest(st.AgentTestCase):
"""The test fixture for the OpenStackSmokeTest.
This is implemented using citest OperationContract instances that are
created by the OpenStackSmokeTestScenario.
"""
# pylint: disable=missing-docstring
@property
def scenario(self):
return citest.base.TestRunner.global_runner().get_shared_data(
OpenStackSmokeTestScenario)
def test_a_create_app(self):
self.run_test_case(self.scenario.create_app())
def test_z_delete_app(self):
self.run_test_case(self.scenario.delete_app(),
retry_interval_secs=8, max_retries=8)
def test_b_create_security_group(self):
self.run_test_case(self.scenario.create_a_security_group())
def test_y_delete_security_group(self):
self.run_test_case(self.scenario.delete_a_security_group(),
retry_interval_secs=8, max_retries=8)
def main():
"""Implements the main method running this smoke test."""
defaults = {
'TEST_STACK': str(OpenStackSmokeTestScenario.DEFAULT_TEST_ID),
'TEST_APP': 'openstack-smoketest' + OpenStackSmokeTestScenario.DEFAULT_TEST_ID
}
return citest.base.TestRunner.main(
parser_inits=[OpenStackSmokeTestScenario.initArgumentParser],
default_binding_overrides=defaults,
test_case_list=[OpenStackSmokeTest])
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
raintank/graphite-api | setup.py | 1 | 2147 | # coding: utf-8
import sys
from setuptools import setup, find_packages
install_requires = [
'Flask',
'PyYAML',
'cairocffi',
'pyparsing>=1.5.7',
'pytz',
'six',
'tzlocal',
]
if sys.version_info < (2, 7):
install_requires.append('importlib')
install_requires.append('logutils')
install_requires.append('ordereddict')
install_requires.append('structlog<=16.0.0')
else:
install_requires.append('structlog')
with open('README.rst') as f:
long_description = f.read()
setup(
name='graphite-api',
version='1.1.3',
url='https://github.com/brutasse/graphite-api',
author="Bruno Renié, based on Chris Davis's graphite-web",
author_email='bruno@renie.fr',
license='Apache Software License 2.0',
description=('Graphite-web, without the interface. '
'Just the rendering HTTP API.'),
long_description=long_description,
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=install_requires,
extras_require={
'sentry': ['raven[flask]'],
'cyanite': ['cyanite'],
'cache': ['Flask-Cache'],
'statsd': ['statsd'],
},
zip_safe=False,
platforms='any',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: System :: Monitoring',
),
test_suite='tests',
)
| apache-2.0 |
freekh/three.js | utils/exporters/blender/addons/io_three/exporter/api/__init__.py | 124 | 1502 | import os
import bpy
from . import object as object_, mesh, material, camera, light
from .. import logger
def active_object():
"""
:return: The actively selected object
"""
return bpy.context.scene.objects.active
def batch_mode():
"""
:return: Whether or not the session is interactive
:rtype: bool
"""
return bpy.context.area is None
def data(node):
"""
:param node: name of an object node
:returns: the data block of the node
"""
try:
return bpy.data.objects[node].data
except KeyError:
pass
def init():
"""Initializing the api module. Required first step before
initializing the actual export process.
"""
logger.debug("Initializing API")
object_.clear_mesh_map()
def selected_objects(valid_types=None):
"""Selected objects.
:param valid_types: Filter for valid types (Default value = None)
"""
logger.debug("api.selected_objects(%s)", valid_types)
for node in bpy.context.selected_objects:
if valid_types is None:
yield node.name
elif valid_types is not None and node.type in valid_types:
yield node.name
def set_active_object(obj):
"""Set the object as active in the scene
:param obj:
"""
logger.debug("api.set_active_object(%s)", obj)
bpy.context.scene.objects.active = obj
def scene_name():
"""
:return: name of the current scene
"""
return os.path.basename(bpy.data.filepath)
| mit |
ThatRfernand/or-tools | examples/python/knapsack_cp.py | 5 | 2364 | # Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Knapsack problem in Google CP Solver.
Simple knapsack problem.
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from __future__ import print_function
from ortools.constraint_solver import pywrapcp
def knapsack(solver, values, weights, n):
z = solver.IntVar(0, 10000)
x = [solver.IntVar(0, 1, "x(%i)" % i) for i in range(len(values))]
solver.Add(z >= 0)
solver.Add(z == solver.ScalProd(x, values))
solver.Add(solver.ScalProd(x, weights) <= n)
return [x, z]
def main(values, weights, n):
# Create the solver.
solver = pywrapcp.Solver("knapsack_cp")
#
# data
#
print("values:", values)
print("weights:", weights)
print("n:", n)
print()
# declare variables
#
# constraints
#
[x, z] = knapsack(solver, values, weights, n)
# objective
objective = solver.Maximize(z, 1)
#
# solution and search
#
solution = solver.Assignment()
solution.Add(x)
solution.Add(z)
# db: DecisionBuilder
db = solver.Phase(x,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MAX_VALUE)
solver.NewSearch(db, [objective])
num_solutions = 0
while solver.NextSolution():
print("x:", [x[i].Value() for i in range(len(values))])
print("z:", z.Value())
print()
num_solutions += 1
solver.EndSearch()
print()
print("num_solutions:", num_solutions)
print("failures:", solver.Failures())
print("branches:", solver.Branches())
print("WallTime:", solver.WallTime())
values = [15, 100, 90, 60, 40, 15, 10, 1, 12, 12, 100]
weights = [2, 20, 20, 30, 40, 30, 60, 10, 21, 12, 2]
n = 102
if __name__ == "__main__":
main(values, weights, n)
| apache-2.0 |
chrplr/AIP2015 | resources/python-scripts/icons.py | 1 | 9336 | goicon=[
"32 32 5 1",
". c None",
"b c #008000",
"a c #00c000",
"# c #00ff00",
"c c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
"..........#.....................",
".........a##.....#..............",
"........ba###...a##.............",
"........ba####..a###............",
"........ba####..a####...........",
"........ba#####.c#####..........",
"........ba######ca#####.........",
"........ba#######ca#####........",
"........ba########ca#####.......",
"........ba#########ca#####......",
"........ba##########ca#####.....",
"........ba#########ac#####a.....",
"........ba########acc####a......",
"........ba#######abc####ab......",
"........ba######abc####ab.......",
"........ba#####abc####ab........",
"........ba####abc####ab.........",
"........ba###abca###ab..........",
"........ba##ab.ca##ab...........",
"........ba#ab..ba#ab............",
"........baab...baab.............",
"........bbb....bab..............",
".........b......b...............",
"................................",
"................................",
"................................",
"................................"]
stopicon=[
"32 32 4 1",
". c None",
"b c #800000",
"a c #c00000",
"# c #ff0000",
"................................",
"................................",
"................................",
"................................",
"................................",
".....####################.......",
"....a####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...ba####################.......",
"...baaaaaaaaaaaaaaaaaaaa........",
"...bbbbbbbbbbbbbbbbbbbb.........",
"................................",
"................................",
"................................",
"................................",
"................................"]
print_xpm = ['32 32 12 1',
'a c #ffffff',
'h c #ffff00',
'c c #ffffff',
'f c #dcdcdc',
'b c #c0c0c0',
'j c #a0a0a4',
'e c #808080',
'g c #808000',
'd c #585858',
'i c #00ff00',
'# c #000000',
'. c None',
'................................',
'................................',
'...........###..................',
'..........#abb###...............',
'.........#aabbbbb###............',
'.........#ddaaabbbbb###.........',
'........#ddddddaaabbbbb###......',
'.......#deffddddddaaabbbbb###...',
'......#deaaabbbddddddaaabbbbb###',
'.....#deaaaaaaabbbddddddaaabbbb#',
'....#deaaabbbaaaa#ddedddfggaaad#',
'...#deaaaaaaaaaa#ddeeeeafgggfdd#',
'..#deaaabbbaaaa#ddeeeeabbbbgfdd#',
'.#deeefaaaaaaa#ddeeeeabbhhbbadd#',
'#aabbbeeefaaa#ddeeeeabbbbbbaddd#',
'#bbaaabbbeee#ddeeeeabbiibbadddd#',
'#bbbbbaaabbbeeeeeeabbbbbbaddddd#',
'#bjbbbbbbaaabbbbeabbbbbbadddddd#',
'#bjjjjbbbbbbaaaeabbbbbbaddddddd#',
'#bjaaajjjbbbbbbaaabbbbadddddddd#',
'#bbbbbaaajjjbbbbbbaaaaddddddddd#',
'#bjbbbbbbaaajjjbbbbbbddddddddd#.',
'#bjjjjbbbbbbaaajjjbbbdddddddd#..',
'#bjaaajjjbbbbbbjaajjbddddddd#...',
'#bbbbbaaajjjbbbjbbaabdddddd#....',
'###bbbbbbaaajjjjbbbbbddddd#.....',
'...###bbbbbbaaajbbbbbdddd#......',
'......###bbbbbbjbbbbbddd#.......',
'.........###bbbbbbbbbdd#........',
'............###bbbbbbd#.........',
'...............###bbb#..........',
'..................###...........']
pwspec=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaabaaaaaaaaaaaaaa#......",
".....#aaaabaaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaaaaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaaabbaaaaabaaaaaaa#......",
".....#aaabaabaaababaaaaaa#......",
".....#bbbaaaabbbaaabbbbbb#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
scope=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaabaaaaaaaaaaaaa#......",
".....#aaabbabbaaaaaaaaaaa#......",
".....#aabaaaaabaaaaaaaaaa#......",
".....#abaaaaaaabaaaaaaaaa#......",
".....#baaaaaaaaabaaaaaaaa#......",
".....#aaaaaaaaaaabaaaaaaa#......",
".....#aaaaaaaaaaaabaaaaaa#......",
".....#aaaaaaaaaaaaabaaaab#......",
".....#aaaaaaaaaaaaaabbaba#......",
".....#aaaaaaaaaaaaaaaabaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
single=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaabaaaaaaaaa#......",
".....#aaaaaaaabbaaaaaaaaa#......",
".....#abaabaaabbaaaaabaaa#......",
".....#abaabababbaababbbaa#......",
".....#abbabababbaabbbbbaa#......",
".....#bbbbbbbbbbbbbbbbbbb#......",
".....#aabbbbabaabbabbbaaa#......",
".....#aaabbbabaabbabbaaaa#......",
".....#aaababaaaabbabbaaaa#......",
".....#aaaaaaaaaabbabbaaaa#......",
".....#aaaaaaaaaabaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
avge=[
"32 32 4 1",
". c None",
"b c #0000c0",
"# c #00ff00",
"a c #ffffc0",
"................................",
"................................",
"................................",
"................................",
"................................",
".....#####################......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#bbbbbbbbbbbbbbbbbbb#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#aaaaaaaaaaaaaaaaaaa#......",
".....#####################......",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................",
"................................"]
| gpl-2.0 |
Fale/ansible | test/lib/ansible_test/_internal/encoding.py | 51 | 1484 | """Functions for encoding and decoding strings."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from . import types as t
ENCODING = 'utf-8'
Text = type(u'')
def to_optional_bytes(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[bytes]
"""Return the given value as bytes encoded using UTF-8 if not already bytes, or None if the value is None."""
return None if value is None else to_bytes(value, errors)
def to_optional_text(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[t.Text]
"""Return the given value as text decoded using UTF-8 if not already text, or None if the value is None."""
return None if value is None else to_text(value, errors)
def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes
"""Return the given value as bytes encoded using UTF-8 if not already bytes."""
if isinstance(value, bytes):
return value
if isinstance(value, Text):
return value.encode(ENCODING, errors)
raise Exception('value is not bytes or text: %s' % type(value))
def to_text(value, errors='strict'): # type: (t.AnyStr, str) -> t.Text
"""Return the given value as text decoded using UTF-8 if not already text."""
if isinstance(value, bytes):
return value.decode(ENCODING, errors)
if isinstance(value, Text):
return value
raise Exception('value is not bytes or text: %s' % type(value))
| gpl-3.0 |
woobe/h2o | py/testdir_hosts/test_parse_summary_zip_s3_fvec.py | 2 | 2594 | import unittest, time, sys, random
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_glm, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(1)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_summary_zip_s3_fvec(self):
h2o.beta_features = True
csvFilelist = [
("test_set.zip", 300), # 110.9MB
("train_set.zip", 600), # 362.9MB
]
(importResult, importPattern) = h2i.import_only(bucket='h2o-datasets', path="allstate", schema='s3')
print "\nTrying StoreView after the import hdfs"
h2o_cmd.runStoreView(timeoutSecs=120)
trial = 0
for (csvFilename, timeoutSecs) in csvFilelist:
trialStart = time.time()
csvPathname = csvFilename
# PARSE****************************************
csvPathname = "allstate/" + csvFilename
hex_key = csvFilename + "_" + str(trial) + ".hex"
start = time.time()
parseResult = h2i.import_parse(bucket='h2o-datasets', path=csvPathname, schema='s3', hex_key=hex_key,
timeoutSecs=timeoutSecs, retryDelaySecs=10, pollTimeoutSecs=120)
elapsed = time.time() - start
print "parse end on ", parseResult['destination_key'], 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
# INSPECT******************************************
# We should be able to see the parse result?
start = time.time()
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'], timeoutSecs=360)
print "Inspect:", parseResult['destination_key'], "took", time.time() - start, "seconds"
h2o_cmd.infoFromInspect(inspect, csvPathname)
summaryResult = h2o_cmd.runSummary(key=hex_key, timeoutSecs=360)
h2o_cmd.infoFromSummary(summaryResult)
# STOREVIEW***************************************
print "\nTrying StoreView after the parse"
h2o_cmd.runStoreView(timeoutSecs=120)
print "Trial #", trial, "completed in", time.time() - trialStart, "seconds."
trial += 1
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.