repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
HBehrens/feedsanitizer | django/contrib/gis/geos/geometry.py | 230 | 24281 | """
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
# Python, ctypes and types dependencies.
import re
import warnings
from ctypes import addressof, byref, c_double, c_size_t
# super-class for mutable list behavior
from django.contrib.gis.geos.mutable_list import ListMixin
# GEOS-related dependencies.
from django.contrib.gis.geos.base import GEOSBase, gdal
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.mutable_list import ListMixin
# All other functions in this module come from the ctypes
# prototypes module -- which handles all interaction with
# the underlying GEOS library.
from django.contrib.gis.geos import prototypes as capi
# These functions provide access to a thread-local instance
# of their corresponding GEOS I/O class.
from django.contrib.gis.geos.prototypes.io import wkt_r, wkt_w, wkb_r, wkb_w, ewkb_w, ewkb_w3d
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
class GEOSGeometry(GEOSBase, ListMixin):
"A class that, generally, encapsulates a GEOS geometry."
# Raise GEOSIndexError instead of plain IndexError
# (see ticket #4740 and GEOSIndexError docstring)
_IndexError = GEOSIndexError
ptr_type = GEOM_PTR
#### Python 'magic' routines ####
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* strings:
- WKT
- HEXEWKB (a PostGIS-specific canonical form)
- GeoJSON (requires GDAL)
* buffer:
- WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, basestring):
if isinstance(geo_input, unicode):
# Encoding to ASCII, WKT or HEXEWKB doesn't need any more.
geo_input = geo_input.encode('ascii')
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'): srid = int(wkt_m.group('srid'))
g = wkt_r().read(wkt_m.group('wkt'))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = wkb_r().read(geo_input)
elif gdal.GEOJSON and json_regex.match(geo_input):
# Handling GeoJSON input.
g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb)
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, buffer):
# When the input is a buffer (WKB).
g = wkb_r().read(geo_input)
elif isinstance(geo_input, GEOSGeometry):
g = capi.geom_clone(geo_input.ptr)
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if bool(g):
# Setting the pointer object with a valid pointer.
self.ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int): self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr: capi.destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"WKT is used for the string representation."
return self.wkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return str(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = wkb_r().read(buffer(wkb))
if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.')
self.ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, basestring):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
#### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geometry."
if self.has_cs:
return self._cs.clone()
#### Geometry Info ####
@property
def geom_type(self):
"Returns a string representing the Geometry type, e.g. 'Polygon'"
return capi.geos_type(self.ptr)
@property
def geom_typeid(self):
"Returns an integer representing the Geometry type."
return capi.geos_typeid(self.ptr)
@property
def num_geom(self):
"Returns the number of geometries in the Geometry."
return capi.get_num_geoms(self.ptr)
@property
def num_coords(self):
"Returns the number of coordinates in the Geometry."
return capi.get_num_coords(self.ptr)
@property
def num_points(self):
"Returns the number points, or coordinates, in the Geometry."
return self.num_coords
@property
def dims(self):
"Returns the dimension of this Geometry (0=point, 1=line, 2=surface)."
return capi.get_dims(self.ptr)
def normalize(self):
"Converts this Geometry to normal form (or canonical form)."
return capi.geos_normalize(self.ptr)
#### Unary predicates ####
@property
def empty(self):
"""
Returns a boolean indicating whether the set of points in this Geometry
are empty.
"""
return capi.geos_isempty(self.ptr)
@property
def hasz(self):
"Returns whether the geometry has a 3D dimension."
return capi.geos_hasz(self.ptr)
@property
def ring(self):
"Returns whether or not the geometry is a ring."
return capi.geos_isring(self.ptr)
@property
def simple(self):
"Returns false if the Geometry not simple."
return capi.geos_issimple(self.ptr)
@property
def valid(self):
"This property tests the validity of this Geometry."
return capi.geos_isvalid(self.ptr)
@property
def valid_reason(self):
"""
Returns a string containing the reason for any invalidity.
"""
if not GEOS_PREPARE:
raise GEOSException('Upgrade GEOS to 3.1 to get validity reason.')
return capi.geos_isvalidreason(self.ptr)
#### Binary predicates. ####
def contains(self, other):
"Returns true if other.within(this) returns true."
return capi.geos_contains(self.ptr, other.ptr)
def crosses(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T****** (for a point and a curve,a point and an area or a line and
an area) 0******** (for two curves).
"""
return capi.geos_crosses(self.ptr, other.ptr)
def disjoint(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FF*FF****.
"""
return capi.geos_disjoint(self.ptr, other.ptr)
def equals(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**FFF*.
"""
return capi.geos_equals(self.ptr, other.ptr)
def equals_exact(self, other, tolerance=0):
"""
Returns true if the two Geometries are exactly equal, up to a
specified tolerance.
"""
return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance))
def intersects(self, other):
"Returns true if disjoint returns false."
return capi.geos_intersects(self.ptr, other.ptr)
def overlaps(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
def relate_pattern(self, other, pattern):
"""
Returns true if the elements in the DE-9IM intersection matrix for the
two Geometries match the elements in pattern.
"""
if not isinstance(pattern, basestring) or len(pattern) > 9:
raise GEOSException('invalid intersection matrix pattern')
return capi.geos_relatepattern(self.ptr, other.ptr, pattern)
def touches(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FT*******, F**T***** or F***T****.
"""
return capi.geos_touches(self.ptr, other.ptr)
def within(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**F***.
"""
return capi.geos_within(self.ptr, other.ptr)
#### SRID Routines ####
def get_srid(self):
"Gets the SRID for the geometry, returns None if no SRID is set."
s = capi.geos_get_srid(self.ptr)
if s == 0: return None
else: return s
def set_srid(self, srid):
"Sets the SRID for the geometry."
capi.geos_set_srid(self.ptr, srid)
srid = property(get_srid, set_srid)
#### Output Routines ####
@property
def ewkt(self):
"""
Returns the EWKT (WKT + SRID) of the Geometry. Note that Z values
are *not* included in this representation because GEOS does not yet
support serializing them.
"""
if self.get_srid(): return 'SRID=%s;%s' % (self.srid, self.wkt)
else: return self.wkt
@property
def wkt(self):
"Returns the WKT (Well-Known Text) representation of this Geometry."
return wkt_w().write(self)
@property
def hex(self):
"""
Returns the WKB of this Geometry in hexadecimal form. Please note
that the SRID and Z values are not included in this representation
because it is not a part of the OGC specification (use the `hexewkb`
property instead).
"""
# A possible faster, all-python, implementation:
# str(self.wkb).encode('hex')
return wkb_w().write_hex(self)
@property
def hexewkb(self):
"""
Returns the EWKB of this Geometry in hexadecimal form. This is an
extension of the WKB specification that includes SRID and Z values
that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D HEXEWKB.')
return ewkb_w3d().write_hex(self)
else:
return ewkb_w().write_hex(self)
@property
def json(self):
"""
Returns GeoJSON representation of this Geometry if GDAL 1.5+
is installed.
"""
if gdal.GEOJSON:
return self.ogr.json
else:
raise GEOSException('GeoJSON output only supported on GDAL 1.5+.')
geojson = json
@property
def wkb(self):
"""
Returns the WKB (Well-Known Binary) representation of this Geometry
as a Python buffer. SRID and Z values are not included, use the
`ewkb` property instead.
"""
return wkb_w().write(self)
@property
def ewkb(self):
"""
Return the EWKB representation of this Geometry as a Python buffer.
This is an extension of the WKB specification that includes any SRID
and Z values that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D EWKB.')
return ewkb_w3d().write(self)
else:
return ewkb_w().write(self)
@property
def kml(self):
"Returns the KML representation of this Geometry."
gtype = self.geom_type
return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype)
@property
def prepared(self):
"""
Returns a PreparedGeometry corresponding to this geometry -- it is
optimized for the contains, intersects, and covers operations.
"""
if GEOS_PREPARE:
return PreparedGeometry(self)
else:
raise GEOSException('GEOS 3.1+ required for prepared geometry support.')
#### GDAL-specific output routines ####
@property
def ogr(self):
"Returns the OGR Geometry for this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.OGRGeometry(self.wkb, self.srid)
else:
return gdal.OGRGeometry(self.wkb)
else:
raise GEOSException('GDAL required to convert to an OGRGeometry.')
@property
def srs(self):
"Returns the OSR SpatialReference for SRID of this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.SpatialReference(self.srid)
else:
return None
else:
raise GEOSException('GDAL required to return a SpatialReference object.')
@property
def crs(self):
"Alias for `srs` property."
return self.srs
def transform(self, ct, clone=False):
"""
Requires GDAL. Transforms the geometry according to the given
transformation object, which may be an integer SRID, and WKT or
PROJ.4 string. By default, the geometry is transformed in-place and
nothing is returned. However if the `clone` keyword is set, then this
geometry will not be modified and a transformed clone will be returned
instead.
"""
srid = self.srid
if ct == srid:
# short-circuit where source & dest SRIDs match
if clone:
return self.clone()
else:
return
if (srid is None) or (srid < 0):
warnings.warn("Calling transform() with no SRID set does no transformation!",
stacklevel=2)
warnings.warn("Calling transform() with no SRID will raise GEOSException in v1.5",
FutureWarning, stacklevel=2)
return
if not gdal.HAS_GDAL:
raise GEOSException("GDAL library is not available to transform() geometry.")
# Creating an OGR Geometry, which is then transformed.
g = gdal.OGRGeometry(self.wkb, srid)
g.transform(ct)
# Getting a new GEOS pointer
ptr = wkb_r().read(g.wkb)
if clone:
# User wants a cloned transformed geometry returned.
return GEOSGeometry(ptr, srid=g.srid)
if ptr:
# Reassigning pointer, and performing post-initialization setup
# again due to the reassignment.
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(g.srid)
else:
raise GEOSException('Transformed WKB was invalid.')
#### Topology Routines ####
def _topology(self, gptr):
"Helper routine to return Geometry from the given pointer."
return GEOSGeometry(gptr, srid=self.srid)
@property
def boundary(self):
"Returns the boundary as a newly allocated Geometry object."
return self._topology(capi.geos_boundary(self.ptr))
def buffer(self, width, quadsegs=8):
"""
Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry. The optional third parameter sets
the number of segment used to approximate a quarter circle (defaults to 8).
(Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@property
def centroid(self):
"""
The centroid is equal to the centroid of the set of component Geometries
of highest dimension (since the lower-dimension geometries contribute zero
"weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points
in the Geometry.
"""
return self._topology(capi.geos_convexhull(self.ptr))
def difference(self, other):
"""
Returns a Geometry representing the points making up this Geometry
that do not make up other.
"""
return self._topology(capi.geos_difference(self.ptr, other.ptr))
@property
def envelope(self):
"Return the envelope for this geometry (a polygon)."
return self._topology(capi.geos_envelope(self.ptr))
def intersection(self, other):
"Returns a Geometry representing the points shared by this Geometry and other."
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
def point_on_surface(self):
"Computes an interior point of this Geometry."
return self._topology(capi.geos_pointonsurface(self.ptr))
def relate(self, other):
"Returns the DE-9IM intersection matrix for this Geometry and the other."
return capi.geos_relate(self.ptr, other.ptr)
def simplify(self, tolerance=0.0, preserve_topology=False):
"""
Returns the Geometry, simplified using the Douglas-Peucker algorithm
to the specified tolerance (higher tolerance => less points). If no
tolerance provided, defaults to 0.
By default, this function does not preserve topology - e.g. polygons can
be split, collapse to lines or disappear holes can be created or
disappear, and lines can cross. By specifying preserve_topology=True,
the result will have the same dimension and number of components as the
input. This is significantly slower.
"""
if preserve_topology:
return self._topology(capi.geos_preservesimplify(self.ptr, tolerance))
else:
return self._topology(capi.geos_simplify(self.ptr, tolerance))
def sym_difference(self, other):
"""
Returns a set combining the points in this Geometry not in other,
and the points in other not in this Geometry.
"""
return self._topology(capi.geos_symdifference(self.ptr, other.ptr))
def union(self, other):
"Returns a Geometry representing all the points in this Geometry and other."
return self._topology(capi.geos_union(self.ptr, other.ptr))
#### Other Routines ####
@property
def area(self):
"Returns the area of the Geometry."
return capi.geos_area(self.ptr, byref(c_double()))
def distance(self, other):
"""
Returns the distance between the closest points on this Geometry
and the other. Units will be in those of the coordinate system of
the Geometry.
"""
if not isinstance(other, GEOSGeometry):
raise TypeError('distance() works only on other GEOS Geometries.')
return capi.geos_distance(self.ptr, other.ptr, byref(c_double()))
@property
def extent(self):
"""
Returns the extent of this geometry as a 4-tuple, consisting of
(xmin, ymin, xmax, ymax).
"""
env = self.envelope
if isinstance(env, Point):
xmin, ymin = env.tuple
xmax, ymax = xmin, ymin
else:
xmin, ymin = env[0][0]
xmax, ymax = env[0][2]
return (xmin, ymin, xmax, ymax)
@property
def length(self):
"""
Returns the length of this Geometry (e.g., 0 for point, or the
circumfrence of a Polygon).
"""
return capi.geos_length(self.ptr, byref(c_double()))
def clone(self):
"Clones this Geometry."
return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid)
# Class mapping dictionary. Has to be at the end to avoid import
# conflicts with GEOSGeometry.
from django.contrib.gis.geos.linestring import LineString, LinearRing
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos.polygon import Polygon
from django.contrib.gis.geos.collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
GEOS_CLASSES = {0 : Point,
1 : LineString,
2 : LinearRing,
3 : Polygon,
4 : MultiPoint,
5 : MultiLineString,
6 : MultiPolygon,
7 : GeometryCollection,
}
# If supported, import the PreparedGeometry class.
if GEOS_PREPARE:
from django.contrib.gis.geos.prepared import PreparedGeometry
| mit |
lordmos/blink | Tools/Scripts/webkitpy/common/system/user_mock.py | 155 | 2432 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
_log = logging.getLogger(__name__)
class MockUser(object):
@classmethod
def prompt(cls, message, repeat=1, raw_input=raw_input):
return "Mock user response"
@classmethod
def prompt_with_list(cls, list_title, list_items, can_choose_multiple=False, raw_input=raw_input):
pass
def __init__(self):
self.opened_urls = []
def edit(self, files):
pass
def edit_changelog(self, files):
pass
def page(self, message):
pass
def confirm(self, message=None, default='y'):
_log.info(message)
return default == 'y'
def can_open_url(self):
return True
def open_url(self, url):
self.opened_urls.append(url)
if url.startswith("file://"):
_log.info("MOCK: user.open_url: file://...")
return
_log.info("MOCK: user.open_url: %s" % url)
| mit |
hashbrowncipher/pushmanager | pushmanager/tests/test_servlet_delayrequest.py | 1 | 2896 | from contextlib import nested
import mock
from pushmanager.core import db
from pushmanager.core.util import get_servlet_urlspec
from pushmanager.servlets.delayrequest import DelayRequestServlet
from pushmanager.testing.mocksettings import MockedSettings
from pushmanager.testing.testservlet import ServletTestMixin
import pushmanager.testing as T
import types
class DelayRequestServletTest(T.TestCase, ServletTestMixin):
@T.class_setup_teardown
def mock_servlet_env(self):
self.results = []
with nested(
mock.patch.dict(db.Settings, MockedSettings),
mock.patch.object(
DelayRequestServlet,
"get_current_user",
return_value="testuser"
)
):
yield
def get_handlers(self):
return [get_servlet_urlspec(DelayRequestServlet)]
def call_on_db_complete(self, req):
mocked_self = mock.Mock()
mocked_self.current_user = 'fake_pushmaster'
mocked_self.check_db_results = mock.Mock(return_value=None)
mocked_self.on_db_complete = types.MethodType(DelayRequestServlet.on_db_complete.im_func, mocked_self)
def first():
return req
mreq = mock.Mock()
mreq.first = first
mocked_self.on_db_complete('success', [mock.ANY, mock.ANY, mreq])
@mock.patch('pushmanager.core.mail.MailQueue.enqueue_user_email')
def test_no_watched_mailqueue_on_db_complete(self, mailq):
req = {
'user': 'testuser',
'watchers': None,
'repo': 'repo',
'branch': 'branch',
'title': 'title',
'state': 'delayed',
}
self.call_on_db_complete(req)
no_watcher_call_args = mailq.call_args_list[0][0]
T.assert_equal(['testuser'], no_watcher_call_args[0])
T.assert_in('Request for testuser', no_watcher_call_args[1])
T.assert_in('testuser - title', no_watcher_call_args[1])
T.assert_in('[push] testuser - title', no_watcher_call_args[2])
@mock.patch('pushmanager.core.mail.MailQueue.enqueue_user_email')
def test_watched_mailqueue_on_db_complete(self, mailq):
req = {
'user': 'testuser',
'watchers': 'testuser1,testuser2',
'repo': 'repo',
'branch': 'branch',
'title': 'title',
'state': 'delayed',
}
self.call_on_db_complete(req)
watched_call_args = mailq.call_args_list[0][0]
T.assert_equal(['testuser', 'testuser1', 'testuser2'], watched_call_args[0])
T.assert_in('Request for testuser (testuser1,testuser2)', watched_call_args[1])
T.assert_in('testuser (testuser1,testuser2) - title', watched_call_args[1])
T.assert_in('[push] testuser (testuser1,testuser2) - title', watched_call_args[2])
if __name__ == '__main__':
T.run()
| apache-2.0 |
sbalde/edxplatform | lms/envs/yaml_config.py | 7 | 10389 | """
This is the default settings files for all
production servers.
Before importing this settings file the following MUST be
defined in the environment:
* SERVICE_VARIANT - can be either "lms" or "cms"
* CONFIG_ROOT - the directory where the application
yaml config files are located
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import, undefined-variable, used-before-assignment
import yaml
from .common import *
from openedx.core.lib.logsettings import get_logger_config
from util.config_parse import convert_tokens
import os
from path import Path as path
# https://stackoverflow.com/questions/2890146/how-to-force-pyyaml-to-load-strings-as-unicode-objects
from yaml import Loader, SafeLoader
def construct_yaml_str(self, node):
"""
Override the default string handling function
to always return unicode objects
"""
return self.construct_scalar(node)
Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
# SERVICE_VARIANT specifies name of the variant used, which decides what YAML
# configuration files are read during startup.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# CONFIG_ROOT specifies the directory where the YAML configuration
# files are expected to be found. If not specified, use the project
# directory.
CONFIG_ROOT = path(os.environ.get('CONFIG_ROOT', ENV_ROOT))
# CONFIG_PREFIX specifies the prefix of the YAML configuration files,
# based on the service variant. If no variant is use, don't use a
# prefix.
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
##############################################################
#
# DEFAULT SETTINGS FOR PRODUCTION
#
# These are defaults common for all production deployments
#
DEBUG = False
TEMPLATE_DEBUG = False
EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# a user can fool our server into thinking it was an https connection.
# See
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# for other warnings.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SESSION_COOKIE_NAME = None
GIT_REPO_DIR = '/edx/var/edxapp/course_repos'
MICROSITE_ROOT_DIR = ''
CAS_SERVER_URL = None
CAS_ATTRIBUTE_CALLBACK = None
##### Defaults for OAUTH2 Provider ##############
OAUTH_OIDC_ISSUER = None
OAUTH_ENFORCE_SECURE = True
OAUTH_ENFORCE_CLIENT_SECURE = True
#### Course Registration Code length ####
REGISTRATION_CODE_LENGTH = 8
# SSL external authentication settings
SSL_AUTH_EMAIL_DOMAIN = "MIT.EDU"
SSL_AUTH_DN_FORMAT_STRING = "/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}"
GIT_IMPORT_STATIC = True
META_UNIVERSITIES = {}
DATADOG = {}
EMAIL_FILE_PATH = None
MONGODB_LOG = {}
SESSION_INACTIVITY_TIMEOUT_IN_SECONDS = None
ADDL_INSTALLED_APPS = []
LOCAL_LOGLEVEL = 'INFO'
##############################################################
#
# DEFAULT SETTINGS FOR CELERY
#
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = 10.0
BROKER_HEARTBEAT_CHECKRATE = 2
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
# Skip djcelery migrations, since we don't use the database as the broker
SOUTH_MIGRATION_MODULES = {
'djcelery': 'ignore',
}
# Rename the exchange and queues for each variant
QUEUE_VARIANT = CONFIG_PREFIX.lower()
CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
HIGH_MEM_QUEUE = 'edx.{0}core.high_mem'.format(QUEUE_VARIANT)
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {},
HIGH_MEM_QUEUE: {},
}
# If we're a worker on the high_mem queue, set ourselves to die after processing
# one request to avoid having memory leaks take down the worker server. This env
# var is set in /etc/init/edx-workers.conf -- this should probably be replaced
# with some celery API call to see what queue we started listening to, but I
# don't know what that call is or if it's active at this point in the code.
if os.environ.get('QUEUE') == 'high_mem':
CELERYD_MAX_TASKS_PER_CHILD = 1
##############################################################
#
# ENV TOKEN IMPORT
#
# Currently non-secure and secure settings are managed
# in two yaml files. This section imports the non-secure
# settings and modifies them in code if necessary.
#
with open(CONFIG_ROOT / CONFIG_PREFIX + "env.yaml") as env_file:
ENV_TOKENS = yaml.load(env_file)
# Works around an Ansible bug
ENV_TOKENS = convert_tokens(ENV_TOKENS)
##########################################
# Merge settings from common.py
#
# Before the tokens are imported directly
# into settings some dictionary settings
# need to be merged from common.py
ENV_FEATURES = ENV_TOKENS.get('FEATURES', ENV_TOKENS.get('MITX_FEATURES', {}))
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
# Delete keys from ENV_TOKENS so that when it's imported
# into settings it doesn't override what was set above
if 'FEATURES' in ENV_TOKENS:
del ENV_TOKENS['FEATURES']
if 'MKTG_URL_LINK_MAP' in ENV_TOKENS:
del ENV_TOKENS['MKTG_URL_LINK_MAP']
# Update the token dictionary directly into settings
vars().update(ENV_TOKENS)
##########################################
# Manipulate imported settings with code
#
# For historical reasons some settings need
# to be modified in code. For example
# conversions to other data structures that
# cannot be represented in YAML.
if SESSION_COOKIE_NAME:
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(SESSION_COOKIE_NAME)
MICROSITE_ROOT_DIR = path(MICROSITE_ROOT_DIR)
# Cache used for location mapping -- called many times with the same key/value
# in a given request.
if 'loc_cache' not in CACHES:
CACHES['loc_cache'] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
}
# We want Bulk Email running on the high-priority queue, so we define the
# routing key that points to it. At the moment, the name is the same.
# We have to reset the value here, since we have changed the value of the queue name.
BULK_EMAIL_ROUTING_KEY = HIGH_PRIORITY_QUEUE
# We can run smaller jobs on the low priority queue. See note above for why
# we have to reset the value here.
BULK_EMAIL_ROUTING_KEY_SMALL_JOBS = LOW_PRIORITY_QUEUE
LANGUAGE_DICT = dict(LANGUAGES)
# Additional installed apps
for app in ADDL_INSTALLED_APPS:
INSTALLED_APPS += (app,)
LOGGING = get_logger_config(LOG_DIR,
logging_env=LOGGING_ENV,
local_loglevel=LOCAL_LOGLEVEL,
debug=False,
service_variant=SERVICE_VARIANT)
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
oldvalue = CODE_JAIL.get(name)
if isinstance(oldvalue, dict):
for subname, subvalue in value.items():
oldvalue[subname] = subvalue
else:
CODE_JAIL[name] = value
if FEATURES.get('AUTH_USE_CAS'):
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'django_cas.backends.CASBackend',
)
INSTALLED_APPS += ('django_cas',)
MIDDLEWARE_CLASSES += ('django_cas.middleware.CASMiddleware',)
if CAS_ATTRIBUTE_CALLBACK:
import importlib
CAS_USER_DETAILS_RESOLVER = getattr(
importlib.import_module(CAS_ATTRIBUTE_CALLBACK['module']),
CAS_ATTRIBUTE_CALLBACK['function']
)
STATIC_ROOT = path(STATIC_ROOT_BASE)
##############################################################
#
# AUTH TOKEN IMPORT
#
with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.yaml") as auth_file:
AUTH_TOKENS = yaml.load(auth_file)
# Works around an Ansible bug
AUTH_TOKENS = convert_tokens(AUTH_TOKENS)
vars().update(AUTH_TOKENS)
##########################################
# Manipulate imported settings with code
#
if AWS_ACCESS_KEY_ID == "":
AWS_ACCESS_KEY_ID = None
if AWS_SECRET_ACCESS_KEY == "":
AWS_SECRET_ACCESS_KEY = None
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = HIGH_MEM_QUEUE
##### Custom Courses for EdX #####
if FEATURES.get('CUSTOM_COURSES_EDX'):
INSTALLED_APPS += ('ccx',)
FIELD_OVERRIDE_PROVIDERS += (
'ccx.overrides.CustomCoursesForEdxOverrideProvider',
)
##### Individual Due Date Extensions #####
if FEATURES.get('INDIVIDUAL_DUE_DATES'):
FIELD_OVERRIDE_PROVIDERS += (
'courseware.student_field_overrides.IndividualStudentOverrideProvider',
)
##################### LTI Provider #####################
if FEATURES.get('ENABLE_LTI_PROVIDER'):
INSTALLED_APPS += ('lti_provider',)
AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend', )
| agpl-3.0 |
keo/bitcoin | qa/rpc-tests/mempool_limit.py | 1 | 2126 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test mempool limiting together/eviction with the wallet
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MempoolLimitTest(BitcoinTestFramework):
def __init__(self):
self.txouts = gen_return_txouts()
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-maxmempool=5", "-spendzeroconfchange=0", "-debug"]))
self.is_network_split = False
self.sync_all()
self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 2)
def run_test(self):
txids = []
utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], 90)
#create a mempool tx that will be evicted
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.0001}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
txF = self.nodes[0].fundrawtransaction(tx)
txFS = self.nodes[0].signrawtransaction(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
self.nodes[0].lockunspent(True, [us0])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
base_fee = relayfee*100
for i in xrange (4):
txids.append([])
txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[30*i:30*i+30], (i+1)*base_fee)
# by now, the tx should be evicted, check confirmation state
assert(txid not in self.nodes[0].getrawmempool())
txdata = self.nodes[0].gettransaction(txid);
assert(txdata['confirmations'] == 0) #confirmation should still be 0
if __name__ == '__main__':
MempoolLimitTest().main()
| mit |
danielperna84/hass-configurator | configurator.py | 1 | 234825 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# pylint: disable=too-many-lines
"""
Configurator for Home Assistant.
https://github.com/danielperna84/hass-configurator
"""
import os
import sys
import argparse
import json
import ssl
import socket
import socketserver
import base64
import ipaddress
import signal
import cgi
import shlex
import subprocess
import logging
import fnmatch
import hashlib
import mimetypes
from string import Template
from http.server import BaseHTTPRequestHandler
import urllib.request
from urllib.parse import urlparse, parse_qs, unquote
### Some options for you to change
LISTENIP = "0.0.0.0"
PORT = 3218
# Set BASEPATH to something like "/home/hass/.homeassistant/" if you're not
# running the configurator from that path
BASEPATH = None
# Set ENFORCE_BASEPATH to True to lock the configurator into the basepath and
# thereby prevent it from opening files outside of the BASEPATH
ENFORCE_BASEPATH = False
# Set the paths to a certificate and the key if you're using SSL,
# e.g "/etc/ssl/certs/mycert.pem"
SSL_CERTIFICATE = None
SSL_KEY = None
# Set the destination where the HASS API is reachable
HASS_API = "http://127.0.0.1:8123/api/"
# Set the destination where the websocket API is reachable (if different
# from HASS_API, e.g. wss://hass.example.com/api/websocket)
HASS_WS_API = None
# If a password is required to access the API, set it in the form of "password"
# if you have HA ignoring SSL locally this is not needed if on same machine.
HASS_API_PASSWORD = None
# Using the CREDENTIALS variable is deprecated.
# It will still work though if USERNAME and PASSWORD are not set.
CREDENTIALS = None
# Set the username used for basic authentication.
USERNAME = None
# Set the password used for basic authentication.
PASSWORD = None
# Limit access to the configurator by adding allowed IP addresses / networks to
# the list, e.g ALLOWED_NETWORKS = ["192.168.0.0/24", "172.16.47.23"]
ALLOWED_NETWORKS = []
# Allow access to the configurator to client IP addesses which match the result
# of DNS lookups for the specified domains.
ALLOWED_DOMAINS = []
# List of statically banned IP addresses, e.g. ["1.1.1.1", "2.2.2.2"]
BANNED_IPS = []
# Ban IPs after n failed login attempts. Restart service to reset banning.
# The default of `0` disables this feature.
BANLIMIT = 0
# Enable git integration.
# GitPython (https://gitpython.readthedocs.io/en/stable/) has to be installed.
GIT = False
# Files to ignore in the UI. A good example list that cleans up the UI is
# [".*", "*.log", "deps", "icloud", "*.conf", "*.json", "certs", "__pycache__"]
IGNORE_PATTERN = []
# if DIRSFIRST is set to `true`, directories will be displayed at the top
DIRSFIRST = False
# Don't display hidden files (starting with .)
HIDEHIDDEN = False
# Sesame token. Browse to the configurator URL + /secrettoken to unban your
# client IP and add it to the list of allowed IPs.
SESAME = None
# Instead of a static SESAME token you may also use a TOTP based token that
# changes every 30 seconds. The value needs to be a base 32 encoded string.
SESAME_TOTP_SECRET = None
# Verify the hostname used in the request. Block access if it doesn't match
# this value
VERIFY_HOSTNAME = None
# Prefix for environment variables
ENV_PREFIX = "HC_"
# Ignore SSL errors when connecting to the HASS API
IGNORE_SSL = False
# Notification service like `notify.mytelegram`. Default is `persistent_notification.create`
NOTIFY_SERVICE_DEFAULT = "persistent_notification.create"
NOTIFY_SERVICE = NOTIFY_SERVICE_DEFAULT
### End of options
LOGLEVEL_MAPPING = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG
}
DEFAULT_LOGLEVEL = "info"
LOGLEVEL = LOGLEVEL_MAPPING.get(os.environ.get("HC_LOGLEVEL", DEFAULT_LOGLEVEL))
LOG = logging.getLogger(__name__)
LOG.setLevel(LOGLEVEL)
SO = logging.StreamHandler(sys.stdout)
SO.setLevel(LOGLEVEL)
SO.setFormatter(
logging.Formatter('%(levelname)s:%(asctime)s:%(name)s:%(message)s'))
LOG.addHandler(SO)
RELEASEURL = "https://api.github.com/repos/danielperna84/hass-configurator/releases/latest"
VERSION = "0.3.7"
BASEDIR = "."
DEV = False
LISTENPORT = None
TOTP = None
HTTPD = None
FAIL2BAN_IPS = {}
REPO = None
INDEX = Template(r"""<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0" />
<title>HASS Configurator</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/3.4.93/css/materialdesignicons.min.css" rel="stylesheet">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.min.css">
<style type="text/css" media="screen">
body {
margin: 0;
padding: 0;
background-color: #fafafa;
display: flex;
min-height: 100vh;
flex-direction: column;
}
main {
flex: 1 0 auto;
}
#editor {
position: fixed;
top: 135px;
right: 0;
bottom: 0;
}
@media only screen and (max-width: 600px) {
#editor {
top: 125px;
}
.toolbar_mobile {
margin-bottom: 0;
}
}
.leftellipsis {
overflow: hidden;
direction: rtl;
text-overflow: ellipsis;
white-space: nowrap;
}
.select-wrapper input.select-dropdown {
width: 96%;
overflow: hidden;
direction: ltr;
text-overflow: ellipsis;
white-space: nowrap;
}
#edit_float {
z-index: 10;
}
#filebrowser {
background-color: #fff;
}
#fbheader {
display: block;
cursor: initial;
pointer-events: none;
color: #424242 !important;
font-weight: 400;
font-size: .9em;
min-height: 64px;
padding-top: 8px;
margin-left: -5px;
max-width: 250px;
}
#fbheaderbranch {
padding: 5px 10px !important;
display: none;
color: #757575 !important;
}
#branchselector {
font-weight: 400;
}
a.branch_select.active {
color: white !important;
}
#fbelements {
margin: 0;
position: relative;
}
a.collection-item {
color: #616161 !important;
}
.fbtoolbarbutton {
color: #757575 !important;
min-height: 64px !important;
}
.fbmenubutton {
color: #616161 !important;
display: inline-block;
float: right;
min-height: 64px;
padding-top: 8px !important;
padding-left: 20px !important;
}
.filename {
color: #616161 !important;
font-weight: 400;
display: inline-block;
width: 182px;
white-space: nowrap;
text-overflow: ellipsis;
cursor: pointer;
}
.nowrap {
white-space: nowrap;
}
.text_darkgreen {
color: #1b5e20 !important;
}
.text_darkred {
color: #b71c1c !important;
}
span.stats {
margin: -10px 0 0 0;
padding: 0;
font-size: 0.5em;
color: #616161 !important;
line-height: 16px;
display: inherit;
}
.collection-item #uplink {
background-color: #f5f5f5;
width: 323px !important;
margin-left: -3px !important;
}
input.currentfile_input {
margin-bottom: 0;
margin-top: 0;
padding-left: 5px;
border-bottom: 0;
}
.side_tools {
vertical-align: middle;
}
.fbtoolbarbutton_icon {
margin-top: 20px;
}
.collection {
margin: 0;
background-color: #fff;
}
li.collection-item {
border-bottom: 1px solid #eeeeee !important;
}
.side-nav {
width: 337px !important;
height: 100% !important;
}
.fb_side-nav li {
line-height: 36px;
}
.fb_side-nav a {
padding: 0 0 0 16px;
display: inline-block !important;
}
.fb_side-nav li>a>i {
margin-right: 16px !important;
cursor: pointer;
}
.green {
color: #fff;
}
.red {
color: #fff;
}
#file_history, #dropdown_menu, #dropdown_menu_mobile {
min-width: 235px;
}
#dropdown_gitmenu {
min-width: 140px !important;
}
.dropdown-content li>a,
.dropdown-content li>span {
color: #616161 !important;
}
.fb_dd {
margin-left: -15px !important;
}
.blue_check:checked+label:before {
border-right: 2px solid #03a9f4;
border-bottom: 2px solid #03a9f4;
}
.input-field input:focus+label {
color: #03a9f4 !important;
}
.input-field input[type=text].valid {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input[type=text]:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input:focus+label {
color: #03a9f4 !important;
}
.input-field input[type=password].valid {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input[type=password]:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field textarea:focus+label {
color: #03a9f4 !important;
}
.input-field textarea:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
#modal_acekeyboard {
top: auto;
width: 96%;
min-height: 96%;
border-radius: 0;
margin: auto;
}
.modal .modal-content_nopad {
padding: 0;
}
.waves-effect.waves-blue .waves-ripple {
background-color: #03a9f4;
}
.preloader-background {
display: flex;
align-items: center;
justify-content: center;
background-color: #eee;
position: fixed;
z-index: 10000;
top: 0;
left: 0;
right: 0;
bottom: 0;
}
.modal-content_nopad {
position: relative;
}
.modal-content_nopad .modal_btn {
position: absolute;
top: 2px;
right:0;
}
footer {
z-index: 10;
}
.shadow {
height: 25px;
margin: -26px;
min-width: 320px;
background-color: transparent;
}
.ace_optionsMenuEntry input {
position: relative !important;
left: 0 !important;
opacity: 1 !important;
}
.ace_optionsMenuEntry select {
position: relative !important;
left: 0 !important;
opacity: 1 !important;
display: block !important;
}
.ace_search {
background-color: #eeeeee !important;
border-radius: 0 !important;
border: 0 !important;
box-shadow: 0 6px 10px 0 rgba(0, 0, 0, 0.14), 0 1px 18px 0 rgba(0, 0, 0, 0.12), 0 3px 5px -1px rgba(0, 0, 0, 0.3);
}
.ace_search_form {
background-color: #fafafa;
width: 300px;
border: 0 !important;
border-radius: 0 !important;
outline: none !important;
box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 2px 1px -2px rgba(0, 0, 0, 0.2);
margin-bottom: 15px !important;
margin-left: 8px !important;
color: #424242 !important;
}
.ace_search_field {
padding-left: 4px !important;
margin-left: 10px !important;
max-width: 275px !important;
font-family: 'Roboto', sans-serif !important;
border-bottom: 1px solid #03a9f4 !important;
color: #424242 !important;
}
.ace_replace_form {
background-color: #fafafa;
width: 300px;
border: 0 !important;
border-radius: 0 !important;
outline: none !important;
box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 2px 1px -2px rgba(0, 0, 0, 0.2);
margin-bottom: 15px !important;
margin-left: 8px !important;
}
.ace_search_options {
background-color: #eeeeee;
text-align: left !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
font-family: 'Roboto', sans-serif !important;
font-size: 130%;
top: 0 !important;
}
.ace_searchbtn {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
color: #424242 !important;
text-align: center !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_searchbtn:hover {
background-color: #03a9f4;
}
.ace_replacebtn {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
color: #424242 !important;
text-align: center !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_replacebtn:hover {
background-color: #03a9f4;
}
.ace_button {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
border-radius: 0 !important;
outline: none !important;
color: #424242 !important;
background-color: #fafafa;
text-align: center;
letter-spacing: .5px;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_button:hover {
background-color: #03a9f4 !important;
}
.ace_invisible {
color: rgba(191, 191, 191, 0.5) !important;
}
.fbicon_pad {
min-height: 64px !important;
}
.fbmenuicon_pad {
min-height: 64px;
margin-top: 6px !important;
margin-right: 18px !important;
color: #616161 !important;
}
.no-padding {
padding: 0 !important;
}
.branch_select {
min-width: 300px !important;
font-size: 14px !important;
font-weight: 400 !important;
}
a.branch_hover:hover {
background-color: #e0e0e0 !important;
}
.hidesave {
opacity: 0;
-webkit-transition: all 0.5s ease-in-out;
-moz-transition: all 0.5s ease-in-out;
-ms-transition: all 0.5s ease-in-out;
-o-transition: all 0.5s ease-in-out;
transition: all 0.5s ease-in-out;
}
.pathtip_color {
-webkit-animation: fadeinout 1.75s linear 1 forwards;
animation: fadeinout 1.75s linear 1 forwards;
}
@-webkit-keyframes fadeinout {
0% { background-color: #f5f5f5; }
50% { background-color: #ff8a80; }
100% { background-color: #f5f5f5; }
}
@keyframes fadeinout {
0% { background-color: #f5f5f5; }
50% { background-color: #ff8a80; }
100% { background-color: #f5f5f5; }
}
#lint-status {
position: absolute;
top: 0.75rem;
right: 10px;
}
.cursor-pointer {
cursor: pointer;
}
#modal_lint.modal {
width: 80%;
}
#modal_lint textarea {
resize: none;
height: auto;
}
</style>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.7/ace.js" type="text/javascript" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.7/ext-modelist.js" type="text/javascript" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.7/ext-language_tools.js" type="text/javascript" charset="utf-8"></script>
</head>
<body>
<div class="preloader-background">
<div class="preloader-wrapper big active">
<div class="spinner-layer spinner-blue">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-red">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-yellow">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-green">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
</div>
</div>
<header>
<div class="navbar-fixed">
<nav class="light-blue">
<div class="nav-wrapper">
<ul class="left">
<li><a class="waves-effect waves-light tooltipped files-collapse hide-on-small-only" data-activates="slide-out" data-position="bottom" data-delay="500" data-tooltip="Browse Filesystem" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">folder</i></a></li>
<li><a class="waves-effect waves-light tooltipped dropdown-button hide-on-small-only" data-activates="file_history" data-beloworigin="true" data-delay="500" data-tooltip="File History" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">history</i></a></li>
<li><a class="waves-effect waves-light files-collapse hide-on-med-and-up" data-activates="slide-out" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">folder</i></a></li>
<li><a class="waves-effect waves-light dropdown-button hide-on-med-and-up" data-activates="file_history" data-beloworigin="true" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">history</i></a></li>
</ul>
<ul class="right">
<li><a class="waves-effect waves-light tooltipped hide-on-small-only markdirty hidesave" data-position="bottom" data-delay="500" data-tooltip="Save" onclick="save_check()"><i class="material-icons">save</i></a></li>
<li><a class="waves-effect waves-light tooltipped hide-on-small-only modal-trigger" data-position="bottom" data-delay="500" data-tooltip="Close" href="#modal_close"><i class="material-icons">close</i></a></li>
<li><a class="waves-effect waves-light tooltipped hide-on-small-only" data-position="bottom" data-delay="500" data-tooltip="Search" onclick="editor.execCommand('replace')"><i class="material-icons">search</i></a></li>
<li><a class="waves-effect waves-light dropdown-button hide-on-small-only $versionclass" data-activates="dropdown_menu" data-beloworigin="true"><i class="material-icons right">settings</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up markdirty hidesave" onclick="save_check()"><i class="material-icons">save</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up modal-trigger" href="#modal_close"><i class="material-icons">close</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up" onclick="editor.execCommand('replace')"><i class="material-icons">search</i></a></li>
<li><a class="waves-effect waves-light dropdown-button hide-on-med-and-up $versionclass" data-activates="dropdown_menu_mobile" data-beloworigin="true"><i class="material-icons right">settings</i></a></li>
</ul>
</div>
</nav>
</div>
</header>
<main>
<ul id="file_history" class="dropdown-content z-depth-4"></ul>
<ul id="dropdown_menu" class="dropdown-content z-depth-4">
<li><a onclick="localStorage.setItem('new_tab', true);window.open(window.location.origin+window.location.pathname, '_blank');">New tab</a></li>
<li class="divider"></li>
<li><a target="_blank" href="https://home-assistant.io/components/">Components</a></li>
<li><a target="_blank" href="https://materialdesignicons.com/">Material Icons</a></li>
<li><a href="#" data-activates="ace_settings" class="ace_settings-collapse">Editor Settings</a></li>
<li><a class="modal-trigger" href="#modal_netstat" onclick="get_netstat()">Network status</a></li>
<li><a class="modal-trigger" href="#modal_about">About HASS-Configurator</a></li>
<li class="divider"></li>
<!--<li><a href="#modal_check_config">Check HASS Configuration</a></li>-->
<li><a class="modal-trigger" href="#modal_events">Observe events</a></li>
<li><a class="modal-trigger" href="#modal_reload_automations">Reload automations</a></li>
<li><a class="modal-trigger" href="#modal_reload_scripts">Reload scripts</a></li>
<li><a class="modal-trigger" href="#modal_reload_groups">Reload groups</a></li>
<li><a class="modal-trigger" href="#modal_reload_core">Reload core</a></li>
<li><a class="modal-trigger" href="#modal_restart">Restart HASS</a></li>
<li class="divider"></li>
<li><a class="modal-trigger" href="#modal_exec_command">Execute shell command</a></li>
<li><a onclick="toggle_hass_panels()">Toggle HASS panel</a></li>
</ul>
<ul id="dropdown_menu_mobile" class="dropdown-content z-depth-4">
<li><a onclick="localStorage.setItem('new_tab', true);window.open(window.location.origin+window.location.pathname, '_blank');">New tab</a></li>
<li class="divider"></li>
<li><a target="_blank" href="https://home-assistant.io/help/">Help</a></li>
<li><a target="_blank" href="https://home-assistant.io/components/">Components</a></li>
<li><a target="_blank" href="https://materialdesignicons.com/">Material Icons</a></li>
<li><a href="#" data-activates="ace_settings" class="ace_settings-collapse">Editor Settings</a></li>
<li><a class="modal-trigger" href="#modal_netstat" onclick="get_netstat()">Network status</a></li>
<li><a class="modal-trigger" href="#modal_about">About HASS-Configurator</a></li>
<li class="divider"></li>
<!--<li><a href="#modal_check_config">Check HASS Configuration</a></li>-->
<li><a class="modal-trigger" href="#modal_events">Observe events</a></li>
<li><a class="modal-trigger" href="#modal_reload_automations">Reload automations</a></li>
<li><a class="modal-trigger" href="#modal_reload_scripts">Reload scripts</a></li>
<li><a class="modal-trigger" href="#modal_reload_groups">Reload groups</a></li>
<li><a class="modal-trigger" href="#modal_reload_core">Reload core</a></li>
<li><a class="modal-trigger" href="#modal_restart">Restart HASS</a></li>
<li class="divider"></li>
<li><a class="modal-trigger" href="#modal_exec_command">Execute shell command</a></li>
<li><a onclick="toggle_hass_panels()">Toggle HASS panel</a></li>
</ul>
<ul id="dropdown_gitmenu" class="dropdown-content z-depth-4">
<li><a class="modal-trigger" href="#modal_init" class="nowrap waves-effect">git init</a></li>
<li><a class="modal-trigger" href="#modal_commit" class="nowrap waves-effect">git commit</a></li>
<li><a class="modal-trigger" href="#modal_push" class="nowrap waves-effect">git push</a></li>
<li><a class="modal-trigger" href="#modal_stash" class="nowrap waves-effect">git stash</a></li>
</ul>
<ul id="dropdown_gitmenu_mobile" class="dropdown-content z-depth-4">
<li><a class="modal-trigger" href="#modal_init" class="nowrap waves-effect">git init</a></li>
<li><a class="modal-trigger" href="#modal_commit" class="nowrap waves-effect">git commit</a></li>
<li><a class="modal-trigger" href="#modal_push" class="nowrap waves-effect">git push</a></li>
<li><a class="modal-trigger" href="#modal_stash" class="nowrap waves-effect">git stash</a></li>
</ul>
<div id="modal_acekeyboard" class="modal bottom-sheet modal-fixed-footer">
<div class="modal-content centered">
<h4 class="grey-text text-darken-3">Ace Keyboard Shortcuts<i class="mdi mdi-keyboard right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<br>
<ul class="collapsible popout" data-collapsible="expandable">
<li>
<div class="collapsible-header"><i class="material-icons">view_headline</i>Line Operations</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-D</td>
<td>Command-D</td>
<td>Remove line</td>
</tr>
<tr>
<td>Alt-Shift-Down</td>
<td>Command-Option-Down</td>
<td>Copy lines down</td>
</tr>
<tr>
<td>Alt-Shift-Up</td>
<td>Command-Option-Up</td>
<td>Copy lines up</td>
</tr>
<tr>
<td>Alt-Down</td>
<td>Option-Down</td>
<td>Move lines down</td>
</tr>
<tr>
<td>Alt-Up</td>
<td>Option-Up</td>
<td>Move lines up</td>
</tr>
<tr>
<td>Alt-Delete</td>
<td>Ctrl-K</td>
<td>Remove to line end</td>
</tr>
<tr>
<td>Alt-Backspace</td>
<td>Command-Backspace</td>
<td>Remove to linestart</td>
</tr>
<tr>
<td>Ctrl-Backspace</td>
<td>Option-Backspace, Ctrl-Option-Backspace</td>
<td>Remove word left</td>
</tr>
<tr>
<td>Ctrl-Delete</td>
<td>Option-Delete</td>
<td>Remove word right</td>
</tr>
<tr>
<td>---</td>
<td>Ctrl-O</td>
<td>Split line</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">photo_size_select_small</i>Selection</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th >Windows/Linux</th>
<th >Mac</th>
<th >Action</th>
</tr>
</thead>
<tbody>
<tr>
<td >Ctrl-A</td>
<td >Command-A</td>
<td >Select all</td>
</tr>
<tr>
<td >Shift-Left</td>
<td >Shift-Left</td>
<td >Select left</td>
</tr>
<tr>
<td >Shift-Right</td>
<td >Shift-Right</td>
<td >Select right</td>
</tr>
<tr>
<td >Ctrl-Shift-Left</td>
<td >Option-Shift-Left</td>
<td >Select word left</td>
</tr>
<tr>
<td >Ctrl-Shift-Right</td>
<td >Option-Shift-Right</td>
<td >Select word right</td>
</tr>
<tr>
<td >Shift-Home</td>
<td >Shift-Home</td>
<td >Select line start</td>
</tr>
<tr>
<td >Shift-End</td>
<td >Shift-End</td>
<td >Select line end</td>
</tr>
<tr>
<td >Alt-Shift-Right</td>
<td >Command-Shift-Right</td>
<td >Select to line end</td>
</tr>
<tr>
<td >Alt-Shift-Left</td>
<td >Command-Shift-Left</td>
<td >Select to line start</td>
</tr>
<tr>
<td >Shift-Up</td>
<td >Shift-Up</td>
<td >Select up</td>
</tr>
<tr>
<td >Shift-Down</td>
<td >Shift-Down</td>
<td >Select down</td>
</tr>
<tr>
<td >Shift-PageUp</td>
<td >Shift-PageUp</td>
<td >Select page up</td>
</tr>
<tr>
<td >Shift-PageDown</td>
<td >Shift-PageDown</td>
<td >Select page down</td>
</tr>
<tr>
<td >Ctrl-Shift-Home</td>
<td >Command-Shift-Up</td>
<td >Select to start</td>
</tr>
<tr>
<td >Ctrl-Shift-End</td>
<td >Command-Shift-Down</td>
<td >Select to end</td>
</tr>
<tr>
<td >Ctrl-Shift-D</td>
<td >Command-Shift-D</td>
<td >Duplicate selection</td>
</tr>
<tr>
<td >Ctrl-Shift-P</td>
<td >---</td>
<td >Select to matching bracket</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">multiline_chart</i>Multicursor</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-Alt-Up</td>
<td>Ctrl-Option-Up</td>
<td>Add multi-cursor above</td>
</tr>
<tr>
<td>Ctrl-Alt-Down</td>
<td>Ctrl-Option-Down</td>
<td>Add multi-cursor below</td>
</tr>
<tr>
<td>Ctrl-Alt-Right</td>
<td>Ctrl-Option-Right</td>
<td>Add next occurrence to multi-selection</td>
</tr>
<tr>
<td>Ctrl-Alt-Left</td>
<td>Ctrl-Option-Left</td>
<td>Add previous occurrence to multi-selection</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Up</td>
<td>Ctrl-Option-Shift-Up</td>
<td>Move multicursor from current line to the line above</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Down</td>
<td>Ctrl-Option-Shift-Down</td>
<td>Move multicursor from current line to the line below</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Right</td>
<td>Ctrl-Option-Shift-Right</td>
<td>Remove current occurrence from multi-selection and move to next</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Left</td>
<td>Ctrl-Option-Shift-Left</td>
<td>Remove current occurrence from multi-selection and move to previous</td>
</tr>
<tr>
<td>Ctrl-Shift-L</td>
<td>Ctrl-Shift-L</td>
<td>Select all from multi-selection</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">call_missed_outgoing</i>Go To</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Left</td>
<td>Left, Ctrl-B</td>
<td>Go to left</td>
</tr>
<tr>
<td>Right</td>
<td>Right, Ctrl-F</td>
<td>Go to right</td>
</tr>
<tr>
<td>Ctrl-Left</td>
<td>Option-Left</td>
<td>Go to word left</td>
</tr>
<tr>
<td>Ctrl-Right</td>
<td>Option-Right</td>
<td>Go to word right</td>
</tr>
<tr>
<td>Up</td>
<td>Up, Ctrl-P</td>
<td>Go line up</td>
</tr>
<tr>
<td>Down</td>
<td>Down, Ctrl-N</td>
<td>Go line down</td>
</tr>
<tr>
<td>Alt-Left, Home</td>
<td>Command-Left, Home, Ctrl-A</td>
<td>Go to line start</td>
</tr>
<tr>
<td>Alt-Right, End</td>
<td>Command-Right, End, Ctrl-E</td>
<td>Go to line end</td>
</tr>
<tr>
<td>PageUp</td>
<td>Option-PageUp</td>
<td>Go to page up</td>
</tr>
<tr>
<td>PageDown</td>
<td>Option-PageDown, Ctrl-V</td>
<td>Go to page down</td>
</tr>
<tr>
<td>Ctrl-Home</td>
<td>Command-Home, Command-Up</td>
<td>Go to start</td>
</tr>
<tr>
<td>Ctrl-End</td>
<td>Command-End, Command-Down</td>
<td>Go to end</td>
</tr>
<tr>
<td>Ctrl-L</td>
<td>Command-L</td>
<td>Go to line</td>
</tr>
<tr>
<td>Ctrl-Down</td>
<td>Command-Down</td>
<td>Scroll line down</td>
</tr>
<tr>
<td>Ctrl-Up</td>
<td>---</td>
<td>Scroll line up</td>
</tr>
<tr>
<td>Ctrl-P</td>
<td>---</td>
<td>Go to matching bracket</td>
</tr>
<tr>
<td>---</td>
<td>Option-PageDown</td>
<td>Scroll page down</td>
</tr>
<tr>
<td>---</td>
<td>Option-PageUp</td>
<td>Scroll page up</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">find_replace</i>Find/Replace</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-F</td>
<td>Command-F</td>
<td>Find</td>
</tr>
<tr>
<td>Ctrl-H</td>
<td>Command-Option-F</td>
<td>Replace</td>
</tr>
<tr>
<td>Ctrl-K</td>
<td>Command-G</td>
<td>Find next</td>
</tr>
<tr>
<td>Ctrl-Shift-K</td>
<td>Command-Shift-G</td>
<td>Find previous</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">all_out</i>Folding</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Alt-L, Ctrl-F1</td>
<td>Command-Option-L, Command-F1</td>
<td>Fold selection</td>
</tr>
<tr>
<td>Alt-Shift-L, Ctrl-Shift-F1</td>
<td>Command-Option-Shift-L, Command-Shift-F1</td>
<td>Unfold</td>
</tr>
<tr>
<td>Alt-0</td>
<td>Command-Option-0</td>
<td>Fold all</td>
</tr>
<tr>
<td>Alt-Shift-0</td>
<td>Command-Option-Shift-0</td>
<td>Unfold all</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">devices_other</i>Other</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Tab</td>
<td>Tab</td>
<td>Indent</td>
</tr>
<tr>
<td>Shift-Tab</td>
<td>Shift-Tab</td>
<td>Outdent</td>
</tr>
<tr>
<td>Ctrl-Z</td>
<td>Command-Z</td>
<td>Undo</td>
</tr>
<tr>
<td>Ctrl-Shift-Z, Ctrl-Y</td>
<td>Command-Shift-Z, Command-Y</td>
<td>Redo</td>
</tr>
<tr>
<td>Ctrl-,</td>
<td>Command-,</td>
<td>Show the settings menu</td>
</tr>
<tr>
<td>Ctrl-/</td>
<td>Command-/</td>
<td>Toggle comment</td>
</tr>
<tr>
<td>Ctrl-T</td>
<td>Ctrl-T</td>
<td>Transpose letters</td>
</tr>
<tr>
<td>Ctrl-Enter</td>
<td>Command-Enter</td>
<td>Enter full screen</td>
</tr>
<tr>
<td>Ctrl-Shift-U</td>
<td>Ctrl-Shift-U</td>
<td>Change to lower case</td>
</tr>
<tr>
<td>Ctrl-U</td>
<td>Ctrl-U</td>
<td>Change to upper case</td>
</tr>
<tr>
<td>Insert</td>
<td>Insert</td>
<td>Overwrite</td>
</tr>
<tr>
<td>Ctrl-Shift-E</td>
<td>Command-Shift-E</td>
<td>Macros replay</td>
</tr>
<tr>
<td>Ctrl-Alt-E</td>
<td>---</td>
<td>Macros recording</td>
</tr>
<tr>
<td>Delete</td>
<td>---</td>
<td>Delete</td>
</tr>
<tr>
<td>---</td>
<td>Ctrl-L</td>
<td>Center selection</td>
</tr>
</tbody>
</table>
</div>
</li>
</ul>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect btn-flat light-blue-text">Close</a>
</div>
</div>
<div id="modal_events" class="modal modal-fixed-footer">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Event Observer<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">error_outline</i></h4>
<br />
<div class="row">
<form class="col s12">
<div class="row">
<div class="input-field col s12">
<input type="text" id="ws_uri" placeholder="ws://127.0.0.1:8123/api/websocket" value="$hass_ws_address"/>
<label for="ws_uri">Websocket URI</label>
</div>
</div>
<div class="row">
<div class="input-field col s12">
<input type="password" id="ws_password" value="$api_password"/>
<label for="ws_password">API password</label>
</div>
</div>
<div class="row">
<div class="input-field col s12">
<textarea id="ws_events" class="materialize-textarea"></textarea>
</div>
</div>
</form>
</div>
</div>
<div class="modal-footer">
<a onclick="ws_connect()" id="ws_b_c" class="modal-action waves-effect waves-green btn-flat light-blue-text">Connect</a>
<a onclick="ws_disconnect()" id="ws_b_d" class="modal-action waves-effect waves-green btn-flat light-blue-text disabled">Disconnect</a>
<a onclick="ws_disconnect()" class="modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Close</a>
</div>
</div>
<div id="modal_save" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Save<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">save</i></h4>
<p>Do you really want to save?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="save()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_upload" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Upload File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2.28rem;">file_upload</i></h4>
<p>Please choose a file to upload</p>
<form action="#" id="uploadform">
<div class="file-field input-field">
<div class="btn light-blue waves-effect">
<span>File</span>
<input type="file" id="uploadfile" />
</div>
<div class="file-path-wrapper">
<input class="file-path validate" type="text">
</div>
</div>
</form>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="upload()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_init" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git init<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to initialize a repository at the current path?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitinit()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_commit" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git commit<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<div class="row">
<div class="input-field col s12">
<input type="text" id="commitmessage">
<label class="active" for="commitmessage">Commit message</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="commit(document.getElementById('commitmessage').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_push" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git push<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to push your commited changes to the configured remote / origin?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitpush()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_stash" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git stash<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to stash your changes?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitstash()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_close" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Close File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2.28rem;">close</i></h4>
<p>Are you sure you want to close the current file? Unsaved changes will be lost.</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="closefile()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_rename" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Rename</h4>
<p>Please enter a new name for <span class="fb_currentfile"></span>.</p>
<input type="text" id="rename_name_new" />
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="rename_file()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Apply</a>
</div>
</div>
<div id="modal_delete" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Delete</h4>
<p>Are you sure you want to delete <span class="fb_currentfile"></span>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="delete_element()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_gitadd" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git add<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to add <span class="fb_currentfile"></span> to the index?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="gitadd()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_check_config" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Check configuration<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to check the configuration?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="check_config()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_automations" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload automations<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the automations?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_automations()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_scripts" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload scripts<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the scripts?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_scripts()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_groups" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload groups<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the groups?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_groups()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_core" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload core<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the core?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_core()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_restart" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Restart<i class="mdi mdi-restart right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to restart Home Assistant?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="restart()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_a_net_remove" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Remove allowed network / IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to remove the network / IP <b><span id="removenet"></span></b> from the list of allowed networks?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="a_net_remove()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_a_net_add" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Add allowed network / IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to Add the network / IP <b><span id="addnet"></span></b> to the list of allowed networks?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="a_net_add()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_unban" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Unban IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to unban the IP <b><span id="unbanip"></span></b>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="banned_unban()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_ban" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Ban IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to ban the IP <b><span id="banip"></span></b>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="banned_ban()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_exec_command" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Execute shell command<i class="mdi mdi-laptop right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<pre class="col s6" id="command_history"></pre>
<br>
<div class="row">
<div class="input-field col s12">
<input placeholder="/bin/ls -l /var/log" id="commandline" type="text">
<label for="commandline">Command</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Close</a>
<a onclick="document.getElementById('command_history').innerText='';" class=" modal-action waves-effect waves-green btn-flat light-blue-text">Clear</a>
<a onclick="exec_command()" class=" modal-action waves-effect waves-green btn-flat light-blue-text">Execute</a>
</div>
</div>
<div id="modal_markdirty" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Unsaved Changes<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">save</i></h4>
<p>You have unsaved changes in the current file. Please save the changes or close the file before opening a new one.</p>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Abort</a>
<a onclick="document.getElementById('currentfile').value='';editor.getSession().setValue('');$('.markdirty').each(function(i, o){o.classList.remove('red');});" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Close file</a>
<a onclick="save()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Save changes</a>
</div>
</div>
<div id="modal_newfolder" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New Folder<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">create_new_folder</i></h4>
<br>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newfoldername">
<label class="active" for="newfoldername">New Folder Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newfolder(document.getElementById('newfoldername').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_newfile" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">note_add</i></h4>
<br>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newfilename">
<label class="active" for="newfilename">New File Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newfile(document.getElementById('newfilename').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_newbranch" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New Branch<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newbranch">
<label class="active" for="newbranch">New Branch Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newbranch(document.getElementById('newbranch').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_netstat" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Network status<i class="mdi mdi-network right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p><label for="your_address">Your address: </label><span id="your_address">$your_address</span></p>
<p><label for="listening_address">Listening address: </label><span id="listening_address">$listening_address</span></p>
<p><label for="hass_api_address">HASS API address: </label><span id="hass_api_address">$hass_api_address</span></p>
<p>Modifying the following lists is not persistent. To statically control access please use the configuration file.</p>
<p>
<ul id="allowed_networks" class="collection with-header"></ul>
<br />
<div class="input-field">
<a href="#" class="prefix" onclick="helper_a_net_add()"><i class="mdi mdi-plus-circle prefix light-blue-text"></i></a></i>
<input placeholder="192.168.0.0/16" id="add_net_ip" type="text">
<label for="add_net_ip">Add network / IP</label>
</div>
</p>
<p>
<ul id="banned_ips" class="collection with-header"></ul>
<br />
<div class="input-field">
<a href="#" class="prefix" onclick="helper_banned_ban()"><i class="mdi mdi-plus-circle prefix light-blue-text"></i></a></i>
<input placeholder="1.2.3.4" id="add_banned_ip" type="text">
<label for="add_banned_ip">Ban IP</label>
</div>
</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
</div>
</div>
<div id="modal_about" class="modal modal-fixed-footer">
<div class="modal-content">
<h4 class="grey-text text-darken-3"><a class="black-text" href="https://github.com/danielperna84/hass-configurator/" target="_blank">HASS Configurator</a></h4>
<p>Version: <a class="$versionclass" href="https://github.com/danielperna84/hass-configurator/releases/" target="_blank">$current</a></p>
<p>Web-based file editor designed to modify configuration files of <a class="light-blue-text" href="https://home-assistant.io/" target="_blank">Home Assistant</a> or other textual files. Use at your own risk.</p>
<p>Published under the MIT license</p>
<p>Developed by:</p>
<ul>
<li>
<div class="chip"> <img src="https://avatars3.githubusercontent.com/u/7396998?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/danielperna84" target="_blank">Daniel Perna</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars2.githubusercontent.com/u/1509640?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/jmart518" target="_blank">JT Martinez</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars0.githubusercontent.com/u/1525413?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/AtoxIO" target="_blank">AtoxIO</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars0.githubusercontent.com/u/646513?s=400&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/Munsio" target="_blank">Martin Treml</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars2.githubusercontent.com/u/1399443?s=460&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/sytone" target="_blank">Sytone</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars3.githubusercontent.com/u/1561226?s=400&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/dimagoltsman" target="_blank">Dima Goltsman</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars3.githubusercontent.com/u/14281572?s=400&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/emontnemery" target="_blank">Erik Montnemery</a> </div>
</li>
</ul>
<p>Libraries used:</p>
<div class="row">
<div class="col s6 m3 l3">
<a href="https://ace.c9.io/" target="_blank">
<div class="card grey lighten-3 hoverable waves-effect">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBeld4U09LQkV0c2M">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">Ace Editor</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="http://materializecss.com/" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://evwilkin.github.io/images/materializecss.png">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">Materialize</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://jquery.com/" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBdFI0ZXRGb01xNzQ">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">JQuery</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://gitpython.readthedocs.io" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBakk4ek1uRGxqYVE">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">GitPython</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://github.com/nodeca/js-yaml" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">js-yaml</p>
</div>
</div>
</a>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_lint" class="modal">
<div class="modal-content">
<textarea rows="8" readonly></textarea>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect btn-flat light-blue-text">OK</a>
</div>
</div>
<!-- Main Editor Area -->
<div class="row">
<div id="hass_menu_l" class="col m4 l3 hide-on-small-only">
<br>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select trigger platform</option>
<option value="event">Event</option>
<option value="homeassistant">Home Assistant</option>
<option value="mqtt">MQTT</option>
<option value="numeric_state">Numeric State</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Trigger platforms</label>
</div>
<div class="input-field col s12">
<select id="events" onchange="insert(this.value)"></select>
<label>Events</label>
</div>
<div class="input-field col s12">
<input type="text" id="entities-search" class="autocomplete" autocomplete="off" placeholder="sensor.example">
<label>Search entity</label>
</div>
<div class="input-field col s12">
<select id="entities" onchange="insert(this.value)"></select>
<label>Entities</label>
</div>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select condition</option>
<option value="numeric_state">Numeric state</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Conditions</label>
</div>
<div class="input-field col s12">
<select id="services" onchange="insert(this.value)"> </select>
<label>Services</label>
</div>
</div>
<div id="filename_row" class="col s12 m8 l9">
<div class="card input-field col s12 grey lighten-4 hoverable pathtip">
<input class="currentfile_input" value="" id="currentfile" type="text">
<i class="material-icons" id="lint-status" onclick="show_lint_error()"></i>
</div>
</div>
<div class="col s12 m8 l9 z-depth-2" id="editor"></div>
<div id="edit_float" class="fixed-action-btn vertical click-to-toggle">
<a class="btn-floating btn-large red accent-2 hoverable">
<i class="material-icons">edit</i>
</a>
<ul>
<li><a class="btn-floating yellow tooltipped" data-position="left" data-delay="50" data-tooltip="Undo" onclick="editor.execCommand('undo')"><i class="material-icons">undo</i></a></li>
<li><a class="btn-floating green tooltipped" data-position="left" data-delay="50" data-tooltip="Redo" onclick="editor.execCommand('redo')"><i class="material-icons">redo</i></a></li>
<li><a class="btn-floating blue tooltipped" data-position="left" data-delay="50" data-tooltip="Indent" onclick="editor.execCommand('indent')"><i class="material-icons">format_indent_increase</i></a></li>
<li><a class="btn-floating orange tooltipped" data-position="left" data-delay="50" data-tooltip="Outdent" onclick="editor.execCommand('outdent')"><i class="material-icons">format_indent_decrease</i></a></li>
<li><a class="btn-floating brown tooltipped" data-position="left" data-delay="50" data-tooltip="Fold" onclick="toggle_fold()"><i class="material-icons">all_out</i></a></li>
<li><a class="btn-floating grey tooltipped" data-position="left" data-delay="50" data-tooltip="(Un)comment" onclick="editor.execCommand('togglecomment')">#</a></li>
</ul>
</div>
</div>
<!-- Left filebrowser sidenav -->
<div class="row">
<ul id="slide-out" class="side-nav grey lighten-4">
<li class="no-padding">
<ul class="row no-padding center hide-on-small-only grey lighten-4" style="margin-bottom: 0;">
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_newfile" data-position="bottom" data-delay="500" data-tooltip="New File"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">note_add</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_newfolder" data-position="bottom" data-delay="500" data-tooltip="New Folder"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">create_new_folder</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_upload" data-position="bottom" data-delay="500" data-tooltip="Upload File"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">file_upload</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped dropdown-button $githidden" data-activates="dropdown_gitmenu" data-alignment='right' data-beloworigin='true' data-delay='500' data-position="bottom" data-tooltip="Git"><i class="mdi mdi-git grey-text text-darken-2 material-icons" style="padding-top: 17px;"></i></a>
</ul>
<ul class="row center toolbar_mobile hide-on-med-and-up grey lighten-4" style="margin-bottom: 0;">
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_newfile"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">note_add</i></a>
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_newfolder"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">create_new_folder</i></a>
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_upload"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">file_upload</i></a>
<a class="col s3 waves-effect fbtoolbarbutton dropdown-button $githidden" data-activates="dropdown_gitmenu_mobile" data-alignment='right' data-beloworigin='true'><i class="mdi mdi-git grey-text text-darken-2 material-icons" style="padding-top: 17px;"></i></a>
</ul>
</li>
<li>
<div class="col s2 no-padding" style="min-height: 64px">
<a id="uplink" class="col s12 waves-effect" style="min-height: 64px; padding-top: 15px; cursor: pointer;"><i class="arrow grey-text text-darken-2 material-icons">arrow_back</i></a>
</div>
<div class="col s10 " style="white-space: nowrap; overflow: auto; min-height: 64px">
<div id="fbheader" class="leftellipsis"></div>
</div>
</li>
<ul id='branches' class="dropdown-content branch_select z-depth-2 grey lighten-4">
<ul id="branchlist"></ul>
</ul>
<li>
<ul class="row no-padding" style="margin-bottom: 0;">
<a id="branchselector" class="col s10 dropdown-button waves-effect truncate grey-text text-darken-2" data-beloworigin="true" data-activates='branches'><i class="grey-text text-darken-2 left material-icons" style="margin-left: 0; margin-right: 0; padding-top: 12px; padding-right: 8px;">arrow_drop_down</i>Branch:<span id="fbheaderbranch"></span></a>
<a id="newbranchbutton" class="waves-effect col s2 center modal-trigger" href="#modal_newbranch"><i class="grey-text text-darken-2 center material-icons" style="padding-top: 12px;">add</i></a>
</ul>
<div class="divider" style="margin-top: 0;"></div>
</li>
<li>
<ul id="fbelements"></ul>
</li>
<div class="row col s12 shadow"></div>
<div id="hass_menu_s" class="z-depth-3 hide-on-med-and-up">
<div class="input-field col s12" style="margin-top: 30px;">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select trigger platform</option>
<option value="event">Event</option>
<option value="mqtt">MQTT</option>
<option value="numeric_state">Numeric State</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Trigger Platforms</label>
</div>
<div class="input-field col s12">
<select id="events_side" onchange="insert(this.value)"></select>
<label>Events</label>
</div>
<div class="input-field col s12">
<input type="text" id="entities-search_side" class="autocomplete" placeholder="sensor.example">
<label>Search entity</label>
</div>
<div class="input-field col s12">
<select id="entities_side" onchange="insert(this.value)"></select>
<label>Entities</label>
</div>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select condition</option>
<option value="numeric_state">Numeric state</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Conditions</label>
</div>
<div class="input-field col s12">
<select id="services_side" onchange="insert(this.value)"></select>
<label>Services</label>
</div>
</div>
</ul>
</div>
<!-- Ace Editor SideNav -->
<div class="row">
<ul id="ace_settings" class="side-nav">
<li class="center s12 grey lighten-3 z-depth-1 subheader">Editor Settings</li>
<div class="row col s12">
<p class="col s12"> <a class="waves-effect waves-light btn light-blue modal-trigger" href="#modal_acekeyboard">Keyboard Shortcuts</a> </p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="set_save_prompt(this.checked)" id="savePrompt" />
<Label for="savePrompt">Prompt before save</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="set_hide_filedetails(this.checked)" id="hideDetails" />
<Label for="hideDetails">Hide details in browser</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('animatedScroll', !editor.getOptions().animatedScroll)" id="animatedScroll" />
<Label for="animatedScroll">Animated Scroll</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('behavioursEnabled', !editor.getOptions().behavioursEnabled)" id="behavioursEnabled" />
<Label for="behavioursEnabled">Behaviour Enabled</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('displayIndentGuides', !editor.getOptions().displayIndentGuides)" id="displayIndentGuides" />
<Label for="displayIndentGuides">Display Indent Guides</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('fadeFoldWidgets', !editor.getOptions().fadeFoldWidgets)" id="fadeFoldWidgets" />
<Label for="fadeFoldWidgets">Fade Fold Widgets</label>
</p>
<div class="input-field col s12">
<input type="number" onchange="editor.setOption('fontSize', parseInt(this.value))" min="6" id="fontSize">
<label class="active" for="fontSize">Font Size</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightActiveLine', !editor.getOptions().highlightActiveLine)" id="highlightActiveLine" />
<Label for="highlightActiveLine">Hightlight Active Line</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightGutterLine', !editor.getOptions().highlightGutterLine)" id="highlightGutterLine" />
<Label for="highlightGutterLine">Hightlight Gutter Line</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightSelectedWord', !editor.getOptions().highlightSelectedWord)" id="highlightSelectedWord" />
<Label for="highlightSelectedWord">Hightlight Selected Word</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('hScrollBarAlwaysVisible', !editor.getOptions().hScrollBarAlwaysVisible)" id="hScrollBarAlwaysVisible" />
<Label for="hScrollBarAlwaysVisible">H Scroll Bar Always Visible</label>
</p>
<div class="input-field col s12">
<select onchange="editor.setKeyboardHandler(this.value)" id="setKeyboardHandler">
<option value="">ace</option>
<option value="ace/keyboard/vim">vim</option>
<option value="ace/keyboard/emacs">emacs</option>
</select>
<label for="setKeyboardHandler">Keyboard Handler</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setOption('mode', this.value)" id="mode">
<option value="ace/mode/abap">abap</option>
<option value="ace/mode/abc">abc</option>
<option value="ace/mode/actionscript">actionscript</option>
<option value="ace/mode/ada">ada</option>
<option value="ace/mode/apache_conf">apache_conf</option>
<option value="ace/mode/asciidoc">asciidoc</option>
<option value="ace/mode/assembly_x86">assembly_x86</option>
<option value="ace/mode/autohotkey">autohotkey</option>
<option value="ace/mode/batchfile">batchfile</option>
<option value="ace/mode/bro">bro</option>
<option value="ace/mode/c_cpp">c_cpp</option>
<option value="ace/mode/c9search">c9search</option>
<option value="ace/mode/cirru">cirru</option>
<option value="ace/mode/clojure">clojure</option>
<option value="ace/mode/cobol">cobol</option>
<option value="ace/mode/coffee">coffee</option>
<option value="ace/mode/coldfusion">coldfusion</option>
<option value="ace/mode/csharp">csharp</option>
<option value="ace/mode/css">css</option>
<option value="ace/mode/curly">curly</option>
<option value="ace/mode/d">d</option>
<option value="ace/mode/dart">dart</option>
<option value="ace/mode/diff">diff</option>
<option value="ace/mode/django">django</option>
<option value="ace/mode/dockerfile">dockerfile</option>
<option value="ace/mode/dot">dot</option>
<option value="ace/mode/drools">drools</option>
<option value="ace/mode/dummy">dummy</option>
<option value="ace/mode/dummysyntax">dummysyntax</option>
<option value="ace/mode/eiffel">eiffel</option>
<option value="ace/mode/ejs">ejs</option>
<option value="ace/mode/elixir">elixir</option>
<option value="ace/mode/elm">elm</option>
<option value="ace/mode/erlang">erlang</option>
<option value="ace/mode/forth">forth</option>
<option value="ace/mode/fortran">fortran</option>
<option value="ace/mode/ftl">ftl</option>
<option value="ace/mode/gcode">gcode</option>
<option value="ace/mode/gherkin">gherkin</option>
<option value="ace/mode/gitignore">gitignore</option>
<option value="ace/mode/glsl">glsl</option>
<option value="ace/mode/gobstones">gobstones</option>
<option value="ace/mode/golang">golang</option>
<option value="ace/mode/groovy">groovy</option>
<option value="ace/mode/haml">haml</option>
<option value="ace/mode/handlebars">handlebars</option>
<option value="ace/mode/haskell">haskell</option>
<option value="ace/mode/haskell_cabal">haskell_cabal</option>
<option value="ace/mode/haxe">haxe</option>
<option value="ace/mode/hjson">hjson</option>
<option value="ace/mode/html">html</option>
<option value="ace/mode/html_elixir">html_elixir</option>
<option value="ace/mode/html_ruby">html_ruby</option>
<option value="ace/mode/ini">ini</option>
<option value="ace/mode/io">io</option>
<option value="ace/mode/jack">jack</option>
<option value="ace/mode/jade">jade</option>
<option value="ace/mode/java">java</option>
<option value="ace/mode/javascript">javascript</option>
<option value="ace/mode/json">json</option>
<option value="ace/mode/jsoniq">jsoniq</option>
<option value="ace/mode/jsp">jsp</option>
<option value="ace/mode/jsx">jsx</option>
<option value="ace/mode/julia">julia</option>
<option value="ace/mode/kotlin">kotlin</option>
<option value="ace/mode/latex">latex</option>
<option value="ace/mode/less">less</option>
<option value="ace/mode/liquid">liquid</option>
<option value="ace/mode/lisp">lisp</option>
<option value="ace/mode/livescript">livescript</option>
<option value="ace/mode/logiql">logiql</option>
<option value="ace/mode/lsl">lsl</option>
<option value="ace/mode/lua">lua</option>
<option value="ace/mode/luapage">luapage</option>
<option value="ace/mode/lucene">lucene</option>
<option value="ace/mode/makefile">makefile</option>
<option value="ace/mode/markdown">markdown</option>
<option value="ace/mode/mask">mask</option>
<option value="ace/mode/matlab">matlab</option>
<option value="ace/mode/maze">maze</option>
<option value="ace/mode/mel">mel</option>
<option value="ace/mode/mushcode">mushcode</option>
<option value="ace/mode/mysql">mysql</option>
<option value="ace/mode/nix">nix</option>
<option value="ace/mode/nsis">nsis</option>
<option value="ace/mode/objectivec">objectivec</option>
<option value="ace/mode/ocaml">ocaml</option>
<option value="ace/mode/pascal">pascal</option>
<option value="ace/mode/perl">perl</option>
<option value="ace/mode/pgsql">pgsql</option>
<option value="ace/mode/php">php</option>
<option value="ace/mode/powershell">powershell</option>
<option value="ace/mode/praat">praat</option>
<option value="ace/mode/prolog">prolog</option>
<option value="ace/mode/properties">properties</option>
<option value="ace/mode/protobuf">protobuf</option>
<option value="ace/mode/python">python</option>
<option value="ace/mode/r">r</option>
<option value="ace/mode/razor">razor</option>
<option value="ace/mode/rdoc">rdoc</option>
<option value="ace/mode/rhtml">rhtml</option>
<option value="ace/mode/rst">rst</option>
<option value="ace/mode/ruby">ruby</option>
<option value="ace/mode/rust">rust</option>
<option value="ace/mode/sass">sass</option>
<option value="ace/mode/scad">scad</option>
<option value="ace/mode/scala">scala</option>
<option value="ace/mode/scheme">scheme</option>
<option value="ace/mode/scss">scss</option>
<option value="ace/mode/sh">sh</option>
<option value="ace/mode/sjs">sjs</option>
<option value="ace/mode/smarty">smarty</option>
<option value="ace/mode/snippets">snippets</option>
<option value="ace/mode/soy_template">soy_template</option>
<option value="ace/mode/space">space</option>
<option value="ace/mode/sql">sql</option>
<option value="ace/mode/sqlserver">sqlserver</option>
<option value="ace/mode/stylus">stylus</option>
<option value="ace/mode/svg">svg</option>
<option value="ace/mode/swift">swift</option>
<option value="ace/mode/tcl">tcl</option>
<option value="ace/mode/tex">tex</option>
<option value="ace/mode/text">text</option>
<option value="ace/mode/textile">textile</option>
<option value="ace/mode/toml">toml</option>
<option value="ace/mode/tsx">tsx</option>
<option value="ace/mode/twig">twig</option>
<option value="ace/mode/typescript">typescript</option>
<option value="ace/mode/vala">vala</option>
<option value="ace/mode/vbscript">vbscript</option>
<option value="ace/mode/velocity">velocity</option>
<option value="ace/mode/verilog">verilog</option>
<option value="ace/mode/vhdl">vhdl</option>
<option value="ace/mode/wollok">wollok</option>
<option value="ace/mode/xml">xml</option>
<option value="ace/mode/xquery">xquery</option>
<option value="ace/mode/yaml">yaml</option>
</select>
<label for="mode">Mode</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setOption('newLineMode', this.value)" id="newLineMode">
<option value="auto">Auto</option>
<option value="windows">Windows</option>
<option value="unix">Unix</option>
</select>
<label for="newLineMode">New Line Mode</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('overwrite', !editor.getOptions().overwrite)" id="overwrite" />
<Label for="overwrite">Overwrite</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('readOnly', !editor.getOptions().readOnly)" id="readOnly" />
<Label for="readOnly">Read Only</label>
</p>
<div class="input-field col s12">
<input value="2" type="number" onchange="editor.setOption('scrollSpeed', parseInt(this.value))" id="scrollSpeed">
<label class="active" for="scrollSpeed">Scroll Speed</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showFoldWidgets', !editor.getOptions().showFoldWidgets)" id="showFoldWidgets" />
<Label for="showFoldWidgets">Show Fold Widgets</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showGutter', !editor.getOptions().showGutter)" id="showGutter" />
<Label for="showGutter">Show Gutter</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showInvisibles', !editor.getOptions().showInvisibles)" id="showInvisibles" />
<Label for="showInvisibles">Show Invisibles</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showPrintMargin', !editor.getOptions().showPrintMargin)" id="showPrintMargin" />
<Label for="showPrintMargin">Show Print Margin</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showLineNumbers', !editor.getOptions().showLineNumbers)" id="showLineNumbers" />
<Label for="showLineNumbers">Show Line Numbers</label>
</p>
<div class="input-field col s12">
<input type="number" onchange="editor.setOption('tabSize', parseInt(this.value))" min="1" id="tabSize">
<label class="active" for="tabSize">Tab Size</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setTheme(this.value)" id="theme">
<optgroup label="Light Themes">
<option value="ace/theme/chrome">Chrome</option>
<option value="ace/theme/clouds">Clouds</option>
<option value="ace/theme/crimson_editor">Crimson Editor</option>
<option value="ace/theme/dawn">Dawn</option>
<option value="ace/theme/dreamweaver">Dreamweaver</option>
<option value="ace/theme/eclipse">Eclipse</option>
<option value="ace/theme/github">GitHub</option>
<option value="ace/theme/iplastic">IPlastic</option>
<option value="ace/theme/solarized_light">Solarized Light</option>
<option value="ace/theme/textmate">TextMate</option>
<option value="ace/theme/tomorrow">Tomorrow</option>
<option value="ace/theme/xcode">XCode</option>
<option value="ace/theme/kuroir">Kuroir</option>
<option value="ace/theme/katzenmilch">KatzenMilch</option>
<option value="ace/theme/sqlserver">SQL Server</option>
</optgroup>
<optgroup label="Dark Themes">
<option value="ace/theme/ambiance">Ambiance</option>
<option value="ace/theme/chaos">Chaos</option>
<option value="ace/theme/clouds_midnight">Clouds Midnight</option>
<option value="ace/theme/cobalt">Cobalt</option>
<option value="ace/theme/gruvbox">Gruvbox</option>
<option value="ace/theme/idle_fingers">idle Fingers</option>
<option value="ace/theme/kr_theme">krTheme</option>
<option value="ace/theme/merbivore">Merbivore</option>
<option value="ace/theme/merbivore_soft">Merbivore Soft</option>
<option value="ace/theme/mono_industrial">Mono Industrial</option>
<option value="ace/theme/monokai">Monokai</option>
<option value="ace/theme/pastel_on_dark">Pastel on dark</option>
<option value="ace/theme/solarized_dark">Solarized Dark</option>
<option value="ace/theme/terminal">Terminal</option>
<option value="ace/theme/tomorrow_night">Tomorrow Night</option>
<option value="ace/theme/tomorrow_night_blue">Tomorrow Night Blue</option>
<option value="ace/theme/tomorrow_night_bright">Tomorrow Night Bright</option>
<option value="ace/theme/tomorrow_night_eighties">Tomorrow Night 80s</option>
<option value="ace/theme/twilight">Twilight</option>
<option value="ace/theme/vibrant_ink">Vibrant Ink</option>
</optgroup>
</select>
<label for="theme">Theme</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('useSoftTabs', !editor.getOptions().useSoftTabs)" id="useSoftTabs" />
<Label for="useSoftTabs">Use Soft Tabs</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('useWorker', !editor.getOptions().useWorker)" id="useWorker" />
<Label for="useWorker">Use Worker</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('vScrollBarAlwaysVisible', !editor.getOptions().vScrollBarAlwaysVisible)" id="vScrollBarAlwaysVisible" />
<Label for="vScrollBarAlwaysVisible">V Scroll Bar Always Visible</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('wrapBehavioursEnabled', !editor.getOptions().wrapBehavioursEnabled)" id="wrapBehavioursEnabled" />
<Label for="wrapBehavioursEnabled">Wrap Behaviours Enabled</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.getSession().setUseWrapMode(!editor.getSession().getUseWrapMode());if(editor.getSession().getUseWrapMode()){document.getElementById('wrap_limit').focus();document.getElementById('wrap_limit').onchange();}" id="wrap" />
<Label for="wrap">Wrap Mode</label>
</p>
<div class="input-field col s12">
<input id="wrap_limit" type="number" onchange="editor.setOption('wrap', parseInt(this.value))" min="1" value="80">
<label class="active" for="wrap_limit">Wrap Limit</label>
</div>
<a class="waves-effect waves-light btn light-blue" onclick="save_ace_settings()">Save Settings Locally</a>
<p class="center col s12"> Ace Editor 1.4.7 </p>
</div>
</ul>
</div>
</main>
<input type="hidden" id="fb_currentfile" value="" />
<!-- Scripts -->
<script src="https://code.jquery.com/jquery-3.4.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
<script>
function ws_connect() {
function msg(str) {
document.getElementById("ws_events").value = str + "\n\n" + document.getElementById("ws_events").value;
$('#ws_events').trigger('autoresize');
}
try {
ws = new WebSocket(document.getElementById("ws_uri").value);
ws.addEventListener("open", function(event) {
if (document.getElementById("ws_password").value.split(".").length == 3) {
var auth = {
type: "auth",
access_token: document.getElementById("ws_password").value
};
}
else {
var auth = {
type: "auth",
api_password: document.getElementById("ws_password").value
};
}
var data = {
id: 1,
type: "subscribe_events"
};
if (document.getElementById("ws_password").value) {
ws.send(JSON.stringify(auth));
}
ws.send(JSON.stringify(data));
});
ws.onmessage = function(event) {
msg(event.data);
}
ws.onclose = function() {
msg('Socket closed');
document.getElementById('ws_b_c').classList.remove('disabled');
document.getElementById('ws_b_d').classList.add('disabled');
};
ws.onopen = function() {
msg('Socket connected');
document.getElementById('ws_b_c').classList.add('disabled');
document.getElementById('ws_b_d').classList.remove('disabled');
};
}
catch(err) {
console.log("Error: " + err.message);
}
}
function ws_disconnect() {
try {
ws.close();
}
catch(err) {
console.log("Error: " + err.message);
}
}
</script>
<script type="text/javascript">
var init_loadfile = $loadfile;
var global_current_filepath = null;
var global_current_filename = null;
function toggle_hass_panels() {
if (document.getElementById("hass_menu_l").style.display == "none") {
document.getElementById("hass_menu_l").style.display = "";
document.getElementById("editor").classList.remove("l12");
document.getElementById("editor").classList.add("l9");
document.getElementById("filename_row").classList.remove("l12");
document.getElementById("filename_row").classList.add("l9");
}
else {
document.getElementById("hass_menu_l").style.display = "none";
document.getElementById("editor").classList.remove("l9");
document.getElementById("editor").classList.add("l12");
document.getElementById("filename_row").classList.remove("l9");
document.getElementById("filename_row").classList.add("l12");
}
if (document.getElementById("hass_menu_s").style.display == "none") {
document.getElementById("hass_menu_s").style.display = "";
document.getElementById("editor").classList.remove("l12");
document.getElementById("editor").classList.add("l9");
document.getElementById("filename_row").classList.remove("l12");
document.getElementById("filename_row").classList.add("l9");
}
else {
document.getElementById("hass_menu_s").style.display = "none";
document.getElementById("editor").classList.remove("l9");
document.getElementById("editor").classList.add("l12");
document.getElementById("filename_row").classList.remove("l9");
document.getElementById("filename_row").classList.add("l12");
}
}
function got_focus_or_visibility() {
if (global_current_filename && global_current_filepath) {
// The globals are set, set the localStorage to those values
var current_file = {current_filepath: global_current_filepath,
current_filename: global_current_filename}
localStorage.setItem('current_file', JSON.stringify(current_file));
}
else {
// This tab had no prior file opened, clearing from localStorage
localStorage.removeItem('current_file');
}
}
window.onfocus = function() {
got_focus_or_visibility();
}
//window.onblur = function() {
// console.log("lost focus");
//}
// Got this from here: https://developer.mozilla.org/en-US/docs/Web/API/Page_Visibility_API
// Set the name of the hidden property and the change event for visibility
var hidden, visibilityChange;
if (typeof document.hidden !== "undefined") { // Opera 12.10 and Firefox 18 and later support
hidden = "hidden";
visibilityChange = "visibilitychange";
}
else if (typeof document.msHidden !== "undefined") {
hidden = "msHidden";
visibilityChange = "msvisibilitychange";
}
else if (typeof document.webkitHidden !== "undefined") {
hidden = "webkitHidden";
visibilityChange = "webkitvisibilitychange";
}
function handleVisibilityChange() {
if (document[hidden]) {
// We're doing nothing when the tab gets out of vision
}
else {
// We're doing this if the tab becomes visible
got_focus_or_visibility();
}
}
// Warn if the browser doesn't support addEventListener or the Page Visibility API
if (typeof document.addEventListener === "undefined" || typeof document.hidden === "undefined") {
console.log("This requires a browser, such as Google Chrome or Firefox, that supports the Page Visibility API.");
}
else {
// Handle page visibility change
document.addEventListener(visibilityChange, handleVisibilityChange, false);
}
$(document).keydown(function(e) {
if ((e.key == 's' || e.key == 'S' ) && (e.ctrlKey || e.metaKey)) {
e.preventDefault();
save_check();
return false;
}
return true;
});
$(document).ready(function () {
$('select').material_select();
$('.modal').modal();
$('ul.tabs').tabs();
$('.collapsible').collapsible({
onOpen: function(el) {
$('#branch_tab').click();
},
});
$('.dropdown-button').dropdown({
inDuration: 300,
outDuration: 225,
constrainWidth: false,
hover: false,
gutter: 0,
belowOrigin: true,
alignment: 'right',
stopPropagation: false
});
$('.files-collapse').sideNav({
menuWidth: 320,
edge: 'left',
closeOnClick: false,
draggable: true
});
$('.ace_settings-collapse').sideNav({
menuWidth: 300,
edge: 'right',
closeOnClick: true,
draggable: false
});
// This fixes the dead spaces when trying to close the file browser
$(document).on('click', '.drag-target', function(){$('.button-collapse').sideNav('hide');})
listdir('.');
document.getElementById('savePrompt').checked = get_save_prompt();
document.getElementById('hideDetails').checked = get_hide_filedetails();
var entities_search = new Object();
if (states_list) {
for (var i = 0; i < states_list.length; i++) {
entities_search[states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')'] = null;
}
}
$('#entities-search').autocomplete({
data: entities_search,
limit: 40,
onAutocomplete: function(val) {
insert(val.split("(")[1].split(")")[0]);
},
minLength: 1,
});
$('#entities-search_side').autocomplete({
data: entities_search,
limit: 40,
onAutocomplete: function(val) {
insert(val.split("(")[1].split(")")[0]);
},
minLength: 1,
});
$standalone
});
</script>
<script type="text/javascript">
document.addEventListener("DOMContentLoaded", function() {
$('.preloader-background').delay(800).fadeOut('slow');
$('.preloader-wrapper').delay(800).fadeOut('slow');
if (init_loadfile) {
init_loadfile_name = init_loadfile.split('/').pop();
loadfile(init_loadfile, init_loadfile_name);
}
else {
if (!localStorage.getItem("new_tab")) {
var old_file = localStorage.getItem("current_file");
if (old_file) {
old_file = JSON.parse(old_file);
loadfile(old_file.current_filepath, old_file.current_filename);
}
}
else {
localStorage.removeItem("current_file");
}
localStorage.removeItem("new_tab");
}
});
</script>
<script>
var modemapping = new Object();
modemapping['c'] = 'ace/mode/c_cpp';
modemapping['cpp'] = 'ace/mode/c_cpp';
modemapping['css'] = 'ace/mode/css';
modemapping['gitignore'] = 'ace/mode/gitignore';
modemapping['htm'] = 'ace/mode/html';
modemapping['html'] = 'ace/mode/html';
modemapping['js'] = 'ace/mode/javascript';
modemapping['json'] = 'ace/mode/json';
modemapping['php'] = 'ace/mode/php';
modemapping['py'] = 'ace/mode/python';
modemapping['sh'] = 'ace/mode/sh';
modemapping['sql'] = 'ace/mode/sql';
modemapping['txt'] = 'ace/mode/text';
modemapping['xml'] = 'ace/mode/xml';
modemapping['yaml'] = 'ace/mode/yaml';
function sort_select(id) {
var options = $('#' + id + ' option');
var arr = options.map(function (_, o) {
return {
t: $(o).text(), v: o.value
};
}).get();
arr.sort(function (o1, o2) {
var t1 = o1.t.toLowerCase(), t2 = o2.t.toLowerCase();
return t1 > t2 ? 1 : t1 < t2 ? -1 : 0;
});
options.each(function (i, o) {
o.value = arr[i].v;
$(o).text(arr[i].t);
});
}
var separator = '$separator';
var services_list = $services;
var events_list = $events;
var states_list = $states;
if (events_list) {
var events = document.getElementById("events");
for (var i = 0; i < events_list.length; i++) {
var option = document.createElement("option");
option.value = events_list[i].event;
option.text = events_list[i].event;
events.add(option);
}
var events = document.getElementById("events_side");
for (var i = 0; i < events_list.length; i++) {
var option = document.createElement("option");
option.value = events_list[i].event;
option.text = events_list[i].event;
events.add(option);
}
sort_select('events');
sort_select('events_side');
}
if (states_list) {
var entities = document.getElementById("entities");
for (var i = 0; i < states_list.length; i++) {
var option = document.createElement("option");
option.value = states_list[i].entity_id;
option.text = states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')';
entities.add(option);
}
var entities = document.getElementById("entities_side");
for (var i = 0; i < states_list.length; i++) {
var option = document.createElement("option");
option.value = states_list[i].entity_id;
option.text = states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')';
entities.add(option);
}
sort_select('entities');
sort_select('entities_side');
}
if (services_list) {
var services = document.getElementById("services");
for (var i = 0; i < services_list.length; i++) {
for (var k in services_list[i].services) {
var option = document.createElement("option");
option.value = services_list[i].domain + '.' + k;
option.text = services_list[i].domain + '.' + k;
services.add(option);
}
}
var services = document.getElementById("services_side");
for (var i = 0; i < services_list.length; i++) {
for (var k in services_list[i].services) {
var option = document.createElement("option");
option.value = services_list[i].domain + '.' + k;
option.text = services_list[i].domain + '.' + k;
services.add(option);
}
}
sort_select('services');
sort_select('services_side');
}
function listdir(path) {
$.get(encodeURI("api/listdir?path=" + path), function(data) {
if (!data.error) {
renderpath(data);
}
else {
console.log("Permission denied.");
}
});
document.getElementById("slide-out").scrollTop = 0;
}
function renderitem(itemdata, index) {
var li = document.createElement('li');
li.classList.add("collection-item", "fbicon_pad", "col", "s12", "no-padding", "white");
var item = document.createElement('a');
item.classList.add("waves-effect", "col", "s10", "fbicon_pad");
var iicon = document.createElement('i');
iicon.classList.add("material-icons", "fbmenuicon_pad");
var stats = document.createElement('span');
date = new Date(itemdata.modified*1000);
stats.classList.add('stats');
if (itemdata.type == 'dir') {
iicon.innerHTML = 'folder';
item.setAttribute("onclick", "listdir('" + encodeURI(itemdata.fullpath) + "')");
stats.innerHTML = "Mod.: " + date.toUTCString();
}
else {
nameparts = itemdata.name.split('.');
extension = nameparts[nameparts.length -1];
if (['c', 'cpp', 'css', 'htm', 'html', 'js', 'json', 'php', 'py', 'sh', 'sql', 'xml', 'yaml'].indexOf(extension.toLocaleLowerCase()) > +1 ) {
iicon.classList.add('mdi', 'mdi-file-xml');
}
else if (['txt', 'doc', 'docx'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-document');
}
else if (['bmp', 'gif', 'jpg', 'jpeg', 'png', 'tif', 'webp'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-image');
}
else if (['mp3', 'ogg', 'wav'].indexOf(extension) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-music');
}
else if (['avi', 'flv', 'mkv', 'mp4', 'mpg', 'mpeg', 'webm'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-video');
}
else if (['pdf'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-pdf');
}
else {
iicon.classList.add('mdi', 'mdi-file');
}
item.setAttribute("onclick", "loadfile('" + encodeURI(itemdata.fullpath) + "', '" + itemdata.name + "')");
stats.innerHTML = "Mod.: " + date.toUTCString() + " Size: " + (itemdata.size/1024).toFixed(1) + " KiB";
}
item.appendChild(iicon);
var itext = document.createElement('div');
itext.innerHTML = itemdata.name;
itext.classList.add("filename");
var hasgitadd = false;
if (itemdata.gitstatus) {
if (itemdata.gittracked == 'untracked') {
itext.classList.add('text_darkred');
hasgitadd = true;
}
else {
if(itemdata.gitstatus == 'unstaged') {
itext.classList.add('text_darkred');
hasgitadd = true;
}
else if (itemdata.gitstatus == 'staged') {
itext.classList.add('text_darkgreen');
}
}
}
item.appendChild(itext);
if (!get_hide_filedetails()) {
item.appendChild(stats);
}
var dropdown = document.createElement('ul');
dropdown.id = 'fb_dropdown_' + index;
dropdown.classList.add('dropdown-content');
dropdown.classList.add("z-depth-4");
// Download button
var dd_download = document.createElement('li');
var dd_download_a = document.createElement('a');
dd_download_a.classList.add("waves-effect", "fb_dd");
dd_download_a.setAttribute('onclick', "download_file('" + encodeURI(itemdata.fullpath) + "')");
dd_download_a.innerHTML = "Download";
dd_download.appendChild(dd_download_a);
dropdown.appendChild(dd_download);
// Rename button
var dd_rename = document.createElement('li');
var dd_rename_a = document.createElement('a');
dd_rename_a.classList.add("waves-effect", "fb_dd");
dd_rename_a.setAttribute('href', "#modal_rename");
dd_rename_a.classList.add("modal-trigger");
dd_rename_a.innerHTML = "Rename";
dd_rename.appendChild(dd_rename_a);
dropdown.appendChild(dd_rename);
// Delete button
var dd_delete = document.createElement('li');
var dd_delete_a = document.createElement('a');
dd_delete_a.classList.add("waves-effect", "fb_dd");
dd_delete_a.setAttribute('href', "#modal_delete");
dd_delete_a.classList.add("modal-trigger");
dd_delete_a.innerHTML = "Delete";
dd_delete.appendChild(dd_delete_a);
dropdown.appendChild(dd_delete);
if (itemdata.gitstatus) {
if (hasgitadd) {
var divider = document.createElement('li');
divider.classList.add('divider');
dropdown.appendChild(divider);
// git add button
var dd_gitadd = document.createElement('li');
var dd_gitadd_a = document.createElement('a');
dd_gitadd_a.classList.add('waves-effect', 'fb_dd', 'modal-trigger');
dd_gitadd_a.setAttribute('href', "#modal_gitadd");
dd_gitadd_a.innerHTML = "git add";
dd_gitadd.appendChild(dd_gitadd_a);
dropdown.appendChild(dd_gitadd);
// git diff button
var dd_gitdiff = document.createElement('li');
var dd_gitdiff_a = document.createElement('a');
dd_gitdiff_a.classList.add('waves-effect', 'fb_dd', 'modal-trigger');
dd_gitdiff_a.setAttribute('onclick', "gitdiff()");
dd_gitdiff_a.innerHTML = "git diff";
dd_gitdiff.appendChild(dd_gitdiff_a);
dropdown.appendChild(dd_gitdiff);
}
}
var menubutton = document.createElement('a');
menubutton.classList.add("fbmenubutton", "waves-effect", "dropdown-button", "col", "s2", "fbicon_pad");
menubutton.classList.add('waves-effect');
menubutton.classList.add('dropdown-button');
menubutton.setAttribute('data-activates', dropdown.id);
menubutton.setAttribute('data-alignment', 'right');
var menubuttonicon = document.createElement('i');
menubutton.classList.add('material-icons');
menubutton.classList.add("right");
menubutton.innerHTML = 'more_vert';
menubutton.setAttribute('onclick', "document.getElementById('fb_currentfile').value='" + encodeURI(itemdata.fullpath) + "';$('span.fb_currentfile').html('" + itemdata.name + "')");
li.appendChild(item);
li.appendChild(menubutton);
li.setAttribute("title", itemdata.name)
li.appendChild(dropdown);
return li;
}
function renderpath(dirdata) {
var newbranchbutton = document.getElementById('newbranchbutton');
newbranchbutton.style.cssText = "display: none !important"
var fbelements = document.getElementById("fbelements");
while (fbelements.firstChild) {
fbelements.removeChild(fbelements.firstChild);
}
var fbheader = document.getElementById('fbheader');
fbheader.innerHTML = dirdata.abspath;
var branchselector = document.getElementById('branchselector');
var fbheaderbranch = document.getElementById('fbheaderbranch');
var branchlist = document.getElementById('branchlist');
while (branchlist.firstChild) {
branchlist.removeChild(branchlist.firstChild);
}
if (dirdata.activebranch) {
newbranchbutton.style.display = "inline-block";
fbheaderbranch.innerHTML = dirdata.activebranch;
fbheaderbranch.style.display = "inline";
branchselector.style.display = "block";
for (var i = 0; i < dirdata.branches.length; i++) {
var branch = document.createElement('li');
var link = document.createElement('a');
link.classList.add("branch_select", "truncate");
link.innerHTML = dirdata.branches[i];
link.href = '#';
link.setAttribute('onclick', 'checkout("' + dirdata.branches[i] + '");collapseAll()')
branch.appendChild(link);
if (dirdata.branches[i] == dirdata.activebranch) {
link.classList.add("active", "grey", "darken-1");
}
else {
link.classList.add("grey-text", "text-darken-3", "branch_hover", "waves-effect", "grey", "lighten-4");
}
branchlist.appendChild(branch);
}
}
else {
fbheaderbranch.innerHTML = "";
fbheaderbranch.style.display = "";
branchselector.style.display = "none";
}
var uplink = document.getElementById('uplink');
uplink.setAttribute("onclick", "listdir('" + encodeURI(dirdata.parent) + "')")
for (var i = 0; i < dirdata.content.length; i++) {
fbelements.appendChild(renderitem(dirdata.content[i], i));
}
$(".dropdown-button").dropdown();
}
function collapseAll() {
$(".collapsible-header").removeClass(function() { return "active"; });
$(".collapsible").collapsible({accordion: true});
$(".collapsible").collapsible({accordion: false});
}
function checkout(){
$(".collapsible-header").removeClass(function(){
return "active";
});
$(".collapsible").collapsible({accordion: true});
$(".collapsible").collapsible({accordion: false});
}
function loadfile(filepath, filenameonly) {
if ($('.markdirty.red').length) {
$('#modal_markdirty').modal('open');
}
else {
url = "api/file?filename=" + filepath;
fileparts = filepath.split('.');
extension = fileparts[fileparts.length -1];
raw_open = [
"jpg",
"jpeg",
"png",
"svg",
"bmp",
"webp",
"gif"
]
if (raw_open.indexOf(extension) > -1) {
window.open(url, '_blank');
}
else {
$.get(url, function(data) {
if (modemapping.hasOwnProperty(extension)) {
editor.setOption('mode', modemapping[extension]);
}
else {
editor.setOption('mode', "ace/mode/text");
}
editor.getSession().setValue(data, -1);
document.getElementById('currentfile').value = decodeURI(filepath);
editor.session.getUndoManager().markClean();
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
document.title = filenameonly + " - HASS Configurator";
global_current_filepath = filepath;
global_current_filename = filenameonly;
var current_file = {current_filepath: global_current_filepath,
current_filename: global_current_filename}
localStorage.setItem('current_file', JSON.stringify(current_file));
check_lint();
if (localStorage.getItem("filehistory") === null) {
localStorage.setItem("filehistory", JSON.stringify([filepath]));
var filehistory = JSON.parse(localStorage.getItem("filehistory"));
}
else {
var filehistory = JSON.parse(localStorage.getItem("filehistory"));
if (filehistory[filehistory.length -1] != filepath) {
filehistory.push(filepath);
filehistory = filehistory.reduce(function(a,b){if(a.indexOf(b)<0)a.push(b);return a;},[]);
while (filehistory.length > 10) {
filehistory.shift();
}
localStorage.setItem("filehistory", JSON.stringify(filehistory));
}
}
var history_ul = document.getElementById("file_history");
while (history_ul.firstChild) {
history_ul.removeChild(history_ul.firstChild);
}
filehistory.reverse();
for (i = 0; i < filehistory.length; i++) {
var li = document.createElement('li');
var item = document.createElement('span');
var parts = decodeURI(filehistory[i]).split(separator);
var filename = parts[parts.length - 1];
item.innerHTML = "..." + decodeURI(filehistory[i].slice(filehistory[i].length - 25));
item.setAttribute("onclick", "loadfile('" + filehistory[i] + "', '" + filename + "')");
li.appendChild(item);
history_ul.appendChild(li);
}
});
}
}
}
function closefile() {
document.getElementById('currentfile').value='';
editor.getSession().setValue('');
$('.markdirty').each(function(i, o) {
o.classList.remove('red');
});
localStorage.removeItem('current_file');
global_current_filepath = null;
global_current_filename = null;
document.title = 'HASS Configurator';
}
function check_config() {
$.get("api/check_config", function (resp) {
if (resp.length == 0) {
var $toastContent = $("<div><pre>Configuration seems valid.</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>" + resp[0].state + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function reload_automations() {
$.get("api/reload_automations", function (resp) {
var $toastContent = $("<div>Automations reloaded</div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_scripts() {
$.get("api/reload_scripts", function (resp) {
var $toastContent = $("<div>Scripts reloaded</div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_groups() {
$.get("api/reload_groups", function (resp) {
var $toastContent = $("<div><pre>Groups reloaded</pre></div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_core() {
$.get("api/reload_core", function (resp) {
var $toastContent = $("<div><pre>Core reloaded</pre></div>");
Materialize.toast($toastContent, 2000);
});
}
function restart() {
$.get("api/restart", function (resp) {
if (resp.length == 0) {
var $toastContent = $("<div><pre>Restarting HASS</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>" + resp + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function get_netstat() {
$.get("api/netstat", function (resp) {
if (resp.hasOwnProperty("allowed_networks")) {
var allowed_list = document.getElementById("allowed_networks");
while (allowed_list.firstChild) {
allowed_list.removeChild(allowed_list.firstChild);
}
var header = document.createElement("li");
header.classList.add("collection-header");
var header_h4 = document.createElement("h4");
header_h4.innerText = "Allowed networks";
header_h4.classList.add("grey-text");
header_h4.classList.add("text-darken-3");
header.appendChild(header_h4);
allowed_list.appendChild(header);
for (var i = 0; i < resp.allowed_networks.length; i++) {
var li = document.createElement("li");
li.classList.add("collection-item");
var li_div = document.createElement("div");
var address = document.createElement("span");
address.innerText = resp.allowed_networks[i];
li_div.appendChild(address);
var li_a = document.createElement("a");
li_a.classList.add("light-blue-text");
li_a.href = "#!";
li_a.classList.add("secondary-content");
var li_a_i = document.createElement("i");
li_a_i.classList.add("mdi");
li_a_i.classList.add("mdi-delete");
li_a_i.innerText = "Remove";
li_a.appendChild(li_a_i);
li_a.setAttribute("onclick", "helper_a_net_remove('" + resp.allowed_networks[i] + "')");
li_div.appendChild(li_a);
li.appendChild(li_div);
allowed_list.appendChild(li);
}
}
if (resp.hasOwnProperty("banned_ips")) {
var banlist = document.getElementById("banned_ips");
while (banlist.firstChild) {
banlist.removeChild(banlist.firstChild);
}
var header = document.createElement("li");
header.classList.add("collection-header");
var header_h4 = document.createElement("h4");
header_h4.innerText = "Banned IPs";
header_h4.classList.add("grey-text");
header_h4.classList.add("text-darken-3");
header.appendChild(header_h4);
banlist.appendChild(header);
for (var i = 0; i < resp.banned_ips.length; i++) {
var li = document.createElement("li");
li.classList.add("collection-item");
var li_div = document.createElement("div");
var address = document.createElement("span");
address.innerText = resp.banned_ips[i];
li_div.appendChild(address);
var li_a = document.createElement("a");
li_a.classList.add("light-blue-text");
li_a.href = "#!";
li_a.classList.add("secondary-content");
var li_a_i = document.createElement("i");
li_a_i.classList.add("mdi");
li_a_i.classList.add("mdi-delete");
li_a_i.innerText = "Unban";
li_a.appendChild(li_a_i);
li_a.setAttribute("onclick", "helper_banned_unban('" + resp.banned_ips[i] + "')");
li_div.appendChild(li_a);
li.appendChild(li_div);
banlist.appendChild(li);
}
}
});
}
function helper_a_net_remove(network) {
document.getElementById("removenet").innerText = network;
$('#modal_netstat').modal('close');
$('#modal_a_net_remove').modal('open');
}
function a_net_remove() {
var network = document.getElementById("removenet").innerText
data = new Object();
data.network = network;
data.method = 'remove';
$.post("api/allowed_networks", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_a_net_add() {
document.getElementById("addnet").innerText = document.getElementById("add_net_ip").value;
document.getElementById("add_net_ip").value = "";
$('#modal_netstat').modal('close');
$('#modal_a_net_add').modal('open');
}
function a_net_add() {
var network = document.getElementById("addnet").innerText
data = new Object();
data.network = network;
data.method = 'add';
$.post("api/allowed_networks", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_banned_unban(ip) {
document.getElementById("unbanip").innerText = ip;
$('#modal_netstat').modal('close');
$('#modal_unban').modal('open');
}
function banned_unban() {
var ip = document.getElementById("unbanip").innerText
data = new Object();
data.ip = ip;
data.method = 'unban';
$.post("api/banned_ips", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_banned_ban() {
document.getElementById("banip").innerText = document.getElementById("add_banned_ip").value;
document.getElementById("add_banned_ip").value = "";
$('#modal_netstat').modal('close');
$('#modal_ban').modal('open');
}
function banned_ban() {
var ip = document.getElementById("banip").innerText
data = new Object();
data.ip = ip;
data.method = 'ban';
$.post("api/banned_ips", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function save() {
var filepath = document.getElementById('currentfile').value;
if (filepath.length > 0) {
data = new Object();
data.filename = filepath;
data.text = editor.getValue()
$.post("api/save", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
editor.session.getUndoManager().markClean();
}
});
}
else {
Materialize.toast('Error: Please provide a filename', 5000);
}
}
function save_check() {
var filepath = document.getElementById('currentfile').value;
if (filepath.length > 0) {
if (get_save_prompt()) {
$('#modal_save').modal('open');
}
else {
save();
}
}
else {
Materialize.toast('Error: Please provide a filename', 5000);
$(".pathtip").bind("animationend webkitAnimationEnd oAnimationEnd MSAnimationEnd", function(){
$(this).removeClass("pathtip_color");
}).addClass("pathtip_color");
}
}
function download_file(filepath) {
window.open("api/download?filename="+encodeURI(filepath));
}
function rename_file() {
var src = document.getElementById("fb_currentfile").value;
var dstfilename = document.getElementById("rename_name_new").value;
if (src.length > 0 && dstfilename.length > 0) {
data = new Object();
data.src = src;
data.dstfilename = dstfilename;
$.post("api/rename", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML)
//document.getElementById('currentfile').value='';
//editor.setValue('');
document.getElementById("rename_name_new").value = "";
}
})
}
}
function delete_file() {
var path = document.getElementById('currentfile').value;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/delete", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML)
document.getElementById('currentfile').value='';
editor.setValue('');
}
});
}
}
function exec_command() {
var command = document.getElementById('commandline').value;
if (command.length > 0) {
data = new Object();
data.command = command;
data.timeout = 15;
$.post("api/exec_command", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var history = document.getElementById('command_history');
history.innerText += resp.message + ': ' + resp.returncode + "\n";
if (resp.stdout) {
history.innerText += resp.stdout;
}
if (resp.stderr) {
history.innerText += resp.stderr;
}
}
});
}
}
function delete_element() {
var path = document.getElementById('fb_currentfile').value;
if (path.length > 0) {
data = new Object();
data.path= path;
$.post("api/delete", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
if (document.getElementById('currentfile').value == path) {
document.getElementById('currentfile').value='';
editor.setValue('');
}
}
});
}
}
function gitadd() {
var path = document.getElementById('fb_currentfile').value;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/gitadd", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function gitdiff() {
var path = document.getElementById('fb_currentfile').value;
closefile();
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/gitdiff", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
editor.setOption('mode', modemapping['diff']);
editor.getSession().setValue(resp.message, -1);
editor.session.getUndoManager().markClean();
}
});
}
}
function gitinit() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/init", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function commit(message) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.message = message;
$.post("api/commit", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('commitmessage').value = "";
}
});
}
}
function gitpush() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/push", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function gitstash() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/stash", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function checkout(branch) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.branch = branch;
$.post("api/checkout", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function newbranch(branch) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.branch = branch;
$.post("api/newbranch", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function newfolder(foldername) {
var path = document.getElementById('fbheader').innerHTML;
if (path.length > 0 && foldername.length > 0) {
data = new Object();
data.path = path;
data.name = foldername;
$.post("api/newfolder", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('newfoldername').value = '';
});
}
}
function newfile(filename) {
var path = document.getElementById('fbheader').innerHTML;
if (path.length > 0 && filename.length > 0) {
data = new Object();
data.path = path;
data.name = filename;
$.post("api/newfile", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('newfilename').value = '';
});
}
}
function upload() {
var file_data = $('#uploadfile').prop('files')[0];
var form_data = new FormData();
form_data.append('file', file_data);
form_data.append('path', document.getElementById('fbheader').innerHTML);
$.ajax({
url: 'api/upload',
dataType: 'json',
cache: false,
contentType: false,
processData: false,
data: form_data,
type: 'post',
success: function(resp){
if (resp.error) {
var $toastContent = $("<div><pre>Error: " + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>Upload succesful</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('uploadform').reset();
}
}
});
}
</script>
<script>
ace.require("ace/ext/language_tools");
var editor = ace.edit("editor");
editor.on("input", function() {
if (editor.session.getUndoManager().isClean()) {
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
}
else {
$('.markdirty').each(function(i, o){o.classList.add('red');});
$('.hidesave').css('opacity', 1);
}
});
if (localStorage.hasOwnProperty("pochass")) {
editor.setOptions(JSON.parse(localStorage.pochass));
editor.setOptions({
enableBasicAutocompletion: true,
enableSnippets: true
})
editor.$blockScrolling = Infinity;
}
else {
editor.getSession().setMode("ace/mode/yaml");
editor.setOptions({
showInvisibles: true,
useSoftTabs: true,
displayIndentGuides: true,
highlightSelectedWord: true,
enableBasicAutocompletion: true,
enableSnippets: true
})
editor.$blockScrolling = Infinity;
}
function set_save_prompt(checked) {
localStorage.setItem('save_prompt', JSON.stringify({save_prompt: checked}));
}
function get_save_prompt() {
if (localStorage.getItem('save_prompt')) {
var save_prompt = JSON.parse(localStorage.getItem('save_prompt'));
return save_prompt.save_prompt;
}
return false;
}
function set_hide_filedetails(checked) {
localStorage.setItem('hide_filedetails', JSON.stringify({hide_filedetails: checked}));
}
function get_hide_filedetails() {
if (localStorage.getItem('hide_filedetails')) {
var hide_filedetails = JSON.parse(localStorage.getItem('hide_filedetails'));
return hide_filedetails.hide_filedetails;
}
return false;
}
function apply_settings() {
var options = editor.getOptions();
for (var key in options) {
if (options.hasOwnProperty(key)) {
var target = document.getElementById(key);
if (target) {
if (typeof(options[key]) == "boolean" && target.type === 'checkbox') {
target.checked = options[key];
target.setAttribute("checked", options[key]);
}
else if (typeof(options[key]) == "number" && target.type === 'number') {
target.value = options[key];
}
else if (typeof(options[key]) == "string" && target.tagName == 'SELECT') {
target.value = options[key];
}
}
}
}
}
apply_settings();
function save_ace_settings() {
localStorage.pochass = JSON.stringify(editor.getOptions())
Materialize.toast("Ace Settings Saved", 2000);
}
function insert(text) {
var pos = editor.selection.getCursor();
var end = editor.session.insert(pos, text);
editor.selection.setRange({
start: pos,
end: end
});
editor.focus();
}
var foldstatus = true;
function toggle_fold() {
if (foldstatus) {
editor.getSession().foldAll();
}
else {
editor.getSession().unfold();
}
foldstatus = !foldstatus;
}
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/js-yaml/3.13.1/js-yaml.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript">
var lint_timeout;
var lint_status = $('#lint-status'); // speed optimization
var lint_error = "";
function check_lint() {
if (document.getElementById('currentfile').value.match(".yaml$")) {
try {
var text = editor.getValue().replace(/!(include|secret|env_var)/g,".$1"); // hack because js-yaml does not like !include/!secret
jsyaml.safeLoad(text);
lint_status.text("check_circle");
lint_status.removeClass("cursor-pointer red-text grey-text");
lint_status.addClass("green-text");
lint_error = "";
} catch (err) {
lint_status.text("error");
lint_status.removeClass("green-text grey-text");
lint_status.addClass("cursor-pointer red-text");
lint_error = err.message;
}
} else {
lint_status.empty();
}
}
function queue_lint(e) {
if (document.getElementById('currentfile').value.match(".yaml$")) {
clearTimeout(lint_timeout);
lint_timeout = setTimeout(check_lint, 500);
if (lint_status.text() != "cached") {
lint_status.text("cached");
lint_status.removeClass("cursor-pointer red-text green-text");
lint_status.addClass("grey-text");
}
} else {
lint_status.empty();
}
}
function show_lint_error() {
if(lint_error) {
$("#modal_lint textarea").val(lint_error);
$("#modal_lint").modal('open');
}
}
editor.on('change', queue_lint);
</script>
</body>
</html>""")
# pylint: disable=unused-argument
def signal_handler(sig, frame):
"""Handle signal to shut down server."""
global HTTPD
LOG.info("Got signal: %s. Shutting down server", str(sig))
HTTPD.server_close()
sys.exit(0)
def load_settings(args):
"""Load settings from file and environment."""
global LISTENIP, LISTENPORT, BASEPATH, SSL_CERTIFICATE, SSL_KEY, HASS_API, \
HASS_API_PASSWORD, CREDENTIALS, ALLOWED_NETWORKS, BANNED_IPS, BANLIMIT, \
DEV, IGNORE_PATTERN, DIRSFIRST, SESAME, VERIFY_HOSTNAME, ENFORCE_BASEPATH, \
ENV_PREFIX, NOTIFY_SERVICE, USERNAME, PASSWORD, SESAME_TOTP_SECRET, TOTP, \
GIT, REPO, PORT, IGNORE_SSL, HASS_WS_API, ALLOWED_DOMAINS, HIDEHIDDEN
settings = {}
settingsfile = args.settings
if settingsfile:
try:
if os.path.isfile(settingsfile):
with open(settingsfile) as fptr:
settings = json.loads(fptr.read())
LOG.debug("Settings from file:")
LOG.debug(settings)
else:
LOG.warning("File not found: %s", settingsfile)
except Exception as err:
LOG.warning(err)
LOG.warning("Not loading settings from file")
ENV_PREFIX = settings.get('ENV_PREFIX', ENV_PREFIX)
for key, value in os.environ.items():
if key.startswith(ENV_PREFIX):
# Convert booleans
if value in ['true', 'false', 'True', 'False']:
value = value in ['true', 'True']
# Convert None / null
elif value in ['none', 'None', 'null']:
value = None
# Convert plain numbers
elif value.isnumeric():
value = int(value)
# Make lists out of comma separated values for list-settings
elif key[len(ENV_PREFIX):] in ["ALLOWED_NETWORKS", "BANNED_IPS", "IGNORE_PATTERN"]:
value = value.split(',')
settings[key[len(ENV_PREFIX):]] = value
LOG.debug("Settings after looking at environment:")
LOG.debug(settings)
if args.git:
GIT = args.git
else:
GIT = settings.get("GIT", GIT)
if GIT:
try:
# pylint: disable=redefined-outer-name,import-outside-toplevel
from git import Repo as REPO
except ImportError:
LOG.warning("Unable to import Git module")
if args.listen:
LISTENIP = args.listen
else:
LISTENIP = settings.get("LISTENIP", LISTENIP)
if args.port is not None:
PORT = args.port
else:
LISTENPORT = settings.get("LISTENPORT", None)
PORT = settings.get("PORT", PORT)
if LISTENPORT is not None:
PORT = LISTENPORT
if args.basepath:
BASEPATH = args.basepath
else:
BASEPATH = settings.get("BASEPATH", BASEPATH)
if args.enforce:
ENFORCE_BASEPATH = True
else:
ENFORCE_BASEPATH = settings.get("ENFORCE_BASEPATH", ENFORCE_BASEPATH)
SSL_CERTIFICATE = settings.get("SSL_CERTIFICATE", SSL_CERTIFICATE)
SSL_KEY = settings.get("SSL_KEY", SSL_KEY)
if args.standalone:
HASS_API = None
else:
HASS_API = settings.get("HASS_API", HASS_API)
HASS_WS_API = settings.get("HASS_WS_API", HASS_WS_API)
HASS_API_PASSWORD = settings.get("HASS_API_PASSWORD", HASS_API_PASSWORD)
CREDENTIALS = settings.get("CREDENTIALS", CREDENTIALS)
ALLOWED_NETWORKS = settings.get("ALLOWED_NETWORKS", ALLOWED_NETWORKS)
if ALLOWED_NETWORKS and not all(ALLOWED_NETWORKS):
LOG.warning("Invalid value for ALLOWED_NETWORKS. Using empty list.")
ALLOWED_NETWORKS = []
for net in ALLOWED_NETWORKS:
try:
ipaddress.ip_network(net)
except Exception:
LOG.warning("Invalid network in ALLOWED_NETWORKS: %s", net)
ALLOWED_NETWORKS.remove(net)
ALLOWED_DOMAINS = settings.get("ALLOWED_DOMAINS", ALLOWED_DOMAINS)
if ALLOWED_DOMAINS and not all(ALLOWED_DOMAINS):
LOG.warning("Invalid value for ALLOWED_DOMAINS. Using empty list.")
ALLOWED_DOMAINS = []
BANNED_IPS = settings.get("BANNED_IPS", BANNED_IPS)
if BANNED_IPS and not all(BANNED_IPS):
LOG.warning("Invalid value for BANNED_IPS. Using empty list.")
BANNED_IPS = []
for banned_ip in BANNED_IPS:
try:
ipaddress.ip_address(banned_ip)
except Exception:
LOG.warning("Invalid IP address in BANNED_IPS: %s", banned_ip)
BANNED_IPS.remove(banned_ip)
BANLIMIT = settings.get("BANLIMIT", BANLIMIT)
if args.dev:
DEV = True
else:
DEV = settings.get("DEV", DEV)
IGNORE_PATTERN = settings.get("IGNORE_PATTERN", IGNORE_PATTERN)
if IGNORE_PATTERN and not all(IGNORE_PATTERN):
LOG.warning("Invalid value for IGNORE_PATTERN. Using empty list.")
IGNORE_PATTERN = []
if args.dirsfirst:
DIRSFIRST = args.dirsfirst
else:
DIRSFIRST = settings.get("DIRSFIRST", DIRSFIRST)
if args.hidehidden:
HIDEHIDDEN = args.hidehidden
else:
HIDEHIDDEN = settings.get("HIDEHIDDEN", HIDEHIDDEN)
SESAME = settings.get("SESAME", SESAME)
SESAME_TOTP_SECRET = settings.get("SESAME_TOTP_SECRET", SESAME_TOTP_SECRET)
VERIFY_HOSTNAME = settings.get("VERIFY_HOSTNAME", VERIFY_HOSTNAME)
NOTIFY_SERVICE = settings.get("NOTIFY_SERVICE", NOTIFY_SERVICE_DEFAULT)
IGNORE_SSL = settings.get("IGNORE_SSL", IGNORE_SSL)
if IGNORE_SSL:
# pylint: disable=protected-access
ssl._create_default_https_context = ssl._create_unverified_context
if args.username and args.password:
USERNAME = args.username
PASSWORD = args.password
else:
USERNAME = settings.get("USERNAME", USERNAME)
PASSWORD = settings.get("PASSWORD", PASSWORD)
PASSWORD = str(PASSWORD) if PASSWORD else None
if CREDENTIALS and (USERNAME is None or PASSWORD is None):
USERNAME = CREDENTIALS.split(":")[0]
PASSWORD = ":".join(CREDENTIALS.split(":")[1:])
if PASSWORD and PASSWORD.startswith("{sha256}"):
PASSWORD = PASSWORD.lower()
if SESAME_TOTP_SECRET:
try:
#pylint: disable=import-outside-toplevel
import pyotp
TOTP = pyotp.TOTP(SESAME_TOTP_SECRET)
except ImportError:
LOG.warning("Unable to import pyotp module")
except Exception as err:
LOG.warning("Unable to create TOTP object: %s", err)
def is_jwt(token):
"""Perform basic check if token is a JWT token."""
return len(token.split('.')) == 3
def is_safe_path(basedir, path, follow_symlinks=True):
"""Check path for malicious traversal."""
if basedir is None:
return True
if follow_symlinks:
return os.path.realpath(path).startswith(basedir.encode('utf-8'))
return os.path.abspath(path).startswith(basedir.encode('utf-8'))
def get_dircontent(path, repo=None):
"""Get content of directory."""
dircontent = []
if repo:
untracked = [
"%s%s%s"%(repo.working_dir, os.sep, e) for e in \
["%s"%os.sep.join(f.split('/')) for f in repo.untracked_files]
]
staged = {}
unstaged = {}
try:
for element in repo.index.diff("HEAD"):
staged["%s%s%s" % (repo.working_dir,
os.sep,
"%s"%os.sep.join(
element.b_path.split('/')))] = element.change_type
except Exception as err:
LOG.warning("Exception: %s", str(err))
for element in repo.index.diff(None):
unstaged["%s%s%s" % (repo.working_dir,
os.sep,
"%s"%os.sep.join(
element.b_path.split('/')))] = element.change_type
else:
untracked = []
staged = {}
unstaged = {}
def sorted_file_list():
"""Sort list of files / directories."""
dirlist = [x for x in os.listdir(path) if os.path.isdir(os.path.join(path, x))]
filelist = [x for x in os.listdir(path) if not os.path.isdir(os.path.join(path, x))]
if HIDEHIDDEN:
dirlist = [x for x in dirlist if not x.startswith('.')]
filelist = [x for x in filelist if not x.startswith('.')]
if DIRSFIRST:
return sorted(dirlist, key=lambda x: x.lower()) + \
sorted(filelist, key=lambda x: x.lower())
return sorted(dirlist + filelist, key=lambda x: x.lower())
for elem in sorted_file_list():
edata = {}
edata['name'] = elem
edata['dir'] = path
edata['fullpath'] = os.path.abspath(os.path.join(path, elem))
edata['type'] = 'dir' if os.path.isdir(edata['fullpath']) else 'file'
try:
stats = os.stat(os.path.join(path, elem))
edata['size'] = stats.st_size
edata['modified'] = stats.st_mtime
edata['created'] = stats.st_ctime
except Exception:
edata['size'] = 0
edata['modified'] = 0
edata['created'] = 0
edata['changetype'] = None
edata['gitstatus'] = bool(repo)
edata['gittracked'] = 'untracked' if edata['fullpath'] in untracked else 'tracked'
if edata['fullpath'] in unstaged:
edata['gitstatus'] = 'unstaged'
edata['changetype'] = unstaged.get(edata['name'], None)
elif edata['fullpath'] in staged:
edata['gitstatus'] = 'staged'
edata['changetype'] = staged.get(edata['name'], None)
hidden = False
if IGNORE_PATTERN is not None:
for file_pattern in IGNORE_PATTERN:
if fnmatch.fnmatch(edata['name'], file_pattern):
hidden = True
if not hidden:
dircontent.append(edata)
return dircontent
def get_html():
"""Load the HTML from file in dev-mode, otherwise embedded."""
if DEV:
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
"dev.html")) as fptr:
html = Template(fptr.read())
return html
except Exception as err:
LOG.warning(err)
LOG.warning("Delivering embedded HTML")
return INDEX
def password_problems(password, name="UNKNOWN"):
"""Rudimentary checks for password strength."""
problems = 0
password = str(password)
if password is None:
return problems
if len(password) < 8:
LOG.warning("Password %s is too short", name)
problems += 1
if password.isalpha():
LOG.warning("Password %s does not contain digits", name)
problems += 2
if password.isdigit():
LOG.warning("Password %s does not contain alphabetic characters", name)
problems += 4
quota = len(set(password)) / len(password)
exp = len(password) ** len(set(password))
score = exp / quota / 8
if score < 65536:
LOG.warning("Password %s does not contain enough unique characters (%i)",
name, len(set(password)))
problems += 8
return problems
def check_access(clientip):
"""Check if IP is allowed to access the configurator / API."""
global BANNED_IPS
if clientip in BANNED_IPS:
LOG.warning("Client IP banned.")
return False
if not ALLOWED_NETWORKS:
return True
for net in ALLOWED_NETWORKS:
ipobject = ipaddress.ip_address(clientip)
if ipobject in ipaddress.ip_network(net):
return True
LOG.warning("Client IP not within allowed networks.")
if ALLOWED_DOMAINS:
for domain in ALLOWED_DOMAINS:
try:
domain_data = socket.getaddrinfo(domain, None)
except Exception as err:
LOG.warning("Unable to lookup domain data: %s", err)
continue
for res in domain_data:
if res[0] in [socket.AF_INET, socket.AF_INET6]:
if res[4][0] == clientip:
return True
LOG.warning("Client IP not within allowed domains.")
BANNED_IPS.append(clientip)
return False
def verify_hostname(request_hostname):
"""Verify the provided host header is correct."""
if VERIFY_HOSTNAME:
if VERIFY_HOSTNAME not in request_hostname:
return False
return True
class RequestHandler(BaseHTTPRequestHandler):
"""Request handler."""
# pylint: disable=redefined-builtin
def log_message(self, format, *args):
LOG.info("%s - %s", self.client_address[0], format % args)
# pylint: disable=invalid-name
def do_BLOCK(self, status=420, reason="Policy not fulfilled"):
"""Customized do_BLOCK method."""
self.send_response(status)
self.end_headers()
self.wfile.write(bytes(reason, "utf8"))
# pylint: disable=invalid-name
def do_GET(self):
"""Customized do_GET method."""
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
req = urlparse(self.path)
if SESAME or TOTP:
chunk = req.path.split("/")[-1]
if SESAME and chunk == SESAME:
if self.client_address[0] not in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append(self.client_address[0])
if self.client_address[0] in BANNED_IPS:
BANNED_IPS.remove(self.client_address[0])
url = req.path[:req.path.rfind(chunk)]
self.send_response(302)
self.send_header('Location', url)
self.end_headers()
data = {
"title": "HASS Configurator - SESAME access",
"message": "Your SESAME token has been used to whitelist " \
"the IP address %s." % self.client_address[0]
}
notify(**data)
return
if TOTP and TOTP.verify(chunk):
if self.client_address[0] not in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append(self.client_address[0])
if self.client_address[0] in BANNED_IPS:
BANNED_IPS.remove(self.client_address[0])
url = req.path[:req.path.rfind(chunk)]
self.send_response(302)
self.send_header('Location', url)
self.end_headers()
data = {
"title": "HASS Configurator - SESAME access",
"message": "Your SESAME token has been used to whitelist " \
"the IP address %s." % self.client_address[0]
}
notify(**data)
return
if not check_access(self.client_address[0]):
self.do_BLOCK()
return
query = parse_qs(req.query)
self.send_response(200)
# pylint: disable=no-else-return
if req.path.endswith('/api/file'):
content = ""
filename = query.get('filename', None)
try:
if filename:
is_raw = False
filename = unquote(filename[0]).encode('utf-8')
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH, filename):
raise OSError('Access denied.')
filepath = os.path.join(BASEDIR.encode('utf-8'), filename)
if os.path.isfile(filepath):
mimetype = mimetypes.guess_type(filepath.decode('utf-8'))
if mimetype[0] is not None:
if mimetype[0].split('/')[0] == 'image':
is_raw = True
if is_raw:
with open(filepath, 'rb') as fptr:
content = fptr.read()
self.send_header('Content-type', mimetype[0])
else:
with open(filepath, 'rb') as fptr:
content += fptr.read().decode('utf-8')
self.send_header('Content-type', 'text/text')
else:
self.send_header('Content-type', 'text/text')
content = "File not found"
except Exception as err:
LOG.warning(err)
self.send_header('Content-type', 'text/text')
content = str(err)
self.end_headers()
if is_raw:
self.wfile.write(content)
else:
self.wfile.write(bytes(content, "utf8"))
return
elif req.path.endswith('/api/download'):
content = ""
filename = query.get('filename', None)
try:
if filename:
filename = unquote(filename[0]).encode('utf-8')
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH, filename):
raise OSError('Access denied.')
LOG.info(filename)
if os.path.isfile(os.path.join(BASEDIR.encode('utf-8'), filename)):
with open(os.path.join(BASEDIR.encode('utf-8'), filename), 'rb') as fptr:
filecontent = fptr.read()
self.send_header(
'Content-Disposition',
'attachment; filename=%s' % filename.decode('utf-8').split(os.sep)[-1])
self.end_headers()
self.wfile.write(filecontent)
return
content = "File not found"
except Exception as err:
LOG.warning(err)
content = str(err)
self.send_header('Content-type', 'text/text')
self.wfile.write(bytes(content, "utf8"))
return
elif req.path.endswith('/api/listdir'):
content = {'error': None}
self.send_header('Content-type', 'text/json')
self.end_headers()
dirpath = query.get('path', None)
try:
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
if os.path.isdir(dirpath):
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH,
dirpath):
raise OSError('Access denied.')
repo = None
activebranch = None
dirty = False
branches = []
if REPO:
try:
# pylint: disable=not-callable
repo = REPO(dirpath.decode('utf-8'),
search_parent_directories=True)
activebranch = repo.active_branch.name
dirty = repo.is_dirty()
for branch in repo.branches:
branches.append(branch.name)
except Exception as err:
LOG.debug("Exception (no repo): %s", str(err))
dircontent = get_dircontent(dirpath.decode('utf-8'), repo)
filedata = {
'content': dircontent,
'abspath': os.path.abspath(dirpath).decode('utf-8'),
'parent': os.path.dirname(os.path.abspath(dirpath)).decode('utf-8'),
'branches': branches,
'activebranch': activebranch,
'dirty': dirty,
'error': None
}
self.wfile.write(bytes(json.dumps(filedata), "utf8"))
except Exception as err:
LOG.warning(err)
content['error'] = str(err)
self.wfile.write(bytes(json.dumps(content), "utf8"))
return
elif req.path.endswith('/api/abspath'):
content = ""
self.send_header('Content-type', 'text/text')
self.end_headers()
dirpath = query.get('path', None)
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
LOG.debug(dirpath)
absp = os.path.abspath(dirpath)
LOG.debug(absp)
if os.path.isdir(dirpath):
self.wfile.write(os.path.abspath(dirpath))
return
elif req.path.endswith('/api/parent'):
content = ""
self.send_header('Content-type', 'text/text')
self.end_headers()
dirpath = query.get('path', None)
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
LOG.debug(dirpath)
absp = os.path.abspath(dirpath)
LOG.debug(absp)
if os.path.isdir(dirpath):
self.wfile.write(os.path.abspath(os.path.dirname(dirpath)))
return
elif req.path.endswith('/api/netstat'):
content = ""
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {
"allowed_networks": ALLOWED_NETWORKS,
"banned_ips": BANNED_IPS
}
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/restart'):
LOG.info("/api/restart")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"restart": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/restart" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
res = json.loads(response.read().decode('utf-8'))
LOG.debug(res)
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/check_config'):
LOG.info("/api/check_config")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"check_config": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/check_config" % HASS_API,
headers=headers, method='POST')
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_automations'):
LOG.info("/api/reload_automations")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_automations": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/automation/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_scripts'):
LOG.info("/api/reload_scripts")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_scripts": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/script/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_groups'):
LOG.info("/api/reload_groups")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_groups": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/group/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_core'):
LOG.info("/api/reload_core")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_core": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/reload_core_config" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/'):
self.send_header('Content-type', 'text/html')
self.end_headers()
loadfile = query.get('loadfile', [None])[0]
if loadfile is None:
loadfile = 'null'
else:
loadfile = "'%s'" % loadfile
services = "[]"
events = "[]"
states = "[]"
try:
if HASS_API:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request("%sservices" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
services = response.read().decode('utf-8')
req = urllib.request.Request("%sevents" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
events = response.read().decode('utf-8')
req = urllib.request.Request("%sstates" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
states = response.read().decode('utf-8')
except Exception as err:
LOG.warning("Exception getting bootstrap")
LOG.warning(err)
color = ""
try:
response = urllib.request.urlopen(RELEASEURL)
latest = json.loads(response.read().decode('utf-8'))['tag_name']
if VERSION != latest:
color = "red-text"
except Exception as err:
LOG.warning("Exception getting release")
LOG.warning(err)
ws_api = ""
if HASS_API:
protocol, uri = HASS_API.split("//")
ws_api = "%s://%swebsocket" % (
"wss" if protocol == 'https' else 'ws', uri
)
if HASS_WS_API:
ws_api = HASS_WS_API
standalone = ""
if not HASS_API:
standalone = "toggle_hass_panels();"
html = get_html().safe_substitute(
services=services,
events=events,
states=states,
loadfile=loadfile,
current=VERSION,
versionclass=color,
githidden="" if GIT else "hiddendiv",
# pylint: disable=anomalous-backslash-in-string
separator="\%s" % os.sep if os.sep == "\\" else os.sep,
your_address=self.client_address[0],
listening_address="%s://%s:%i" % (
'https' if SSL_CERTIFICATE else 'http', LISTENIP, PORT),
hass_api_address="%s" % (HASS_API, ),
hass_ws_address=ws_api,
api_password=HASS_API_PASSWORD if HASS_API_PASSWORD else "",
standalone=standalone)
self.wfile.write(bytes(html, "utf8"))
return
else:
self.send_response(404)
self.end_headers()
self.wfile.write(bytes("File not found", "utf8"))
# pylint: disable=invalid-name
def do_POST(self):
"""Customized do_POST method."""
global ALLOWED_NETWORKS, BANNED_IPS
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
if not check_access(self.client_address[0]):
self.do_BLOCK()
return
req = urlparse(self.path)
response = {
"error": True,
"message": "Generic failure"
}
length = int(self.headers['content-length'])
if req.path.endswith('/api/save'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'filename' in postvars.keys() and 'text' in postvars.keys():
if postvars['filename'] and postvars['text']:
try:
filename = unquote(postvars['filename'][0])
response['file'] = filename
with open(filename, 'wb') as fptr:
fptr.write(bytes(postvars['text'][0], "utf-8"))
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "File saved successfully"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/upload'):
if length > 104857600: #100 MB for now
read = 0
while read < length:
read += len(self.rfile.read(min(66556, length - read)))
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = True
response['message'] = "File too big: %i" % read
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
})
filename = form['file'].filename
filepath = form['path'].file.read()
data = form['file'].file.read()
open("%s%s%s" % (filepath, os.sep, filename), "wb").write(data)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Upload successful"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
elif req.path.endswith('/api/rename'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'src' in postvars.keys() and 'dstfilename' in postvars.keys():
if postvars['src'] and postvars['dstfilename']:
try:
src = unquote(postvars['src'][0])
dstfilename = unquote(postvars['dstfilename'][0])
renamepath = src[:src.index(os.path.basename(src))] + dstfilename
response['path'] = renamepath
try:
os.rename(src, renamepath)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Rename successful"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/delete'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
delpath = unquote(postvars['path'][0])
response['path'] = delpath
try:
if os.path.isdir(delpath):
os.rmdir(delpath)
else:
os.unlink(delpath)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Deletion successful"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/exec_command'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'command' in postvars.keys():
if postvars['command']:
try:
command = shlex.split(postvars['command'][0])
timeout = 15
if 'timeout' in postvars.keys():
if postvars['timeout']:
timeout = int(postvars['timeout'][0])
try:
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(timeout=timeout)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Command executed: %s" % postvars['command'][0]
response['returncode'] = proc.returncode
try:
response['stdout'] = stdout.decode(sys.getdefaultencoding())
except Exception as err:
LOG.warning(err)
response['stdout'] = stdout.decode("utf-8", errors="replace")
try:
response['stderr'] = stderr.decode(sys.getdefaultencoding())
except Exception as err:
LOG.warning(err)
response['stderr'] = stderr.decode("utf-8", errors="replace")
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing command"
elif req.path.endswith('/api/gitadd'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
addpath = unquote(postvars['path'][0])
# pylint: disable=not-callable
repo = REPO(addpath,
search_parent_directories=True)
filepath = "/".join(
addpath.split(os.sep)[len(repo.working_dir.split(os.sep)):])
response['path'] = filepath
try:
repo.index.add([filepath])
response['error'] = False
response['message'] = "Added file to index"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename"
elif req.path.endswith('/api/gitdiff'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
diffpath = unquote(postvars['path'][0])
# pylint: disable=not-callable
repo = REPO(diffpath,
search_parent_directories=True)
filepath = "/".join(
diffpath.split(os.sep)[len(repo.working_dir.split(os.sep)):])
response['path'] = filepath
try:
diff = repo.index.diff(None,
create_patch=True,
paths=filepath)[0].diff.decode("utf-8")
response['error'] = False
response['message'] = diff
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = "Unable to load diff: %s" % str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename"
elif req.path.endswith('/api/commit'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'message' in postvars.keys():
if postvars['path'] and postvars['message']:
try:
commitpath = unquote(postvars['path'][0])
response['path'] = commitpath
message = unquote(postvars['message'][0])
# pylint: disable=not-callable
repo = REPO(commitpath,
search_parent_directories=True)
try:
repo.index.commit(message)
response['error'] = False
response['message'] = "Changes commited"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.debug(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path"
elif req.path.endswith('/api/checkout'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'branch' in postvars.keys():
if postvars['path'] and postvars['branch']:
try:
branchpath = unquote(postvars['path'][0])
response['path'] = branchpath
branch = unquote(postvars['branch'][0])
# pylint: disable=not-callable
repo = REPO(branchpath,
search_parent_directories=True)
try:
head = [h for h in repo.heads if h.name == branch][0]
head.checkout()
response['error'] = False
response['message'] = "Checked out %s" % branch
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/newbranch'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'branch' in postvars.keys():
if postvars['path'] and postvars['branch']:
try:
branchpath = unquote(postvars['path'][0])
response['path'] = branchpath
branch = unquote(postvars['branch'][0])
# pylint: disable=not-callable
repo = REPO(branchpath,
search_parent_directories=True)
try:
repo.git.checkout("HEAD", b=branch)
response['error'] = False
response['message'] = "Created and checked out %s" % branch
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/init'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
repo = REPO.init(repopath)
response['error'] = False
response['message'] = "Initialized repository in %s" % repopath
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/push'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
# pylint: disable=not-callable
repo = REPO(repopath)
urls = []
if repo.remotes:
for url in repo.remotes.origin.urls:
urls.append(url)
if not urls:
response['error'] = True
response['message'] = "No remotes configured for %s" % repopath
else:
repo.remotes.origin.push()
response['error'] = False
response['message'] = "Pushed to %s" % urls[0]
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/stash'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
# pylint: disable=not-callable
repo = REPO(repopath)
returnvalue = repo.git.stash()
response['error'] = False
response['message'] = "%s\n%s" % (returnvalue, repopath)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/newfolder'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'name' in postvars.keys():
if postvars['path'] and postvars['name']:
try:
basepath = unquote(postvars['path'][0])
name = unquote(postvars['name'][0])
response['path'] = os.path.join(basepath, name)
try:
os.makedirs(response['path'])
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Folder created"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
elif req.path.endswith('/api/newfile'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'name' in postvars.keys():
if postvars['path'] and postvars['name']:
try:
basepath = unquote(postvars['path'][0])
name = unquote(postvars['name'][0])
response['path'] = os.path.join(basepath, name)
try:
with open(response['path'], 'w') as fptr:
fptr.write("")
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "File created"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/allowed_networks'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'network' in postvars.keys() and 'method' in postvars.keys():
if postvars['network'] and postvars['method']:
try:
network = unquote(postvars['network'][0])
method = unquote(postvars['method'][0])
if method == 'remove':
if network in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.remove(network)
if not ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append("0.0.0.0/0")
response['error'] = False
elif method == 'add':
ipaddress.ip_network(network)
ALLOWED_NETWORKS.append(network)
response['error'] = False
else:
response['error'] = True
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "ALLOWED_NETWORKS (%s): %s" % (method, network)
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing network"
elif req.path.endswith('/api/banned_ips'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'ip' in postvars.keys() and 'method' in postvars.keys():
if postvars['ip'] and postvars['method']:
try:
ip_address = unquote(postvars['ip'][0])
method = unquote(postvars['method'][0])
if method == 'unban':
if ip_address in BANNED_IPS:
BANNED_IPS.remove(ip_address)
response['error'] = False
elif method == 'ban':
ipaddress.ip_network(ip_address)
BANNED_IPS.append(ip_address)
else:
response['error'] = True
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['message'] = "BANNED_IPS (%s): %s" % (method, ip_address)
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing IP"
else:
response['message'] = "Invalid method"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
class AuthHandler(RequestHandler):
"""Handler to verify auth header."""
def do_BLOCK(self, status=420, reason="Policy not fulfilled"):
self.send_response(status)
self.end_headers()
self.wfile.write(bytes(reason, "utf8"))
# pylint: disable=invalid-name
def do_AUTHHEAD(self):
"""Request authorization."""
LOG.info("Requesting authorization")
self.send_response(401)
self.send_header('WWW-Authenticate', 'Basic realm=\"HASS-Configurator\"')
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
header = self.headers.get('Authorization', None)
if header is None:
self.do_AUTHHEAD()
self.wfile.write(bytes('no auth header received', 'utf-8'))
else:
authorization = header.split()
if len(authorization) == 2 and authorization[0] == "Basic":
plain = base64.b64decode(authorization[1]).decode("utf-8")
parts = plain.split(':')
username = parts[0]
password = ":".join(parts[1:])
if PASSWORD.startswith("{sha256}"):
password = "{sha256}%s" % hashlib.sha256(password.encode("utf-8")).hexdigest()
if username == USERNAME and password == PASSWORD:
if BANLIMIT:
FAIL2BAN_IPS.pop(self.client_address[0], None)
super().do_GET()
return
if BANLIMIT:
bancounter = FAIL2BAN_IPS.get(self.client_address[0], 1)
if bancounter >= BANLIMIT:
LOG.warning("Blocking access from %s", self.client_address[0])
self.do_BLOCK()
return
FAIL2BAN_IPS[self.client_address[0]] = bancounter + 1
self.do_AUTHHEAD()
self.wfile.write(bytes('Authentication required', 'utf-8'))
def do_POST(self):
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
header = self.headers.get('Authorization', None)
if header is None:
self.do_AUTHHEAD()
self.wfile.write(bytes('no auth header received', 'utf-8'))
else:
authorization = header.split()
if len(authorization) == 2 and authorization[0] == "Basic":
plain = base64.b64decode(authorization[1]).decode("utf-8")
parts = plain.split(':')
username = parts[0]
password = ":".join(parts[1:])
if PASSWORD.startswith("{sha256}"):
password = "{sha256}%s" % hashlib.sha256(password.encode("utf-8")).hexdigest()
if username == USERNAME and password == PASSWORD:
if BANLIMIT:
FAIL2BAN_IPS.pop(self.client_address[0], None)
super().do_POST()
return
if BANLIMIT:
bancounter = FAIL2BAN_IPS.get(self.client_address[0], 1)
if bancounter >= BANLIMIT:
LOG.warning("Blocking access from %s", self.client_address[0])
self.do_BLOCK()
return
FAIL2BAN_IPS[self.client_address[0]] = bancounter + 1
self.do_AUTHHEAD()
self.wfile.write(bytes('Authentication required', 'utf-8'))
class SimpleServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
"""Server class."""
daemon_threads = True
allow_reuse_address = True
def __init__(self, server_address, RequestHandlerClass):
socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
def notify(title="HASS Configurator",
message="Notification by HASS Configurator",
notification_id=None):
"""Helper function to send notifications via HASS."""
if not HASS_API or not NOTIFY_SERVICE:
return
headers = {
"Content-Type": "application/json"
}
data = {
"title": title,
"message": message
}
if notification_id and NOTIFY_SERVICE == NOTIFY_SERVICE_DEFAULT:
data["notification_id"] = notification_id
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/%s" % (HASS_API, NOTIFY_SERVICE.replace('.', '/')),
data=bytes(json.dumps(data).encode('utf-8')),
headers=headers, method='POST')
LOG.info("%s", data)
try:
with urllib.request.urlopen(req) as response:
message = response.read().decode('utf-8')
LOG.debug(message)
except Exception as err:
LOG.warning("Exception while creating notification: %s", err)
def main():
"""Main function, duh!"""
global HTTPD
signal.signal(signal.SIGINT, signal_handler)
parser = argparse.ArgumentParser(description="Visit " \
"https://github.com/danielperna84/hass-configurator for more details " \
"about the availble options.")
parser.add_argument(
'settings', nargs='?',
help="Path to file with persistent settings.")
parser.add_argument(
'--listen', '-l', nargs='?',
help="The IP address the service is listening on. Default: 0.0.0.0")
parser.add_argument(
'--port', '-p', nargs='?', type=int,
help="The port the service is listening on. " \
"0 allocates a dynamic port. Default: 3218")
parser.add_argument(
'--allowed_networks', '-a', nargs='?',
help="Comma-separated list of allowed networks / IP addresses " \
"from which access is allowed. Eg. 127.0.0.1,192.168.0.0/16. " \
"By default access is allowed from anywhere.")
parser.add_argument(
'--username', '-U', nargs='?',
help="Username required for access.")
parser.add_argument(
'--password', '-P', nargs='?',
help="Password required for access.")
parser.add_argument(
'--sesame', '-S', nargs='?',
help="SESAME token for whitelisting client IPs by accessing " \
"a scret URL: http://1.2.3.4:3218/secret_sesame_token")
parser.add_argument(
'--basepath', '-b', nargs='?',
help="Path to initially serve files from")
parser.add_argument(
'--enforce', '-e', action='store_true',
help="Lock the configurator into the basepath.")
parser.add_argument(
'--standalone', '-s', action='store_true',
help="Don't fetch data from HASS_API.")
parser.add_argument(
'--dirsfirst', '-d', action='store_true',
help="Display directories first.")
parser.add_argument(
'--hidehidden', '-H', action='store_true',
help="Don't display hidden files.")
parser.add_argument(
'--git', '-g', action='store_true',
help="Enable GIT support.")
parser.add_argument(
'--dev', '-D', action='store_true',
help="Enable Dev-Mode (serve dev.html instead of embedded HTML).")
args = parser.parse_args()
load_settings(args)
LOG.info("Starting server")
try:
problems = None
if HASS_API_PASSWORD:
problems = password_problems(HASS_API_PASSWORD, "HASS_API_PASSWORD")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your HASS API password seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_HASS_API_PASSWORD"
}
notify(**data)
problems = None
if SESAME:
problems = password_problems(SESAME, "SESAME")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your SESAME seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_SESAME"
}
notify(**data)
problems = None
if PASSWORD:
problems = password_problems(PASSWORD, "PASSWORD")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your PASSWORD seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_PASSWORD"
}
notify(**data)
except Exception as err:
LOG.warning("Exception while checking passwords: %s", err)
custom_server = SimpleServer
if ':' in LISTENIP:
custom_server.address_family = socket.AF_INET6
server_address = (LISTENIP, PORT)
if USERNAME and PASSWORD:
handler = AuthHandler
else:
handler = RequestHandler
HTTPD = custom_server(server_address, handler)
if SSL_CERTIFICATE:
HTTPD.socket = ssl.wrap_socket(HTTPD.socket,
certfile=SSL_CERTIFICATE,
keyfile=SSL_KEY,
server_side=True)
LOG.info('Listening on: %s://%s:%i',
'https' if SSL_CERTIFICATE else 'http',
HTTPD.server_address[0], HTTPD.server_address[1])
if BASEPATH:
os.chdir(BASEPATH)
HTTPD.serve_forever()
if __name__ == "__main__":
main()
| mit |
defionscode/ansible | lib/ansible/modules/storage/netapp/na_elementsw_ldap.py | 9 | 8686 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_ldap
short_description: NetApp Element Software Manage ldap admin users
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (ng-ansibleteam@netapp.com)
description:
- Enable, disable ldap, and add ldap users
options:
state:
description:
- Whether the specified volume should exist or not.
required: true
choices: ['present', 'absent']
authType:
description:
- Identifies which user authentication method to use.
choices: ['DirectBind', 'SearchAndBind']
groupSearchBaseDn:
description:
- The base DN of the tree to start the group search (will do a subtree search from here)
groupSearchType:
description:
- Controls the default group search filter used
choices: ['NoGroup', 'ActiveDirectory', 'MemberDN']
serverURIs:
description:
- A comma-separated list of LDAP server URIs
userSearchBaseDN:
description:
- The base DN of the tree to start the search (will do a subtree search from here)
searchBindDN:
description:
- A dully qualified DN to log in with to perform an LDAp search for the user (needs read access to the LDAP directory).
searchBindPassword:
description:
- The password for the searchBindDN account used for searching
userSearchFilter:
description:
- the LDAP Filter to use
userDNTemplate:
description:
- A string that is used form a fully qualified user DN.
groupSearchCustomFilter:
description:
- For use with the CustomFilter Search type
'''
EXAMPLES = """
- name: disable ldap authentication
na_elementsw_ldap:
state: absent
username: "{{ admin username }}"
password: "{{ admin password }}"
hostname: "{{ hostname }}"
- name: Enable ldap authentication
na_elementsw_ldap:
state: present
username: "{{ admin username }}"
password: "{{ admin password }}"
hostname: "{{ hostname }}"
authType: DirectBind
serverURIs: ldap://svmdurlabesx01spd_ldapclnt
groupSearchType: MemberDN
userDNTemplate: uid=%USERNAME%,cn=users,cn=accounts,dc=corp,dc="{{ company name }}",dc=com
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
try:
import solidfire.common
except:
HAS_SF_SDK = False
class NetappElementLdap(object):
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
authType=dict(required=False, choices=['DirectBind', 'SearchAndBind']),
groupSearchBaseDn=dict(required=False, type=str),
groupSearchType=dict(required=False, choices=['NoGroup', 'ActiveDirectory', 'MemberDN']),
serverURIs=dict(required=False, type=str),
userSearchBaseDN=dict(required=False, type=str),
searchBindDN=dict(required=False, type=str),
searchBindPassword=dict(required=False, type=str, no_log=True),
userSearchFilter=dict(required=False, type=str),
userDNTemplate=dict(required=False, type=str),
groupSearchCustomFilter=dict(required=False, type=str)
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
param = self.module.params
# set up state variables
self.state = param['state']
self.authType = param['authType']
self.groupSearchBaseDn = param['groupSearchBaseDn']
self.groupSearchType = param['groupSearchType']
self.serverURIs = param['serverURIs']
if self.serverURIs is not None:
self.serverURIs = self.serverURIs.split(',')
self.userSearchBaseDN = param['userSearchBaseDN']
self.searchBindDN = param['searchBindDN']
self.searchBindPassword = param['searchBindPassword']
self.userSearchFilter = param['userSearchFilter']
self.userDNTemplate = param['userDNTemplate']
self.groupSearchCustomFilter = param['groupSearchCustomFilter']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
def get_ldap_configuration(self):
"""
Return ldap configuration if found
:return: Details about the ldap configuration. None if not found.
:rtype: solidfire.models.GetLdapConfigurationResult
"""
ldap_config = self.sfe.get_ldap_configuration()
return ldap_config
def enable_ldap(self):
"""
Enable LDAP
:return: nothing
"""
try:
self.sfe.enable_ldap_authentication(self.serverURIs, auth_type=self.authType,
group_search_base_dn=self.groupSearchBaseDn,
group_search_type=self.groupSearchType,
group_search_custom_filter=self.groupSearchCustomFilter,
search_bind_dn=self.searchBindDN,
search_bind_password=self.searchBindPassword,
user_search_base_dn=self.userSearchBaseDN,
user_search_filter=self.userSearchFilter,
user_dntemplate=self.userDNTemplate)
except solidfire.common.ApiServerError as error:
self.module.fail_json(msg='Error enabling LDAP %s: %s' % (self.account_id, to_native(error)),
exception=traceback.format_exc())
def check_config(self, ldap_config):
"""
Check to see if the ldap config has been modified.
:param ldap_config: The LDAP configuration
:return: False if the config is the same as the playbook, True if it is not
"""
if self.authType != ldap_config.ldap_configuration.auth_type:
return True
if self.serverURIs != ldap_config.ldap_configuration.server_uris:
return True
if self.groupSearchBaseDn != ldap_config.ldap_configuration.group_search_base_dn:
return True
if self.groupSearchType != ldap_config.ldap_configuration.group_search_type:
return True
if self.groupSearchCustomFilter != ldap_config.ldap_configuration.group_search_custom_filter:
return True
if self.searchBindDN != ldap_config.ldap_configuration.search_bind_dn:
return True
if self.searchBindPassword != ldap_config.ldap_configuration.search_bind_password:
return True
if self.userSearchBaseDN != ldap_config.ldap_configuration.user_search_base_dn:
return True
if self.userSearchFilter != ldap_config.ldap_configuration.user_search_filter:
return True
if self.userDNTemplate != ldap_config.ldap_configuration.user_dntemplate:
return True
return False
def apply(self):
changed = False
ldap_config = self.get_ldap_configuration()
if self.state == 'absent':
if ldap_config and ldap_config.ldap_configuration.enabled:
changed = True
if self.state == 'present' and self.check_config(ldap_config):
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
self.enable_ldap()
elif self.state == 'absent':
self.sfe.disable_ldap_authentication()
self.module.exit_json(changed=changed)
def main():
v = NetappElementLdap()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
naacl/zhihu-py3 | zhihu/common.py | 3 | 7821 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '7sDream'
import functools
import re
import os
from requests import Session
from bs4 import BeautifulSoup as _Bs
try:
__import__('lxml')
BeautifulSoup = lambda makeup: _Bs(makeup, 'lxml')
except ImportError:
BeautifulSoup = lambda makeup: _Bs(makeup, 'html.parser')
Default_Header = {'X-Requested-With': 'XMLHttpRequest',
'Referer': 'http://www.zhihu.com',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; '
'rv:39.0) Gecko/20100101 Firefox/39.0',
'Host': 'www.zhihu.com'}
Zhihu_URL = 'http://www.zhihu.com'
Login_URL = Zhihu_URL + '/login/email'
Captcha_URL_Prefix = Zhihu_URL + '/captcha.gif?r='
Get_Profile_Card_URL = Zhihu_URL + '/node/MemberProfileCardV2'
Question_Get_More_Answer_URL = Zhihu_URL + '/node/QuestionAnswerListV2'
Author_Get_More_Followers_URL = Zhihu_URL + '/node/ProfileFollowersListV2'
Author_Get_More_Followees_URL = Zhihu_URL + '/node/ProfileFolloweesListV2'
Column_Url = 'http://zhuanlan.zhihu.com'
Column_API = Column_Url + '/api/columns'
Column_Data = Column_API + '/{0}'
Column_Posts_Data = Column_API + '/{0}/posts?limit=10&offset={1}'
Column_Post_Data = Column_API + '/{0}/posts/{1}'
Topic_Url = Zhihu_URL + '/topic'
Topic_Get_Children_Url = Topic_Url + '/{0}/organize/entire'
Topic_Get_More_Follower_Url = Topic_Url + '/{0}/followers'
Topic_Question_Url = Topic_Url + '/{0}/questions'
Topic_Top_Answers_Url = Topic_Url + '/{0}/top-answers'
Topic_Hot_Questions_Url = Topic_Url + '/{0}/hot'
Get_Me_Info_Url = Column_Url + '/api/me'
Upvote_Answer_Url = Zhihu_URL + '/node/AnswerVoteBarV2'
Upvote_Article_Url = Column_API + '/{0}/posts/{1}/rating'
Follow_Author_Url = Zhihu_URL + '/node/MemberFollowBaseV2'
Follow_Question_Url = Zhihu_URL + '/node/QuestionFollowBaseV2'
Follow_Topic_Url = Zhihu_URL + '/node/TopicFollowBaseV2'
Follow_Collection_Url = Zhihu_URL + '/collection/follow'
Unfollow_Collection_Url = Zhihu_URL + '/collection/unfollow'
Thanks_Url = Zhihu_URL + '/answer/thanks'
Cancel_Thanks_Url = Zhihu_URL + '/answer/cancel_thanks'
re_question_url = re.compile(r'^http://www\.zhihu\.com/question/\d+/?$')
re_ans_url = re.compile(
r'^http://www\.zhihu\.com/question/\d+/answer/\d+/?$')
re_author_url = re.compile(r'^http://www\.zhihu\.com/people/[^/]+/?$')
re_collection_url = re.compile(r'^http://www\.zhihu\.com/collection/\d+/?$')
re_column_url = re.compile(r'^http://zhuanlan\.zhihu\.com/([^/]+)/?$')
re_post_url = re.compile(r'^http://zhuanlan\.zhihu\.com/([^/]+)/(\d+)/?$')
re_topic_url = re.compile(r'^http://www\.zhihu\.com/topic/(\d+)/?$')
re_a2q = re.compile(r'(.*)/a.*')
re_collection_url_split = re.compile(r'.*(/c.*)')
re_get_number = re.compile(r'[^\d]*(\d+).*')
re_del_empty_line = re.compile(r'\n*(.*)\n*')
def check_soup(attr, soup_type='_make_soup'):
def real(func):
@functools.wraps(func)
def wrapper(self):
# noinspection PyTypeChecker
value = getattr(self, attr, None)
if value is None:
if soup_type == '_make_soup':
getattr(self, soup_type)()
elif self.soup is None:
getattr(self, soup_type)()
value = func(self)
setattr(self, attr, value)
return value
return wrapper
return real
def class_common_init(url_re, allowed_none=False):
def real(func):
@functools.wraps(func)
def wrapper(self, url, *args, **kwargs):
if url is None and not allowed_none:
raise ValueError('Invalid Url')
if url is not None:
if url_re.match(url) is None:
raise ValueError('Invalid URL')
if url.endswith('/') is False:
url += '/'
if 'session' not in kwargs.keys() or kwargs['session'] is None:
kwargs['session'] = Session()
self.soup = None
return func(self, url, *args, **kwargs)
return wrapper
return real
def remove_invalid_char(text):
"""去除字符串中的无效字符,一般用于保存文件时保证文件名的有效性.
:param str text: 待处理的字符串
:return: 处理后的字符串
:rtype: str
"""
invalid_char_list = ['/', '\\', ':', '*', '?', '"', '<', '>', '|', '\n']
res = ''
for char in text:
if char not in invalid_char_list:
res += char
return res
def parser_author_from_tag(author):
if author.text == '匿名用户':
return None, '匿名用户', '', ''
else:
author_name = author.contents[3].text
author_motto = author.strong['title'] \
if author.strong is not None else ''
author_url = Zhihu_URL + author.contents[3]['href']
photo_url = author.a.img['src'].replace('_s', '_r')
return author_url, author_name, author_motto, photo_url
def answer_content_process(content):
del content['class']
soup = BeautifulSoup(
'<html><head><meta charset="utf-8"></head><body></body></html>')
soup.body.append(content)
no_script_list = soup.find_all("noscript")
for no_script in no_script_list:
no_script.extract()
img_list = soup.find_all(
"img", class_="origin_image zh-lightbox-thumb lazy")
for img in img_list:
new_img = soup.new_tag('img', src=img['data-original'])
img.replace_with(new_img)
new_img.insert_after(soup.new_tag('br'))
if img.previous_sibling is None or img.previous_sibling.name != 'br':
new_img.insert_before(soup.new_tag('br'))
useless_list = soup.find_all("i", class_="icon-external")
for useless in useless_list:
useless.extract()
return soup.prettify()
def get_path(path, filename, mode, default_path, default_name):
if path is None:
path = os.path.join(
os.getcwd(), remove_invalid_char(default_path))
if filename is None:
filename = remove_invalid_char(default_name)
if os.path.isdir(path) is False:
os.makedirs(path)
temp = filename
i = 0
while os.path.isfile(os.path.join(path, temp) + '.' + mode):
i += 1
temp = filename + str(i)
return os.path.join(path, temp) + '.' + mode
def common_follower(url, xsrf, session):
from .author import Author
headers = dict(Default_Header)
headers['Referer'] = url
data = {'offset': 0, '_xsrf': xsrf}
gotten_data_num = 20
offset = 0
while gotten_data_num == 20:
data['offset'] = offset
res = session.post(url, data=data, headers=headers)
json_data = res.json()['msg']
gotten_data_num = json_data[0]
offset += gotten_data_num
soup = BeautifulSoup(json_data[1])
follower_divs = soup.find_all('div', class_='zm-profile-card')
for div in follower_divs:
if div.a is not None:
author_name = div.a['title']
author_url = Zhihu_URL + div.a['href']
author_motto = div.find('div', class_='zg-big-gray').text
author_photo = div.img['src'].replace('_m', '_r')
numbers = [re_get_number.match(a.text).group(1)
for a in div.find_all('a', target='_blank')]
else:
author_name = '匿名用户'
author_url = None
author_motto = ''
author_photo = None
numbers = [None] * 4
yield Author(author_url, author_name, author_motto, *numbers,
photo_url=author_photo, session=session)
| mit |
adamgonzalez/analysis | COV_CI.py | 1 | 11142 | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 02 11:25:09 2017
@author: Adam
"""
import os
import numpy as np
import math
import cmath
import matplotlib
import matplotlib.pyplot as plt
from astropy.io import fits
matplotlib.rcParams.update({'font.size': 18})
matplotlib.rcParams['axes.linewidth'] = 1
# Function to compute the average count rate of a file (light curve or background)
def data_cleaner(d_t_raw, d_r_raw, d_e_raw):
n = 0
for i in range (0,len(d_t_raw)):
if (math.isnan(d_r_raw[i]) == False):
n += 1
d_t = np.zeros(n)
d_r = np.zeros(n)
d_e = np.zeros(n)
n = 0
for i in range (0,len(d_t_raw)):
if (math.isnan(d_r_raw[i]) == False):
d_t[n] = d_t_raw[i]
d_r[n] = d_r_raw[i]
d_e[n] = d_e_raw[i]
n += 1
d_t = d_t - d_t[0]
a_r = np.average(d_r)
return d_t, d_r, d_e, n, a_r
os.chdir("/Users/agonzalez/Documents/Research/Data/IZw1")
#pic = '2769_COVvE_603.png'
# ORBIT 2768
with open(name='2768/lc_covCI_2768_600.txt',mode='r') as lcfile:
#with open('2768/covariance_lists/lc_covCI_2768_601.txt','r') as lcfile:
#with open('2768/covariance_lists/lc_covCI_2768_602.txt','r') as lcfile:
#with open('2768/covariance_lists/lc_covCI_2768_603.txt','r') as lcfile:
#with open('2768/covariance_lists/lc_covCI_2768_604.txt','r') as lcfile:
# ORBIT 2769
#with open('2769/covariance_lists/lc_covCI_2769_600.txt','r') as lcfile:
#with open('2769/covariance_lists/lc_covCI_2769_601.txt','r') as lcfile:
#with open('2769/covariance_lists/lc_covCI_2769_602.txt','r') as lcfile:
#with open('2769/covariance_lists/lc_covCI_2769_603.txt','r') as lcfile:
#with open('2769/covariance_lists/lc_covCI_2769_604.txt','r') as lcfile:
lc_fnames = [line.rstrip('\n') for line in lcfile]
lcfile.close()
n_lc = len(lc_fnames)
# ORBIT 2768
with open('2768/lcref_covCI_2768_600.txt','r') as reffile:
#with open('2768/covariance_lists/lcref_covCI_2768_601.txt','r') as reffile:
#with open('2768/covariance_lists/lcref_covCI_2768_602.txt','r') as reffile:
#with open('2768/covariance_lists/lcref_covCI_2768_603.txt','r') as reffile:
#with open('2768/covariance_lists/lcref_covCI_2768_604.txt','r') as reffile:
# ORBIT 2769
#with open('2769/covariance_lists/lcref_covCI_2769_600.txt','r') as reffile:
#with open('2769/covariance_lists/lcref_covCI_2769_601.txt','r') as reffile:
#with open('2769/covariance_lists/lcref_covCI_2769_602.txt','r') as reffile:
#with open('2769/covariance_lists/lcref_covCI_2769_603.txt','r') as reffile:
#with open('2769/covariance_lists/lcref_covCI_2769_604.txt','r') as reffile:
ref_fnames = [line.rstrip('\n') for line in reffile]
reffile.close()
n_ref = len(ref_fnames)
# ORBIT 2768
with open('2768/bg_covCI_2768_600.txt','r') as bgfile:
#with open('2768/covariance_lists/bg_covCI_2768_601.txt','r') as bgfile:
#with open('2768/covariance_lists/bg_covCI_2768_602.txt','r') as bgfile:
#with open('2768/covariance_lists/bg_covCI_2768_603.txt','r') as bgfile:
#with open('2768/covariance_lists/bg_covCI_2768_604.txt','r') as bgfile:
# ORBIT 2769
#with open('2769/covariance_lists/bg_covCI_2769_600.txt','r') as bgfile:
#with open('2769/covariance_lists/bg_covCI_2769_601.txt','r') as bgfile:
#with open('2769/covariance_lists/bg_covCI_2769_602.txt','r') as bgfile:
#with open('2769/covariance_lists/bg_covCI_2769_603.txt','r') as bgfile:
#with open('2769/covariance_lists/bg_covCI_2769_604.txt','r') as bgfile:
bg_fnames = [line.rstrip('\n') for line in bgfile]
bgfile.close()
n_bg = len(bg_fnames)
# ORBIT 2768
with open('2768/bgref_covCI_2768_600.txt','r') as refbgfile:
#with open('2768/covariance_lists/bgref_covCI_2768_601.txt','r') as refbgfile:
#with open('2768/covariance_lists/bgref_covCI_2768_602.txt','r') as refbgfile:
#with open('2768/covariance_lists/bgref_covCI_2768_603.txt','r') as refbgfile:
#with open('2768/covariance_lists/bgref_covCI_2768_604.txt','r') as refbgfile:
# ORBIT 2769
#with open('2769/covariance_lists/bgref_covCI_2769_600.txt','r') as refbgfile:
#with open('2769/covariance_lists/bgref_covCI_2769_601.txt','r') as refbgfile:
#with open('2769/covariance_lists/bgref_covCI_2769_602.txt','r') as refbgfile:
#with open('2769/covariance_lists/bgref_covCI_2769_603.txt','r') as refbgfile:
#with open('2769/covariance_lists/bgref_covCI_2769_604.txt','r') as refbgfile:
refbg_fnames = [line.rstrip('\n') for line in refbgfile]
refbgfile.close()
n_refbg = len(refbg_fnames)
#n_lc = 2
n_RUNS = n_lc
# set up all of the final output variables and the number of files to go thru
energy = [0.3, 0.45, 0.55, 0.7, 0.9, 1.25, 1.75, 3.0, 5.0, 7.0, 9.0]
energy = energy[:n_RUNS]
Df_LF = 4.0*pow(10,-4.0) - 1.0*pow(10,-4.0)
Df_MF = 1.5*pow(10,-3.0) - 0.4*pow(10,-3.0)
Df_HF = 4.0*pow(10,-3.0) - 2.0*pow(10,-3.0)
plt.rc('font',family='serif')
# do the stuff
for RUN in range (0,n_RUNS):
print "RUN NUMBER: ", RUN+1
lcfits = fits.open(lc_fnames[RUN])
lcdata = lcfits[1].data
lcfits.close()
lc_t_raw = lcdata.field('TIME') ; lc_t_raw = lc_t_raw - lc_t_raw[0]
lc_r_raw = lcdata.field('RATE')
lc_e_raw = lcdata.field('ERROR')
bgfits = fits.open(bg_fnames[RUN])
bgdata = bgfits[1].data
bgfits.close()
bg_t_raw = bgdata.field('TIME') ; bg_t_raw = bg_t_raw - bg_t_raw[0]
bg_r_raw = bgdata.field('RATE')
bg_e_raw = bgdata.field('ERROR')
reffits = fits.open(ref_fnames[RUN])
refdata = reffits[1].data
reffits.close()
ref_t_raw = refdata.field('TIME') ; ref_t_raw = ref_t_raw - ref_t_raw[0]
ref_r_raw = refdata.field('RATE')
ref_e_raw = refdata.field('ERROR')
refbgfits = fits.open(refbg_fnames[RUN])
refbgdata = refbgfits[1].data
refbgfits.close()
refbg_t_raw = refbgdata.field('TIME') ; refbg_t_raw = refbg_t_raw - refbg_t_raw[0]
refbg_r_raw = refbgdata.field('RATE')
refbg_e_raw = refbgdata.field('ERROR')
#print "Mean Energy = ", energy[RUN]
lc_t, lc_r, lc_e, idx, avg_rate = data_cleaner(lc_t_raw, lc_r_raw, lc_e_raw) ; print "Average count rate = ", avg_rate
bg_t, bg_r, bg_e, bg_idx, avg_bg_rate = data_cleaner(bg_t_raw, bg_r_raw, bg_e_raw) ; print "Average background rate = ", avg_bg_rate
ref_t, ref_r, ref_e, ref_idx, avg_ref_rate = data_cleaner(ref_t_raw, ref_r_raw, ref_e_raw) ; print "Average ref count rate = ", avg_ref_rate
refbg_t, refbg_r, refbg_e, refbg_idx, avg_refbg_rate = data_cleaner(refbg_t_raw, refbg_r_raw, refbg_e_raw) ; print "Average ref count rate = ", avg_refbg_rate
# performing the DFT
n_bins = len(lc_t)
k = np.arange(n_bins-1)
frq = k/max(lc_t)
DFT = np.fft.fft(lc_r) #/n
DFT_ref = np.fft.fft(ref_r)
t_bins = lc_t[:-1]
dt = t_bins[1] - t_bins[0]
# grabbing only the relevant parts of frq and DFT
half_n_bins = int((n_bins-1.0)/2.0)
frq = frq[range(half_n_bins)]
DFT = DFT[range(half_n_bins)]
DFT_ref = DFT_ref[range(half_n_bins)]
df = frq[1] - frq[0]
# computing the PSD and background level
PSD = (2.0*dt*abs(DFT)**2.0)/(n_bins*avg_rate**2.0)
PN_lev = 2.0*(avg_rate + avg_bg_rate)/(avg_rate**2.0)
PSD_ref = (2.0*dt*abs(DFT_ref)**2.0)/(n_bins*avg_ref_rate**2.0)
PN_ref = 2.0*(avg_ref_rate + avg_refbg_rate)/(avg_ref_rate**2.0)
if (RUN == 0):
w, h = n_lc, half_n_bins
r = [[0 for x in range(w)] for y in range(h)]
phi = [[0 for x in range(w)] for y in range(h)]
r_ref = [[0 for x in range(w)] for y in range(h)]
phi_ref = [[0 for x in range(w)] for y in range(h)]
CS = [[0 for x in range(w)] for y in range(h)]
# working with the DFT values
for i in range (0,half_n_bins):
r[i][RUN], phi[i][RUN] = cmath.polar(DFT[i])
r_ref[i][RUN], phi_ref[i][RUN] = cmath.polar(DFT_ref[i])
# compute the cross spectrum
for row in range (0,half_n_bins):
CS[row][RUN] = (r[row][RUN]*r_ref[row][RUN]) * np.exp((-1.0*phi[row][RUN] + phi_ref[row][RUN])*1j)
# bin up the PSD and CS
C_LF = 0 ; C_MF = 0 ; C_HF = 0
PSD_LF_avg = 0 ; PSD_MF_avg = 0 ; PSD_HF_avg = 0
CS_LF_avg = 0 ; CS_MF_avg = 0 ; CS_HF_avg = 0
for i in range (0,len(frq)):
if (0.1e-3 <= frq[i] <= 0.4e-3):
C_LF += 1
PSD_LF_avg += PSD[i]
CS_LF_avg += CS[i][RUN]
if (0.4e-3 <= frq[i] <= 1.5e-3):
C_MF += 1
PSD_MF_avg += PSD[i]
CS_MF_avg += CS[i][RUN]
if (2e-3 <= frq[i] <= 4e-3):
C_HF += 1
PSD_HF_avg += PSD[i]
CS_HF_avg += CS[i][RUN]
PSD_LF_avg = PSD_LF_avg / C_LF
PSD_MF_avg = PSD_MF_avg / C_MF
PSD_HF_avg = PSD_HF_avg / C_HF
CS_LF_avg = CS_LF_avg / C_LF
CS_MF_avg = CS_MF_avg / C_MF
CS_HF_avg = CS_HF_avg / C_HF
C_ref_LF = 0 ; C_ref_MF = 0 ; C_ref_HF = 0
PSD_ref_LF_avg = 0 ; PSD_ref_MF_avg = 0 ; PSD_ref_HF_avg = 0
for i in range (0,len(frq)):
if (0.1e-3 <= frq[i] <= 0.4e-3):
C_ref_LF += 1
PSD_ref_LF_avg += PSD_ref[i]
if (0.4e-3 <= frq[i] <= 1.5e-3):
C_ref_MF += 1
PSD_ref_MF_avg += PSD_ref[i]
if (2e-3 <= frq[i] <= 4e-3):
C_ref_HF += 1
PSD_ref_HF_avg += PSD_ref[i]
PSD_ref_LF_avg = PSD_ref_LF_avg / C_ref_LF
PSD_ref_MF_avg = PSD_ref_MF_avg / C_ref_MF
PSD_ref_HF_avg = PSD_ref_HF_avg / C_ref_HF
if (RUN ==0):
COV_LF = np.zeros(n_lc)
COV_MF = np.zeros(n_lc)
COV_HF = np.zeros(n_lc)
nsq_LF = ((PSD_LF_avg - PN_lev)*PN_ref + (PSD_ref_LF_avg - PN_ref)*PN_lev + PN_lev*PN_ref)/C_LF
dfrq_LF = Df_LF
COV_LF[RUN] = avg_rate * np.sqrt( dfrq_LF*(abs(CS_LF_avg)**2.0 - nsq_LF) / (PSD_ref_LF_avg - PN_ref) )
nsq_MF = ((PSD_MF_avg - PN_lev)*PN_ref + (PSD_ref_MF_avg - PN_ref)*PN_lev + PN_lev*PN_ref)/C_MF
dfrq_MF = Df_MF
COV_MF[RUN] = avg_rate * np.sqrt( dfrq_MF*(abs(CS_MF_avg)**2.0 - nsq_MF) / (PSD_ref_MF_avg - PN_ref) )
nsq_HF = ((PSD_HF_avg - PN_lev)*PN_ref + (PSD_ref_HF_avg - PN_ref)*PN_lev + PN_lev*PN_ref)/C_HF
dfrq_HF = Df_HF
COV_HF[RUN] = avg_rate * np.sqrt( dfrq_HF*(abs(CS_HF_avg)**2.0 - nsq_HF) / (PSD_ref_HF_avg - PN_ref) )
w, h = 4, len(energy)
M = [[0 for x in range(w)] for y in range(h)]
for i in range (0,len(energy)):
M[i][0], M[i][1], M[i][2], M[i][3] = energy[i], COV_LF[i], COV_MF[i], COV_HF[i]
##outfile = open('E_COV_LMH_2768.txt','a')
#outfile = open('E_COV_LMH_2769.txt','a')
#np.savetxt(outfile,M)
#outfile.close()
plt.figure(1)
plt.loglog(energy, COV_LF, '-or', label = "LF")
plt.loglog(energy, COV_MF, '-og', label = "MF")
plt.loglog(energy, COV_HF, '-ob', label = "HF")
plt.xlabel("Energy [keV]")
plt.ylabel('keV$^2$ (Photons cm$^{-2}$ s$^{-1}$ keV$^{-1}$)')
plt.xlim(0.20,10.0)
plt.legend(loc=3,labelspacing=0.1,fontsize=16)
plt.show()
#plt.savefig(pic,bbox_inches='tight')
| mit |
brandond/ansible | lib/ansible/modules/network/fortios/fortios_report_chart.py | 19 | 27950 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_report_chart
short_description: Report chart widget configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify report feature and chart category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
report_chart:
description:
- Report chart widget configuration.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
background:
description:
- Chart background.
category:
description:
- Category.
choices:
- misc
- traffic
- event
- virus
- webfilter
- attack
- spam
- dlp
- app-ctrl
- vulnerability
category-series:
description:
- Category series of pie chart.
suboptions:
databind:
description:
- Category series value expression.
font-size:
description:
- Font size of category-series title.
color-palette:
description:
- Color palette (system will pick color automatically by default).
column:
description:
- Table column definition.
suboptions:
detail-unit:
description:
- Detail unit of column.
detail-value:
description:
- Detail value of column.
footer-unit:
description:
- Footer unit of column.
footer-value:
description:
- Footer value of column.
header-value:
description:
- Display name of table header.
id:
description:
- ID.
required: true
mapping:
description:
- Show detail in certain display value for certain condition.
suboptions:
displayname:
description:
- Display name.
id:
description:
- id
required: true
op:
description:
- Comparision operater.
choices:
- none
- greater
- greater-equal
- less
- less-equal
- equal
- between
value-type:
description:
- Value type.
choices:
- integer
- string
value1:
description:
- Value 1.
value2:
description:
- Value 2.
comments:
description:
- Comment.
dataset:
description:
- Bind dataset to chart.
dimension:
description:
- Dimension.
choices:
- 2D
- 3D
drill-down-charts:
description:
- Drill down charts.
suboptions:
chart-name:
description:
- Drill down chart name.
id:
description:
- Drill down chart ID.
required: true
status:
description:
- Enable/disable this drill down chart.
choices:
- enable
- disable
favorite:
description:
- Favorite.
choices:
- no
- yes
graph-type:
description:
- Graph type.
choices:
- none
- bar
- pie
- line
- flow
legend:
description:
- Enable/Disable Legend area.
choices:
- enable
- disable
legend-font-size:
description:
- Font size of legend area.
name:
description:
- Chart Widget Name
required: true
period:
description:
- Time period.
choices:
- last24h
- last7d
policy:
description:
- Used by monitor policy.
style:
description:
- Style.
choices:
- auto
- manual
title:
description:
- Chart title.
title-font-size:
description:
- Font size of chart title.
type:
description:
- Chart type.
choices:
- graph
- table
value-series:
description:
- Value series of pie chart.
suboptions:
databind:
description:
- Value series value expression.
x-series:
description:
- X-series of chart.
suboptions:
caption:
description:
- X-series caption.
caption-font-size:
description:
- X-series caption font size.
databind:
description:
- X-series value expression.
font-size:
description:
- X-series label font size.
is-category:
description:
- X-series represent category or not.
choices:
- yes
- no
label-angle:
description:
- X-series label angle.
choices:
- 45-degree
- vertical
- horizontal
scale-direction:
description:
- Scale increase or decrease.
choices:
- decrease
- increase
scale-format:
description:
- Date/time format.
choices:
- YYYY-MM-DD-HH-MM
- YYYY-MM-DD HH
- YYYY-MM-DD
- YYYY-MM
- YYYY
- HH-MM
- MM-DD
scale-step:
description:
- Scale step.
scale-unit:
description:
- Scale unit.
choices:
- minute
- hour
- day
- month
- year
unit:
description:
- X-series unit.
y-series:
description:
- Y-series of chart.
suboptions:
caption:
description:
- Y-series caption.
caption-font-size:
description:
- Y-series caption font size.
databind:
description:
- Y-series value expression.
extra-databind:
description:
- Extra Y-series value.
extra-y:
description:
- Allow another Y-series value
choices:
- enable
- disable
extra-y-legend:
description:
- Extra Y-series legend type/name.
font-size:
description:
- Y-series label font size.
group:
description:
- Y-series group option.
label-angle:
description:
- Y-series label angle.
choices:
- 45-degree
- vertical
- horizontal
unit:
description:
- Y-series unit.
y-legend:
description:
- First Y-series legend type/name.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Report chart widget configuration.
fortios_report_chart:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
report_chart:
state: "present"
background: "<your_own_value>"
category: "misc"
category-series:
databind: "<your_own_value>"
font-size: "7"
color-palette: "<your_own_value>"
column:
-
detail-unit: "<your_own_value>"
detail-value: "<your_own_value>"
footer-unit: "<your_own_value>"
footer-value: "<your_own_value>"
header-value: "<your_own_value>"
id: "15"
mapping:
-
displayname: "<your_own_value>"
id: "18"
op: "none"
value-type: "integer"
value1: "<your_own_value>"
value2: "<your_own_value>"
comments: "<your_own_value>"
dataset: "<your_own_value>"
dimension: "2D"
drill-down-charts:
-
chart-name: "<your_own_value>"
id: "28"
status: "enable"
favorite: "no"
graph-type: "none"
legend: "enable"
legend-font-size: "33"
name: "default_name_34"
period: "last24h"
policy: "36"
style: "auto"
title: "<your_own_value>"
title-font-size: "39"
type: "graph"
value-series:
databind: "<your_own_value>"
x-series:
caption: "<your_own_value>"
caption-font-size: "45"
databind: "<your_own_value>"
font-size: "47"
is-category: "yes"
label-angle: "45-degree"
scale-direction: "decrease"
scale-format: "YYYY-MM-DD-HH-MM"
scale-step: "52"
scale-unit: "minute"
unit: "<your_own_value>"
y-series:
caption: "<your_own_value>"
caption-font-size: "57"
databind: "<your_own_value>"
extra-databind: "<your_own_value>"
extra-y: "enable"
extra-y-legend: "<your_own_value>"
font-size: "62"
group: "<your_own_value>"
label-angle: "45-degree"
unit: "<your_own_value>"
y-legend: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_report_chart_data(json):
option_list = ['background', 'category', 'category-series',
'color-palette', 'column', 'comments',
'dataset', 'dimension', 'drill-down-charts',
'favorite', 'graph-type', 'legend',
'legend-font-size', 'name', 'period',
'policy', 'style', 'title',
'title-font-size', 'type', 'value-series',
'x-series', 'y-series']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def report_chart(data, fos):
vdom = data['vdom']
report_chart_data = data['report_chart']
flattened_data = flatten_multilists_attributes(report_chart_data)
filtered_data = filter_report_chart_data(flattened_data)
if report_chart_data['state'] == "present":
return fos.set('report',
'chart',
data=filtered_data,
vdom=vdom)
elif report_chart_data['state'] == "absent":
return fos.delete('report',
'chart',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_report(data, fos):
login(data)
if data['report_chart']:
resp = report_chart(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"report_chart": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"background": {"required": False, "type": "str"},
"category": {"required": False, "type": "str",
"choices": ["misc", "traffic", "event",
"virus", "webfilter", "attack",
"spam", "dlp", "app-ctrl",
"vulnerability"]},
"category-series": {"required": False, "type": "dict",
"options": {
"databind": {"required": False, "type": "str"},
"font-size": {"required": False, "type": "int"}
}},
"color-palette": {"required": False, "type": "str"},
"column": {"required": False, "type": "list",
"options": {
"detail-unit": {"required": False, "type": "str"},
"detail-value": {"required": False, "type": "str"},
"footer-unit": {"required": False, "type": "str"},
"footer-value": {"required": False, "type": "str"},
"header-value": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"mapping": {"required": False, "type": "list",
"options": {
"displayname": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"op": {"required": False, "type": "str",
"choices": ["none", "greater", "greater-equal",
"less", "less-equal", "equal",
"between"]},
"value-type": {"required": False, "type": "str",
"choices": ["integer", "string"]},
"value1": {"required": False, "type": "str"},
"value2": {"required": False, "type": "str"}
}}
}},
"comments": {"required": False, "type": "str"},
"dataset": {"required": False, "type": "str"},
"dimension": {"required": False, "type": "str",
"choices": ["2D", "3D"]},
"drill-down-charts": {"required": False, "type": "list",
"options": {
"chart-name": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"favorite": {"required": False, "type": "str",
"choices": ["no", "yes"]},
"graph-type": {"required": False, "type": "str",
"choices": ["none", "bar", "pie",
"line", "flow"]},
"legend": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"legend-font-size": {"required": False, "type": "int"},
"name": {"required": True, "type": "str"},
"period": {"required": False, "type": "str",
"choices": ["last24h", "last7d"]},
"policy": {"required": False, "type": "int"},
"style": {"required": False, "type": "str",
"choices": ["auto", "manual"]},
"title": {"required": False, "type": "str"},
"title-font-size": {"required": False, "type": "int"},
"type": {"required": False, "type": "str",
"choices": ["graph", "table"]},
"value-series": {"required": False, "type": "dict",
"options": {
"databind": {"required": False, "type": "str"}
}},
"x-series": {"required": False, "type": "dict",
"options": {
"caption": {"required": False, "type": "str"},
"caption-font-size": {"required": False, "type": "int"},
"databind": {"required": False, "type": "str"},
"font-size": {"required": False, "type": "int"},
"is-category": {"required": False, "type": "str",
"choices": ["yes", "no"]},
"label-angle": {"required": False, "type": "str",
"choices": ["45-degree", "vertical", "horizontal"]},
"scale-direction": {"required": False, "type": "str",
"choices": ["decrease", "increase"]},
"scale-format": {"required": False, "type": "str",
"choices": ["YYYY-MM-DD-HH-MM", "YYYY-MM-DD HH", "YYYY-MM-DD",
"YYYY-MM", "YYYY", "HH-MM",
"MM-DD"]},
"scale-step": {"required": False, "type": "int"},
"scale-unit": {"required": False, "type": "str",
"choices": ["minute", "hour", "day",
"month", "year"]},
"unit": {"required": False, "type": "str"}
}},
"y-series": {"required": False, "type": "dict",
"options": {
"caption": {"required": False, "type": "str"},
"caption-font-size": {"required": False, "type": "int"},
"databind": {"required": False, "type": "str"},
"extra-databind": {"required": False, "type": "str"},
"extra-y": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"extra-y-legend": {"required": False, "type": "str"},
"font-size": {"required": False, "type": "int"},
"group": {"required": False, "type": "str"},
"label-angle": {"required": False, "type": "str",
"choices": ["45-degree", "vertical", "horizontal"]},
"unit": {"required": False, "type": "str"},
"y-legend": {"required": False, "type": "str"}
}}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_report(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Contextualist/Quip4AHA | lib/flask/logging.py | 4 | 1443 | # -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from logging import getLogger, StreamHandler, Formatter, getLoggerClass, DEBUG
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(x):
if x.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(x)
class DebugHandler(StreamHandler):
def emit(x, record):
StreamHandler.emit(x, record) if app.debug else None
handler = DebugHandler()
handler.setLevel(DEBUG)
handler.setFormatter(Formatter(app.debug_log_format))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(handler)
return logger
| apache-2.0 |
slayerjain/servo | tests/wpt/css-tests/tools/html5lib/html5lib/treewalkers/dom.py | 1229 | 1457 | from __future__ import absolute_import, division, unicode_literals
from xml.dom import Node
import gettext
_ = gettext.gettext
from . import _base
class TreeWalker(_base.NonRecursiveTreeWalker):
def getNodeDetails(self, node):
if node.nodeType == Node.DOCUMENT_TYPE_NODE:
return _base.DOCTYPE, node.name, node.publicId, node.systemId
elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
return _base.TEXT, node.nodeValue
elif node.nodeType == Node.ELEMENT_NODE:
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
if attr.namespaceURI:
attrs[(attr.namespaceURI, attr.localName)] = attr.value
else:
attrs[(None, attr.name)] = attr.value
return (_base.ELEMENT, node.namespaceURI, node.nodeName,
attrs, node.hasChildNodes())
elif node.nodeType == Node.COMMENT_NODE:
return _base.COMMENT, node.nodeValue
elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
return (_base.DOCUMENT,)
else:
return _base.UNKNOWN, node.nodeType
def getFirstChild(self, node):
return node.firstChild
def getNextSibling(self, node):
return node.nextSibling
def getParentNode(self, node):
return node.parentNode
| mpl-2.0 |
UOSHUB/BackEnd | API/views/finals.py | 1 | 1132 | from rest_framework.response import Response
from rest_framework.views import APIView
from .common import login_required
from Requests import myudc
from .terms import Terms
from time import time
# Final Exams requests handler
class Finals(APIView):
"""
This returns student's final exams schedule,
which's an array of final exams' data.
"""
# Returns student's array of final exams on GET request
@staticmethod
@login_required("myudc")
def get(request, term_code):
# Return all available terms if non is specified
if not term_code:
return Terms.get(request)
# Reset MyUDC time to force login on the next request
request.session["myudc_time"] = time() - 15*60
# Return an array of finals exams
return Response(
# Get & scrape final exams data from MyUDC
myudc.scrape.final_exams(
myudc.get.final_exams(
# Send MyUDC session
request.session["myudc"],
# Send term code
term_code
)
)
)
| gpl-3.0 |
milinbhakta/flaskjinja | flask1/Lib/encodings/charmap.py | 860 | 2084 | """ Generic Python Character Mapping Codec.
Use this codec directly rather than through the automatic
conversion mechanisms supplied by unicode() and .encode().
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.charmap_encode
decode = codecs.charmap_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict', mapping=None):
codecs.IncrementalEncoder.__init__(self, errors)
self.mapping = mapping
def encode(self, input, final=False):
return codecs.charmap_encode(input, self.errors, self.mapping)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict', mapping=None):
codecs.IncrementalDecoder.__init__(self, errors)
self.mapping = mapping
def decode(self, input, final=False):
return codecs.charmap_decode(input, self.errors, self.mapping)[0]
class StreamWriter(Codec,codecs.StreamWriter):
def __init__(self,stream,errors='strict',mapping=None):
codecs.StreamWriter.__init__(self,stream,errors)
self.mapping = mapping
def encode(self,input,errors='strict'):
return Codec.encode(input,errors,self.mapping)
class StreamReader(Codec,codecs.StreamReader):
def __init__(self,stream,errors='strict',mapping=None):
codecs.StreamReader.__init__(self,stream,errors)
self.mapping = mapping
def decode(self,input,errors='strict'):
return Codec.decode(input,errors,self.mapping)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='charmap',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-2.0 |
hassanibi/erpnext | erpnext/accounts/doctype/tax_rule/tax_rule.py | 8 | 5911 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import cstr, cint
from frappe.geo.doctype.address.address import get_default_address
class IncorrectCustomerGroup(frappe.ValidationError): pass
class IncorrectSupplierType(frappe.ValidationError): pass
class ConflictingTaxRule(frappe.ValidationError): pass
class TaxRule(Document):
def __setup__(self):
self.flags.ignore_these_exceptions_in_test = [ConflictingTaxRule]
def validate(self):
self.validate_tax_template()
self.validate_date()
self.validate_filters()
self.validate_use_for_shopping_cart()
def validate_tax_template(self):
if self.tax_type== "Sales":
self.purchase_tax_template = self.supplier = self.supplier_type = None
if self.customer:
self.customer_group = None
else:
self.sales_tax_template = self.customer = self.customer_group = None
if self.supplier:
self.supplier_type = None
if not (self.sales_tax_template or self.purchase_tax_template):
frappe.throw(_("Tax Template is mandatory."))
def validate_date(self):
if self.from_date and self.to_date and self.from_date > self.to_date:
frappe.throw(_("From Date cannot be greater than To Date"))
def validate_filters(self):
filters = {
"tax_type": self.tax_type,
"customer": self.customer,
"customer_group": self.customer_group,
"supplier": self.supplier,
"supplier_type": self.supplier_type,
"billing_city": self.billing_city,
"billing_county": self.billing_county,
"billing_state": self.billing_state,
"billing_country": self.billing_country,
"shipping_city": self.shipping_city,
"shipping_county": self.shipping_county,
"shipping_state": self.shipping_state,
"shipping_country": self.shipping_country,
"company": self.company
}
conds=""
for d in filters:
if conds:
conds += " and "
conds += """ifnull({0}, '') = '{1}'""".format(d, frappe.db.escape(cstr(filters[d])))
if self.from_date and self.to_date:
conds += """ and ((from_date > '{from_date}' and from_date < '{to_date}') or
(to_date > '{from_date}' and to_date < '{to_date}') or
('{from_date}' > from_date and '{from_date}' < to_date) or
('{from_date}' = from_date and '{to_date}' = to_date))""".format(from_date=self.from_date, to_date=self.to_date)
elif self.from_date and not self.to_date:
conds += """ and to_date > '{from_date}'""".format(from_date = self.from_date)
elif self.to_date and not self.from_date:
conds += """ and from_date < '{to_date}'""".format(to_date = self.to_date)
tax_rule = frappe.db.sql("select name, priority \
from `tabTax Rule` where {0} and name != '{1}'".format(conds, self.name), as_dict=1)
if tax_rule:
if tax_rule[0].priority == self.priority:
frappe.throw(_("Tax Rule Conflicts with {0}".format(tax_rule[0].name)), ConflictingTaxRule)
def validate_use_for_shopping_cart(self):
'''If shopping cart is enabled and no tax rule exists for shopping cart, enable this one'''
if (not self.use_for_shopping_cart
and cint(frappe.db.get_single_value('Shopping Cart Settings', 'enabled'))
and not frappe.db.get_value('Tax Rule', {'use_for_shopping_cart': 1, 'name': ['!=', self.name]})):
self.use_for_shopping_cart = 1
frappe.msgprint(_("Enabling 'Use for Shopping Cart', as Shopping Cart is enabled and there should be at least one Tax Rule for Shopping Cart"))
@frappe.whitelist()
def get_party_details(party, party_type, args=None):
out = {}
billing_address, shipping_address = None, None
if args:
if args.get('billing_address'):
billing_address = frappe.get_doc('Address', args.get('billing_address'))
if args.get('shipping_address'):
shipping_address = frappe.get_doc('Address', args.get('shipping_address'))
else:
billing_address_name = get_default_address(party_type, party)
shipping_address_name = get_default_address(party_type, party, 'is_shipping_address')
if billing_address_name:
billing_address = frappe.get_doc('Address', billing_address_name)
if shipping_address_name:
shipping_address = frappe.get_doc('Address', shipping_address_name)
if billing_address:
out["billing_city"]= billing_address.city
out["billing_county"]= billing_address.county
out["billing_state"]= billing_address.state
out["billing_country"]= billing_address.country
if shipping_address:
out["shipping_city"]= shipping_address.city
out["shipping_county"]= shipping_address.county
out["shipping_state"]= shipping_address.state
out["shipping_country"]= shipping_address.country
return out
def get_tax_template(posting_date, args):
"""Get matching tax rule"""
args = frappe._dict(args)
conditions = ["""(from_date is null or from_date = '' or from_date <= '{0}')
and (to_date is null or to_date = '' or to_date >= '{0}')""".format(posting_date)]
for key, value in args.iteritems():
if key=="use_for_shopping_cart":
conditions.append("use_for_shopping_cart = {0}".format(1 if value else 0))
else:
conditions.append("ifnull({0}, '') in ('', '{1}')".format(key, frappe.db.escape(cstr(value))))
tax_rule = frappe.db.sql("""select * from `tabTax Rule`
where {0}""".format(" and ".join(conditions)), as_dict = True)
if not tax_rule:
return None
for rule in tax_rule:
rule.no_of_keys_matched = 0
for key in args:
if rule.get(key): rule.no_of_keys_matched += 1
rule = sorted(tax_rule, lambda b, a: cmp(a.no_of_keys_matched, b.no_of_keys_matched) or cmp(a.priority, b.priority))[0]
tax_template = rule.sales_tax_template or rule.purchase_tax_template
doctype = "{0} Taxes and Charges Template".format(rule.tax_type)
if frappe.db.get_value(doctype, tax_template, 'disabled')==1:
return None
return tax_template
| gpl-3.0 |
eayunstack/nova | nova/virt/baremetal/db/sqlalchemy/migrate_repo/versions/008_remove_bm_pxe_ips_table.py | 29 | 1961 | # Copyright 2013 Mirantis Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
table_name = 'bm_pxe_ips'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
table = Table(table_name, meta, autoload=True)
table.drop()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
bm_pxe_ips = Table(table_name, meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Boolean),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', String(length=255), unique=True),
Column('bm_node_id', Integer),
Column('server_address',
String(length=255), unique=True),
mysql_engine='InnoDB',
)
bm_pxe_ips.create()
Index(
'idx_bm_pxe_ips_bm_node_id_deleted',
bm_pxe_ips.c.bm_node_id,
bm_pxe_ips.c.deleted
).create(migrate_engine)
| apache-2.0 |
ovnicraft/openerp-restaurant | sale_margin/sale_margin.py | 65 | 4085 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
res = super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty=qty,
uom=uom, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id,
lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
if not pricelist:
return res
frm_cur = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id.id
to_cur = self.pool.get('product.pricelist').browse(cr, uid, [pricelist])[0].currency_id.id
if product:
purchase_price = self.pool.get('product.product').browse(cr, uid, product).standard_price
price = self.pool.get('res.currency').compute(cr, uid, frm_cur, to_cur, purchase_price, round=False)
res['value'].update({'purchase_price': price})
return res
def _product_margin(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = 0
if line.product_id:
if line.purchase_price:
res[line.id] = round((line.price_unit*line.product_uos_qty*(100.0-line.discount)/100.0) -(line.purchase_price*line.product_uos_qty), 2)
else:
res[line.id] = round((line.price_unit*line.product_uos_qty*(100.0-line.discount)/100.0) -(line.product_id.standard_price*line.product_uos_qty), 2)
return res
_columns = {
'margin': fields.function(_product_margin, string='Margin',
store = True),
'purchase_price': fields.float('Cost Price', digits=(16,2))
}
class sale_order(osv.osv):
_inherit = "sale.order"
def _product_margin(self, cr, uid, ids, field_name, arg, context=None):
result = {}
for sale in self.browse(cr, uid, ids, context=context):
result[sale.id] = 0.0
for line in sale.order_line:
result[sale.id] += line.margin or 0.0
return result
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
_columns = {
'margin': fields.function(_product_margin, string='Margin', help="It gives profitability by calculating the difference between the Unit Price and the cost price.", store={
'sale.order.line': (_get_order, ['margin'], 20),
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 20),
}),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | agpl-3.0 |
GoogleCloudPlatform/datacatalog-connectors | google-datacatalog-connectors-commons/src/google/datacatalog_connectors/commons/prepare/base_entry_relationship_mapper.py | 1 | 3800 | #!/usr/bin/python
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from google.cloud import datacatalog
ABC = abc.ABCMeta('ABC', (object,), {}) # compatible with Python 2 *and* 3
class BaseEntryRelationshipMapper(ABC):
__DATA_CATALOG_UI_URL = 'https://console.cloud.google.com/datacatalog'
__ID_FIELD_KEY = 'id'
@abc.abstractmethod
def fulfill_tag_fields(self, assembled_entries_data):
pass
@classmethod
def _fulfill_tag_fields(cls, assembled_entries_data, resolvers):
if not (assembled_entries_data and resolvers):
return
id_name_pairs = cls.__build_id_name_pairs(assembled_entries_data)
for resolver in resolvers:
resolver(assembled_entries_data, id_name_pairs)
@classmethod
def __build_id_name_pairs(cls, assembled_entries_data):
id_name_pairs = {}
for assembled_entry_data in assembled_entries_data:
entry = assembled_entry_data.entry
for tag in assembled_entry_data.tags:
asset_id_field_key = cls._get_asset_identifier_tag_field_key()
if asset_id_field_key not in tag.fields:
continue
asset_id_field = tag.fields[asset_id_field_key]
asset_id = asset_id_field.string_value \
if asset_id_field.string_value \
else int(asset_id_field.double_value)
dict_key = '{}-{}'.format(entry.user_specified_type, asset_id)
id_name_pairs[dict_key] = entry.name
return id_name_pairs
@classmethod
def _get_asset_identifier_tag_field_key(cls):
return cls.__ID_FIELD_KEY
@classmethod
def _map_related_entry(cls, assembled_entry_data, related_asset_type,
source_field_id, target_field_id, id_name_pairs):
relationship_tags_dict = {}
tags = assembled_entry_data.tags or []
related_asset_ids = []
for tag in tags:
if source_field_id not in tag.fields:
continue
source_field = tag.fields[source_field_id]
related_asset_id = source_field.string_value \
if source_field.string_value \
else int(source_field.double_value)
related_asset_ids.append(related_asset_id)
if not relationship_tags_dict.get(related_asset_id):
relationship_tags_dict[related_asset_id] = []
relationship_tags_dict[related_asset_id].append(tag)
for related_asset_id in related_asset_ids:
related_asset_key = '{}-{}'.format(related_asset_type,
related_asset_id)
if related_asset_key not in id_name_pairs:
continue
for relationship_tag in relationship_tags_dict[related_asset_id]:
string_field = datacatalog.TagField()
string_field.string_value = cls.__format_related_entry_url(
id_name_pairs[related_asset_key])
relationship_tag.fields[target_field_id] = string_field
@classmethod
def __format_related_entry_url(cls, entry_name):
return '{}/{}'.format(cls.__DATA_CATALOG_UI_URL, entry_name)
| apache-2.0 |
Senseg/Py4A | python3-alpha/extra_modules/atom/auth.py | 297 | 1199 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import base64
class BasicAuth(object):
"""Sets the Authorization header as defined in RFC1945"""
def __init__(self, user_id, password):
self.basic_cookie = base64.encodestring(
'%s:%s' % (user_id, password)).strip()
def modify_request(self, http_request):
http_request.headers['Authorization'] = 'Basic %s' % self.basic_cookie
ModifyRequest = modify_request
class NoAuth(object):
def modify_request(self, http_request):
pass
| apache-2.0 |
yongshengwang/hue | desktop/core/ext-py/guppy-0.1.10/guppy/heapy/Doc.py | 37 | 6806 | #._cv_part guppy.heapy.Doc
class Doc: # base class
def __mod__(self, other):
other = self.mod.getdoc(other)
return self.mapchildren(lambda x: x % other)
def __rmod__(self, other):
return self.mod.getdoc(other) % self
def __str__(self):
return self.getstr()
def __eq__(self, other):
if not isinstance(other, self.__class__):
return 0
return str(self) == str(other)
def __hash__(self):
return hash(str(self))
def shortest(self):
return self.mapchildren(lambda x: x.shortest())
class Anon(Doc):
def __init__(self, mod, obj):
self.mod = mod
self.obj = obj
def getstr(self):
return repr(self.obj)
def mapchildren(self, f):
return self
class Source(Doc):
def __init__(self, mod, text):
self.mod = mod
self.text = text
def getstr(self):
return self.text
def mapchildren(self, f):
return self
class Attribute(Doc):
def __init__(self, mod, obj, name):
self.mod = mod
self.obj = obj
self.name = name
def __mod__(self, other):
if self.obj is other:
return self.mod.rootattribute(other, self.name)
return self.mapchildren(lambda x: x % other)
def getstr(self):
return '%s.%s'%(self.obj.getstr(), self.name)
def mapchildren(self, f):
return self.__class__(self.mod, f(self.obj), self.name)
class RootAttribute(Doc):
def __init__(self, mod, obj, name):
self.mod = mod
self.obj = obj
self.name = name
def getstr(self):
return '%s'%(self.name,)
def mapchildren(self, f):
return self
class BinaryOp(Doc):
table = {
'and':'&',
'or':'|',
'sub':'-',
'mul':'*',
'pow':'**',
'lshift':'<<',
'rshift':'>>',
}
def __init__(self, mod, op, a, b):
self.mod = mod
self.op = op
self.a = a
self.b = b
def getstr(self):
return '%s %s %s'%(self.a.getstr(),
self.table[self.op],
self.b.getstr())
def mapchildren(self, f):
return self.__class__(self.mod, self.op, f(self.a), f(self.b))
class UnaryOp(Doc):
table = {
'invert': '~',
'neg' : '-',
'pos' : '+',
}
def __init__(self, mod, op, a):
self.mod = mod
self.op = op
self.a = a
def getstr(self):
return '%s %s'%(self.table[self.op], self.a.getstr())
def mapchildren(self, f):
return self.__class__(self.mod, self.op, f(self.a))
class CallFunc(Doc):
def __init__(self, mod, obj, *args, **kwds):
self.mod = mod
self.obj = obj
self.args = args
self.kwds = kwds
def getstr(self):
return '%s(%s%s)'%(
self.obj.getstr(),
', '.join([x.getstr() for x in self.args]),
', '.join(['%s=%s'%(k,v.getstr()) for k, v in self.kwds.items()]))
def mapchildren(self, f):
obj = f(self.obj)
args = [f(a) for a in self.args]
kwds = dict([(k, f(v)) for (k, v) in self.kwds.items()])
return self.__class__(self.mod, obj, *args, **kwds)
class Multi(Doc):
def __init__(self, mod, set):
self.mod = mod
self.str = '{%s}'%', '.join([x.getstr() for x in set])
self.set = set
def getstr(self):
return self.str
def mapchildren(self, f):
return self.__class__(self.mod, dict([(f(x), 1) for x in self.set]))
def shortest(self):
ls = None
for a in self.set:
a = a.shortest()
l = len(a.getstr())
if ls is None or l < ls:
ls = l
st = a
return st
class Root(Doc):
def __init__(self, mod, name='<root>'):
self.mod = mod
self.name = name
def __call__(self, name):
return self.__class__(self.mod, name)
def mapchildren(self, f):
return self
def getstr(self):
return self.name
class Tuple(Doc):
def __init__(self, mod, *args):
self.mod = mod
self.args = args
#pdb.set_trace()
def mapchildren(self, f):
return self.__class__(self.mod, *[f(x) for x in self.args])
def getstr(self):
x = '(%s)'%', '.join([x.getstr() for x in self.args])
if len(self.args) == 1:
x = x[:-1]+',)'
return x
class DocError(Exception):
pass
class _GLUECLAMP_:
def add_origin(self, obj, origin):
o = getattr(obj, '_origin_', None)
if o is None:
obj._origin_ = origin
else:
obj._origin_ = self.multi(o, origin)
return obj
def add_wrapdict(self, obj, doc):
wd = self.wrapdict
o = wd.get(id(obj))
if o is None:
o = (obj, doc)
else:
o = (obj, self.multi(o[1], doc))
wd[id(obj)] = o
return obj
def anon(self, obj):
return Anon(self, obj)
def attribute(self, obj, name):
return Attribute(self, self.getdoc(obj), name)
def binop(self, op, a, b):
return BinaryOp(self, op, self.getdoc(a), self.getdoc(b))
def callfunc(self, obj, *args, **kwds):
getdoc = self.getdoc
obj = getdoc(obj)
args = [getdoc(a) for a in args]
kwds = dict([(k, getdoc(v)) for (k, v) in kwds.items()])
return CallFunc(self, obj, *args, **kwds)
def getdoc(self, obj):
if isinstance(obj, Doc):
return obj
w = getattr(obj, '_origin_', None)
if isinstance(w, Doc):
return w
w = self.wrapdict.get(id(obj))
if w is not None:
return w[1]
if isinstance(obj, tuple):
return self.tuple(*obj)
return self.anon(obj)
def multi(self, a, b):
a = self.getdoc(a)
b = self.getdoc(b)
if isinstance(a, Multi):
#pdb.set_trace()
set = a.set.copy()
if 1 and len(set) > 4:
return a
else:
set = {a:1}
if isinstance(b, Multi):
set.update(b.set)
else:
set[b] = 1
return Multi(self, set)
def _get_root(self):
return Root(self)
def rootattribute(self, root, name):
return RootAttribute(self, self.getdoc(root), name)
def source(self, text):
return Source(self, text)
def tuple(self, *args):
return Tuple(self, *[self.getdoc(x) for x in args])
def unop(self, op, a):
return UnaryOp(self, op, self.getdoc(a))
def wrap(self, obj, doc):
if obj is self._parent.UniSet.UniSet:
pdb.set_trace()
w = getattr(obj, '_derive_origin_', None)
if w is not None:
if getattr(w, 'im_self', None) is obj or isinstance(w, self._root.types.FunctionType):
obj = w(doc)
elif w == 'ADD':
#pdb.set_trace()
obj = self.add_origin(obj, doc)
else:
raise DocError, "Doc.wrap: attribute '_derive_origin_' has invalid value"
elif isinstance(obj, self._root.types.MethodType):
obj = self.wrap_method(obj, doc)
elif isinstance(obj, self._root.types.FunctionType):
obj = self.wrap_function(obj, doc)
else:
obj = self.add_wrapdict(obj, doc)
return obj
def _get_wrapdict(self):
return {}
def wrap_function(mod, obj, doc):
def f(*args, **kwds):
r = obj(*args, **kwds)
r = mod.wrap(r, mod.callfunc(doc, *args, **kwds))
return r
f._origin_ = doc
return f
def wrap_method(mod, obj, doc):
im_func = obj.im_func
def f(self, *args, **kwds):
r = im_func(self, *args, **kwds)
r = mod.wrap(r, mod.callfunc(doc, *args, **kwds))
return r
return mod._root.new.instancemethod(f, obj.im_self, obj.im_self.__class__)
| apache-2.0 |
basnijholt/holoviews | holoviews/plotting/plotly/stats.py | 2 | 4384 | from __future__ import absolute_import, division, unicode_literals
import param
from .chart import ChartPlot
from .element import ElementPlot, ColorbarPlot
class BivariatePlot(ChartPlot, ColorbarPlot):
filled = param.Boolean(default=False)
ncontours = param.Integer(default=None)
trace_kwargs = {'type': 'histogram2dcontour'}
style_opts = ['cmap', 'showlabels', 'labelfont', 'labelformat', 'showlines']
_style_key = 'contours'
def graph_options(self, element, ranges, style):
opts = super(BivariatePlot, self).graph_options(element, ranges, style)
if self.ncontours:
opts['autocontour'] = False
opts['ncontours'] = self.ncontours
opts['contours'] = {'coloring': 'fill' if self.filled else 'lines'}
return opts
class DistributionPlot(ElementPlot):
bandwidth = param.Number(default=None, doc="""
The bandwidth of the kernel for the density estimate.""")
cut = param.Number(default=3, doc="""
Draw the estimate to cut * bw from the extreme data points.""")
filled = param.Boolean(default=True, doc="""
Whether the bivariate contours should be filled.""")
style_opts = ['color', 'dash', 'line_width']
trace_kwargs = {'type': 'scatter', 'mode': 'lines'}
_style_key = 'line'
class MultiDistributionPlot(ElementPlot):
def _get_axis_dims(self, element):
return element.kdims, element.vdims[0]
def get_data(self, element, ranges, style):
if element.kdims:
groups = element.groupby(element.kdims).items()
else:
groups = [(element.label, element)]
plots = []
axis = 'x' if self.invert_axes else 'y'
for key, group in groups:
if element.kdims:
label = ','.join([d.pprint_value(v) for d, v in zip(element.kdims, key)])
else:
label = key
data = {axis: group.dimension_values(group.vdims[0]), 'name': label}
plots.append(data)
return plots
def get_extents(self, element, ranges, range_type='combined'):
return super(MultiDistributionPlot, self).get_extents(
element, ranges, range_type, 'categorical', element.vdims[0]
)
class BoxWhiskerPlot(MultiDistributionPlot):
boxpoints = param.ObjectSelector(objects=["all", "outliers",
"suspectedoutliers", False],
default='outliers', doc="""
Which points to show, valid options are 'all', 'outliers',
'suspectedoutliers' and False""")
jitter = param.Number(default=0, doc="""
Sets the amount of jitter in the sample points drawn. If "0",
the sample points align along the distribution axis. If "1",
the sample points are drawn in a random jitter of width equal
to the width of the box(es).""")
mean = param.ObjectSelector(default=False, objects=[True, False, 'sd'],
doc="""
If "True", the mean of the box(es)' underlying distribution
is drawn as a dashed line inside the box(es). If "sd" the
standard deviation is also drawn.""")
style_opts = ['color', 'alpha', 'outliercolor', 'marker', 'size']
trace_kwargs = {'type': 'box'}
_style_key = 'marker'
def graph_options(self, element, ranges, style):
options = super(BoxWhiskerPlot, self).graph_options(element, ranges, style)
options['boxmean'] = self.mean
options['jitter'] = self.jitter
return options
class ViolinPlot(MultiDistributionPlot):
box = param.Boolean(default=True, doc="""
Whether to draw a boxplot inside the violin""")
meanline = param.Boolean(default=False, doc="""
If "True", the mean of the box(es)' underlying distribution
is drawn as a dashed line inside the box(es). If "sd" the
standard deviation is also drawn.""")
style_opts = ['color', 'alpha', 'outliercolor', 'marker', 'size']
trace_kwargs = {'type': 'violin'}
_style_key = 'marker'
def graph_options(self, element, ranges, style):
options = super(ViolinPlot, self).graph_options(element, ranges, style)
options['meanline'] = {'visible': self.meanline}
options['box'] = {'visible': self.box}
return options
| bsd-3-clause |
tseaver/gcloud-python | spanner/google/cloud/spanner_admin_database_v1/gapic/transports/database_admin_grpc_transport.py | 1 | 9341 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
import google.api_core.operations_v1
from google.cloud.spanner_admin_database_v1.proto import spanner_database_admin_pb2_grpc
class DatabaseAdminGrpcTransport(object):
"""gRPC transport class providing stubs for
google.spanner.admin.database.v1 DatabaseAdmin API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/spanner.admin',
)
def __init__(self,
channel=None,
credentials=None,
address='spanner.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.', )
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
)
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'database_admin_stub':
spanner_database_admin_pb2_grpc.DatabaseAdminStub(channel),
}
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
# instantiate an LRO client.
self._operations_client = google.api_core.operations_v1.OperationsClient(
channel)
@classmethod
def create_channel(cls,
address='spanner.googleapis.com:443',
credentials=None):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
)
@property
def list_databases(self):
"""Return the gRPC stub for {$apiMethod.name}.
Lists Cloud Spanner databases.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].ListDatabases
@property
def create_database(self):
"""Return the gRPC stub for {$apiMethod.name}.
Creates a new Cloud Spanner database and starts to prepare it for serving.
The returned ``long-running operation`` will
have a name of the format ``<database_name>/operations/<operation_id>`` and
can be used to track preparation of the database. The
``metadata`` field type is
``CreateDatabaseMetadata``. The
``response`` field type is
``Database``, if successful.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].CreateDatabase
@property
def get_database(self):
"""Return the gRPC stub for {$apiMethod.name}.
Gets the state of a Cloud Spanner database.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].GetDatabase
@property
def update_database_ddl(self):
"""Return the gRPC stub for {$apiMethod.name}.
Updates the schema of a Cloud Spanner database by
creating/altering/dropping tables, columns, indexes, etc. The returned
``long-running operation`` will have a name of
the format ``<database_name>/operations/<operation_id>`` and can be used to
track execution of the schema change(s). The
``metadata`` field type is
``UpdateDatabaseDdlMetadata``. The operation has no response.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].UpdateDatabaseDdl
@property
def drop_database(self):
"""Return the gRPC stub for {$apiMethod.name}.
Drops (aka deletes) a Cloud Spanner database.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].DropDatabase
@property
def get_database_ddl(self):
"""Return the gRPC stub for {$apiMethod.name}.
Returns the schema of a Cloud Spanner database as a list of formatted
DDL statements. This method does not show pending schema updates, those may
be queried using the ``Operations`` API.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].GetDatabaseDdl
@property
def set_iam_policy(self):
"""Return the gRPC stub for {$apiMethod.name}.
Sets the access control policy on a database resource. Replaces any
existing policy.
Authorization requires ``spanner.databases.setIamPolicy`` permission on
``resource``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].SetIamPolicy
@property
def get_iam_policy(self):
"""Return the gRPC stub for {$apiMethod.name}.
Gets the access control policy for a database resource. Returns an empty
policy if a database exists but does not have a policy set.
Authorization requires ``spanner.databases.getIamPolicy`` permission on
``resource``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].GetIamPolicy
@property
def test_iam_permissions(self):
"""Return the gRPC stub for {$apiMethod.name}.
Returns permissions that the caller has on the specified database resource.
Attempting this RPC on a non-existent Cloud Spanner database will result in
a NOT_FOUND error if the user has ``spanner.databases.list`` permission on
the containing Cloud Spanner instance. Otherwise returns an empty set of
permissions.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['database_admin_stub'].TestIamPermissions
| apache-2.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-2.4.3/Lib/cgi.py | 8 | 34262 | #! /usr/local/bin/python
# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
# intentionally NOT "/usr/bin/env python". On many systems
# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
# scripts, and /usr/local/bin is the default directory where Python is
# installed, so /usr/bin/env would be unable to find python. Granted,
# binary installations by Linux vendors often install Python in
# /usr/bin. So let those vendors patch cgi.py to match their choice
# of installation.
"""Support module for CGI (Common Gateway Interface) scripts.
This module defines a number of utilities for use by CGI scripts
written in Python.
"""
# XXX Perhaps there should be a slimmed version that doesn't contain
# all those backwards compatible and debugging classes and functions?
# History
# -------
#
# Michael McLay started this module. Steve Majewski changed the
# interface to SvFormContentDict and FormContentDict. The multipart
# parsing was inspired by code submitted by Andreas Paepcke. Guido van
# Rossum rewrote, reformatted and documented the module and is currently
# responsible for its maintenance.
#
__version__ = "2.6"
# Imports
# =======
import sys
import os
import urllib
import mimetools
import rfc822
import UserDict
from StringIO import StringIO
__all__ = ["MiniFieldStorage", "FieldStorage", "FormContentDict",
"SvFormContentDict", "InterpFormContentDict", "FormContent",
"parse", "parse_qs", "parse_qsl", "parse_multipart",
"parse_header", "print_exception", "print_environ",
"print_form", "print_directory", "print_arguments",
"print_environ_usage", "escape"]
# Logging support
# ===============
logfile = "" # Filename to log to, if not empty
logfp = None # File object to log to, if not None
def initlog(*allargs):
"""Write a log message, if there is a log file.
Even though this function is called initlog(), you should always
use log(); log is a variable that is set either to initlog
(initially), to dolog (once the log file has been opened), or to
nolog (when logging is disabled).
The first argument is a format string; the remaining arguments (if
any) are arguments to the % operator, so e.g.
log("%s: %s", "a", "b")
will write "a: b" to the log file, followed by a newline.
If the global logfp is not None, it should be a file object to
which log data is written.
If the global logfp is None, the global logfile may be a string
giving a filename to open, in append mode. This file should be
world writable!!! If the file can't be opened, logging is
silently disabled (since there is no safe place where we could
send an error message).
"""
global logfp, log
if logfile and not logfp:
try:
logfp = open(logfile, "a")
except IOError:
pass
if not logfp:
log = nolog
else:
log = dolog
log(*allargs)
def dolog(fmt, *args):
"""Write a log message to the log file. See initlog() for docs."""
logfp.write(fmt%args + "\n")
def nolog(*allargs):
"""Dummy function, assigned to log when logging is disabled."""
pass
log = initlog # The current logging function
# Parsing functions
# =================
# Maximum input we will accept when REQUEST_METHOD is POST
# 0 ==> unlimited input
maxlen = 0
def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
fp : file pointer; default: sys.stdin
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
URL encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
if fp is None:
fp = sys.stdin
if not 'REQUEST_METHOD' in environ:
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
if ctype == 'multipart/form-data':
return parse_multipart(fp, pdict)
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
raise ValueError, 'Maximum content length exceeded'
qs = fp.read(clength)
else:
qs = '' # Unknown content-type
if 'QUERY_STRING' in environ:
if qs: qs = qs + '&'
qs = qs + environ['QUERY_STRING']
elif sys.argv[1:]:
if qs: qs = qs + '&'
qs = qs + sys.argv[1]
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
elif 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
else:
if sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
return parse_qs(qs, keep_blank_values, strict_parsing)
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: URL-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
URL encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
return dict
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: URL-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
URL encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
Returns a list, as G-d intended.
"""
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError, "bad query field: %r" % (name_value,)
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = urllib.unquote(nv[0].replace('+', ' '))
value = urllib.unquote(nv[1].replace('+', ' '))
r.append((name, value))
return r
def parse_multipart(fp, pdict):
"""Parse multipart input.
Arguments:
fp : input file
pdict: dictionary containing other parameters of conten-type header
Returns a dictionary just like parse_qs(): keys are the field names, each
value is a list of values for that field. This is easy to use but not
much good if you are expecting megabytes to be uploaded -- in that case,
use the FieldStorage class instead which is much more flexible. Note
that content-type is the raw, unparsed contents of the content-type
header.
XXX This does not parse nested multipart parts -- use FieldStorage for
that.
XXX This should really be subsumed by FieldStorage altogether -- no
point in having two implementations of the same parsing algorithm.
"""
boundary = ""
if 'boundary' in pdict:
boundary = pdict['boundary']
if not valid_boundary(boundary):
raise ValueError, ('Invalid boundary in multipart form: %r'
% (boundary,))
nextpart = "--" + boundary
lastpart = "--" + boundary + "--"
partdict = {}
terminator = ""
while terminator != lastpart:
bytes = -1
data = None
if terminator:
# At start of next part. Read headers first.
headers = mimetools.Message(fp)
clength = headers.getheader('content-length')
if clength:
try:
bytes = int(clength)
except ValueError:
pass
if bytes > 0:
if maxlen and bytes > maxlen:
raise ValueError, 'Maximum content length exceeded'
data = fp.read(bytes)
else:
data = ""
# Read lines until end of part.
lines = []
while 1:
line = fp.readline()
if not line:
terminator = lastpart # End outer loop
break
if line[:2] == "--":
terminator = line.strip()
if terminator in (nextpart, lastpart):
break
lines.append(line)
# Done with part.
if data is None:
continue
if bytes < 0:
if lines:
# Strip final line terminator
line = lines[-1]
if line[-2:] == "\r\n":
line = line[:-2]
elif line[-1:] == "\n":
line = line[:-1]
lines[-1] = line
data = "".join(lines)
line = headers['content-disposition']
if not line:
continue
key, params = parse_header(line)
if key != 'form-data':
continue
if 'name' in params:
name = params['name']
else:
continue
if name in partdict:
partdict[name].append(data)
else:
partdict[name] = [data]
return partdict
def parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
plist = map(lambda x: x.strip(), line.split(';'))
key = plist.pop(0).lower()
pdict = {}
for p in plist:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i+1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
return key, pdict
# Classes for field storage
# =========================
class MiniFieldStorage:
"""Like FieldStorage, for use when no file uploads are possible."""
# Dummy attributes
filename = None
list = None
type = None
file = None
type_options = {}
disposition = None
disposition_options = {}
headers = {}
def __init__(self, name, value):
"""Constructor from field name and value."""
self.name = name
self.value = value
# self.file = StringIO(value)
def __repr__(self):
"""Return printable representation."""
return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
class FieldStorage:
"""Store a sequence of fields, reading multipart/form-data.
This class provides naming, typing, files stored on disk, and
more. At the top level, it is accessible like a dictionary, whose
keys are the field names. (Note: None can occur as a field name.)
The items are either a Python list (if there's multiple values) or
another FieldStorage or MiniFieldStorage object. If it's a single
object, it has the following attributes:
name: the field name, if specified; otherwise None
filename: the filename, if specified; otherwise None; this is the
client side filename, *not* the file name on which it is
stored (that's a temporary file you don't deal with)
value: the value as a *string*; for file uploads, this
transparently reads the file every time you request the value
file: the file(-like) object from which you can read the data;
None if the data is stored a simple string
type: the content-type, or None if not specified
type_options: dictionary of options specified on the content-type
line
disposition: content-disposition, or None if not specified
disposition_options: dictionary of corresponding options
headers: a dictionary(-like) object (sometimes rfc822.Message or a
subclass thereof) containing *all* headers
The class is subclassable, mostly for the purpose of overriding
the make_file() method, which is called internally to come up with
a file open for reading and writing. This makes it possible to
override the default choice of storing all files in a temporary
directory and unlinking them as soon as they have been opened.
"""
def __init__(self, fp=None, headers=None, outerboundary="",
environ=os.environ, keep_blank_values=0, strict_parsing=0):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
fp : file pointer; default: sys.stdin
(not used when the request method is GET)
headers : header dictionary-like object; default:
taken from environ as per CGI spec
outerboundary : terminating multipart boundary
(for internal use only)
environ : environment dictionary; default: os.environ
keep_blank_values: flag indicating whether blank values in
URL encoded forms should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
method = 'GET'
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
if method == 'GET' or method == 'HEAD':
if 'QUERY_STRING' in environ:
qs = environ['QUERY_STRING']
elif sys.argv[1:]:
qs = sys.argv[1]
else:
qs = ""
fp = StringIO(qs)
if headers is None:
headers = {'content-type':
"application/x-www-form-urlencoded"}
if headers is None:
headers = {}
if method == 'POST':
# Set default content-type for POST to what's traditional
headers['content-type'] = "application/x-www-form-urlencoded"
if 'CONTENT_TYPE' in environ:
headers['content-type'] = environ['CONTENT_TYPE']
if 'CONTENT_LENGTH' in environ:
headers['content-length'] = environ['CONTENT_LENGTH']
self.fp = fp or sys.stdin
self.headers = headers
self.outerboundary = outerboundary
# Process content-disposition header
cdisp, pdict = "", {}
if 'content-disposition' in self.headers:
cdisp, pdict = parse_header(self.headers['content-disposition'])
self.disposition = cdisp
self.disposition_options = pdict
self.name = None
if 'name' in pdict:
self.name = pdict['name']
self.filename = None
if 'filename' in pdict:
self.filename = pdict['filename']
# Process content-type header
#
# Honor any existing content-type header. But if there is no
# content-type header, use some sensible defaults. Assume
# outerboundary is "" at the outer level, but something non-false
# inside a multi-part. The default for an inner part is text/plain,
# but for an outer part it should be urlencoded. This should catch
# bogus clients which erroneously forget to include a content-type
# header.
#
# See below for what we do if there does exist a content-type header,
# but it happens to be something we don't understand.
if 'content-type' in self.headers:
ctype, pdict = parse_header(self.headers['content-type'])
elif self.outerboundary or method != 'POST':
ctype, pdict = "text/plain", {}
else:
ctype, pdict = 'application/x-www-form-urlencoded', {}
self.type = ctype
self.type_options = pdict
self.innerboundary = ""
if 'boundary' in pdict:
self.innerboundary = pdict['boundary']
clen = -1
if 'content-length' in self.headers:
try:
clen = int(self.headers['content-length'])
except ValueError:
pass
if maxlen and clen > maxlen:
raise ValueError, 'Maximum content length exceeded'
self.length = clen
self.list = self.file = None
self.done = 0
if ctype == 'application/x-www-form-urlencoded':
self.read_urlencoded()
elif ctype[:10] == 'multipart/':
self.read_multi(environ, keep_blank_values, strict_parsing)
else:
self.read_single()
def __repr__(self):
"""Return a printable representation."""
return "FieldStorage(%r, %r, %r)" % (
self.name, self.filename, self.value)
def __iter__(self):
return iter(self.keys())
def __getattr__(self, name):
if name != 'value':
raise AttributeError, name
if self.file:
self.file.seek(0)
value = self.file.read()
self.file.seek(0)
elif self.list is not None:
value = self.list
else:
value = None
return value
def __getitem__(self, key):
"""Dictionary style indexing."""
if self.list is None:
raise TypeError, "not indexable"
found = []
for item in self.list:
if item.name == key: found.append(item)
if not found:
raise KeyError, key
if len(found) == 1:
return found[0]
else:
return found
def getvalue(self, key, default=None):
"""Dictionary style get() method, including 'value' lookup."""
if key in self:
value = self[key]
if type(value) is type([]):
return map(lambda v: v.value, value)
else:
return value.value
else:
return default
def getfirst(self, key, default=None):
""" Return the first value received."""
if key in self:
value = self[key]
if type(value) is type([]):
return value[0].value
else:
return value.value
else:
return default
def getlist(self, key):
""" Return list of received values."""
if key in self:
value = self[key]
if type(value) is type([]):
return map(lambda v: v.value, value)
else:
return [value.value]
else:
return []
def keys(self):
"""Dictionary style keys() method."""
if self.list is None:
raise TypeError, "not indexable"
keys = []
for item in self.list:
if item.name not in keys: keys.append(item.name)
return keys
def has_key(self, key):
"""Dictionary style has_key() method."""
if self.list is None:
raise TypeError, "not indexable"
for item in self.list:
if item.name == key: return True
return False
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:
raise TypeError, "not indexable"
for item in self.list:
if item.name == key: return True
return False
def __len__(self):
"""Dictionary style len(x) support."""
return len(self.keys())
def read_urlencoded(self):
"""Internal: read data in query string format."""
qs = self.fp.read(self.length)
self.list = list = []
for key, value in parse_qsl(qs, self.keep_blank_values,
self.strict_parsing):
list.append(MiniFieldStorage(key, value))
self.skip_lines()
FieldStorageClass = None
def read_multi(self, environ, keep_blank_values, strict_parsing):
"""Internal: read a part that is itself multipart."""
ib = self.innerboundary
if not valid_boundary(ib):
raise ValueError, 'Invalid boundary in multipart form: %r' % (ib,)
self.list = []
klass = self.FieldStorageClass or self.__class__
part = klass(self.fp, {}, ib,
environ, keep_blank_values, strict_parsing)
# Throw first part away
while not part.done:
headers = rfc822.Message(self.fp)
part = klass(self.fp, headers, ib,
environ, keep_blank_values, strict_parsing)
self.list.append(part)
self.skip_lines()
def read_single(self):
"""Internal: read an atomic part."""
if self.length >= 0:
self.read_binary()
self.skip_lines()
else:
self.read_lines()
self.file.seek(0)
bufsize = 8*1024 # I/O buffering size for copy to file
def read_binary(self):
"""Internal: read binary data."""
self.file = self.make_file('b')
todo = self.length
if todo >= 0:
while todo > 0:
data = self.fp.read(min(todo, self.bufsize))
if not data:
self.done = -1
break
self.file.write(data)
todo = todo - len(data)
def read_lines(self):
"""Internal: read lines until EOF or outerboundary."""
self.file = self.__file = StringIO()
if self.outerboundary:
self.read_lines_to_outerboundary()
else:
self.read_lines_to_eof()
def __write(self, line):
if self.__file is not None:
if self.__file.tell() + len(line) > 1000:
self.file = self.make_file('')
self.file.write(self.__file.getvalue())
self.__file = None
self.file.write(line)
def read_lines_to_eof(self):
"""Internal: read lines until EOF."""
while 1:
line = self.fp.readline()
if not line:
self.done = -1
break
self.__write(line)
def read_lines_to_outerboundary(self):
"""Internal: read lines until outerboundary."""
next = "--" + self.outerboundary
last = next + "--"
delim = ""
while 1:
line = self.fp.readline()
if not line:
self.done = -1
break
if line[:2] == "--":
strippedline = line.strip()
if strippedline == next:
break
if strippedline == last:
self.done = 1
break
odelim = delim
if line[-2:] == "\r\n":
delim = "\r\n"
line = line[:-2]
elif line[-1] == "\n":
delim = "\n"
line = line[:-1]
else:
delim = ""
self.__write(odelim + line)
def skip_lines(self):
"""Internal: skip lines until outer boundary if defined."""
if not self.outerboundary or self.done:
return
next = "--" + self.outerboundary
last = next + "--"
while 1:
line = self.fp.readline()
if not line:
self.done = -1
break
if line[:2] == "--":
strippedline = line.strip()
if strippedline == next:
break
if strippedline == last:
self.done = 1
break
def make_file(self, binary=None):
"""Overridable: return a readable & writable file.
The file will be used as follows:
- data is written to it
- seek(0)
- data is read from it
The 'binary' argument is unused -- the file is always opened
in binary mode.
This version opens a temporary file for reading and writing,
and immediately deletes (unlinks) it. The trick (on Unix!) is
that the file can still be used, but it can't be opened by
another process, and it will automatically be deleted when it
is closed or when the current process terminates.
If you want a more permanent file, you derive a class which
overrides this method. If you want a visible temporary file
that is nevertheless automatically deleted when the script
terminates, try defining a __del__ method in a derived class
which unlinks the temporary files you have created.
"""
import tempfile
return tempfile.TemporaryFile("w+b")
# Backwards Compatibility Classes
# ===============================
class FormContentDict(UserDict.UserDict):
"""Form content as dictionary with a list of values per field.
form = FormContentDict()
form[key] -> [value, value, ...]
key in form -> Boolean
form.keys() -> [key, key, ...]
form.values() -> [[val, val, ...], [val, val, ...], ...]
form.items() -> [(key, [val, val, ...]), (key, [val, val, ...]), ...]
form.dict == {key: [val, val, ...], ...}
"""
def __init__(self, environ=os.environ):
self.dict = self.data = parse(environ=environ)
self.query_string = environ['QUERY_STRING']
class SvFormContentDict(FormContentDict):
"""Form content as dictionary expecting a single value per field.
If you only expect a single value for each field, then form[key]
will return that single value. It will raise an IndexError if
that expectation is not true. If you expect a field to have
possible multiple values, than you can use form.getlist(key) to
get all of the values. values() and items() are a compromise:
they return single strings where there is a single value, and
lists of strings otherwise.
"""
def __getitem__(self, key):
if len(self.dict[key]) > 1:
raise IndexError, 'expecting a single value'
return self.dict[key][0]
def getlist(self, key):
return self.dict[key]
def values(self):
result = []
for value in self.dict.values():
if len(value) == 1:
result.append(value[0])
else: result.append(value)
return result
def items(self):
result = []
for key, value in self.dict.items():
if len(value) == 1:
result.append((key, value[0]))
else: result.append((key, value))
return result
class InterpFormContentDict(SvFormContentDict):
"""This class is present for backwards compatibility only."""
def __getitem__(self, key):
v = SvFormContentDict.__getitem__(self, key)
if v[0] in '0123456789+-.':
try: return int(v)
except ValueError:
try: return float(v)
except ValueError: pass
return v.strip()
def values(self):
result = []
for key in self.keys():
try:
result.append(self[key])
except IndexError:
result.append(self.dict[key])
return result
def items(self):
result = []
for key in self.keys():
try:
result.append((key, self[key]))
except IndexError:
result.append((key, self.dict[key]))
return result
class FormContent(FormContentDict):
"""This class is present for backwards compatibility only."""
def values(self, key):
if key in self.dict :return self.dict[key]
else: return None
def indexed_value(self, key, location):
if key in self.dict:
if len(self.dict[key]) > location:
return self.dict[key][location]
else: return None
else: return None
def value(self, key):
if key in self.dict: return self.dict[key][0]
else: return None
def length(self, key):
return len(self.dict[key])
def stripped(self, key):
if key in self.dict: return self.dict[key][0].strip()
else: return None
def pars(self):
return self.dict
# Test/debug code
# ===============
def test(environ=os.environ):
"""Robust test CGI script, usable as main program.
Write minimal HTTP headers and dump all information provided to
the script in HTML form.
"""
print "Content-type: text/html"
print
sys.stderr = sys.stdout
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
print_environ_usage()
def f():
exec "testing print_exception() -- <I>italics?</I>"
def g(f=f):
f()
print "<H3>What follows is a test, not an actual exception:</H3>"
g()
except:
print_exception()
print "<H1>Second try with a small maxlen...</H1>"
global maxlen
maxlen = 50
try:
form = FieldStorage() # Replace with other classes to test those
print_directory()
print_arguments()
print_form(form)
print_environ(environ)
except:
print_exception()
def print_exception(type=None, value=None, tb=None, limit=None):
if type is None:
type, value, tb = sys.exc_info()
import traceback
print
print "<H3>Traceback (most recent call last):</H3>"
list = traceback.format_tb(tb, limit) + \
traceback.format_exception_only(type, value)
print "<PRE>%s<B>%s</B></PRE>" % (
escape("".join(list[:-1])),
escape(list[-1]),
)
del tb
def print_environ(environ=os.environ):
"""Dump the shell environment as HTML."""
keys = environ.keys()
keys.sort()
print
print "<H3>Shell Environment:</H3>"
print "<DL>"
for key in keys:
print "<DT>", escape(key), "<DD>", escape(environ[key])
print "</DL>"
print
def print_form(form):
"""Dump the contents of a form as HTML."""
keys = form.keys()
keys.sort()
print
print "<H3>Form Contents:</H3>"
if not keys:
print "<P>No form fields."
print "<DL>"
for key in keys:
print "<DT>" + escape(key) + ":",
value = form[key]
print "<i>" + escape(repr(type(value))) + "</i>"
print "<DD>" + escape(repr(value))
print "</DL>"
print
def print_directory():
"""Dump the current directory as HTML."""
print
print "<H3>Current Working Directory:</H3>"
try:
pwd = os.getcwd()
except os.error, msg:
print "os.error:", escape(str(msg))
else:
print escape(pwd)
print
def print_arguments():
print
print "<H3>Command Line Arguments:</H3>"
print
print sys.argv
print
def print_environ_usage():
"""Dump a list of environment variables used by CGI as HTML."""
print """
<H3>These environment variables could have been set:</H3>
<UL>
<LI>AUTH_TYPE
<LI>CONTENT_LENGTH
<LI>CONTENT_TYPE
<LI>DATE_GMT
<LI>DATE_LOCAL
<LI>DOCUMENT_NAME
<LI>DOCUMENT_ROOT
<LI>DOCUMENT_URI
<LI>GATEWAY_INTERFACE
<LI>LAST_MODIFIED
<LI>PATH
<LI>PATH_INFO
<LI>PATH_TRANSLATED
<LI>QUERY_STRING
<LI>REMOTE_ADDR
<LI>REMOTE_HOST
<LI>REMOTE_IDENT
<LI>REMOTE_USER
<LI>REQUEST_METHOD
<LI>SCRIPT_NAME
<LI>SERVER_NAME
<LI>SERVER_PORT
<LI>SERVER_PROTOCOL
<LI>SERVER_ROOT
<LI>SERVER_SOFTWARE
</UL>
In addition, HTTP headers sent by the server may be passed in the
environment as well. Here are some common variable names:
<UL>
<LI>HTTP_ACCEPT
<LI>HTTP_CONNECTION
<LI>HTTP_HOST
<LI>HTTP_PRAGMA
<LI>HTTP_REFERER
<LI>HTTP_USER_AGENT
</UL>
"""
# Utilities
# =========
def escape(s, quote=None):
"""Replace special characters '&', '<' and '>' by SGML entities."""
s = s.replace("&", "&") # Must be done first!
s = s.replace("<", "<")
s = s.replace(">", ">")
if quote:
s = s.replace('"', """)
return s
def valid_boundary(s, _vb_pattern="^[ -~]{0,200}[!-~]$"):
import re
return re.match(_vb_pattern, s)
# Invoke mainline
# ===============
# Call test() when this file is run as a script (not imported as a module)
if __name__ == '__main__':
test()
| mit |
AutorestCI/azure-sdk-for-python | azure-cognitiveservices-language-spellcheck/tests/test_spell_check.py | 2 | 2052 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from azure.cognitiveservices.language.spellcheck import SpellCheckAPI
from msrest.authentication import CognitiveServicesCredentials
from azure_devtools.scenario_tests import ReplayableTest, AzureTestError
from devtools_testutils import mgmt_settings_fake as fake_settings
class SpellCheckTest(ReplayableTest):
FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key']
def __init__(self, method_name):
self._fake_settings, self._real_settings = self._load_settings()
super(SpellCheckTest, self).__init__(method_name)
@property
def settings(self):
if self.is_live:
if self._real_settings:
return self._real_settings
else:
raise AzureTestError('Need a mgmt_settings_real.py file to run tests live.')
else:
return self._fake_settings
def _load_settings(self):
try:
from devtools_testutils import mgmt_settings_real as real_settings
return fake_settings, real_settings
except ImportError:
return fake_settings, None
def test_spell_check(self):
credentials = CognitiveServicesCredentials(
self.settings.CS_SUBSCRIPTION_KEY
)
text_analytics = SpellCheckAPI(credentials=credentials)
response = text_analytics.spell_checker(
"cognituve services"
)
self.assertEquals(response.flagged_tokens[0].token, "cognituve")
self.assertEquals(response.flagged_tokens[0].suggestions[0].suggestion, "cognitive")
| mit |
cchurch/ansible-modules-extras | cloud/profitbricks/profitbricks_nic.py | 132 | 8769 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: profitbricks_nic
short_description: Create or Remove a NIC.
description:
- This module allows you to create or restore a volume snapshot. This module has a dependency on profitbricks >= 1.0.0
version_added: "2.0"
options:
datacenter:
description:
- The datacenter in which to operate.
required: true
server:
description:
- The server name or ID.
required: true
name:
description:
- The name or ID of the NIC. This is only required on deletes, but not on create.
required: true
lan:
description:
- The LAN to place the NIC on. You can pass a LAN that doesn't exist and it will be created. Required on create.
required: true
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environement variable.
required: false
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environement variable.
required: false
wait:
description:
- wait for the operation to complete before returning
required: false
default: "yes"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
state:
description:
- Indicate desired state of the resource
required: false
default: 'present'
choices: ["present", "absent"]
requirements: [ "profitbricks" ]
author: Matt Baldwin (baldwin@stackpointcloud.com)
'''
EXAMPLES = '''
# Create a NIC
- profitbricks_nic:
datacenter: Tardis One
server: node002
lan: 2
wait_timeout: 500
state: present
# Remove a NIC
- profitbricks_nic:
datacenter: Tardis One
server: node002
name: 7341c2454f
wait_timeout: 500
state: absent
'''
import re
import uuid
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService, NIC
except ImportError:
HAS_PB_SDK = False
uuid_match = re.compile(
'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise: return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def create_nic(module, profitbricks):
"""
Creates a NIC.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the nic creates, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
lan = module.params.get('lan')
name = module.params.get('name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server = s['id']
break
try:
n = NIC(
name=name,
lan=lan
)
nic_response = profitbricks.create_nic(datacenter, server, n)
if wait:
_wait_for_completion(profitbricks, nic_response,
wait_timeout, "create_nic")
return nic_response
except Exception as e:
module.fail_json(msg="failed to create the NIC: %s" % str(e))
def delete_nic(module, profitbricks):
"""
Removes a NIC
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the NIC was removed, false otherwise
"""
datacenter = module.params.get('datacenter')
server = module.params.get('server')
name = module.params.get('name')
# Locate UUID for Datacenter
if not (uuid_match.match(datacenter)):
datacenter_list = profitbricks.list_datacenters()
for d in datacenter_list['items']:
dc = profitbricks.get_datacenter(d['id'])
if datacenter == dc['properties']['name']:
datacenter = d['id']
break
# Locate UUID for Server
server_found = False
if not (uuid_match.match(server)):
server_list = profitbricks.list_servers(datacenter)
for s in server_list['items']:
if server == s['properties']['name']:
server_found = True
server = s['id']
break
if not server_found:
return False
# Locate UUID for NIC
nic_found = False
if not (uuid_match.match(name)):
nic_list = profitbricks.list_nics(datacenter, server)
for n in nic_list['items']:
if name == n['properties']['name']:
nic_found = True
name = n['id']
break
if not nic_found:
return False
try:
nic_response = profitbricks.delete_nic(datacenter, server, name)
return nic_response
except Exception as e:
module.fail_json(msg="failed to remove the NIC: %s" % str(e))
def main():
module = AnsibleModule(
argument_spec=dict(
datacenter=dict(),
server=dict(),
name=dict(default=str(uuid.uuid4()).replace('-','')[:10]),
lan=dict(),
subscription_user=dict(),
subscription_password=dict(),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
state=dict(default='present'),
)
)
if not HAS_PB_SDK:
module.fail_json(msg='profitbricks required for this module')
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is required')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is required')
if not module.params.get('datacenter'):
module.fail_json(msg='datacenter parameter is required')
if not module.params.get('server'):
module.fail_json(msg='server parameter is required')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
if not module.params.get('name'):
module.fail_json(msg='name parameter is required')
try:
(changed) = delete_nic(module, profitbricks)
module.exit_json(changed=changed)
except Exception as e:
module.fail_json(msg='failed to set nic state: %s' % str(e))
elif state == 'present':
if not module.params.get('lan'):
module.fail_json(msg='lan parameter is required')
try:
(nic_dict) = create_nic(module, profitbricks)
module.exit_json(nics=nic_dict)
except Exception as e:
module.fail_json(msg='failed to set nic state: %s' % str(e))
from ansible.module_utils.basic import *
main() | gpl-3.0 |
hos7ein/firewalld | src/firewall/server/config_zone.py | 2 | 42804 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2010-2016 Red Hat, Inc.
#
# Authors:
# Thomas Woerner <twoerner@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# force use of pygobject3 in python-slip
from gi.repository import GObject
import sys
sys.modules['gobject'] = GObject
import dbus
import dbus.service
import slip.dbus
import slip.dbus.service
from firewall import config
from firewall.dbus_utils import dbus_to_python, \
dbus_introspection_prepare_properties, \
dbus_introspection_add_properties
from firewall.core.io.zone import Zone
from firewall.core.fw_ifcfg import ifcfg_set_zone_of_interface
from firewall.core.base import DEFAULT_ZONE_TARGET
from firewall.core.rich import Rich_Rule
from firewall.core.logger import log
from firewall.server.decorators import handle_exceptions, \
dbus_handle_exceptions, dbus_service_method
from firewall import errors
from firewall.errors import FirewallError
from firewall.functions import portInPortRange
############################################################################
#
# class FirewallDConfig
#
############################################################################
class FirewallDConfigZone(slip.dbus.service.Object):
"""FirewallD main class"""
persistent = True
""" Make FirewallD persistent. """
default_polkit_auth_required = config.dbus.PK_ACTION_CONFIG
""" Use PK_ACTION_INFO as a default """
@handle_exceptions
def __init__(self, parent, conf, zone, item_id, *args, **kwargs):
super(FirewallDConfigZone, self).__init__(*args, **kwargs)
self.parent = parent
self.config = conf
self.obj = zone
self.item_id = item_id
self.busname = args[0]
self.path = args[1]
self._log_prefix = "config.zone.%d" % self.item_id
dbus_introspection_prepare_properties(
self, config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def __del__(self):
pass
@dbus_handle_exceptions
def unregister(self):
self.remove_from_connection()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# P R O P E R T I E S
@dbus_handle_exceptions
def _get_property(self, property_name):
if property_name == "name":
return dbus.String(self.obj.name)
elif property_name == "filename":
return dbus.String(self.obj.filename)
elif property_name == "path":
return dbus.String(self.obj.path)
elif property_name == "default":
return dbus.Boolean(self.obj.default)
elif property_name == "builtin":
return dbus.Boolean(self.obj.builtin)
else:
raise dbus.exceptions.DBusException(
"org.freedesktop.DBus.Error.InvalidArgs: "
"Property '%s' does not exist" % property_name)
@dbus_service_method(dbus.PROPERTIES_IFACE, in_signature='ss',
out_signature='v')
@dbus_handle_exceptions
def Get(self, interface_name, property_name, sender=None): # pylint: disable=W0613
# get a property
interface_name = dbus_to_python(interface_name, str)
property_name = dbus_to_python(property_name, str)
log.debug1("%s.Get('%s', '%s')", self._log_prefix,
interface_name, property_name)
if interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE:
raise dbus.exceptions.DBusException(
"org.freedesktop.DBus.Error.UnknownInterface: "
"Interface '%s' does not exist" % interface_name)
return self._get_property(property_name)
@dbus_service_method(dbus.PROPERTIES_IFACE, in_signature='s',
out_signature='a{sv}')
@dbus_handle_exceptions
def GetAll(self, interface_name, sender=None): # pylint: disable=W0613
interface_name = dbus_to_python(interface_name, str)
log.debug1("%s.GetAll('%s')", self._log_prefix, interface_name)
if interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE:
raise dbus.exceptions.DBusException(
"org.freedesktop.DBus.Error.UnknownInterface: "
"Interface '%s' does not exist" % interface_name)
ret = { }
for x in [ "name", "filename", "path", "default", "builtin" ]:
ret[x] = self._get_property(x)
return dbus.Dictionary(ret, signature="sv")
@slip.dbus.polkit.require_auth(config.dbus.PK_ACTION_CONFIG)
@dbus_service_method(dbus.PROPERTIES_IFACE, in_signature='ssv')
@dbus_handle_exceptions
def Set(self, interface_name, property_name, new_value, sender=None):
interface_name = dbus_to_python(interface_name, str)
property_name = dbus_to_python(property_name, str)
new_value = dbus_to_python(new_value)
log.debug1("%s.Set('%s', '%s', '%s')", self._log_prefix,
interface_name, property_name, new_value)
self.parent.accessCheck(sender)
if interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE:
raise dbus.exceptions.DBusException(
"org.freedesktop.DBus.Error.UnknownInterface: "
"Interface '%s' does not exist" % interface_name)
raise dbus.exceptions.DBusException(
"org.freedesktop.DBus.Error.PropertyReadOnly: "
"Property '%s' is read-only" % property_name)
@dbus.service.signal(dbus.PROPERTIES_IFACE, signature='sa{sv}as')
def PropertiesChanged(self, interface_name, changed_properties,
invalidated_properties):
interface_name = dbus_to_python(interface_name, str)
changed_properties = dbus_to_python(changed_properties)
invalidated_properties = dbus_to_python(invalidated_properties)
log.debug1("%s.PropertiesChanged('%s', '%s', '%s')", self._log_prefix,
interface_name, changed_properties, invalidated_properties)
@slip.dbus.polkit.require_auth(config.dbus.PK_ACTION_INFO)
@dbus_service_method(dbus.INTROSPECTABLE_IFACE, out_signature='s')
@dbus_handle_exceptions
def Introspect(self, sender=None): # pylint: disable=W0613
log.debug2("%s.Introspect()", self._log_prefix)
data = super(FirewallDConfigZone, self).Introspect(
self.path, self.busname.get_bus())
return dbus_introspection_add_properties(
self, data, config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
# S E T T I N G S
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature=Zone.DBUS_SIGNATURE)
@dbus_handle_exceptions
def getSettings(self, sender=None): # pylint: disable=W0613
"""get settings for zone
"""
log.debug1("%s.getSettings()", self._log_prefix)
settings = self.config.get_zone_config(self.obj)
if settings[4] == DEFAULT_ZONE_TARGET:
# convert to list, fix target, convert back to tuple
_settings = list(settings)
_settings[4] = "default"
settings = tuple(_settings)
return settings
def _checkDuplicateInterfacesSources(self, settings):
"""Assignment of interfaces/sources to zones is different from other
zone settings in the sense that particular interface/zone can be
part of only one zone. So make sure added interfaces/sources have
not already been bound to another zone."""
old_settings = self.config.get_zone_config(self.obj)
idx_i = Zone.index_of("interfaces")
idx_s = Zone.index_of("sources")
added_ifaces = set(settings[idx_i]) - set(old_settings[idx_i])
added_sources = set(settings[idx_s]) - set(old_settings[idx_s])
for iface in added_ifaces:
if self.parent.getZoneOfInterface(iface):
raise FirewallError(errors.ZONE_CONFLICT, iface) # or move to new zone ?
for source in added_sources:
if self.parent.getZoneOfSource(source):
raise FirewallError(errors.ZONE_CONFLICT, source) # or move to new zone ?
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature=Zone.DBUS_SIGNATURE)
@dbus_handle_exceptions
def update(self, settings, sender=None):
"""update settings for zone
"""
settings = dbus_to_python(settings)
log.debug1("%s.update('...')", self._log_prefix)
self.parent.accessCheck(sender)
if settings[4] == "default":
# convert to list, fix target, convert back to tuple
_settings = list(settings)
_settings[4] = DEFAULT_ZONE_TARGET
settings = tuple(_settings)
self._checkDuplicateInterfacesSources(settings)
self.obj = self.config.set_zone_config(self.obj, settings)
self.Updated(self.obj.name)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def loadDefaults(self, sender=None):
"""load default settings for builtin zone
"""
log.debug1("%s.loadDefaults()", self._log_prefix)
self.parent.accessCheck(sender)
self.obj = self.config.load_zone_defaults(self.obj)
self.Updated(self.obj.name)
@dbus.service.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
@dbus_handle_exceptions
def Updated(self, name):
log.debug1("%s.Updated('%s')" % (self._log_prefix, name))
# R E M O V E
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def remove(self, sender=None):
"""remove zone
"""
log.debug1("%s.removeZone()", self._log_prefix)
self.parent.accessCheck(sender)
self.config.remove_zone(self.obj)
self.parent.removeZone(self.obj)
@dbus.service.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
@dbus_handle_exceptions
def Removed(self, name):
log.debug1("%s.Removed('%s')" % (self._log_prefix, name))
# R E N A M E
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def rename(self, name, sender=None):
"""rename zone
"""
name = dbus_to_python(name, str)
log.debug1("%s.rename('%s')", self._log_prefix, name)
self.parent.accessCheck(sender)
self.obj = self.config.rename_zone(self.obj, name)
self.Renamed(name)
@dbus.service.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
@dbus_handle_exceptions
def Renamed(self, name):
log.debug1("%s.Renamed('%s')" % (self._log_prefix, name))
# version
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='s')
@dbus_handle_exceptions
def getVersion(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getVersion()", self._log_prefix)
return self.getSettings()[0]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def setVersion(self, version, sender=None):
version = dbus_to_python(version, str)
log.debug1("%s.setVersion('%s')", self._log_prefix, version)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[0] = version
self.update(settings)
# short
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='s')
@dbus_handle_exceptions
def getShort(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getShort()", self._log_prefix)
return self.getSettings()[1]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def setShort(self, short, sender=None):
short = dbus_to_python(short, str)
log.debug1("%s.setShort('%s')", self._log_prefix, short)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[1] = short
self.update(settings)
# description
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='s')
@dbus_handle_exceptions
def getDescription(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getDescription()", self._log_prefix)
return self.getSettings()[2]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def setDescription(self, description, sender=None):
description = dbus_to_python(description, str)
log.debug1("%s.setDescription('%s')", self._log_prefix, description)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[2] = description
self.update(settings)
# immutable (deprecated)
# settings[3] was used for 'immutable'
# target
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='s')
@dbus_handle_exceptions
def getTarget(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getTarget()", self._log_prefix)
settings = self.getSettings()
return settings[4] if settings[4] != DEFAULT_ZONE_TARGET else "default"
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def setTarget(self, target, sender=None):
target = dbus_to_python(target, str)
log.debug1("%s.setTarget('%s')", self._log_prefix, target)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[4] = target if target != "default" else DEFAULT_ZONE_TARGET
self.update(settings)
# service
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getServices(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getServices()", self._log_prefix)
return self.getSettings()[5]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setServices(self, services, sender=None):
services = dbus_to_python(services, list)
log.debug1("%s.setServices('[%s]')", self._log_prefix,
",".join(services))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[5] = services
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addService(self, service, sender=None):
service = dbus_to_python(service, str)
log.debug1("%s.addService('%s')", self._log_prefix, service)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if service in settings[5]:
raise FirewallError(errors.ALREADY_ENABLED, service)
settings[5].append(service)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeService(self, service, sender=None):
service = dbus_to_python(service, str)
log.debug1("%s.removeService('%s')", self._log_prefix, service)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if service not in settings[5]:
raise FirewallError(errors.NOT_ENABLED, service)
settings[5].remove(service)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s', out_signature='b')
@dbus_handle_exceptions
def queryService(self, service, sender=None): # pylint: disable=W0613
service = dbus_to_python(service, str)
log.debug1("%s.queryService('%s')", self._log_prefix, service)
return service in self.getSettings()[5]
# port
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='a(ss)')
@dbus_handle_exceptions
def getPorts(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getPorts()", self._log_prefix)
return self.getSettings()[6]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='a(ss)')
@dbus_handle_exceptions
def setPorts(self, ports, sender=None):
_ports = [ ]
# convert embedded lists to tuples
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setPorts('[%s]')", self._log_prefix,
",".join("('%s, '%s')" % (port[0], port[1]) for port in ports))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[6] = ports
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss')
@dbus_handle_exceptions
def addPort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addPort('%s', '%s')", self._log_prefix, port,
protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (port,protocol) in settings[6]:
raise FirewallError(errors.ALREADY_ENABLED,
"%s:%s" % (port, protocol))
settings[6].append((port,protocol))
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss')
@dbus_handle_exceptions
def removePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removePort('%s', '%s')", self._log_prefix, port,
protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (port,protocol) not in settings[6]:
raise FirewallError(errors.NOT_ENABLED, "%s:%s" % (port, protocol))
settings[6].remove((port,protocol))
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss', out_signature='b')
@dbus_handle_exceptions
def queryPort(self, port, protocol, sender=None): # pylint: disable=W0613
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.queryPort('%s', '%s')", self._log_prefix, port,
protocol)
if (port,protocol) in self.getSettings()[6]:
return True
else:
# It might be a single port query that is inside a range
for (_port, _protocol) in self.getSettings()[6]:
if portInPortRange(port, _port) and protocol == _protocol:
return True
return False
# protocol
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getProtocols(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getProtocols()", self._log_prefix)
return self.getSettings()[13]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setProtocols(self, protocols, sender=None):
protocols = dbus_to_python(protocols, list)
log.debug1("%s.setProtocols('[%s]')", self._log_prefix,
",".join(protocols))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[13] = protocols
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addProtocol(self, protocol, sender=None):
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addProtocol('%s')", self._log_prefix, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if protocol in settings[13]:
raise FirewallError(errors.ALREADY_ENABLED, protocol)
settings[13].append(protocol)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeProtocol(self, protocol, sender=None):
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removeProtocol('%s')", self._log_prefix, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if protocol not in settings[13]:
raise FirewallError(errors.NOT_ENABLED, protocol)
settings[13].remove(protocol)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s', out_signature='b')
@dbus_handle_exceptions
def queryProtocol(self, protocol, sender=None): # pylint: disable=W0613
protocol = dbus_to_python(protocol, str)
log.debug1("%s.queryProtocol('%s')", self._log_prefix, protocol)
return protocol in self.getSettings()[13]
# source port
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='a(ss)')
@dbus_handle_exceptions
def getSourcePorts(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getSourcePorts()", self._log_prefix)
return self.getSettings()[14]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='a(ss)')
@dbus_handle_exceptions
def setSourcePorts(self, ports, sender=None):
_ports = [ ]
# convert embedded lists to tuples
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setSourcePorts('[%s]')", self._log_prefix,
",".join("('%s, '%s')" % (port[0], port[1]) for port in ports))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[14] = ports
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss')
@dbus_handle_exceptions
def addSourcePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addSourcePort('%s', '%s')", self._log_prefix, port,
protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (port,protocol) in settings[14]:
raise FirewallError(errors.ALREADY_ENABLED,
"%s:%s" % (port, protocol))
settings[14].append((port,protocol))
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss')
@dbus_handle_exceptions
def removeSourcePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removeSourcePort('%s', '%s')", self._log_prefix, port,
protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (port,protocol) not in settings[14]:
raise FirewallError(errors.NOT_ENABLED, "%s:%s" % (port, protocol))
settings[14].remove((port,protocol))
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ss', out_signature='b')
@dbus_handle_exceptions
def querySourcePort(self, port, protocol, sender=None): # pylint: disable=W0613
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.querySourcePort('%s', '%s')", self._log_prefix, port,
protocol)
return (port,protocol) in self.getSettings()[14]
# icmp block
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getIcmpBlocks(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getIcmpBlocks()", self._log_prefix)
return self.getSettings()[7]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setIcmpBlocks(self, icmptypes, sender=None):
icmptypes = dbus_to_python(icmptypes, list)
log.debug1("%s.setIcmpBlocks('[%s]')", self._log_prefix,
",".join(icmptypes))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[7] = icmptypes
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addIcmpBlock(self, icmptype, sender=None):
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.addIcmpBlock('%s')", self._log_prefix, icmptype)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if icmptype in settings[7]:
raise FirewallError(errors.ALREADY_ENABLED, icmptype)
settings[7].append(icmptype)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeIcmpBlock(self, icmptype, sender=None):
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.removeIcmpBlock('%s')", self._log_prefix, icmptype)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if icmptype not in settings[7]:
raise FirewallError(errors.NOT_ENABLED, icmptype)
settings[7].remove(icmptype)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s', out_signature='b')
@dbus_handle_exceptions
def queryIcmpBlock(self, icmptype, sender=None): # pylint: disable=W0613
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.queryIcmpBlock('%s')", self._log_prefix, icmptype)
return icmptype in self.getSettings()[7]
# icmp block inversion
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='b')
@dbus_handle_exceptions
def getIcmpBlockInversion(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getIcmpBlockInversion()", self._log_prefix)
return self.getSettings()[15]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='b')
@dbus_handle_exceptions
def setIcmpBlockInversion(self, flag, sender=None):
flag = dbus_to_python(flag, bool)
log.debug1("%s.setIcmpBlockInversion('%s')", self._log_prefix, flag)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[15] = flag
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def addIcmpBlockInversion(self, sender=None):
log.debug1("%s.addIcmpBlockInversion()", self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if settings[15]:
raise FirewallError(errors.ALREADY_ENABLED, "icmp-block-inversion")
settings[15] = True
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def removeIcmpBlockInversion(self, sender=None):
log.debug1("%s.removeIcmpBlockInversion()", self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if not settings[15]:
raise FirewallError(errors.NOT_ENABLED, "icmp-block-inversion")
settings[15] = False
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='b')
@dbus_handle_exceptions
def queryIcmpBlockInversion(self, sender=None): # pylint: disable=W0613
log.debug1("%s.queryIcmpBlockInversion()", self._log_prefix)
return self.getSettings()[15]
# masquerade
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='b')
@dbus_handle_exceptions
def getMasquerade(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getMasquerade()", self._log_prefix)
return self.getSettings()[8]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='b')
@dbus_handle_exceptions
def setMasquerade(self, masquerade, sender=None):
masquerade = dbus_to_python(masquerade, bool)
log.debug1("%s.setMasquerade('%s')", self._log_prefix, masquerade)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[8] = masquerade
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def addMasquerade(self, sender=None):
log.debug1("%s.addMasquerade()", self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if settings[8]:
raise FirewallError(errors.ALREADY_ENABLED, "masquerade")
settings[8] = True
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
@dbus_handle_exceptions
def removeMasquerade(self, sender=None):
log.debug1("%s.removeMasquerade()", self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if not settings[8]:
raise FirewallError(errors.NOT_ENABLED, "masquerade")
settings[8] = False
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='b')
@dbus_handle_exceptions
def queryMasquerade(self, sender=None): # pylint: disable=W0613
log.debug1("%s.queryMasquerade()", self._log_prefix)
return self.getSettings()[8]
# forward port
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='a(ssss)')
@dbus_handle_exceptions
def getForwardPorts(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getForwardPorts()", self._log_prefix)
return self.getSettings()[9]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='a(ssss)')
@dbus_handle_exceptions
def setForwardPorts(self, ports, sender=None):
_ports = [ ]
# convert embedded lists to tuples
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setForwardPorts('[%s]')", self._log_prefix,
",".join("('%s, '%s', '%s', '%s')" % (port[0], port[1], \
port[2], port[3]) for port in ports))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[9] = ports
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ssss')
@dbus_handle_exceptions
def addForwardPort(self, port, protocol, toport, toaddr, sender=None): # pylint: disable=R0913
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.addForwardPort('%s', '%s', '%s', '%s')",
self._log_prefix, port, protocol, toport, toaddr)
self.parent.accessCheck(sender)
fwp_id = (port, protocol, str(toport), str(toaddr))
settings = list(self.getSettings())
if fwp_id in settings[9]:
raise FirewallError(errors.ALREADY_ENABLED,
"%s:%s:%s:%s" % (port, protocol, toport,
toaddr))
settings[9].append(fwp_id)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ssss')
@dbus_handle_exceptions
def removeForwardPort(self, port, protocol, toport, toaddr, sender=None): # pylint: disable=R0913
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.removeForwardPort('%s', '%s', '%s', '%s')",
self._log_prefix, port, protocol, toport, toaddr)
self.parent.accessCheck(sender)
fwp_id = (port, protocol, str(toport), str(toaddr))
settings = list(self.getSettings())
if fwp_id not in settings[9]:
raise FirewallError(errors.NOT_ENABLED,
"%s:%s:%s:%s" % (port, protocol, toport,
toaddr))
settings[9].remove(fwp_id)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='ssss',
out_signature='b')
@dbus_handle_exceptions
def queryForwardPort(self, port, protocol, toport, toaddr, sender=None): # pylint: disable=W0613, R0913
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.queryForwardPort('%s', '%s', '%s', '%s')",
self._log_prefix, port, protocol, toport, toaddr)
fwp_id = (port, protocol, str(toport), str(toaddr))
return fwp_id in self.getSettings()[9]
# interface
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getInterfaces(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getInterfaces()", self._log_prefix)
return self.getSettings()[10]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setInterfaces(self, interfaces, sender=None):
interfaces = dbus_to_python(interfaces, list)
log.debug1("%s.setInterfaces('[%s]')", self._log_prefix,
",".join(interfaces))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[10] = interfaces
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addInterface(self, interface, sender=None):
interface = dbus_to_python(interface, str)
log.debug1("%s.addInterface('%s')", self._log_prefix, interface)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if interface in settings[10]:
raise FirewallError(errors.ALREADY_ENABLED, interface)
settings[10].append(interface)
self.update(settings)
ifcfg_set_zone_of_interface(self.obj.name, interface)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeInterface(self, interface, sender=None):
interface = dbus_to_python(interface, str)
log.debug1("%s.removeInterface('%s')", self._log_prefix, interface)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if interface not in settings[10]:
raise FirewallError(errors.NOT_ENABLED, interface)
settings[10].remove(interface)
self.update(settings)
ifcfg_set_zone_of_interface("", interface)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s',
out_signature='b')
@dbus_handle_exceptions
def queryInterface(self, interface, sender=None): # pylint: disable=W0613
interface = dbus_to_python(interface, str)
log.debug1("%s.queryInterface('%s')", self._log_prefix, interface)
return interface in self.getSettings()[10]
# source
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getSources(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getSources()", self._log_prefix)
return self.getSettings()[11]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setSources(self, sources, sender=None):
sources = dbus_to_python(sources, list)
log.debug1("%s.setSources('[%s]')", self._log_prefix,
",".join(sources))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[11] = sources
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addSource(self, source, sender=None):
source = dbus_to_python(source, str)
log.debug1("%s.addSource('%s')", self._log_prefix, source)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if source in settings[11]:
raise FirewallError(errors.ALREADY_ENABLED, source)
settings[11].append(source)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeSource(self, source, sender=None):
source = dbus_to_python(source, str)
log.debug1("%s.removeSource('%s')", self._log_prefix, source)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if source not in settings[11]:
raise FirewallError(errors.NOT_ENABLED, source)
settings[11].remove(source)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s', out_signature='b')
@dbus_handle_exceptions
def querySource(self, source, sender=None): # pylint: disable=W0613
source = dbus_to_python(source, str)
log.debug1("%s.querySource('%s')", self._log_prefix, source)
return source in self.getSettings()[11]
# rich rule
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
out_signature='as')
@dbus_handle_exceptions
def getRichRules(self, sender=None): # pylint: disable=W0613
log.debug1("%s.getRichRules()", self._log_prefix)
return self.getSettings()[12]
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='as')
@dbus_handle_exceptions
def setRichRules(self, rules, sender=None):
rules = dbus_to_python(rules, list)
log.debug1("%s.setRichRules('[%s]')", self._log_prefix,
",".join(rules))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rules = [ str(Rich_Rule(rule_str=r)) for r in rules ]
settings[12] = rules
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def addRichRule(self, rule, sender=None):
rule = dbus_to_python(rule, str)
log.debug1("%s.addRichRule('%s')", self._log_prefix, rule)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rule_str = str(Rich_Rule(rule_str=rule))
if rule_str in settings[12]:
raise FirewallError(errors.ALREADY_ENABLED, rule)
settings[12].append(rule_str)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s')
@dbus_handle_exceptions
def removeRichRule(self, rule, sender=None):
rule = dbus_to_python(rule, str)
log.debug1("%s.removeRichRule('%s')", self._log_prefix, rule)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rule_str = str(Rich_Rule(rule_str=rule))
if rule_str not in settings[12]:
raise FirewallError(errors.NOT_ENABLED, rule)
settings[12].remove(rule_str)
self.update(settings)
@dbus_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE,
in_signature='s', out_signature='b')
@dbus_handle_exceptions
def queryRichRule(self, rule, sender=None): # pylint: disable=W0613
rule = dbus_to_python(rule, str)
log.debug1("%s.queryRichRule('%s')", self._log_prefix, rule)
rule_str = str(Rich_Rule(rule_str=rule))
return rule_str in self.getSettings()[12]
| gpl-2.0 |
mscoutermarsh/exercism_coveralls | assignments/python/grains/grains_test.py | 1 | 1123 | try:
from grains import on_square, total_after
except ImportError:
raise SystemExit('Could not find grains.py. Does it exist?')
import unittest
class GrainsTest(unittest.TestCase):
def test_square_1(self):
self.assertEqual(1, on_square(1))
self.assertEqual(1, total_after(1))
def test_square_2(self):
self.assertEqual(2, on_square(2))
self.assertEqual(3, total_after(2))
def test_square_3(self):
self.assertEqual(4, on_square(3))
self.assertEqual(7, total_after(3))
def test_square_4(self):
self.assertEqual(8, on_square(4))
self.assertEqual(15, total_after(4))
def test_square_16(self):
self.assertEqual(32768, on_square(16))
self.assertEqual(65535, total_after(16))
def test_square_32(self):
self.assertEqual(2147483648, on_square(32))
self.assertEqual(4294967295, total_after(32))
def test_square_64(self):
self.assertEqual(9223372036854775808, on_square(64))
self.assertEqual(18446744073709551615, total_after(64))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
richpolis/siveinpy | env/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/errcheck.py | 623 | 3522 | """
Error checking functions for GEOS ctypes prototype functions.
"""
import os
from ctypes import c_void_p, string_at, CDLL
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.libgeos import GEOS_VERSION
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
# Getting the `free` routine used to free the memory allocated for
# string pointers returned by GEOS.
if GEOS_VERSION >= (3, 1, 1):
# In versions 3.1.1 and above, `GEOSFree` was added to the C API
# because `free` isn't always available on all platforms.
free = GEOSFunc('GEOSFree')
free.argtypes = [c_void_p]
free.restype = None
else:
# Getting the `free` routine from the C library of the platform.
if os.name == 'nt':
# On NT, use the MS C library.
libc = CDLL('msvcrt')
else:
# On POSIX platforms C library is obtained by passing None into `CDLL`.
libc = CDLL(None)
free = libc.free
### ctypes error checking routines ###
def last_arg_byref(args):
"Returns the last C argument's value by reference."
return args[-1]._obj.value
def check_dbl(result, func, cargs):
"Checks the status code and returns the double value passed in by reference."
# Checking the status code
if result != 1: return None
# Double passed in by reference, return its value.
return last_arg_byref(cargs)
def check_geom(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__)
return result
def check_minus_one(result, func, cargs):
"Error checking on routines that should not return -1."
if result == -1:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
def check_predicate(result, func, cargs):
"Error checking for unary/binary predicate functions."
val = ord(result) # getting the ordinal from the character
if val == 1: return True
elif val == 0: return False
else:
raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__)
def check_sized_string(result, func, cargs):
"""
Error checking for routines that return explicitly sized strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__)
# A c_size_t object is passed in by reference for the second
# argument on these routines, and its needed to determine the
# correct size.
s = string_at(result, last_arg_byref(cargs))
# Freeing the memory allocated within GEOS
free(result)
return s
def check_string(result, func, cargs):
"""
Error checking for routines that return strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result: raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__)
# Getting the string value at the pointer address.
s = string_at(result)
# Freeing the memory allocated within GEOS
free(result)
return s
def check_zero(result, func, cargs):
"Error checking on routines that should not return 0."
if result == 0:
raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__)
else:
return result
| mit |
mcdeaton13/Tax-Calculator | taxcalc/utils.py | 2 | 19436 | import numpy as np
import pandas as pd
from pandas import DataFrame
from collections import defaultdict
STATS_COLUMNS = ['_expanded_income', 'c00100', '_standard', 'c04470', 'c04600',
'c04800', 'c05200', 'c62100', 'c09600', 'c05800', 'c09200',
'_refund', 'c07100', '_ospctax', 's006']
# each entry in this array corresponds to the same entry in the array
# TABLE_LABELS below. this allows us to use TABLE_LABELS to map a
# label to the correct column in our distribution table
TABLE_COLUMNS = ['s006', 'c00100', 'num_returns_StandardDed', '_standard',
'num_returns_ItemDed', 'c04470', 'c04600', 'c04800', 'c05200',
'c62100', 'num_returns_AMT', 'c09600', 'c05800', 'c07100',
'c09200', '_refund', '_ospctax']
TABLE_LABELS = ['Returns', 'AGI', 'Standard Deduction Filers',
'Standard Deduction', 'Itemizers',
'Itemized Deduction', 'Personal Exemption',
'Taxable Income', 'Regular Tax', 'AMTI', 'AMT Filers', 'AMT',
'Tax before Credits', 'Non-refundable Credits',
'Tax before Refundable Credits', 'Refundable Credits',
'Revenue']
# used in our difference table to label the columns
DIFF_TABLE_LABELS = ["Tax Units with Tax Cut", "Tax Units with Tax Increase",
"Count", "Average Tax Change", "Total Tax Difference",
"Percent with Tax Increase", "Percent with Tax Decrease",
"Share of Overall Change"]
LARGE_INCOME_BINS = [-1e14, 0, 9999, 19999, 29999, 39999, 49999, 74999, 99999,
200000, 1e14]
SMALL_INCOME_BINS = [-1e14, 0, 4999, 9999, 14999, 19999, 24999, 29999, 39999,
49999, 74999, 99999, 199999, 499999, 999999, 1499999,
1999999, 4999999, 9999999, 1e14]
WEBAPP_INCOME_BINS = [-1e14, 0, 9999, 19999, 29999, 39999, 49999, 74999, 99999,
199999, 499999, 1000000, 1e14]
def extract_array(f):
"""
A sanity check decorator. When combined with numba.vectorize
or guvectorize, it provides the same capability as dataframe_vectorize
or dataframe_guvectorize
"""
def wrapper(*args, **kwargs):
arrays = [arg.values for arg in args]
return f(*arrays)
return wrapper
def expand_1D(x, inflate, inflation_rates, num_years):
"""
Expand the given data to account for the given number of budget years.
If necessary, pad out additional years by increasing the last given
year at the provided inflation rate.
"""
if isinstance(x, np.ndarray):
if len(x) >= num_years:
return x
else:
ans = np.zeros(num_years, dtype='f8')
ans[:len(x)] = x
if inflate:
extra = []
cur = x[-1]
for i in range(0, num_years - len(x)):
inf_idx = i + len(x) - 1
cur *= (1. + inflation_rates[inf_idx])
extra.append(cur)
else:
extra = [float(x[-1]) for i in
range(1, num_years - len(x) + 1)]
ans[len(x):] = extra
return ans.astype(x.dtype, casting='unsafe')
return expand_1D(np.array([x]), inflate, inflation_rates, num_years)
def expand_2D(x, inflate, inflation_rates, num_years):
"""
Expand the given data to account for the given number of budget years.
For 2D arrays, we expand out the number of rows until we have num_years
number of rows. For each expanded row, we inflate by the given inflation
rate.
"""
if isinstance(x, np.ndarray):
# Look for -1s and create masks if present
last_good_row = -1
keep_user_data_mask = []
keep_calc_data_mask = []
has_nones = False
for row in x:
keep_user_data_mask.append([1 if i != -1 else 0 for i in row])
keep_calc_data_mask.append([0 if i != -1 else 1 for i in row])
if not np.any(row == -1):
last_good_row += 1
else:
has_nones = True
if x.shape[0] >= num_years and not has_nones:
return x
else:
if has_nones:
c = x[:last_good_row + 1]
keep_user_data_mask = np.array(keep_user_data_mask)
keep_calc_data_mask = np.array(keep_calc_data_mask)
else:
c = x
ans = np.zeros((num_years, c.shape[1]))
ans[:len(c), :] = c
if inflate:
extra = []
cur = c[-1]
for i in range(0, num_years - len(c)):
inf_idx = i + len(c) - 1
cur = np.array(cur * (1. + inflation_rates[inf_idx]))
extra.append(cur)
else:
extra = [c[-1, :] for i in
range(1, num_years - len(c) + 1)]
ans[len(c):, :] = extra
if has_nones:
# Use masks to "mask in" provided data and "mask out"
# data we don't need (produced in rows with a None value)
ans = ans * keep_calc_data_mask
user_vals = x * keep_user_data_mask
ans = ans + user_vals
return ans.astype(c.dtype, casting='unsafe')
return expand_2D(np.array(x), inflate, inflation_rates, num_years)
def strip_Nones(x):
"""
Takes a list of scalar values or a list of lists.
If it is a list of scalar values, when None is encountered, we
return everything encountered before. If a list of lists, we
replace None with -1 and return
Parameters
----------
x: list
Returns
-------
list
"""
accum = []
for val in x:
if val is None:
return accum
if not isinstance(val, list):
accum.append(val)
else:
for i, v in enumerate(val):
if v is None:
val[i] = -1
accum.append(val)
return accum
def expand_array(x, inflate, inflation_rates, num_years):
"""
Dispatch to either expand_1D or expand2D depending on the dimension of x
Parameters
----------
x : value to expand
inflate: Boolean
As we expand, inflate values if this is True, otherwise, just copy
inflation_rate: float
Yearly inflation reate
num_years: int
Number of budget years to expand
Returns
-------
expanded numpy array
"""
x = np.array(strip_Nones(x))
try:
if len(x.shape) == 1:
return expand_1D(x, inflate, inflation_rates, num_years)
elif len(x.shape) == 2:
return expand_2D(x, inflate, inflation_rates, num_years)
else:
raise ValueError("Need a 1D or 2D array")
except AttributeError:
raise ValueError("Must pass a numpy array")
def count_gt_zero(agg):
return sum([1 for a in agg if a > 0])
def count_lt_zero(agg):
return sum([1 for a in agg if a < 0])
def weighted_count_lt_zero(agg, col_name, tolerance=-0.001):
return agg[agg[col_name] < tolerance]['s006'].sum()
def weighted_count_gt_zero(agg, col_name, tolerance=0.001):
return agg[agg[col_name] > tolerance]['s006'].sum()
def weighted_count(agg):
return agg['s006'].sum()
def weighted_mean(agg, col_name):
return (float((agg[col_name] * agg['s006']).sum()) /
float(agg['s006'].sum()))
def weighted_sum(agg, col_name):
return (agg[col_name] * agg['s006']).sum()
def weighted_perc_inc(agg, col_name):
return (float(weighted_count_gt_zero(agg, col_name)) /
float(weighted_count(agg)))
def weighted_perc_dec(agg, col_name):
return (float(weighted_count_lt_zero(agg, col_name)) /
float(weighted_count(agg)))
def weighted_share_of_total(agg, col_name, total):
return float(weighted_sum(agg, col_name)) / float(total)
def add_weighted_decile_bins(df, income_measure='_expanded_income'):
"""
Add a column of income bins based on each 10% of the income_measure,
weighted by s006.
The default income_measure is `expanded_income`, but `c00100` also works.
This function will server as a "grouper" later on.
"""
# First, sort by income_measure
df.sort(income_measure, inplace=True)
# Next, do a cumulative sum by the weights
df['cumsum_weights'] = np.cumsum(df['s006'].values)
# Max value of cum sum of weights
max_ = df['cumsum_weights'].values[-1]
# Create 10 bins and labels based on this cumulative weight
bins = [0] + list(np.arange(1, 11) * (max_ / 10.0))
labels = [range(1, 11)]
# Groupby weighted deciles
df['bins'] = pd.cut(df['cumsum_weights'], bins, labels)
return df
def add_income_bins(df, compare_with="soi", bins=None, right=True,
income_measure='_expanded_income'):
"""
Add a column of income bins of income_measure using pandas 'cut'.
This will serve as a "grouper" later on.
Parameters
----------
df: DataFrame object
the object to which we are adding bins
compare_with: String, optional
options for input: 'tpc', 'soi', 'webapp'
determines which types of bins will be added
default: 'soi'
bins: iterable of scalars, optional income breakpoints.
Follows pandas convention. The breakpoint is inclusive if
right=True. This argument overrides any choice of compare_with.
right : bool, optional
Indicates whether the bins include the rightmost edge or not.
If right == True (the default), then the bins [1,2,3,4]
indicate (1,2], (2,3], (3,4].
Returns
-------
df: DataFrame object
the original input that bins have been added to
"""
if not bins:
if compare_with == "tpc":
bins = LARGE_INCOME_BINS
elif compare_with == "soi":
bins = SMALL_INCOME_BINS
elif compare_with == "webapp":
bins = WEBAPP_INCOME_BINS
else:
msg = "Unknown compare_with arg {0}".format(compare_with)
raise ValueError(msg)
# Groupby income_measure bins
df['bins'] = pd.cut(df[income_measure], bins, right=right)
return df
def means_and_comparisons(df, col_name, gp, weighted_total):
"""
Using grouped values, perform aggregate operations
to populate
df: DataFrame for full results of calculation
col_name: the column name to calculate against
gp: grouped DataFrame
"""
# Who has a tax cut, and who has a tax increase
diffs = gp.apply(weighted_count_lt_zero, col_name)
diffs = DataFrame(data=diffs, columns=['tax_cut'])
diffs['tax_inc'] = gp.apply(weighted_count_gt_zero, col_name)
diffs['count'] = gp.apply(weighted_count)
diffs['mean'] = gp.apply(weighted_mean, col_name)
diffs['tot_change'] = gp.apply(weighted_sum, col_name)
diffs['perc_inc'] = gp.apply(weighted_perc_inc, col_name)
diffs['perc_cut'] = gp.apply(weighted_perc_dec, col_name)
diffs['share_of_change'] = gp.apply(weighted_share_of_total,
col_name, weighted_total)
return diffs
def weighted(df, X):
agg = df
for colname in X:
if not colname.startswith('s006'):
agg[colname] = df[colname] * df['s006']
return agg
def get_sums(df, na=False):
"""
Gets the unweighted sum of each column, saving the col name
and the corresponding sum
Returns
-------
pandas.Series
"""
sums = defaultdict(lambda: 0)
for col in df.columns.tolist():
if col != 'bins':
if na:
sums[col] = 'n/a'
else:
sums[col] = (df[col]).sum()
return pd.Series(sums, name='sums')
def results(c):
"""
Gets the results from the tax calculator and organizes them into a table
Parameters
----------
c : Calculator object
Returns
-------
DataFrame object
"""
outputs = []
for col in STATS_COLUMNS:
if hasattr(c, 'records') and hasattr(c, 'params'):
if hasattr(c.params, col):
outputs.append(getattr(c.params, col))
else:
outputs.append(getattr(c.records, col))
else:
outputs.append(getattr(c, col))
return DataFrame(data=np.column_stack(outputs), columns=STATS_COLUMNS)
def weighted_avg_allcols(df, cols, income_measure='_expanded_income'):
diff = DataFrame(df.groupby('bins', as_index=False).apply(weighted_mean,
income_measure),
columns=[income_measure])
for col in cols:
if (col == "s006" or col == 'num_returns_StandardDed' or
col == 'num_returns_ItemDed' or col == 'num_returns_AMT'):
diff[col] = df.groupby('bins', as_index=False)[col].sum()[col]
elif col != income_measure:
diff[col] = df.groupby('bins', as_index=False).apply(weighted_mean,
col)
return diff
def create_distribution_table(calc, groupby, result_type,
income_measure='_expanded_income'):
"""
Gets results given by the tax calculator, sorts them based on groupby, and
manipulates them based on result_type. Returns these as a table
Parameters
----------
calc : the Calculator object
groupby : String object
options for input: 'weighted_deciles', 'small_income_bins',
'large_income_bins', 'webapp_income_bins';
determines how the columns in the resulting DataFrame are sorted
result_type : String object
options for input: 'weighted_sum' or 'weighted_avg';
determines how the data should be manipulated
Notes
-----
Taxpayer Characteristics:
c04470 : Total itemized deduction
c00100 : AGI (Defecit)
c09600 : Alternative minimum tax
s006 : used to weight population
Returns
-------
DataFrame object
"""
res = results(calc)
# weight of returns with positive AGI and
# itemized deduction greater than standard deduction
res['c04470'] = res['c04470'].where(((res['c00100'] > 0) &
(res['c04470'] > res['_standard'])), 0)
# weight of returns with positive AGI and itemized deduction
res['num_returns_ItemDed'] = res['s006'].where(((res['c00100'] > 0) &
(res['c04470'] > 0)), 0)
# weight of returns with positive AGI and standard deduction
res['num_returns_StandardDed'] = res['s006'].where(((res['c00100'] > 0) &
(res['_standard'] > 0)),
0)
# weight of returns with positive Alternative Minimum Tax (AMT)
res['num_returns_AMT'] = res['s006'].where(res['c09600'] > 0, 0)
# sorts the data
if groupby == "weighted_deciles":
df = add_weighted_decile_bins(res, income_measure=income_measure)
elif groupby == "small_income_bins":
df = add_income_bins(res, compare_with="soi",
income_measure=income_measure)
elif groupby == "large_income_bins":
df = add_income_bins(res, compare_with="tpc",
income_measure=income_measure)
elif groupby == "webapp_income_bins":
df = add_income_bins(res, compare_with="webapp",
income_measure=income_measure)
else:
err = ("groupby must be either 'weighted_deciles' or"
"'small_income_bins' or 'large_income_bins' or"
"'webapp_income_bins'")
raise ValueError(err)
# manipulates the data
pd.options.display.float_format = '{:8,.0f}'.format
if result_type == "weighted_sum":
df = weighted(df, STATS_COLUMNS)
gp_mean = df.groupby('bins', as_index=False)[TABLE_COLUMNS].sum()
gp_mean.drop('bins', axis=1, inplace=True)
sum_row = get_sums(df)[TABLE_COLUMNS]
elif result_type == "weighted_avg":
gp_mean = weighted_avg_allcols(df, TABLE_COLUMNS,
income_measure=income_measure)
sum_row = get_sums(df, na=True)[TABLE_COLUMNS]
else:
err = ("result_type must be either 'weighted_sum' or 'weighted_avg")
raise ValueError(err)
return gp_mean.append(sum_row)
def create_difference_table(calc1, calc2, groupby,
income_measure='_expanded_income'):
"""
Gets results given by the two different tax calculators and outputs
a table that compares the differing results.
The table is sorted according the the groupby input.
Parameters
----------
calc1, the first Calculator object
calc2, the other Calculator object
groupby, String object
options for input: 'weighted_deciles', 'small_income_bins',
'large_income_bins', 'webapp_income_bins'
determines how the columns in the resulting DataFrame are sorted
Returns
-------
DataFrame object
"""
res1 = results(calc1)
res2 = results(calc2)
if groupby == "weighted_deciles":
df = add_weighted_decile_bins(res2, income_measure=income_measure)
elif groupby == "small_income_bins":
df = add_income_bins(res2, compare_with="soi",
income_measure=income_measure)
elif groupby == "large_income_bins":
df = add_income_bins(res2, compare_with="tpc",
income_measure=income_measure)
elif groupby == "webapp_income_bins":
df = add_income_bins(res2, compare_with="webapp",
income_measure=income_measure)
else:
err = ("groupby must be either"
"'weighted_deciles' or 'small_income_bins'"
"or 'large_income_bins' or 'webapp_income_bins'")
raise ValueError(err)
# Difference in plans
# Positive values are the magnitude of the tax increase
# Negative values are the magnitude of the tax decrease
res2['tax_diff'] = res2['_ospctax'] - res1['_ospctax']
diffs = means_and_comparisons(res2, 'tax_diff',
df.groupby('bins', as_index=False),
(res2['tax_diff'] * res2['s006']).sum())
sum_row = get_sums(diffs)[diffs.columns.tolist()]
diffs = diffs.append(sum_row)
pd.options.display.float_format = '{:8,.0f}'.format
srs_inc = ["{0:.2f}%".format(val * 100) for val in diffs['perc_inc']]
diffs['perc_inc'] = pd.Series(srs_inc, index=diffs.index)
srs_cut = ["{0:.2f}%".format(val * 100) for val in diffs['perc_cut']]
diffs['perc_cut'] = pd.Series(srs_cut, index=diffs.index)
srs_change = ["{0:.2f}%".format(val * 100)
for val in diffs['share_of_change']]
diffs['share_of_change'] = pd.Series(srs_change, index=diffs.index)
# columns containing weighted values relative to the binning mechanism
non_sum_cols = [x for x in diffs.columns.tolist()
if 'mean' in x or 'perc' in x]
for col in non_sum_cols:
diffs.loc['sums', col] = 'n/a'
return diffs
| mit |
pyjs/pyjs | examples/misc/djangowanted/wanted/views.py | 6 | 3300 | # Create your views here.
from jsonrpc import *
from djangowanted.wanted.models import Item, Flag, FlagType, Page
from django.template import loader
from django.shortcuts import render_to_response
from django.template import RequestContext, Template
from django.http import HttpResponseRedirect, HttpResponse
import urllib
from copy import copy
from wanted.forms import ItemForm
formsservice = FormProcessor({'itemform': ItemForm})
service = JSONRPCService()
def index(request, path=None):
path = request.GET.get('page', None)
if path == '':
path = 'index'
if path is None:
# workaround in history tokens: must have a query
return HttpResponseRedirect("./?page=#index")
try:
p = Page.objects.get(name=path)
except Page.DoesNotExist:
p = None
if not p and path == 'index':
return render_to_response('index.html', {'title':'', 'noscript':''})
args = {'title': p.name,
'noscript': p.text
}
context_instance=RequestContext(request)
context_instance.autoescape=False
try:
template = Item.objects.get(name='index.html')
except Item.DoesNotExist:
template = None
if not template:
return render_to_response('index.html', args, context_instance)
tpl = loader.get_template_from_string(template)
context_instance.update(args)
tpl = tpl.render(context_instance)
return HttpResponse(tpl)
def _getItem (item):
fields = copy(item._meta.get_all_field_names())
del fields[fields.index('flag')]
del fields[fields.index('id')]
for f in FlagType.objects.all():
fields.append(f.name)
try:
fg = Flag.objects.get(item=item.id, type=f.id)
except Flag.DoesNotExist:
fg = Flag()
setattr(item, f.name, fg)
return json_convert([item], fields=fields)[0]
@jsonremote(service)
def getItem (request, num):
try:
item = Item.objects.get(id=num)
except Item.DoesNotExist:
return None
return _getItem(item)
@jsonremote(service)
def getItemsByName (request, name):
return json_convert([Item.objects.filter(name=name)])
@jsonremote(service)
def getItems (request):
return json_convert(Item.objects.all())
@jsonremote(service)
def updateItem (request, item):
t = Item.objects.get(id=item['id'])
t.name = item['name']
t.text = item['text']
t.save()
return getItems(request)
@jsonremote(service)
def addItem (request, item):
t = Item()
t.name = item['name']
t.short_description = item['short_description']
t.price = item['price']
t.save()
fields = copy(t._meta.get_all_field_names())
del fields[fields.index('flag')]
del fields[fields.index('id')]
for f in FlagType.objects.all():
fields.append(f.name)
fv = item[f.name]
d = {'item': t.id, 'type': f.id, 'value': fv}
try:
fg = Flag.objects.get(item=t.id, type=f.id)
except Flag.DoesNotExist:
fg = Flag()
fg.item = t
fg.type = f
fg.value = fv
fg.save()
setattr(t, f.name, fg)
return json_convert([t], fields=fields)[0]
@jsonremote(service)
def deleteItem (request, num):
t = Item.objects.get(id=num)
t.delete()
return num
| apache-2.0 |
zaeleus/rust | src/etc/mirror-all-snapshots.py | 53 | 1372 | #!/usr/bin/env python
#
# Copyright 2011-2013 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import os
from snapshot import *
f = open(snapshotfile)
date = None
rev = None
platform = None
snap = None
i = 0
for line in f.readlines():
i += 1
parsed = parse_line(i, line)
if not parsed:
continue
if parsed["type"] == "snapshot":
date = parsed["date"]
rev = parsed["rev"]
elif rev is not None and parsed["type"] == "file":
platform = parsed["platform"]
hsh = parsed["hash"]
snap = full_snapshot_name(date, rev, platform, hsh)
dl = os.path.join(download_dir_base, snap)
url = download_url_base + "/" + snap
if (not os.path.exists(dl)):
print("downloading " + url)
get_url_to_file(url, dl)
if (snap_filename_hash_part(snap) == hash_file(dl)):
print("got download with ok hash")
else:
raise Exception("bad hash on download")
| apache-2.0 |
KeyWeeUsr/kivy | kivy/uix/gridlayout.py | 6 | 19254 | '''
Grid Layout
===========
.. only:: html
.. image:: images/gridlayout.gif
:align: right
.. only:: latex
.. image:: images/gridlayout.png
:align: right
.. versionadded:: 1.0.4
The :class:`GridLayout` arranges children in a matrix. It takes the available
space and divides it into columns and rows, then adds widgets to the resulting
"cells".
.. versionchanged:: 1.0.7
The implementation has changed to use the widget size_hint for calculating
column/row sizes. `uniform_width` and `uniform_height` have been removed
and other properties have added to give you more control.
Background
----------
Unlike many other toolkits, you cannot explicitly place a widget in a specific
column/row. Each child is automatically assigned a position determined by the
layout configuration and the child's index in the children list.
A GridLayout must always have at least one input constraint:
:attr:`GridLayout.cols` or :attr:`GridLayout.rows`. If you do not specify cols
or rows, the Layout will throw an exception.
Column Width and Row Height
---------------------------
The column width/row height are determined in 3 steps:
- The initial size is given by the :attr:`col_default_width` and
:attr:`row_default_height` properties. To customize the size of a single
column or row, use :attr:`cols_minimum` or :attr:`rows_minimum`.
- The `size_hint_x`/`size_hint_y` of the children are taken into account.
If no widgets have a size hint, the maximum size is used for all
children.
- You can force the default size by setting the :attr:`col_force_default`
or :attr:`row_force_default` property. This will force the layout to
ignore the `width` and `size_hint` properties of children and use the
default size.
Using a GridLayout
------------------
In the example below, all widgets will have an equal size. By default, the
`size_hint` is (1, 1), so a Widget will take the full size of the parent::
layout = GridLayout(cols=2)
layout.add_widget(Button(text='Hello 1'))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2'))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_1.jpg
Now, let's fix the size of Hello buttons to 100px instead of using
size_hint_x=1::
layout = GridLayout(cols=2)
layout.add_widget(Button(text='Hello 1', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_2.jpg
Next, let's fix the row height to a specific size::
layout = GridLayout(cols=2, row_force_default=True, row_default_height=40)
layout.add_widget(Button(text='Hello 1', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 1'))
layout.add_widget(Button(text='Hello 2', size_hint_x=None, width=100))
layout.add_widget(Button(text='World 2'))
.. image:: images/gridlayout_3.jpg
'''
__all__ = ('GridLayout', 'GridLayoutException')
from kivy.logger import Logger
from kivy.uix.layout import Layout
from kivy.properties import NumericProperty, BooleanProperty, DictProperty, \
BoundedNumericProperty, ReferenceListProperty, VariableListProperty, \
ObjectProperty, StringProperty
from math import ceil
def nmax(*args):
# merge into one list
args = [x for x in args if x is not None]
return max(args)
def nmin(*args):
# merge into one list
args = [x for x in args if x is not None]
return min(args)
class GridLayoutException(Exception):
'''Exception for errors if the grid layout manipulation fails.
'''
pass
class GridLayout(Layout):
'''Grid layout class. See module documentation for more information.
'''
spacing = VariableListProperty([0, 0], length=2)
'''Spacing between children: [spacing_horizontal, spacing_vertical].
spacing also accepts a one argument form [spacing].
:attr:`spacing` is a
:class:`~kivy.properties.VariableListProperty` and defaults to [0, 0].
'''
padding = VariableListProperty([0, 0, 0, 0])
'''Padding between the layout box and it's children: [padding_left,
padding_top, padding_right, padding_bottom].
padding also accepts a two argument form [padding_horizontal,
padding_vertical] and a one argument form [padding].
.. versionchanged:: 1.7.0
Replaced NumericProperty with VariableListProperty.
:attr:`padding` is a :class:`~kivy.properties.VariableListProperty` and
defaults to [0, 0, 0, 0].
'''
cols = BoundedNumericProperty(None, min=0, allownone=True)
'''Number of columns in the grid.
.. versionchanged:: 1.0.8
Changed from a NumericProperty to BoundedNumericProperty. You can no
longer set this to a negative value.
:attr:`cols` is a :class:`~kivy.properties.NumericProperty` and defaults to
0.
'''
rows = BoundedNumericProperty(None, min=0, allownone=True)
'''Number of rows in the grid.
.. versionchanged:: 1.0.8
Changed from a NumericProperty to a BoundedNumericProperty. You can no
longer set this to a negative value.
:attr:`rows` is a :class:`~kivy.properties.NumericProperty` and defaults to
0.
'''
col_default_width = NumericProperty(0)
'''Default minimum size to use for a column.
.. versionadded:: 1.0.7
:attr:`col_default_width` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.
'''
row_default_height = NumericProperty(0)
'''Default minimum size to use for row.
.. versionadded:: 1.0.7
:attr:`row_default_height` is a :class:`~kivy.properties.NumericProperty`
and defaults to 0.
'''
col_force_default = BooleanProperty(False)
'''If True, ignore the width and size_hint_x of the child and use the
default column width.
.. versionadded:: 1.0.7
:attr:`col_force_default` is a :class:`~kivy.properties.BooleanProperty`
and defaults to False.
'''
row_force_default = BooleanProperty(False)
'''If True, ignore the height and size_hint_y of the child and use the
default row height.
.. versionadded:: 1.0.7
:attr:`row_force_default` is a :class:`~kivy.properties.BooleanProperty`
and defaults to False.
'''
cols_minimum = DictProperty({})
'''Dict of minimum width for each column. The dictionary keys are the
column numbers, e.g. 0, 1, 2...
.. versionadded:: 1.0.7
:attr:`cols_minimum` is a :class:`~kivy.properties.DictProperty` and
defaults to {}.
'''
rows_minimum = DictProperty({})
'''Dict of minimum height for each row. The dictionary keys are the
row numbers, e.g. 0, 1, 2...
.. versionadded:: 1.0.7
:attr:`rows_minimum` is a :class:`~kivy.properties.DictProperty` and
defaults to {}.
'''
minimum_width = NumericProperty(0)
'''Automatically computed minimum width needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_width` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0. It is read only.
'''
minimum_height = NumericProperty(0)
'''Automatically computed minimum height needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0. It is read only.
'''
minimum_size = ReferenceListProperty(minimum_width, minimum_height)
'''Automatically computed minimum size needed to contain all children.
.. versionadded:: 1.0.8
:attr:`minimum_size` is a
:class:`~kivy.properties.ReferenceListProperty` of
(:attr:`minimum_width`, :attr:`minimum_height`) properties. It is read
only.
'''
def __init__(self, **kwargs):
self._cols = self._rows = None
super(GridLayout, self).__init__(**kwargs)
fbind = self.fbind
update = self._trigger_layout
fbind('col_default_width', update)
fbind('row_default_height', update)
fbind('col_force_default', update)
fbind('row_force_default', update)
fbind('cols', update)
fbind('rows', update)
fbind('parent', update)
fbind('spacing', update)
fbind('padding', update)
fbind('children', update)
fbind('size', update)
fbind('pos', update)
def get_max_widgets(self):
if self.cols and self.rows:
return self.rows * self.cols
else:
return None
def on_children(self, instance, value):
# if that makes impossible to construct things with deffered method,
# migrate this test in do_layout, and/or issue a warning.
smax = self.get_max_widgets()
if smax and len(value) > smax:
raise GridLayoutException(
'Too many children in GridLayout. Increase rows/cols!')
def _init_rows_cols_sizes(self, count):
# the goal here is to calculate the minimum size of every cols/rows
# and determine if they have stretch or not
current_cols = self.cols
current_rows = self.rows
# if no cols or rows are set, we can't calculate minimum size.
# the grid must be contrained at least on one side
if not current_cols and not current_rows:
Logger.warning('%r have no cols or rows set, '
'layout is not triggered.' % self)
return
if current_cols is None:
current_cols = int(ceil(count / float(current_rows)))
elif current_rows is None:
current_rows = int(ceil(count / float(current_cols)))
current_cols = max(1, current_cols)
current_rows = max(1, current_rows)
self._has_hint_bound_x = False
self._has_hint_bound_y = False
self._cols_min_size_none = 0. # min size from all the None hint
self._rows_min_size_none = 0. # min size from all the None hint
self._cols = cols = [self.col_default_width] * current_cols
self._cols_sh = [None] * current_cols
self._cols_sh_min = [None] * current_cols
self._cols_sh_max = [None] * current_cols
self._rows = rows = [self.row_default_height] * current_rows
self._rows_sh = [None] * current_rows
self._rows_sh_min = [None] * current_rows
self._rows_sh_max = [None] * current_rows
# update minimum size from the dicts
items = (i for i in self.cols_minimum.items() if i[0] < len(cols))
for index, value in items:
cols[index] = max(value, cols[index])
items = (i for i in self.rows_minimum.items() if i[0] < len(rows))
for index, value in items:
rows[index] = max(value, rows[index])
return True
def _fill_rows_cols_sizes(self):
cols, rows = self._cols, self._rows
cols_sh, rows_sh = self._cols_sh, self._rows_sh
cols_sh_min, rows_sh_min = self._cols_sh_min, self._rows_sh_min
cols_sh_max, rows_sh_max = self._cols_sh_max, self._rows_sh_max
# calculate minimum size for each columns and rows
n_cols = len(cols)
has_bound_y = has_bound_x = False
for i, child in enumerate(reversed(self.children)):
(shw, shh), (w, h) = child.size_hint, child.size
shw_min, shh_min = child.size_hint_min
shw_max, shh_max = child.size_hint_max
row, col = divmod(i, n_cols)
# compute minimum size / maximum stretch needed
if shw is None:
cols[col] = nmax(cols[col], w)
else:
cols_sh[col] = nmax(cols_sh[col], shw)
if shw_min is not None:
has_bound_x = True
cols_sh_min[col] = nmax(cols_sh_min[col], shw_min)
if shw_max is not None:
has_bound_x = True
cols_sh_max[col] = nmin(cols_sh_max[col], shw_max)
if shh is None:
rows[row] = nmax(rows[row], h)
else:
rows_sh[row] = nmax(rows_sh[row], shh)
if shh_min is not None:
has_bound_y = True
rows_sh_min[col] = nmax(rows_sh_min[col], shh_min)
if shh_max is not None:
has_bound_y = True
rows_sh_max[col] = nmin(rows_sh_max[col], shh_max)
self._has_hint_bound_x = has_bound_x
self._has_hint_bound_y = has_bound_y
def _update_minimum_size(self):
# calculate minimum width/height needed, starting from padding +
# spacing
l, t, r, b = self.padding
spacing_x, spacing_y = self.spacing
cols, rows = self._cols, self._rows
width = l + r + spacing_x * (len(cols) - 1)
self._cols_min_size_none = sum(cols) + width
# we need to subtract for the sh_max/min the already guaranteed size
# due to having a None in the col. So sh_min gets smaller by that size
# since it's already covered. Similarly for sh_max, because if we
# already exceeded the max, the subtracted max will be zero, so
# it won't get larger
if self._has_hint_bound_x:
cols_sh_min = self._cols_sh_min
cols_sh_max = self._cols_sh_max
for i, (c, sh_min, sh_max) in enumerate(
zip(cols, cols_sh_min, cols_sh_max)):
if sh_min is not None:
width += max(c, sh_min)
cols_sh_min[i] = max(0., sh_min - c)
else:
width += c
if sh_max is not None:
cols_sh_max[i] = max(0., sh_max - c)
else:
width = self._cols_min_size_none
height = t + b + spacing_y * (len(rows) - 1)
self._rows_min_size_none = sum(rows) + height
if self._has_hint_bound_y:
rows_sh_min = self._rows_sh_min
rows_sh_max = self._rows_sh_max
for i, (r, sh_min, sh_max) in enumerate(
zip(rows, rows_sh_min, rows_sh_max)):
if sh_min is not None:
height += max(r, sh_min)
rows_sh_min[i] = max(0., sh_min - r)
else:
height += r
if sh_max is not None:
rows_sh_max[i] = max(0., sh_max - r)
else:
height = self._rows_min_size_none
# finally, set the minimum size
self.minimum_size = (width, height)
def _finalize_rows_cols_sizes(self):
selfw = self.width
selfh = self.height
# resolve size for each column
if self.col_force_default:
cols = [self.col_default_width] * len(self._cols)
for index, value in self.cols_minimum.items():
cols[index] = value
self._cols = cols
else:
cols = self._cols
cols_sh = self._cols_sh
cols_sh_min = self._cols_sh_min
cols_weight = float(sum((x for x in cols_sh if x is not None)))
stretch_w = max(0., selfw - self._cols_min_size_none)
if stretch_w > 1e-9:
if self._has_hint_bound_x:
# fix the hints to be within bounds
self.layout_hint_with_bounds(
cols_weight, stretch_w,
sum((c for c in cols_sh_min if c is not None)),
cols_sh_min, self._cols_sh_max, cols_sh)
for index, col_stretch in enumerate(cols_sh):
# if the col don't have stretch information, nothing to do
if not col_stretch:
continue
# add to the min width whatever remains from size_hint
cols[index] += stretch_w * col_stretch / cols_weight
# same algo for rows
if self.row_force_default:
rows = [self.row_default_height] * len(self._rows)
for index, value in self.rows_minimum.items():
rows[index] = value
self._rows = rows
else:
rows = self._rows
rows_sh = self._rows_sh
rows_sh_min = self._rows_sh_min
rows_weight = float(sum((x for x in rows_sh if x is not None)))
stretch_h = max(0., selfh - self._rows_min_size_none)
if stretch_h > 1e-9:
if self._has_hint_bound_y:
# fix the hints to be within bounds
self.layout_hint_with_bounds(
rows_weight, stretch_h,
sum((r for r in rows_sh_min if r is not None)),
rows_sh_min, self._rows_sh_max, rows_sh)
for index, row_stretch in enumerate(rows_sh):
# if the row don't have stretch information, nothing to do
if not row_stretch:
continue
# add to the min height whatever remains from size_hint
rows[index] += stretch_h * row_stretch / rows_weight
def _iterate_layout(self, count):
selfx = self.x
padding_left = self.padding[0]
padding_top = self.padding[1]
spacing_x, spacing_y = self.spacing
i = count - 1
y = self.top - padding_top
cols = self._cols
for row_height in self._rows:
x = selfx + padding_left
for col_width in cols:
if i < 0:
break
yield i, x, y - row_height, col_width, row_height
i = i - 1
x = x + col_width + spacing_x
y -= row_height + spacing_y
def do_layout(self, *largs):
children = self.children
if not children or not self._init_rows_cols_sizes(len(children)):
l, t, r, b = self.padding
self.minimum_size = l + r, t + b
return
self._fill_rows_cols_sizes()
self._update_minimum_size()
self._finalize_rows_cols_sizes()
for i, x, y, w, h in self._iterate_layout(len(children)):
c = children[i]
c.pos = x, y
shw, shh = c.size_hint
shw_min, shh_min = c.size_hint_min
shw_max, shh_max = c.size_hint_max
if shw_min is not None:
if shw_max is not None:
w = max(min(w, shw_max), shw_min)
else:
w = max(w, shw_min)
else:
if shw_max is not None:
w = min(w, shw_max)
if shh_min is not None:
if shh_max is not None:
h = max(min(h, shh_max), shh_min)
else:
h = max(h, shh_min)
else:
if shh_max is not None:
h = min(h, shh_max)
if shw is None:
if shh is not None:
c.height = h
else:
if shh is None:
c.width = w
else:
c.size = (w, h)
| mit |
osmr/utct | MXNet/estimator.py | 1 | 1449 | import logging
import mxnet as mx
class Estimator(object):
"""
Class, which provides recalculation of quality indexes (for classifier!).
"""
@staticmethod
def estimate(data_source,
checkpoint_path,
checkpoint_epoch,
ctx,
**kwargs):
"""
Recalculating quality indexes.
Parameters:
----------
data_source : object
instance of DataSource class with training/validation iterators
checkpoint_path : str
path to checkpoint file with the prefix
checkpoint_epoch : int
number of epoch for the checkpoint file
ctx : object
instance of MXNet context
"""
logging.basicConfig()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
mod = mx.mod.Module.load(
prefix=checkpoint_path,
epoch=checkpoint_epoch,
logger=logging,
context=ctx)
train_iter, val_iter = data_source(shuffle=False, **kwargs)
mod.bind(
data_shapes=train_iter.provide_data,
label_shapes=train_iter.provide_label,
for_training=False)
acc_metric = mx.metric.Accuracy()
logger.info("Train score: {}".format(mod.score(train_iter, acc_metric)))
logger.info("Validation score: {}".format(mod.score(val_iter, acc_metric)))
| mit |
NickelMedia/phantomjs | src/breakpad/src/tools/gyp/test/generator-output/gyptest-top-all.py | 151 | 1371 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a project hierarchy created when the --generator-output=
option is used to put the build configuration files in a separate
directory tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.writable(test.workpath('src'), False)
test.run_gyp('prog1.gyp',
'-Dset_symroot=1',
'--generator-output=' + test.workpath('gypfiles'),
chdir='src')
test.writable(test.workpath('src/build'), True)
test.writable(test.workpath('src/subdir2/build'), True)
test.writable(test.workpath('src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='gypfiles')
chdir = 'gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
| bsd-3-clause |
hainm/scikit-learn | sklearn/utils/stats.py | 299 | 1692 | import numpy as np
from scipy.stats import rankdata as _sp_rankdata
from .fixes import bincount
# To remove when we support scipy 0.13
def _rankdata(a, method="average"):
"""Assign ranks to data, dealing with ties appropriately.
Ranks begin at 1. The method argument controls how ranks are assigned
to equal values.
Parameters
----------
a : array_like
The array of values to be ranked. The array is first flattened.
method : str, optional
The method used to assign ranks to tied elements.
The options are 'max'.
'max': The maximum of the ranks that would have been assigned
to all the tied values is assigned to each value.
Returns
-------
ranks : ndarray
An array of length equal to the size of a, containing rank scores.
Notes
-----
We only backport the 'max' method
"""
if method != "max":
raise NotImplementedError()
unique_all, inverse = np.unique(a, return_inverse=True)
count = bincount(inverse, minlength=unique_all.size)
cum_count = count.cumsum()
rank = cum_count[inverse]
return rank
try:
_sp_rankdata([1.], 'max')
rankdata = _sp_rankdata
except TypeError as e:
rankdata = _rankdata
def _weighted_percentile(array, sample_weight, percentile=50):
"""Compute the weighted ``percentile`` of ``array`` with ``sample_weight``. """
sorted_idx = np.argsort(array)
# Find index of median prediction for each sample
weight_cdf = sample_weight[sorted_idx].cumsum()
percentile_idx = np.searchsorted(
weight_cdf, (percentile / 100.) * weight_cdf[-1])
return array[sorted_idx[percentile_idx]]
| bsd-3-clause |
b0ttl3z/SickRage | lib/oauthlib/oauth2/rfc6749/utils.py | 51 | 2492 | # -*- coding: utf-8 -*-
"""
oauthlib.utils
~~~~~~~~~~~~~~
This module contains utility methods used by various parts of the OAuth 2 spec.
"""
from __future__ import absolute_import, unicode_literals
import os
import datetime
try:
from urllib import quote
except ImportError:
from urllib.parse import quote
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from oauthlib.common import unicode_type, urldecode
def list_to_scope(scope):
"""Convert a list of scopes to a space separated string."""
if isinstance(scope, unicode_type) or scope is None:
return scope
elif isinstance(scope, (set, tuple, list)):
return " ".join([unicode_type(s) for s in scope])
else:
raise ValueError("Invalid scope (%s), must be string, tuple, set, or list." % scope)
def scope_to_list(scope):
"""Convert a space separated string to a list of scopes."""
if isinstance(scope, (tuple, list, set)):
return [unicode_type(s) for s in scope]
elif scope is None:
return None
else:
return scope.strip().split(" ")
def params_from_uri(uri):
params = dict(urldecode(urlparse(uri).query))
if 'scope' in params:
params['scope'] = scope_to_list(params['scope'])
return params
def host_from_uri(uri):
"""Extract hostname and port from URI.
Will use default port for HTTP and HTTPS if none is present in the URI.
"""
default_ports = {
'HTTP': '80',
'HTTPS': '443',
}
sch, netloc, path, par, query, fra = urlparse(uri)
if ':' in netloc:
netloc, port = netloc.split(':', 1)
else:
port = default_ports.get(sch.upper())
return netloc, port
def escape(u):
"""Escape a string in an OAuth-compatible fashion.
TODO: verify whether this can in fact be used for OAuth 2
"""
if not isinstance(u, unicode_type):
raise ValueError('Only unicode objects are escapable.')
return quote(u.encode('utf-8'), safe=b'~')
def generate_age(issue_time):
"""Generate a age parameter for MAC authentication draft 00."""
td = datetime.datetime.now() - issue_time
age = (td.microseconds + (td.seconds + td.days * 24 * 3600)
* 10 ** 6) / 10 ** 6
return unicode_type(age)
def is_secure_transport(uri):
"""Check if the uri is over ssl."""
if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
return True
return uri.lower().startswith('https://')
| gpl-3.0 |
weidongxu84/info-gatherer | django/contrib/auth/management/__init__.py | 1 | 4700 | """
Creates permissions for all installed apps that need permissions.
"""
import getpass
import locale
import unicodedata
from django.contrib.auth import models as auth_app
from django.db.models import get_models, signals
from django.contrib.auth.models import User
def _get_permission_codename(action, opts):
return u'%s_%s' % (action, opts.object_name.lower())
def _get_all_permissions(opts):
"Returns (codename, name) for all permissions in the given opts."
perms = []
for action in ('add', 'change', 'delete'):
perms.append((_get_permission_codename(action, opts), u'Can %s %s' % (action, opts.verbose_name_raw)))
return perms + list(opts.permissions)
def create_permissions(app, created_models, verbosity, **kwargs):
from django.contrib.contenttypes.models import ContentType
app_models = get_models(app)
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
ctypes_for_models = ContentType.objects.get_for_models(*app_models)
for klass, ctype in ctypes_for_models.iteritems():
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a context_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(auth_app.Permission.objects.filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
objs = [
auth_app.Permission(codename=codename, name=name, content_type=ctype)
for ctype, (codename, name) in searched_perms
if (ctype.pk, codename) not in all_perms
]
auth_app.Permission.objects.bulk_create(objs)
if verbosity >= 2:
for obj in objs:
print "Adding permission '%s'" % obj
def create_superuser(app, created_models, verbosity, db, **kwargs):
from django.core.management import call_command
if auth_app.User in created_models and kwargs.get('interactive', True):
msg = ("\nYou just installed Django's auth system, which means you "
"don't have any superusers defined.\nWould you like to create one "
"now? (yes/no): ")
confirm = raw_input(msg)
while 1:
if confirm not in ('yes', 'no'):
confirm = raw_input('Please enter either "yes" or "no": ')
continue
if confirm == 'yes':
call_command("createsuperuser", interactive=True, database=db)
break
def get_system_username():
"""
Try to determine the current system user's username.
:returns: The username as a unicode string, or an empty string if the
username could not be determined.
"""
try:
return getpass.getuser().decode(locale.getdefaultlocale()[1])
except (ImportError, KeyError, UnicodeDecodeError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
# UnicodeDecodeError - preventive treatment for non-latin Windows.
return u''
def get_default_username(check_db=True):
"""
Try to determine the current system user's username to use as a default.
:param check_db: If ``True``, requires that the username does not match an
existing ``auth.User`` (otherwise returns an empty string).
:returns: The username, or an empty string if no username can be
determined.
"""
from django.contrib.auth.management.commands.createsuperuser import (
RE_VALID_USERNAME)
default_username = get_system_username()
try:
default_username = unicodedata.normalize('NFKD', default_username)\
.encode('ascii', 'ignore').replace(' ', '').lower()
except UnicodeDecodeError:
return ''
if not RE_VALID_USERNAME.match(default_username):
return ''
# Don't return the default username if it is already taken.
if check_db and default_username:
try:
User.objects.get(username=default_username)
except User.DoesNotExist:
pass
else:
return ''
return default_username
signals.post_syncdb.connect(create_permissions,
dispatch_uid = "django.contrib.auth.management.create_permissions")
signals.post_syncdb.connect(create_superuser,
sender=auth_app, dispatch_uid = "django.contrib.auth.management.create_superuser")
| mit |
vijos/jd4 | jd4/util.py | 1 | 2252 | import re
from asyncio import get_event_loop, StreamReader, StreamReaderProtocol
from os import fdopen, listdir, open as os_open, path, remove, waitpid, \
O_RDONLY, O_NONBLOCK, WEXITSTATUS, WIFSIGNALED, WNOHANG, WTERMSIG
from shutil import rmtree
from jd4.error import FormatError
TIME_RE = re.compile(r'([0-9]+(?:\.[0-9]*)?)([mun]?)s?')
TIME_UNITS = {'': 1000000000, 'm': 1000000, 'u': 1000, 'n': 1}
MEMORY_RE = re.compile(r'([0-9]+(?:\.[0-9]*)?)([kmg]?)b?')
MEMORY_UNITS = {'': 1, 'k': 1024, 'm': 1048576, 'g': 1073741824}
def remove_under(*dirnames):
for dirname in dirnames:
for name in listdir(dirname):
full_path = path.join(dirname, name)
if path.isdir(full_path):
rmtree(full_path)
else:
remove(full_path)
def wait_and_reap_zombies(pid):
_, status = waitpid(pid, 0)
try:
while True:
waitpid(-1, WNOHANG)
except ChildProcessError:
pass
if WIFSIGNALED(status):
return -WTERMSIG(status)
return WEXITSTATUS(status)
def read_text_file(file):
with open(file) as f:
return f.read()
def write_binary_file(file, data):
with open(file, 'wb') as f:
f.write(data)
def write_text_file(file, text):
with open(file, 'w') as f:
f.write(text)
async def read_pipe(file, size):
loop = get_event_loop()
reader = StreamReader()
protocol = StreamReaderProtocol(reader)
transport, _ = await loop.connect_read_pipe(
lambda: protocol, fdopen(os_open(file, O_RDONLY | O_NONBLOCK)))
chunks = list()
while size > 0:
chunk = await reader.read(size)
if not chunk:
break
chunks.append(chunk)
size -= len(chunk)
transport.close()
return b''.join(chunks)
def parse_time_ns(time_str):
match = TIME_RE.fullmatch(time_str)
if not match:
raise FormatError(time_str, 'error parsing time')
return int(float(match.group(1)) * TIME_UNITS[match.group(2)])
def parse_memory_bytes(memory_str):
match = MEMORY_RE.fullmatch(memory_str)
if not match:
raise FormatError(memory_str, 'error parsing memory')
return int(float(match.group(1)) * MEMORY_UNITS[match.group(2)])
| agpl-3.0 |
Dopi/HuaweiAscendG600_kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
sbalun/namebench | nb_third_party/simplejson/decoder.py | 296 | 15152 | """Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
def _import_c_scanstring():
try:
from simplejson._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, pos)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
| apache-2.0 |
dims/python-k8sclient | k8sclient/client/models/v1_limit_range_item.py | 5 | 4564 | # coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
class V1LimitRangeItem(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'type': 'str',
'max': 'str',
'min': 'str',
'default': 'str'
}
self.attribute_map = {
'type': 'type',
'max': 'max',
'min': 'min',
'default': 'default'
}
self._type = None
self._max = None
self._min = None
self._default = None
@property
def type(self):
"""
Gets the type of this V1LimitRangeItem.
type of resource that this limit applies to
:return: The type of this V1LimitRangeItem.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1LimitRangeItem.
type of resource that this limit applies to
:param type: The type of this V1LimitRangeItem.
:type: str
"""
self._type = type
@property
def max(self):
"""
Gets the max of this V1LimitRangeItem.
max usage constraints on this kind by resource name
:return: The max of this V1LimitRangeItem.
:rtype: str
"""
return self._max
@max.setter
def max(self, max):
"""
Sets the max of this V1LimitRangeItem.
max usage constraints on this kind by resource name
:param max: The max of this V1LimitRangeItem.
:type: str
"""
self._max = max
@property
def min(self):
"""
Gets the min of this V1LimitRangeItem.
min usage constraints on this kind by resource name
:return: The min of this V1LimitRangeItem.
:rtype: str
"""
return self._min
@min.setter
def min(self, min):
"""
Sets the min of this V1LimitRangeItem.
min usage constraints on this kind by resource name
:param min: The min of this V1LimitRangeItem.
:type: str
"""
self._min = min
@property
def default(self):
"""
Gets the default of this V1LimitRangeItem.
default values on this kind by resource name if omitted
:return: The default of this V1LimitRangeItem.
:rtype: str
"""
return self._default
@default.setter
def default(self, default):
"""
Sets the default of this V1LimitRangeItem.
default values on this kind by resource name if omitted
:param default: The default of this V1LimitRangeItem.
:type: str
"""
self._default = default
def to_dict(self):
"""
Return model properties dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Return model properties str
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
| apache-2.0 |
tdtrask/ansible | lib/ansible/modules/system/debconf.py | 82 | 5176 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Brian Coca <briancoca+ansible@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: debconf
short_description: Configure a .deb package
description:
- Configure a .deb package using debconf-set-selections. Or just query existing selections.
version_added: "1.6"
notes:
- This module requires the command line debconf tools.
- A number of questions have to be answered (depending on the package).
Use 'debconf-show <package>' on any Debian or derivative with the package
installed to see questions/settings available.
- Some distros will always record tasks involving the setting of passwords as changed. This is due to debconf-get-selections masking passwords.
requirements: [ debconf, debconf-utils ]
options:
name:
description:
- Name of package to configure.
required: true
aliases: [ pkg ]
question:
description:
- A debconf configuration setting.
aliases: [ selection, setting ]
vtype:
description:
- The type of the value supplied.
- C(seen) was added in 2.2.
choices: [ boolean, error, multiselect, note, password, seen, select, string, text, title, text ]
value:
description:
- Value to set the configuration to.
aliases: [ answer ]
unseen:
description:
- Do not set 'seen' flag when pre-seeding.
type: bool
default: False
author:
- Brian Coca (@bcoca)
'''
EXAMPLES = '''
- name: Set default locale to fr_FR.UTF-8
debconf:
name: locales
question: locales/default_environment_locale
value: fr_FR.UTF-8
vtype: select
- name: set to generate locales
debconf:
name: locales
question: locales/locales_to_be_generated
value: en_US.UTF-8 UTF-8, fr_FR.UTF-8 UTF-8
vtype: multiselect
- name: Accept oracle license
debconf:
name: oracle-java7-installer
question: shared/accepted-oracle-license-v1-1
value: 'true'
vtype: select
- name: Specifying package you can register/return the list of questions and current values
debconf:
name: tzdata
'''
from ansible.module_utils.basic import AnsibleModule
def get_selections(module, pkg):
cmd = [module.get_bin_path('debconf-show', True), pkg]
rc, out, err = module.run_command(' '.join(cmd))
if rc != 0:
module.fail_json(msg=err)
selections = {}
for line in out.splitlines():
(key, value) = line.split(':', 1)
selections[key.strip('*').strip()] = value.strip()
return selections
def set_selection(module, pkg, question, vtype, value, unseen):
setsel = module.get_bin_path('debconf-set-selections', True)
cmd = [setsel]
if unseen:
cmd.append('-u')
if vtype == 'boolean':
if value == 'True':
value = 'true'
elif value == 'False':
value = 'false'
data = ' '.join([pkg, question, vtype, value])
return module.run_command(cmd, data=data)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True, aliases=['pkg']),
question=dict(type='str', aliases=['selection', 'setting']),
vtype=dict(type='str', choices=['boolean', 'error', 'multiselect', 'note', 'password', 'seen', 'select', 'string', 'text', 'title']),
value=dict(type='str', aliases=['answer']),
unseen=dict(type='bool'),
),
required_together=(['question', 'vtype', 'value'],),
supports_check_mode=True,
)
# TODO: enable passing array of options and/or debconf file from get-selections dump
pkg = module.params["name"]
question = module.params["question"]
vtype = module.params["vtype"]
value = module.params["value"]
unseen = module.params["unseen"]
prev = get_selections(module, pkg)
changed = False
msg = ""
if question is not None:
if vtype is None or value is None:
module.fail_json(msg="when supplying a question you must supply a valid vtype and value")
if question not in prev or prev[question] != value:
changed = True
if changed:
if not module.check_mode:
rc, msg, e = set_selection(module, pkg, question, vtype, value, unseen)
if rc:
module.fail_json(msg=e)
curr = {question: value}
if question in prev:
prev = {question: prev[question]}
else:
prev[question] = ''
if module._diff:
after = prev.copy()
after.update(curr)
diff_dict = {'before': prev, 'after': after}
else:
diff_dict = {}
module.exit_json(changed=changed, msg=msg, current=curr, previous=prev, diff=diff_dict)
module.exit_json(changed=changed, msg=msg, current=prev)
if __name__ == '__main__':
main()
| gpl-3.0 |
kiddhustle/wiardfmblog | django/contrib/gis/tests/geoapp/test_regress.py | 95 | 3512 | # -*- encoding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from datetime import datetime
from django.contrib.gis.tests.utils import no_mysql, no_spatialite
from django.contrib.gis.shortcuts import render_to_kmz
from django.db.models import Count, Min
from django.test import TestCase
from .models import City, PennsylvaniaCity, State, Truth
class GeoRegressionTests(TestCase):
def test_update(self):
"Testing GeoQuerySet.update(). See #10411."
pnt = City.objects.get(name='Pueblo').point
bak = pnt.clone()
pnt.y += 0.005
pnt.x += 0.005
City.objects.filter(name='Pueblo').update(point=pnt)
self.assertEqual(pnt, City.objects.get(name='Pueblo').point)
City.objects.filter(name='Pueblo').update(point=bak)
self.assertEqual(bak, City.objects.get(name='Pueblo').point)
def test_kmz(self):
"Testing `render_to_kmz` with non-ASCII data. See #11624."
name = "Åland Islands"
places = [{'name' : name,
'description' : name,
'kml' : '<Point><coordinates>5.0,23.0</coordinates></Point>'
}]
kmz = render_to_kmz('gis/kml/placemarks.kml', {'places' : places})
@no_spatialite
@no_mysql
def test_extent(self):
"Testing `extent` on a table with a single point. See #11827."
pnt = City.objects.get(name='Pueblo').point
ref_ext = (pnt.x, pnt.y, pnt.x, pnt.y)
extent = City.objects.filter(name='Pueblo').extent()
for ref_val, val in zip(ref_ext, extent):
self.assertAlmostEqual(ref_val, val, 4)
def test_unicode_date(self):
"Testing dates are converted properly, even on SpatiaLite. See #16408."
founded = datetime(1857, 5, 23)
mansfield = PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)',
founded=founded)
self.assertEqual(founded, PennsylvaniaCity.objects.dates('founded', 'day')[0])
self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def test_empty_count(self):
"Testing that PostGISAdapter.__eq__ does check empty strings. See #13670."
# contrived example, but need a geo lookup paired with an id__in lookup
pueblo = City.objects.get(name='Pueblo')
state = State.objects.filter(poly__contains=pueblo.point)
cities_within_state = City.objects.filter(id__in=state)
# .count() should not throw TypeError in __eq__
self.assertEqual(cities_within_state.count(), 1)
def test_defer_or_only_with_annotate(self):
"Regression for #16409. Make sure defer() and only() work with annotate()"
self.assertIsInstance(list(City.objects.annotate(Count('point')).defer('name')), list)
self.assertIsInstance(list(City.objects.annotate(Count('point')).only('name')), list)
def test_boolean_conversion(self):
"Testing Boolean value conversion with the spatial backend, see #15169."
t1 = Truth.objects.create(val=True)
t2 = Truth.objects.create(val=False)
val1 = Truth.objects.get(pk=1).val
val2 = Truth.objects.get(pk=2).val
# verify types -- should't be 0/1
self.assertIsInstance(val1, bool)
self.assertIsInstance(val2, bool)
# verify values
self.assertEqual(val1, True)
self.assertEqual(val2, False)
| bsd-3-clause |
srimai/odoo | addons/account/res_config.py | 200 | 25453 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import datetime
from dateutil.relativedelta import relativedelta
import openerp
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DF
from openerp.tools.translate import _
from openerp.osv import fields, osv
class account_config_settings(osv.osv_memory):
_name = 'account.config.settings'
_inherit = 'res.config.settings'
_columns = {
'company_id': fields.many2one('res.company', 'Company', required=True),
'has_default_company': fields.boolean('Has default company', readonly=True),
'expects_chart_of_accounts': fields.related('company_id', 'expects_chart_of_accounts', type='boolean',
string='This company has its own chart of accounts',
help="""Check this box if this company is a legal entity."""),
'currency_id': fields.related('company_id', 'currency_id', type='many2one', relation='res.currency', required=True,
string='Default company currency', help="Main currency of the company."),
'paypal_account': fields.related('company_id', 'paypal_account', type='char', size=128,
string='Paypal account', help="Paypal account (email) for receiving online payments (credit card, etc.) If you set a paypal account, the customer will be able to pay your invoices or quotations with a button \"Pay with Paypal\" in automated emails or through the Odoo portal."),
'company_footer': fields.related('company_id', 'rml_footer', type='text', readonly=True,
string='Bank accounts footer preview', help="Bank accounts as printed in the footer of each printed document"),
'has_chart_of_accounts': fields.boolean('Company has a chart of accounts'),
'chart_template_id': fields.many2one('account.chart.template', 'Template', domain="[('visible','=', True)]"),
'code_digits': fields.integer('# of Digits', help="No. of digits to use for account code"),
'tax_calculation_rounding_method': fields.related('company_id',
'tax_calculation_rounding_method', type='selection', selection=[
('round_per_line', 'Round per line'),
('round_globally', 'Round globally'),
], string='Tax calculation rounding method',
help="If you select 'Round per line' : for each tax, the tax amount will first be computed and rounded for each PO/SO/invoice line and then these rounded amounts will be summed, leading to the total amount for that tax. If you select 'Round globally': for each tax, the tax amount will be computed for each PO/SO/invoice line, then these amounts will be summed and eventually this total tax amount will be rounded. If you sell with tax included, you should choose 'Round per line' because you certainly want the sum of your tax-included line subtotals to be equal to the total amount with taxes."),
'sale_tax': fields.many2one("account.tax.template", "Default sale tax"),
'purchase_tax': fields.many2one("account.tax.template", "Default purchase tax"),
'sale_tax_rate': fields.float('Sales tax (%)'),
'purchase_tax_rate': fields.float('Purchase tax (%)'),
'complete_tax_set': fields.boolean('Complete set of taxes', help='This boolean helps you to choose if you want to propose to the user to encode the sales and purchase rates or use the usual m2o fields. This last choice assumes that the set of tax defined for the chosen template is complete'),
'has_fiscal_year': fields.boolean('Company has a fiscal year'),
'date_start': fields.date('Start date', required=True),
'date_stop': fields.date('End date', required=True),
'period': fields.selection([('month', 'Monthly'), ('3months','3 Monthly')], 'Periods', required=True),
'sale_journal_id': fields.many2one('account.journal', 'Sale journal'),
'sale_sequence_prefix': fields.related('sale_journal_id', 'sequence_id', 'prefix', type='char', string='Invoice sequence'),
'sale_sequence_next': fields.related('sale_journal_id', 'sequence_id', 'number_next', type='integer', string='Next invoice number'),
'sale_refund_journal_id': fields.many2one('account.journal', 'Sale refund journal'),
'sale_refund_sequence_prefix': fields.related('sale_refund_journal_id', 'sequence_id', 'prefix', type='char', string='Credit note sequence'),
'sale_refund_sequence_next': fields.related('sale_refund_journal_id', 'sequence_id', 'number_next', type='integer', string='Next credit note number'),
'purchase_journal_id': fields.many2one('account.journal', 'Purchase journal'),
'purchase_sequence_prefix': fields.related('purchase_journal_id', 'sequence_id', 'prefix', type='char', string='Supplier invoice sequence'),
'purchase_sequence_next': fields.related('purchase_journal_id', 'sequence_id', 'number_next', type='integer', string='Next supplier invoice number'),
'purchase_refund_journal_id': fields.many2one('account.journal', 'Purchase refund journal'),
'purchase_refund_sequence_prefix': fields.related('purchase_refund_journal_id', 'sequence_id', 'prefix', type='char', string='Supplier credit note sequence'),
'purchase_refund_sequence_next': fields.related('purchase_refund_journal_id', 'sequence_id', 'number_next', type='integer', string='Next supplier credit note number'),
'module_account_check_writing': fields.boolean('Pay your suppliers by check',
help='This allows you to check writing and printing.\n'
'-This installs the module account_check_writing.'),
'module_account_accountant': fields.boolean('Full accounting features: journals, legal statements, chart of accounts, etc.',
help="""If you do not check this box, you will be able to do invoicing & payments, but not accounting (Journal Items, Chart of Accounts, ...)"""),
'module_account_asset': fields.boolean('Assets management',
help='This allows you to manage the assets owned by a company or a person.\n'
'It keeps track of the depreciation occurred on those assets, and creates account move for those depreciation lines.\n'
'-This installs the module account_asset. If you do not check this box, you will be able to do invoicing & payments, '
'but not accounting (Journal Items, Chart of Accounts, ...)'),
'module_account_budget': fields.boolean('Budget management',
help='This allows accountants to manage analytic and crossovered budgets. '
'Once the master budgets and the budgets are defined, '
'the project managers can set the planned amount on each analytic account.\n'
'-This installs the module account_budget.'),
'module_account_payment': fields.boolean('Manage payment orders',
help='This allows you to create and manage your payment orders, with purposes to \n'
'* serve as base for an easy plug-in of various automated payment mechanisms, and \n'
'* provide a more efficient way to manage invoice payments.\n'
'-This installs the module account_payment.' ),
'module_account_voucher': fields.boolean('Manage customer payments',
help='This includes all the basic requirements of voucher entries for bank, cash, sales, purchase, expense, contra, etc.\n'
'-This installs the module account_voucher.'),
'module_account_followup': fields.boolean('Manage customer payment follow-ups',
help='This allows to automate letters for unpaid invoices, with multi-level recalls.\n'
'-This installs the module account_followup.'),
'module_product_email_template': fields.boolean('Send products tools and information at the invoice confirmation',
help='With this module, link your products to a template to send complete information and tools to your customer.\n'
'For instance when invoicing a training, the training agenda and materials will automatically be send to your customers.'),
'group_proforma_invoices': fields.boolean('Allow pro-forma invoices',
implied_group='account.group_proforma_invoices',
help="Allows you to put invoices in pro-forma state."),
'default_sale_tax': fields.many2one('account.tax', 'Default sale tax',
help="This sale tax will be assigned by default on new products."),
'default_purchase_tax': fields.many2one('account.tax', 'Default purchase tax',
help="This purchase tax will be assigned by default on new products."),
'decimal_precision': fields.integer('Decimal precision on journal entries',
help="""As an example, a decimal precision of 2 will allow journal entries like: 9.99 EUR, whereas a decimal precision of 4 will allow journal entries like: 0.0231 EUR."""),
'group_multi_currency': fields.boolean('Allow multi currencies',
implied_group='base.group_multi_currency',
help="Allows you multi currency environment"),
'group_analytic_accounting': fields.boolean('Analytic accounting',
implied_group='analytic.group_analytic_accounting',
help="Allows you to use the analytic accounting."),
'group_check_supplier_invoice_total': fields.boolean('Check the total of supplier invoices',
implied_group="account.group_supplier_inv_check_total"),
'income_currency_exchange_account_id': fields.related(
'company_id', 'income_currency_exchange_account_id',
type='many2one',
relation='account.account',
string="Gain Exchange Rate Account",
domain="[('type', '=', 'other'), ('company_id', '=', company_id)]]"),
'expense_currency_exchange_account_id': fields.related(
'company_id', 'expense_currency_exchange_account_id',
type="many2one",
relation='account.account',
string="Loss Exchange Rate Account",
domain="[('type', '=', 'other'), ('company_id', '=', company_id)]]"),
}
def _check_account_gain(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if obj.income_currency_exchange_account_id.company_id and obj.company_id != obj.income_currency_exchange_account_id.company_id:
return False
return True
def _check_account_loss(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if obj.expense_currency_exchange_account_id.company_id and obj.company_id != obj.expense_currency_exchange_account_id.company_id:
return False
return True
_constraints = [
(_check_account_gain, 'The company of the gain exchange rate account must be the same than the company selected.', ['income_currency_exchange_account_id']),
(_check_account_loss, 'The company of the loss exchange rate account must be the same than the company selected.', ['expense_currency_exchange_account_id']),
]
def _default_company(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return user.company_id.id
def _default_has_default_company(self, cr, uid, context=None):
count = self.pool.get('res.company').search_count(cr, uid, [], context=context)
return bool(count == 1)
def _get_default_fiscalyear_data(self, cr, uid, company_id, context=None):
"""Compute default period, starting and ending date for fiscalyear
- if in a fiscal year, use its period, starting and ending date
- if past fiscal year, use its period, and new dates [ending date of the latest +1 day ; ending date of the latest +1 year]
- if no fiscal year, use monthly, 1st jan, 31th dec of this year
:return: (date_start, date_stop, period) at format DEFAULT_SERVER_DATETIME_FORMAT
"""
fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid,
[('date_start', '<=', time.strftime(DF)), ('date_stop', '>=', time.strftime(DF)),
('company_id', '=', company_id)])
if fiscalyear_ids:
# is in a current fiscal year, use this one
fiscalyear = self.pool.get('account.fiscalyear').browse(cr, uid, fiscalyear_ids[0], context=context)
if len(fiscalyear.period_ids) == 5: # 4 periods of 3 months + opening period
period = '3months'
else:
period = 'month'
return (fiscalyear.date_start, fiscalyear.date_stop, period)
else:
past_fiscalyear_ids = self.pool.get('account.fiscalyear').search(cr, uid,
[('date_stop', '<=', time.strftime(DF)), ('company_id', '=', company_id)])
if past_fiscalyear_ids:
# use the latest fiscal, sorted by (start_date, id)
latest_year = self.pool.get('account.fiscalyear').browse(cr, uid, past_fiscalyear_ids[-1], context=context)
latest_stop = datetime.datetime.strptime(latest_year.date_stop, DF)
if len(latest_year.period_ids) == 5:
period = '3months'
else:
period = 'month'
return ((latest_stop+datetime.timedelta(days=1)).strftime(DF), latest_stop.replace(year=latest_stop.year+1).strftime(DF), period)
else:
return (time.strftime('%Y-01-01'), time.strftime('%Y-12-31'), 'month')
_defaults = {
'company_id': _default_company,
'has_default_company': _default_has_default_company,
}
def create(self, cr, uid, values, context=None):
id = super(account_config_settings, self).create(cr, uid, values, context)
# Hack: to avoid some nasty bug, related fields are not written upon record creation.
# Hence we write on those fields here.
vals = {}
for fname, field in self._columns.iteritems():
if isinstance(field, fields.related) and fname in values:
vals[fname] = values[fname]
self.write(cr, uid, [id], vals, context)
return id
def onchange_company_id(self, cr, uid, ids, company_id, context=None):
# update related fields
values = {}
values['currency_id'] = False
if company_id:
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
has_chart_of_accounts = company_id not in self.pool.get('account.installer').get_unconfigured_cmp(cr, uid)
fiscalyear_count = self.pool.get('account.fiscalyear').search_count(cr, uid,
[('date_start', '<=', time.strftime('%Y-%m-%d')), ('date_stop', '>=', time.strftime('%Y-%m-%d')),
('company_id', '=', company_id)])
date_start, date_stop, period = self._get_default_fiscalyear_data(cr, uid, company_id, context=context)
values = {
'expects_chart_of_accounts': company.expects_chart_of_accounts,
'currency_id': company.currency_id.id,
'paypal_account': company.paypal_account,
'company_footer': company.rml_footer,
'has_chart_of_accounts': has_chart_of_accounts,
'has_fiscal_year': bool(fiscalyear_count),
'chart_template_id': False,
'tax_calculation_rounding_method': company.tax_calculation_rounding_method,
'date_start': date_start,
'date_stop': date_stop,
'period': period,
}
# update journals and sequences
for journal_type in ('sale', 'sale_refund', 'purchase', 'purchase_refund'):
for suffix in ('_journal_id', '_sequence_prefix', '_sequence_next'):
values[journal_type + suffix] = False
journal_obj = self.pool.get('account.journal')
journal_ids = journal_obj.search(cr, uid, [('company_id', '=', company_id)])
for journal in journal_obj.browse(cr, uid, journal_ids):
if journal.type in ('sale', 'sale_refund', 'purchase', 'purchase_refund'):
values.update({
journal.type + '_journal_id': journal.id,
journal.type + '_sequence_prefix': journal.sequence_id.prefix,
journal.type + '_sequence_next': journal.sequence_id.number_next,
})
# update taxes
ir_values = self.pool.get('ir.values')
taxes_id = ir_values.get_default(cr, uid, 'product.template', 'taxes_id', company_id=company_id)
supplier_taxes_id = ir_values.get_default(cr, uid, 'product.template', 'supplier_taxes_id', company_id=company_id)
values.update({
'default_sale_tax': isinstance(taxes_id, list) and taxes_id[0] or taxes_id,
'default_purchase_tax': isinstance(supplier_taxes_id, list) and supplier_taxes_id[0] or supplier_taxes_id,
})
# update gain/loss exchange rate accounts
values.update({
'income_currency_exchange_account_id': company.income_currency_exchange_account_id and company.income_currency_exchange_account_id.id or False,
'expense_currency_exchange_account_id': company.expense_currency_exchange_account_id and company.expense_currency_exchange_account_id.id or False
})
return {'value': values}
def onchange_chart_template_id(self, cr, uid, ids, chart_template_id, context=None):
tax_templ_obj = self.pool.get('account.tax.template')
res = {'value': {
'complete_tax_set': False, 'sale_tax': False, 'purchase_tax': False,
'sale_tax_rate': 15, 'purchase_tax_rate': 15,
}}
if chart_template_id:
# update complete_tax_set, sale_tax and purchase_tax
chart_template = self.pool.get('account.chart.template').browse(cr, uid, chart_template_id, context=context)
res['value'].update({'complete_tax_set': chart_template.complete_tax_set})
if chart_template.complete_tax_set:
# default tax is given by the lowest sequence. For same sequence we will take the latest created as it will be the case for tax created while isntalling the generic chart of account
sale_tax_ids = tax_templ_obj.search(cr, uid,
[("chart_template_id", "=", chart_template_id), ('type_tax_use', 'in', ('sale','all'))],
order="sequence, id desc")
purchase_tax_ids = tax_templ_obj.search(cr, uid,
[("chart_template_id", "=", chart_template_id), ('type_tax_use', 'in', ('purchase','all'))],
order="sequence, id desc")
res['value']['sale_tax'] = sale_tax_ids and sale_tax_ids[0] or False
res['value']['purchase_tax'] = purchase_tax_ids and purchase_tax_ids[0] or False
if chart_template.code_digits:
res['value']['code_digits'] = chart_template.code_digits
return res
def onchange_tax_rate(self, cr, uid, ids, rate, context=None):
return {'value': {'purchase_tax_rate': rate or False}}
def onchange_multi_currency(self, cr, uid, ids, group_multi_currency, context=None):
res = {}
if not group_multi_currency:
res['value'] = {'income_currency_exchange_account_id': False, 'expense_currency_exchange_account_id': False}
return res
def onchange_start_date(self, cr, uid, id, start_date):
if start_date:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
end_date = (start_date + relativedelta(months=12)) - relativedelta(days=1)
return {'value': {'date_stop': end_date.strftime('%Y-%m-%d')}}
return {}
def open_company_form(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context)
return {
'type': 'ir.actions.act_window',
'name': 'Configure your Company',
'res_model': 'res.company',
'res_id': config.company_id.id,
'view_mode': 'form',
}
def set_default_taxes(self, cr, uid, ids, context=None):
""" set default sale and purchase taxes for products """
if uid != SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_erp_manager'):
raise openerp.exceptions.AccessError(_("Only administrators can change the settings"))
ir_values = self.pool.get('ir.values')
config = self.browse(cr, uid, ids[0], context)
ir_values.set_default(cr, SUPERUSER_ID, 'product.template', 'taxes_id',
config.default_sale_tax and [config.default_sale_tax.id] or False, company_id=config.company_id.id)
ir_values.set_default(cr, SUPERUSER_ID, 'product.template', 'supplier_taxes_id',
config.default_purchase_tax and [config.default_purchase_tax.id] or False, company_id=config.company_id.id)
def set_chart_of_accounts(self, cr, uid, ids, context=None):
""" install a chart of accounts for the given company (if required) """
config = self.browse(cr, uid, ids[0], context)
if config.chart_template_id:
assert config.expects_chart_of_accounts and not config.has_chart_of_accounts
wizard = self.pool.get('wizard.multi.charts.accounts')
wizard_id = wizard.create(cr, uid, {
'company_id': config.company_id.id,
'chart_template_id': config.chart_template_id.id,
'code_digits': config.code_digits or 6,
'sale_tax': config.sale_tax.id,
'purchase_tax': config.purchase_tax.id,
'sale_tax_rate': config.sale_tax_rate,
'purchase_tax_rate': config.purchase_tax_rate,
'complete_tax_set': config.complete_tax_set,
'currency_id': config.currency_id.id,
}, context)
wizard.execute(cr, uid, [wizard_id], context)
def set_fiscalyear(self, cr, uid, ids, context=None):
""" create a fiscal year for the given company (if necessary) """
config = self.browse(cr, uid, ids[0], context)
if config.has_chart_of_accounts or config.chart_template_id:
fiscalyear = self.pool.get('account.fiscalyear')
fiscalyear_count = fiscalyear.search_count(cr, uid,
[('date_start', '<=', config.date_start), ('date_stop', '>=', config.date_stop),
('company_id', '=', config.company_id.id)],
context=context)
if not fiscalyear_count:
name = code = config.date_start[:4]
if int(name) != int(config.date_stop[:4]):
name = config.date_start[:4] +'-'+ config.date_stop[:4]
code = config.date_start[2:4] +'-'+ config.date_stop[2:4]
vals = {
'name': name,
'code': code,
'date_start': config.date_start,
'date_stop': config.date_stop,
'company_id': config.company_id.id,
}
fiscalyear_id = fiscalyear.create(cr, uid, vals, context=context)
if config.period == 'month':
fiscalyear.create_period(cr, uid, [fiscalyear_id])
elif config.period == '3months':
fiscalyear.create_period3(cr, uid, [fiscalyear_id])
def get_default_dp(self, cr, uid, fields, context=None):
dp = self.pool.get('ir.model.data').get_object(cr, uid, 'product','decimal_account')
return {'decimal_precision': dp.digits}
def set_default_dp(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context)
dp = self.pool.get('ir.model.data').get_object(cr, uid, 'product','decimal_account')
dp.write({'digits': config.decimal_precision})
def onchange_analytic_accounting(self, cr, uid, ids, analytic_accounting, context=None):
if analytic_accounting:
return {'value': {
'module_account_accountant': True,
}}
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ShinyROM/android_external_chromium_org | chrome/common/extensions/docs/server2/redirector_test.py | 24 | 3356 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from compiled_file_system import CompiledFileSystem
from object_store_creator import ObjectStoreCreator
from redirector import Redirector
from test_file_system import TestFileSystem
from third_party.json_schema_compiler.json_parse import Parse
HOST = 'http://localhost/'
file_system = TestFileSystem({
'redirects.json': json.dumps({
'': '/index.html',
'home': 'index.html',
'index.html': 'http://something.absolute.com/'
}),
'apps': {
'redirects.json': json.dumps({
'': '../index.html',
'index.html': 'about_apps.html'
})
},
'extensions': {
'redirects.json': json.dumps({
'manifest': 'manifest.html'
}),
'manifest': {
'redirects.json': json.dumps({
'': '../manifest.html',
'more-info': 'http://lmgtfy.com'
})
}
}
})
class RedirectorTest(unittest.TestCase):
def setUp(self):
self._redirector = Redirector(
CompiledFileSystem.Factory(ObjectStoreCreator.ForTest()),
file_system)
def testExternalRedirection(self):
self.assertEqual(
'http://something.absolute.com/',
self._redirector.Redirect(HOST, 'index.html'))
self.assertEqual(
'http://lmgtfy.com',
self._redirector.Redirect(HOST, 'extensions/manifest/more-info'))
def testAbsoluteRedirection(self):
self.assertEqual(
'/apps/about_apps.html',
self._redirector.Redirect(HOST, 'apps/index.html'))
self.assertEqual(
'/index.html', self._redirector.Redirect(HOST, ''))
self.assertEqual(
'/index.html', self._redirector.Redirect(HOST, 'home'))
def testRelativeRedirection(self):
self.assertEqual(
'/extensions/manifest.html',
self._redirector.Redirect(HOST, 'extensions/manifest/'))
self.assertEqual(
'/extensions/manifest.html',
self._redirector.Redirect(HOST, 'extensions/manifest'))
self.assertEqual(
'/index.html', self._redirector.Redirect(HOST, 'apps/'))
def testNotFound(self):
self.assertEqual(
None, self._redirector.Redirect(HOST, 'not/a/real/path'))
self.assertEqual(
None, self._redirector.Redirect(HOST, 'public/apps/okay.html'))
def testOldHosts(self):
self.assertEqual(
'https://developer.chrome.com/',
self._redirector.Redirect('http://code.google.com', ''))
self.assertEqual(
'https://developer.chrome.com/',
self._redirector.Redirect('https://code.google.com', ''))
def testCron(self):
self._redirector.Cron().Get()
expected_paths = set([
'redirects.json',
'apps/redirects.json',
'extensions/redirects.json',
'extensions/manifest/redirects.json'
])
for path in expected_paths:
self.assertEqual(
Parse(file_system.ReadSingle(path).Get()),
# Access the cache's object store to see what files were hit during
# the cron run. Returns strings parsed as JSON.
# TODO(jshumway): Make a non hack version of this check.
self._redirector._cache._file_object_store.Get(
path).Get()._cache_data)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python2.7/ctypes/test/test_as_parameter.py | 32 | 6779 | import unittest
from ctypes import *
from ctypes.test import need_symbol
import _ctypes_test
dll = CDLL(_ctypes_test.__file__)
try:
CALLBACK_FUNCTYPE = WINFUNCTYPE
except NameError:
# fake to enable this test on Linux
CALLBACK_FUNCTYPE = CFUNCTYPE
class POINT(Structure):
_fields_ = [("x", c_int), ("y", c_int)]
class BasicWrapTestCase(unittest.TestCase):
def wrap(self, param):
return param
@need_symbol('c_wchar')
def test_wchar_parm(self):
f = dll._testfunc_i_bhilfd
f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double]
result = f(self.wrap(1), self.wrap(u"x"), self.wrap(3), self.wrap(4), self.wrap(5.0), self.wrap(6.0))
self.assertEqual(result, 139)
self.assertTrue(type(result), int)
def test_pointers(self):
f = dll._testfunc_p_p
f.restype = POINTER(c_int)
f.argtypes = [POINTER(c_int)]
# This only works if the value c_int(42) passed to the
# function is still alive while the pointer (the result) is
# used.
v = c_int(42)
self.assertEqual(pointer(v).contents.value, 42)
result = f(self.wrap(pointer(v)))
self.assertEqual(type(result), POINTER(c_int))
self.assertEqual(result.contents.value, 42)
# This on works...
result = f(self.wrap(pointer(v)))
self.assertEqual(result.contents.value, v.value)
p = pointer(c_int(99))
result = f(self.wrap(p))
self.assertEqual(result.contents.value, 99)
def test_shorts(self):
f = dll._testfunc_callback_i_if
args = []
expected = [262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048,
1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1]
def callback(v):
args.append(v)
return v
CallBack = CFUNCTYPE(c_int, c_int)
cb = CallBack(callback)
f(self.wrap(2**18), self.wrap(cb))
self.assertEqual(args, expected)
################################################################
def test_callbacks(self):
f = dll._testfunc_callback_i_if
f.restype = c_int
f.argtypes = None
MyCallback = CFUNCTYPE(c_int, c_int)
def callback(value):
#print "called back with", value
return value
cb = MyCallback(callback)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
# test with prototype
f.argtypes = [c_int, MyCallback]
cb = MyCallback(callback)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
AnotherCallback = CALLBACK_FUNCTYPE(c_int, c_int, c_int, c_int, c_int)
# check that the prototype works: we call f with wrong
# argument types
cb = AnotherCallback(callback)
self.assertRaises(ArgumentError, f, self.wrap(-10), self.wrap(cb))
def test_callbacks_2(self):
# Can also use simple datatypes as argument type specifiers
# for the callback function.
# In this case the call receives an instance of that type
f = dll._testfunc_callback_i_if
f.restype = c_int
MyCallback = CFUNCTYPE(c_int, c_int)
f.argtypes = [c_int, MyCallback]
def callback(value):
#print "called back with", value
self.assertEqual(type(value), int)
return value
cb = MyCallback(callback)
result = f(self.wrap(-10), self.wrap(cb))
self.assertEqual(result, -18)
def test_longlong_callbacks(self):
f = dll._testfunc_callback_q_qf
f.restype = c_longlong
MyCallback = CFUNCTYPE(c_longlong, c_longlong)
f.argtypes = [c_longlong, MyCallback]
def callback(value):
self.assertIsInstance(value, (int, long))
return value & 0x7FFFFFFF
cb = MyCallback(callback)
self.assertEqual(13577625587, int(f(self.wrap(1000000000000), self.wrap(cb))))
def test_byval(self):
# without prototype
ptin = POINT(1, 2)
ptout = POINT()
# EXPORT int _testfunc_byval(point in, point *pout)
result = dll._testfunc_byval(ptin, byref(ptout))
got = result, ptout.x, ptout.y
expected = 3, 1, 2
self.assertEqual(got, expected)
# with prototype
ptin = POINT(101, 102)
ptout = POINT()
dll._testfunc_byval.argtypes = (POINT, POINTER(POINT))
dll._testfunc_byval.restype = c_int
result = dll._testfunc_byval(self.wrap(ptin), byref(ptout))
got = result, ptout.x, ptout.y
expected = 203, 101, 102
self.assertEqual(got, expected)
def test_struct_return_2H(self):
class S2H(Structure):
_fields_ = [("x", c_short),
("y", c_short)]
dll.ret_2h_func.restype = S2H
dll.ret_2h_func.argtypes = [S2H]
inp = S2H(99, 88)
s2h = dll.ret_2h_func(self.wrap(inp))
self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
def test_struct_return_8H(self):
class S8I(Structure):
_fields_ = [("a", c_int),
("b", c_int),
("c", c_int),
("d", c_int),
("e", c_int),
("f", c_int),
("g", c_int),
("h", c_int)]
dll.ret_8i_func.restype = S8I
dll.ret_8i_func.argtypes = [S8I]
inp = S8I(9, 8, 7, 6, 5, 4, 3, 2)
s8i = dll.ret_8i_func(self.wrap(inp))
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
def test_recursive_as_param(self):
from ctypes import c_int
class A(object):
pass
a = A()
a._as_parameter_ = a
with self.assertRaises(RuntimeError):
c_int.from_param(a)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class AsParamWrapper(object):
def __init__(self, param):
self._as_parameter_ = param
class AsParamWrapperTestCase(BasicWrapTestCase):
wrap = AsParamWrapper
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class AsParamPropertyWrapper(object):
def __init__(self, param):
self._param = param
def getParameter(self):
return self._param
_as_parameter_ = property(getParameter)
class AsParamPropertyWrapperTestCase(BasicWrapTestCase):
wrap = AsParamPropertyWrapper
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
poulpito/Flexget | flexget/tests/test_torrent.py | 4 | 12981 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import os
import mock
import pytest
from flexget.utils.bittorrent import Torrent
class TestInfoHash(object):
config = """
tasks:
test:
mock:
- {title: 'test', file: 'test.torrent'}
accept_all: yes
test_magnet:
mock:
- title: test magnet
url: magnet:?xt=urn:btih:2a8959bed2be495bb0e3ea96f497d873d5faed05&dn=some.thing.720p
- title: test magnet 2
urls: ['magnet:?xt=urn:btih:2b3959bed2be445bb0e3ea96f497d873d5faed05&dn=some.thing.else.720p']
"""
def test_infohash(self, execute_task):
"""Torrent: infohash parsing"""
task = execute_task('test')
info_hash = task.entries[0].get('torrent_info_hash')
assert info_hash == '14FFE5DD23188FD5CB53A1D47F1289DB70ABF31E', \
'InfoHash does not match (got %s)' % info_hash
def test_magnet_infohash(self, execute_task):
"""Tests metainfo/magnet_btih plugin"""
task = execute_task('test_magnet')
assert task.all_entries[0]['torrent_info_hash'] == '2A8959BED2BE495BB0E3EA96F497D873D5FAED05'
assert task.all_entries[1]['torrent_info_hash'] == '2B3959BED2BE445BB0E3EA96F497D873D5FAED05'
@pytest.mark.usefixtures('tmpdir')
class TestSeenInfoHash(object):
config = """
tasks:
test:
mock:
- {title: test, file: '__tmp__/test.torrent'}
accept_all: yes
test2:
mock:
- {title: test2, file: '__tmp__/test.torrent'}
accept_all: yes
test_same_run:
mock:
- {title: test, torrent_info_hash: 20AE692114DC343C86DF5B07C276E5077E581766}
- {title: test2, torrent_info_hash: 20ae692114dc343c86df5b07c276e5077e581766}
accept_all: yes
"""
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
def test_seen_info_hash(self, execute_task):
task = execute_task('test')
assert task.find_entry('accepted', title='test'), 'torrent should have been accepted on first run'
task = execute_task('test2')
assert task.find_entry('rejected', title='test2'), 'torrent should have been rejected on second run'
def test_same_run(self, execute_task):
# Test that 2 entries with the same info hash don't get accepted on the same run.
# Also tests that the plugin compares info hash case insensitively.
task = execute_task('test_same_run')
assert len(task.accepted) == 1, 'Should not have accepted both entries with the same info hash'
@pytest.mark.usefixtures('tmpdir')
class TestModifyTrackers(object):
config = """
templates:
global:
accept_all: yes
tasks:
test_add_trackers:
mock:
- {title: 'test', file: '__tmp__/test.torrent'}
- {title: 'test_magnet'}
set:
url: 'magnet:?xt=urn:btih:HASH&dn=title'
add_trackers:
- udp://thetracker.com/announce
test_remove_trackers:
mock:
- {title: 'test', file: '__tmp__/test.torrent'}
- title: 'test_magnet'
set:
url: 'magnet:?xt=urn:btih:HASH&dn=title&tr=http://ipv6.torrent.ubuntu.com:6969/announce'
remove_trackers:
- ipv6
test_modify_trackers:
mock:
- {title: 'test', file: '__tmp__/test.torrent'}
modify_trackers:
- test:
from: ubuntu
to: replaced
"""
def load_torrent(self, filename):
with open(filename, 'rb') as f:
data = f.read()
return Torrent(data)
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
def test_add_trackers(self, execute_task, tmpdir):
task = execute_task('test_add_trackers')
torrent = self.load_torrent(os.path.join(tmpdir.strpath, 'test.torrent'))
assert 'udp://thetracker.com/announce' in torrent.trackers, \
'udp://thetracker.com/announce should have been added to trackers'
# Check magnet url
assert 'tr=udp://thetracker.com/announce' in task.find_entry(title='test_magnet')['url']
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
def test_remove_trackers(self, execute_task, tmpdir):
task = execute_task('test_remove_trackers')
torrent = self.load_torrent(os.path.join(tmpdir.strpath, 'test.torrent'))
assert 'http://ipv6.torrent.ubuntu.com:6969/announce' not in torrent.trackers, \
'ipv6 tracker should have been removed'
# Check magnet url
assert 'tr=http://ipv6.torrent.ubuntu.com:6969/announce' not in task.find_entry(title='test_magnet')['url']
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
def test_modify_trackers(self, execute_task, tmpdir):
execute_task('test_modify_trackers')
torrent = self.load_torrent(os.path.join(tmpdir.strpath, 'test.torrent'))
assert 'http://torrent.replaced.com:6969/announce' in torrent.trackers, \
'ubuntu tracker should have been added'
class TestPrivateTorrents(object):
config = """
tasks:
test:
mock:
- {title: 'test_private', file: 'private.torrent'}
- {title: 'test_public', file: 'test.torrent'}
accept_all: yes
private_torrents: no
"""
def test_private_torrents(self, execute_task):
task = execute_task('test')
assert task.find_entry('rejected', title='test_private'), 'did not reject private torrent'
assert task.find_entry('accepted', title='test_public'), 'did not pass public torrent'
@pytest.mark.usefixtures('tmpdir')
class TestTorrentScrub(object):
config = """
tasks:
test_all:
mock:
- {title: 'test', file: '__tmp__/test.torrent'}
- {title: 'LICENSE', file: '__tmp__/LICENSE.torrent'}
- {title: 'LICENSE-resume', file: '__tmp__/LICENSE-resume.torrent'}
accept_all: yes
torrent_scrub: all
disable: [seen_info_hash]
test_fields:
mock:
- {title: 'fields.LICENSE', file: '__tmp__/LICENSE.torrent'}
accept_all: yes
torrent_scrub:
- comment
- info.x_cross_seed
- field.that.never.exists
test_off:
mock:
- {title: 'off.LICENSE-resume', file: '__tmp__/LICENSE-resume.torrent'}
accept_all: yes
torrent_scrub: off
"""
test_cases = (
(True, 'test.torrent'),
(False, 'LICENSE.torrent'),
(False, 'LICENSE-resume.torrent'),
)
test_files = [i[1] for i in test_cases]
@pytest.mark.filecopy(test_files, '__tmp__')
def test_torrent_scrub(self, execute_task, tmpdir):
# Run task
task = execute_task('test_all')
for clean, filename in self.test_cases:
original = Torrent.from_file(filename)
title = os.path.splitext(filename)[0]
modified = task.find_entry(title=title)
assert modified, "%r cannot be found in %r" % (title, task)
modified = modified.get('torrent')
assert modified, "No 'torrent' key in %r" % (title,)
osize = os.path.getsize(filename)
msize = tmpdir.join(filename).size()
# Dump small torrents on demand
if 0 and not clean:
print("original=%r" % original.content)
print("modified=%r" % modified.content)
# Make sure essentials survived
assert 'announce' in modified.content
assert 'info' in modified.content
assert 'name' in modified.content['info']
assert 'piece length' in modified.content['info']
assert 'pieces' in modified.content['info']
# Check that hashes have changed accordingly
if clean:
assert osize == msize, "Filesizes aren't supposed to differ!"
assert original.info_hash == modified.info_hash, 'info dict changed in ' + filename
else:
assert osize > msize, "Filesizes must be different!"
assert original.info_hash != modified.info_hash, filename + " wasn't scrubbed!"
# Check essential keys were scrubbed
if filename == 'LICENSE.torrent':
assert 'x_cross_seed' in original.content['info']
assert 'x_cross_seed' not in modified.content['info']
if filename == 'LICENSE-resume.torrent':
assert 'libtorrent_resume' in original.content
assert 'libtorrent_resume' not in modified.content
@pytest.mark.filecopy(test_files, '__tmp__')
def test_torrent_scrub_fields(self, execute_task):
task = execute_task('test_fields')
title = 'fields.LICENSE'
torrent = task.find_entry(title=title)
assert torrent, "%r cannot be found in %r" % (title, task)
torrent = torrent.get('torrent')
assert torrent, "No 'torrent' key in %r" % (title,)
assert 'name' in torrent.content['info'], "'info.name' was lost"
assert 'comment' not in torrent.content, "'comment' not scrubbed"
assert 'x_cross_seed' not in torrent.content['info'], "'info.x_cross_seed' not scrubbed"
@pytest.mark.filecopy(test_files, '__tmp__')
def test_torrent_scrub_off(self, execute_task, tmpdir):
execute_task('test_off')
for filename in self.test_files:
osize = os.path.getsize(filename)
msize = tmpdir.join(filename).size()
assert osize == msize, "Filesizes aren't supposed to differ (%r %d, %r %d)!" % (
filename, osize, self.__tmp__ + filename, msize)
@pytest.mark.usefixtures('tmpdir')
class TestTorrentAlive(object):
config = """
templates:
global:
accept_all: yes
tasks:
test_torrent_alive_fail:
mock:
- {title: 'test', file: '__tmp__/test.torrent', url: fake}
torrent_alive: 100000
test_torrent_alive_pass:
mock:
- {title: 'test', file: '__tmp__/test.torrent', url: fake}
torrent_alive: 0
"""
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
@mock.patch('flexget.utils.requests.get')
def test_torrent_alive_fail(self, mocked_request, execute_task):
task = execute_task('test_torrent_alive_fail')
assert not task.accepted, 'Torrent should not have met seed requirement.'
assert task._rerun_count == 1, ('Task should have been rerun 1 time. Was rerun %s times.' %
task._rerun_count)
# Run it again to make sure remember_rejected prevents a rerun from occurring
task = execute_task('test_torrent_alive_fail')
assert not task.accepted, 'Torrent should have been rejected by remember_rejected.'
assert task._rerun_count == 0, 'Task should not have been rerun.'
@pytest.mark.filecopy('test.torrent', '__tmp__/test.torrent')
def test_torrent_alive_pass(self, execute_task):
task = execute_task('test_torrent_alive_pass')
assert task.accepted
assert task._rerun_count == 0, 'Torrent should have been accepted without rerun.'
def test_torrent_alive_udp_invalid_port(self):
from flexget.plugins.filter.torrent_alive import get_udp_seeds
assert get_udp_seeds('udp://[2001::1]/announce', 'HASH') == 0
assert get_udp_seeds('udp://[::1]/announce', 'HASH') == 0
assert get_udp_seeds('udp://["2100::1"]:-1/announce', 'HASH') == 0
assert get_udp_seeds('udp://127.0.0.1/announce', 'HASH') == 0
assert get_udp_seeds('udp://127.0.0.1:-1/announce', 'HASH') == 0
assert get_udp_seeds('udp://127.0.0.1:PORT/announce', 'HASH') == 0
assert get_udp_seeds('udp://127.0.0.1:65536/announce', 'HASH') == 0
class TestRtorrentMagnet(object):
__tmp__ = True
config = """
tasks:
test:
mock:
- title: 'test'
url: 'magnet:?xt=urn:btih:HASH&dn=title&tr=http://torrent.ubuntu.com:6969/announce'
rtorrent_magnet: __tmp__
accept_all: yes
"""
def test_rtorrent_magnet(self, execute_task, tmpdir):
execute_task('test')
fullpath = tmpdir.join('meta-test.torrent')
assert fullpath.isfile()
assert (fullpath.read() ==
'd10:magnet-uri76:magnet:?xt=urn:btih:HASH&dn=title&tr=http://torrent.ubuntu.com:6969/announcee')
| mit |
sujithshankar/NetworkManager | examples/python/dbus/vpn.py | 15 | 5344 | #!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright (C) 2009 Novell, Inc.
# Copyright (C) 2009 Red Hat, Inc.
#
# Run this script without any arguments to list the available connection uuids.
# The uuid of the connection to activate
CONNECTION_UUID="ac6dc9b2-85ef-4311-83d8-add5d7db3f59"
# UID to use. Note that NM only allows the owner of the connection to activate it.
#UID=1000
UID=0
import sys
import os
import dbus
from dbus.mainloop.glib import DBusGMainLoop
import gobject
DBusGMainLoop(set_as_default=True)
def get_connections():
bus = dbus.SystemBus()
proxy = bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager/Settings')
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager.Settings')
return iface.ListConnections()
def get_connection_by_uuid(uuid):
bus = dbus.SystemBus()
for c in get_connections():
proxy = bus.get_object('org.freedesktop.NetworkManager', c)
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager.Settings.Connection')
settings = iface.GetSettings()
if settings['connection']['uuid'] == uuid:
return c
return None
def list_uuids():
bus = dbus.SystemBus()
for c in get_connections():
proxy = bus.get_object('org.freedesktop.NetworkManager', c)
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager.Settings.Connection')
settings = iface.GetSettings()
conn = settings['connection']
print "%s - %s (%s)" % (conn['uuid'], conn['id'], conn['type'])
def get_active_connection_path(uuid):
bus = dbus.SystemBus()
proxy = bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager')
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.DBus.Properties')
active_connections = iface.Get('org.freedesktop.NetworkManager', 'ActiveConnections')
all_connections = get_connections()
for a in active_connections:
proxy = bus.get_object('org.freedesktop.NetworkManager', a)
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.DBus.Properties')
path = iface.Get('org.freedesktop.NetworkManager.Connection.Active', 'Connection')
proxy = bus.get_object('org.freedesktop.NetworkManager', path)
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager.Settings.Connection')
settings = iface.GetSettings()
if settings['connection']['uuid'] == uuid:
return a
return None
def get_wifi_device_path():
bus = dbus.SystemBus()
proxy = bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager')
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager')
devices = iface.GetDevices()
for d in devices:
proxy = bus.get_object('org.freedesktop.NetworkManager', d)
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.DBus.Properties')
devtype = iface.Get('org.freedesktop.NetworkManager.Device', 'DeviceType')
if devtype == 2:
return d
return None
def activate_connection(connection_path, device_path):
def reply_handler(opath):
print "Success: device activating"
sys.exit(0)
def error_handler(*args):
print "Error activating device: %s" % args
sys.exit(1)
bus = dbus.SystemBus()
proxy = bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager')
iface = dbus.Interface(proxy, dbus_interface='org.freedesktop.NetworkManager')
iface.ActivateConnection('org.freedesktop.NetworkManager',
connection_path,
device_path,
"/",
reply_handler=reply_handler,
error_handler=error_handler)
# Change the UID first if required
if UID != 0:
os.setuid(UID)
# Are we configured?
if not len(CONNECTION_UUID):
print "missing connection UUID"
sys.exit(0)
connection_path = get_connection_by_uuid(CONNECTION_UUID)
if not connection_path:
# Configured VPN connection is not known to NM, check CONNECTION_UUID.
print "couldn't find the connection"
sys.exit(1)
device_path = get_wifi_device_path()
if not device_path:
print "no wifi device found"
sys.exit(1)
# Is it already activated?
if get_active_connection_path(CONNECTION_UUID):
print "already connected"
sys.exit(0)
print "Activating connection..."
activate_connection(connection_path, device_path)
loop = gobject.MainLoop()
loop.run()
| gpl-2.0 |
yoshinarikou/MilleFeuilleRaspberryPi | milpython/GpioInputTest.py | 1 | 1189 | ########################################################################
# MCU Gear(R) system Sample Code
# Auther:y.kou.
# web site: http://www.milletool.com/
# Date : 8/OCT/2016
#
########################################################################
#Revision Information
#
########################################################################
#!/usr/bin/python
#set IOs
#IOdata = [p.inpin[0],p.inpin[1],p.inpin[2],p.inpin[3]]
#in wiringdata.py
from milpy import mil
from milpy import milMod
from milpy import wiringdata
from milpy import gpio
import time
#set up for GPIO device
wiringdata.initIO()
modA = milMod.milMod(gpio.getInfo(0)) #Baseboard connector No.0
if __name__=='__main__':
try:
print "Please,set IOs"
print "IOdata = [p.inpin[0],p.inpin[1],p.inpin[2],p.inpin[3]]"
print "in wiringdata.py"
while(1):
modA.connect()
print " "
print "myPin 1 = ",gpio.DigitalIn(modA,0)
print "myPin 2 = ",gpio.DigitalIn(modA,1)
print "myPin 3 = ",gpio.DigitalIn(modA,2)
print "myPin 4 = ",gpio.DigitalIn(modA,3)
time.sleep(1)
modA.disconnect()
except KeyboardInterrupt:
print("detect key interrupt [ctrl]+ [C] \n")
mil.cleanup()
| mit |
FrankDuan/df_code | dragonflow/tests/fullstack/test_base.py | 1 | 2353 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os_client_config
from oslo_config import cfg
from oslo_utils import importutils
from neutron.common import config as common_config
from neutronclient.neutron import client
from dragonflow.common import common_params
from dragonflow.db import api_nb
from dragonflow.tests import base
cfg.CONF.register_opts(common_params.df_opts, 'df')
def get_cloud_config(cloud='devstack-admin'):
return os_client_config.OpenStackConfig().get_one_cloud(cloud=cloud)
def credentials(cloud='devstack-admin'):
"""Retrieves credentials to run functional tests"""
return get_cloud_config(cloud=cloud).get_auth_args()
class DFTestBase(base.BaseTestCase):
def setUp(self):
super(DFTestBase, self).setUp()
creds = credentials()
tenant_name = creds['project_name']
auth_url = creds['auth_url'] + "/v2.0"
self.neutron = client.Client('2.0', username=creds['username'],
password=creds['password'], auth_url=auth_url,
tenant_name=tenant_name)
self.neutron.format = 'json'
common_config.init(['--config-file', '/etc/neutron/neutron.conf'])
db_driver_class = importutils.import_class(cfg.CONF.df.nb_db_class)
self.nb_api = api_nb.NbApi(db_driver_class())
self.nb_api.initialize(db_ip=cfg.CONF.df.remote_db_ip,
db_port=cfg.CONF.df.remote_db_port)
self.local_ip = cfg.CONF.df.local_ip
self.__objects_to_close = []
def store(self, obj, close_func=None):
close_func = close_func if close_func else obj.close
self.__objects_to_close.append(close_func)
return obj
def tearDown(self):
for close_func in reversed(self.__objects_to_close):
close_func()
super(DFTestBase, self).tearDown()
| apache-2.0 |
pongad/api-client-staging | generated/python/googleapis-common-protos/google/api/control_pb2.py | 17 | 2153 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/control.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/api/control.proto',
package='google.api',
syntax='proto3',
serialized_pb=_b('\n\x18google/api/control.proto\x12\ngoogle.api\"\x1e\n\x07\x43ontrol\x12\x13\n\x0b\x65nvironment\x18\x01 \x01(\tB\'\n\x0e\x63om.google.apiB\x0c\x43ontrolProtoP\x01\xa2\x02\x04GAPIb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CONTROL = _descriptor.Descriptor(
name='Control',
full_name='google.api.Control',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='environment', full_name='google.api.Control.environment', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=40,
serialized_end=70,
)
DESCRIPTOR.message_types_by_name['Control'] = _CONTROL
Control = _reflection.GeneratedProtocolMessageType('Control', (_message.Message,), dict(
DESCRIPTOR = _CONTROL,
__module__ = 'google.api.control_pb2'
# @@protoc_insertion_point(class_scope:google.api.Control)
))
_sym_db.RegisterMessage(Control)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\016com.google.apiB\014ControlProtoP\001\242\002\004GAPI'))
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
ictwanglei/phoenix | bin/phoenix_sandbox.py | 24 | 2016 | #!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
import os
import subprocess
import sys
import phoenix_utils
phoenix_utils.setPath()
base_dir = os.path.join(phoenix_utils.current_dir, '..')
phoenix_target_dir = os.path.join(base_dir, 'phoenix-core', 'target')
cp_file_path = os.path.join(phoenix_target_dir, 'cached_classpath.txt')
if not os.path.exists(cp_file_path):
sys.err.write("cached_classpath.txt is not present under "
+ "phoenix-core/target, please rebuild the project first")
sys.exit(1)
logging_config = os.path.join(base_dir, 'bin', 'sandbox-log4j.properties')
cp_components = [phoenix_target_dir + "/*"]
with open(cp_file_path, 'rb') as cp_file:
cp_components.append(cp_file.read())
java_cmd = ("java -Dlog4j.configuration=file:%s " +
"-cp %s org.apache.phoenix.Sandbox") % (
logging_config, ":".join(cp_components))
proc = subprocess.Popen(java_cmd, shell=True)
try:
proc.wait()
except KeyboardInterrupt:
print "Shutting down sandbox..."
proc.terminate()
proc.wait()
print "Sandbox is stopped"
| apache-2.0 |
ahmed-mahran/hue | apps/zookeeper/setup.py | 38 | 1178 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
from hueversion import VERSION
setup(
name = "zookeeper",
version = VERSION,
author = "Hue",
url = 'http://github.com/cloudera/hue',
description = "ZooKeeper Browser",
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'desktop'],
entry_points = { 'desktop.sdk.application': 'zookeeper=zookeeper' },
)
| apache-2.0 |
kiith-sa/QGIS | python/plugins/fTools/tools/doMergeShapes.py | 7 | 13100 | # -*- coding: utf-8 -*-
"""
***************************************************************************
doMergeShapes.py - merge multiple shapefile into one
--------------------------------------
Date : 30-Mar-2010
Copyright : (C) 2010 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
import ftools_utils
from ui_frmMergeShapes import Ui_Dialog
class Dialog( QDialog, Ui_Dialog ):
def __init__( self, iface ):
QDialog.__init__( self, iface.mainWindow() )
self.setupUi( self )
self.iface = iface
self.mergeThread = None
self.inputFiles = None
self.outFileName = None
self.inEncoding = None
self.btnOk = self.buttonBox.button( QDialogButtonBox.Ok )
self.btnClose = self.buttonBox.button( QDialogButtonBox.Close )
QObject.connect( self.btnSelectDir, SIGNAL( "clicked()" ), self.inputDir )
QObject.connect( self.btnSelectFile, SIGNAL( "clicked()" ), self.outFile )
QObject.connect( self.chkListMode, SIGNAL( "stateChanged( int )" ), self.changeMode )
QObject.connect( self.leOutShape, SIGNAL( "editingFinished()" ), self.updateOutFile )
def inputDir( self ):
settings = QSettings()
lastDir = settings.value( "/fTools/lastShapeDir", "." )
inDir = QFileDialog.getExistingDirectory( self,
self.tr( "Select directory with shapefiles to merge" ),
lastDir )
if not inDir:
return
workDir = QDir( inDir )
workDir.setFilter( QDir.Files | QDir.NoSymLinks | QDir.NoDotAndDotDot )
nameFilter = [ "*.shp", "*.SHP" ]
workDir.setNameFilters( nameFilter )
self.inputFiles = workDir.entryList()
if len( self.inputFiles ) == 0:
QMessageBox.warning( self, self.tr( "No shapefiles found" ),
self.tr( "There are no shapefiles in this directory. Please select another one." ) )
self.inputFiles = None
return
settings.setValue( "/fTools/lastShapeDir", inDir )
self.progressFiles.setRange( 0, len( self.inputFiles ) )
self.leInputDir.setText( inDir )
def outFile( self ):
( self.outFileName, self.encoding ) = ftools_utils.saveDialog( self )
if self.outFileName is None or self.encoding is None:
return
self.leOutShape.setText( self.outFileName )
def inputFile( self ):
( files, self.inEncoding ) = ftools_utils.openDialog( self, dialogMode="ManyFiles" )
if files is None or self.inEncoding is None:
self.inputFiles = None
return
self.inputFiles = []
for f in files:
fileName = QFileInfo( f ).fileName()
self.inputFiles.append( fileName )
self.progressFiles.setRange( 0, len( self.inputFiles ) )
self.leInputDir.setText( ";".join( files ) )
def changeMode( self ):
if self.chkListMode.isChecked():
self.label.setText( self.tr( "Input files" ) )
QObject.disconnect( self.btnSelectDir, SIGNAL( "clicked()" ), self.inputDir )
QObject.connect( self.btnSelectDir, SIGNAL( "clicked()" ), self.inputFile )
self.lblGeometry.setEnabled( False )
self.cmbGeometry.setEnabled( False )
else:
self.label.setText( self.tr( "Input directory" ) )
QObject.disconnect( self.btnSelectDir, SIGNAL( "clicked()" ), self.inputFile )
QObject.connect( self.btnSelectDir, SIGNAL( "clicked()" ), self.inputDir )
self.lblGeometry.setEnabled( True )
self.cmbGeometry.setEnabled( True )
def updateOutFile( self ):
self.outFileName = self.leOutShape.text()
settings = QSettings()
self.outEncoding = settings.value( "/UI/encoding" )
def reject( self ):
QDialog.reject( self )
def accept( self ):
if self.inputFiles is None:
workDir = QDir( self.leInputDir.text() )
workDir.setFilter( QDir.Files | QDir.NoSymLinks | QDir.NoDotAndDotDot )
nameFilter = [ "*.shp", "*.SHP" ]
workDir.setNameFilters( nameFilter )
self.inputFiles = workDir.entryList()
if len( self.inputFiles ) == 0:
QMessageBox.warning( self, self.tr( "No shapefiles found" ),
self.tr( "There are no shapefiles in this directory. Please select another one." ) )
self.inputFiles = None
return
if self.outFileName is None:
QMessageBox.warning( self, self.tr( "No output file" ),
self.tr( "Please specify output file." ) )
return
if self.chkListMode.isChecked():
files = self.leInputDir.text().split( ";" )
baseDir = QFileInfo( files[ 0 ] ).absolutePath()
else:
baseDir = self.leInputDir.text()
# look for shapes with specified geometry type
self.inputFiles = ftools_utils.getShapesByGeometryType( baseDir, self.inputFiles, self.cmbGeometry.currentIndex() )
if self.inputFiles is None:
QMessageBox.warning( self, self.tr( "No shapefiles found" ),
self.tr( "There are no shapefiles with the given geometry type. Please select an available geometry type." ) )
return
self.progressFiles.setRange( 0, len( self.inputFiles ) )
outFile = QFile( self.outFileName )
if outFile.exists():
if not QgsVectorFileWriter.deleteShapeFile( self.outFileName ):
QMessageBox.warning( self, self.tr( "Delete error" ), self.tr( "Can't delete file %s" ) % ( self.outFileName ) )
return
if self.inEncoding == None:
self.inEncoding = "System"
self.btnOk.setEnabled( False )
self.mergeThread = ShapeMergeThread( baseDir, self.inputFiles, self.inEncoding, self.outFileName, self.encoding )
QObject.connect( self.mergeThread, SIGNAL( "rangeChanged( PyQt_PyObject )" ), self.setFeatureProgressRange )
QObject.connect( self.mergeThread, SIGNAL( "checkStarted()" ), self.setFeatureProgressFormat )
QObject.connect( self.mergeThread, SIGNAL( "checkFinished()" ), self.resetFeatureProgressFormat )
QObject.connect( self.mergeThread, SIGNAL( "fileNameChanged( PyQt_PyObject )" ), self.setShapeProgressFormat )
QObject.connect( self.mergeThread, SIGNAL( "featureProcessed()" ), self.featureProcessed )
QObject.connect( self.mergeThread, SIGNAL( "shapeProcessed()" ), self.shapeProcessed )
QObject.connect( self.mergeThread, SIGNAL( "processingFinished()" ), self.processingFinished )
QObject.connect( self.mergeThread, SIGNAL( "processingInterrupted()" ), self.processingInterrupted )
self.btnClose.setText( self.tr( "Cancel" ) )
QObject.disconnect( self.buttonBox, SIGNAL( "rejected()" ), self.reject )
QObject.connect( self.btnClose, SIGNAL( "clicked()" ), self.stopProcessing )
self.mergeThread.start()
def setFeatureProgressRange( self, maximum ):
self.progressFeatures.setRange( 0, maximum )
self.progressFeatures.setValue( 0 )
def setFeatureProgressFormat( self ):
self.progressFeatures.setFormat( "Checking files: %p% ")
def resetFeatureProgressFormat( self ):
self.progressFeatures.setFormat( "%p% ")
def featureProcessed( self ):
self.progressFeatures.setValue( self.progressFeatures.value() + 1 )
def setShapeProgressFormat( self, fileName ):
self.progressFiles.setFormat( "%p% " + fileName )
def shapeProcessed( self ):
self.progressFiles.setValue( self.progressFiles.value() + 1 )
def processingFinished( self ):
self.stopProcessing()
if self.chkAddToCanvas.isChecked():
if not ftools_utils.addShapeToCanvas( unicode( self.outFileName ) ):
QMessageBox.warning( self, self.tr( "Merging" ),
self.tr( "Error loading output shapefile:\n%s" ) % ( unicode( self.outFileName ) ) )
self.restoreGui()
def processingInterrupted( self ):
self.restoreGui()
def stopProcessing( self ):
if self.mergeThread != None:
self.mergeThread.stop()
self.mergeThread = None
def restoreGui( self ):
self.progressFiles.setFormat( "%p%" )
self.progressFeatures.setRange( 0, 100 )
self.progressFeatures.setValue( 0 )
self.progressFiles.setValue( 0 )
QObject.connect( self.buttonBox, SIGNAL( "rejected()" ), self.reject )
self.btnClose.setText( self.tr( "Close" ) )
self.btnOk.setEnabled( True )
class ShapeMergeThread( QThread ):
def __init__( self, dir, shapes, inputEncoding, outputFileName, outputEncoding ):
QThread.__init__( self, QThread.currentThread() )
self.baseDir = dir
self.shapes = shapes
self.inputEncoding = inputEncoding
self.outputFileName = outputFileName
self.outputEncoding = outputEncoding
self.mutex = QMutex()
self.stopMe = 0
def run( self ):
self.mutex.lock()
self.stopMe = 0
self.mutex.unlock()
interrupted = False
# create attribute list with uniquie fields
# from all selected layers
mergedFields = []
self.emit( SIGNAL( "rangeChanged( PyQt_PyObject )" ), len( self.shapes ) )
self.emit( SIGNAL( "checkStarted()" ) )
shapeIndex = 0
fieldMap = {}
for fileName in self.shapes:
layerPath = QFileInfo( self.baseDir + "/" + fileName ).absoluteFilePath()
newLayer = QgsVectorLayer( layerPath, QFileInfo( layerPath ).baseName(), "ogr" )
if not newLayer.isValid():
continue
newLayer.setProviderEncoding( self.inputEncoding )
vprovider = newLayer.dataProvider()
fieldMap[shapeIndex] = {}
fieldIndex = 0
for layerField in vprovider.fields():
fieldFound = False
for mergedFieldIndex, mergedField in enumerate(mergedFields):
if mergedField.name() == layerField.name() and mergedField.type() == layerField.type():
fieldFound = True
fieldMap[shapeIndex][fieldIndex] = mergedFieldIndex
if mergedField.length() < layerField.length():
# suit the field size to the field of this layer
mergedField.setLength( layerField.length() )
break
if not fieldFound:
fieldMap[shapeIndex][fieldIndex] = len(mergedFields)
mergedFields.append( layerField )
fieldIndex += 1
shapeIndex += 1
self.emit( SIGNAL( "featureProcessed()" ) )
self.emit( SIGNAL( "checkFinished()" ) )
# get information about shapefiles
layerPath = QFileInfo( self.baseDir + "/" + self.shapes[ 0 ] ).absoluteFilePath()
newLayer = QgsVectorLayer( layerPath, QFileInfo( layerPath ).baseName(), "ogr" )
self.crs = newLayer.crs()
self.geom = newLayer.wkbType()
vprovider = newLayer.dataProvider()
fields = QgsFields()
for f in mergedFields:
fields.append(f)
writer = QgsVectorFileWriter( self.outputFileName, self.outputEncoding,
fields, self.geom, self.crs )
shapeIndex = 0
for fileName in self.shapes:
layerPath = QFileInfo( self.baseDir + "/" + fileName ).absoluteFilePath()
newLayer = QgsVectorLayer( layerPath, QFileInfo( layerPath ).baseName(), "ogr" )
if not newLayer.isValid():
continue
newLayer.setProviderEncoding( self.inputEncoding )
vprovider = newLayer.dataProvider()
layerFields = vprovider.fields()
nFeat = vprovider.featureCount()
self.emit( SIGNAL( "rangeChanged( PyQt_PyObject )" ), nFeat )
self.emit( SIGNAL( "fileNameChanged( PyQt_PyObject )" ), fileName )
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
fit = vprovider.getFeatures()
while fit.nextFeature( inFeat ):
mergedAttrs = [""] * len(mergedFields)
# fill available attributes with values
fieldIndex = 0
for v in inFeat.attributes():
if fieldMap.has_key(shapeIndex) and fieldMap[shapeIndex].has_key(fieldIndex):
mergedAttrs[ fieldMap[shapeIndex][fieldIndex] ] = v
fieldIndex += 1
inGeom = QgsGeometry( inFeat.geometry() )
outFeat.setGeometry( inGeom )
outFeat.setAttributes( mergedAttrs )
writer.addFeature( outFeat )
self.emit( SIGNAL( "featureProcessed()" ) )
self.emit( SIGNAL( "shapeProcessed()" ) )
self.mutex.lock()
s = self.stopMe
self.mutex.unlock()
if s == 1:
interrupted = True
break
shapeIndex += 1
del writer
if not interrupted:
self.emit( SIGNAL( "processingFinished()" ) )
else:
self.emit( SIGNAL( "processingInterrupted()" ) )
def stop( self ):
self.mutex.lock()
self.stopMe = 1
self.mutex.unlock()
QThread.wait( self )
| gpl-2.0 |
gquirozbogner/contentbox-master | third_party/django/utils/archive.py | 229 | 6935 | """
Based on "python-archive" -- http://pypi.python.org/pypi/python-archive/
Copyright (c) 2010 Gary Wilson Jr. <gary.wilson@gmail.com> and contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import os
import shutil
import tarfile
import zipfile
from django.utils import six
class ArchiveException(Exception):
"""
Base exception class for all archive errors.
"""
class UnrecognizedArchiveFormat(ArchiveException):
"""
Error raised when passed file is not a recognized archive format.
"""
def extract(path, to_path=''):
"""
Unpack the tar or zip file at the specified path to the directory
specified by to_path.
"""
with Archive(path) as archive:
archive.extract(to_path)
class Archive(object):
"""
The external API class that encapsulates an archive implementation.
"""
def __init__(self, file):
self._archive = self._archive_cls(file)(file)
@staticmethod
def _archive_cls(file):
cls = None
if isinstance(file, six.string_types):
filename = file
else:
try:
filename = file.name
except AttributeError:
raise UnrecognizedArchiveFormat(
"File object not a recognized archive format.")
base, tail_ext = os.path.splitext(filename.lower())
cls = extension_map.get(tail_ext)
if not cls:
base, ext = os.path.splitext(base)
cls = extension_map.get(ext)
if not cls:
raise UnrecognizedArchiveFormat(
"Path not a recognized archive format: %s" % filename)
return cls
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def extract(self, to_path=''):
self._archive.extract(to_path)
def list(self):
self._archive.list()
def close(self):
self._archive.close()
class BaseArchive(object):
"""
Base Archive class. Implementations should inherit this class.
"""
def split_leading_dir(self, path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(self, paths):
"""
Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)
"""
common_prefix = None
for path in paths:
prefix, rest = self.split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def extract(self):
raise NotImplementedError
def list(self):
raise NotImplementedError
class TarArchive(BaseArchive):
def __init__(self, file):
self._archive = tarfile.open(file)
def list(self, *args, **kwargs):
self._archive.list(*args, **kwargs)
def extract(self, to_path):
# note: python<=2.5 doesnt seem to know about pax headers, filter them
members = [member for member in self._archive.getmembers()
if member.name != 'pax_global_header']
leading = self.has_leading_dir(members)
for member in members:
name = member.name
if leading:
name = self.split_leading_dir(name)[1]
filename = os.path.join(to_path, name)
if member.isdir():
if filename and not os.path.exists(filename):
os.makedirs(filename)
else:
try:
extracted = self._archive.extractfile(member)
except (KeyError, AttributeError) as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
print("In the tar file %s the member %s is invalid: %s" %
(name, member.name, exc))
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as outfile:
shutil.copyfileobj(extracted, outfile)
finally:
if extracted:
extracted.close()
def close(self):
self._archive.close()
class ZipArchive(BaseArchive):
def __init__(self, file):
self._archive = zipfile.ZipFile(file)
def list(self, *args, **kwargs):
self._archive.printdir(*args, **kwargs)
def extract(self, to_path):
namelist = self._archive.namelist()
leading = self.has_leading_dir(namelist)
for name in namelist:
data = self._archive.read(name)
if leading:
name = self.split_leading_dir(name)[1]
filename = os.path.join(to_path, name)
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
if filename.endswith(('/', '\\')):
# A directory
if not os.path.exists(filename):
os.makedirs(filename)
else:
with open(filename, 'wb') as outfile:
outfile.write(data)
def close(self):
self._archive.close()
extension_map = {
'.tar': TarArchive,
'.tar.bz2': TarArchive,
'.tar.gz': TarArchive,
'.tgz': TarArchive,
'.tz2': TarArchive,
'.zip': ZipArchive,
}
| apache-2.0 |
andrea9a/oslab | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 1891 | 3300 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm, common_callchain):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
self.callchain = common_callchain
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
varunarya10/nova_test_latest | nova/api/openstack/compute/plugins/v3/server_usage.py | 36 | 2858 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
ALIAS = "os-server-usage"
authorize = extensions.os_compute_soft_authorizer(ALIAS)
resp_topic = "OS-SRV-USG"
class ServerUsageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ServerUsageController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
def _extend_server(self, server, instance):
for k in ['launched_at', 'terminated_at']:
key = "%s:%s" % (resp_topic, k)
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server[key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(server, db_instance)
class ServerUsage(extensions.V3APIExtensionBase):
"""Adds launched_at and terminated_at on Servers."""
name = "ServerUsage"
alias = ALIAS
version = 1
def get_controller_extensions(self):
controller = ServerUsageController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def get_resources(self):
return []
| apache-2.0 |
Thor77/youtube-dl | docs/conf.py | 137 | 2284 | # -*- coding: utf-8 -*-
#
# youtube-dl documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 14 21:05:43 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# Allows to import youtube_dl
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'youtube-dl'
copyright = u'2014, Ricardo Garcia Gonzalez'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from youtube_dl.version import __version__
version = __version__
# The full version, including alpha/beta/rc tags.
release = version
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'youtube-dldoc'
| unlicense |
jgoclawski/django | django/contrib/gis/db/models/proxy.py | 306 | 3034 | """
The SpatialProxy object allows for lazy-geometries and lazy-rasters. The proxy
uses Python descriptors for instantiating and setting Geometry or Raster
objects corresponding to geographic model fields.
Thanks to Robert Coup for providing this functionality (see #4322).
"""
from django.utils import six
class SpatialProxy(object):
def __init__(self, klass, field):
"""
Proxy initializes on the given Geometry or Raster class (not an instance)
and the corresponding field.
"""
self._field = field
self._klass = klass
def __get__(self, obj, type=None):
"""
This accessor retrieves the geometry or raster, initializing it using
the corresponding class specified during initialization and the value
of the field. Currently, GEOS or OGR geometries as well as GDALRasters
are supported.
"""
if obj is None:
# Accessed on a class, not an instance
return self
# Getting the value of the field.
geo_value = obj.__dict__[self._field.attname]
if isinstance(geo_value, self._klass):
geo_obj = geo_value
elif (geo_value is None) or (geo_value == ''):
geo_obj = None
else:
# Otherwise, a geometry or raster object is built using the field's
# contents, and the model's corresponding attribute is set.
geo_obj = self._klass(geo_value)
setattr(obj, self._field.attname, geo_obj)
return geo_obj
def __set__(self, obj, value):
"""
This accessor sets the proxied geometry or raster with the
corresponding class specified during initialization.
To set geometries, values of None, HEXEWKB, or WKT may be used.
To set rasters, JSON or dict values may be used.
"""
# The geographic type of the field.
gtype = self._field.geom_type
if gtype == 'RASTER' and (value is None or isinstance(value, six.string_types + (dict, self._klass))):
# For raster fields, assure input is None or a string, dict, or
# raster instance.
pass
elif isinstance(value, self._klass) and (str(value.geom_type).upper() == gtype or gtype == 'GEOMETRY'):
# The geometry type must match that of the field -- unless the
# general GeometryField is used.
if value.srid is None:
# Assigning the field SRID if the geometry has no SRID.
value.srid = self._field.srid
elif value is None or isinstance(value, six.string_types + (six.memoryview,)):
# Set geometries with None, WKT, HEX, or WKB
pass
else:
raise TypeError('Cannot set %s SpatialProxy (%s) with value of type: %s' % (
obj.__class__.__name__, gtype, type(value)))
# Setting the objects dictionary with the value, and returning.
obj.__dict__[self._field.attname] = value
return value
| bsd-3-clause |
jonparrott/gcloud-python | dns/google/cloud/dns/zone.py | 3 | 14599 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API ManagedZones."""
import six
from google.api_core import page_iterator
from google.cloud._helpers import _rfc3339_to_datetime
from google.cloud.exceptions import NotFound
from google.cloud.dns.changes import Changes
from google.cloud.dns.resource_record_set import ResourceRecordSet
class ManagedZone(object):
"""ManagedZones are containers for DNS resource records.
See
https://cloud.google.com/dns/api/v1/managedZones
:type name: str
:param name: the name of the zone
:type dns_name: str
:param dns_name:
(Optional) the DNS name of the zone. If not passed, then calls to
:meth:`create` will fail.
:type client: :class:`google.cloud.dns.client.Client`
:param client: A client which holds credentials and project configuration
for the zone (which requires a project).
:type description: str
:param description:
(Optional) the description for the zone. If not passed, defaults to
the value of 'dns_name'.
"""
def __init__(self, name, dns_name=None, client=None, description=None):
self.name = name
self.dns_name = dns_name
self._client = client
self._properties = {}
if description is None:
description = dns_name
self.description = description
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a zone given its API representation
:type resource: dict
:param resource: zone resource representation returned from the API
:type client: :class:`google.cloud.dns.client.Client`
:param client: Client which holds credentials and project
configuration for the zone.
:rtype: :class:`google.cloud.dns.zone.ManagedZone`
:returns: Zone parsed from ``resource``.
"""
name = resource.get('name')
dns_name = resource.get('dnsName')
if name is None or dns_name is None:
raise KeyError('Resource lacks required identity information:'
'["name"]["dnsName"]')
zone = cls(name, dns_name, client=client)
zone._set_properties(resource)
return zone
@property
def project(self):
"""Project bound to the zone.
:rtype: str
:returns: the project (derived from the client).
"""
return self._client.project
@property
def path(self):
"""URL path for the zone's APIs.
:rtype: str
:returns: the path based on project and dataste name.
"""
return '/projects/%s/managedZones/%s' % (self.project, self.name)
@property
def created(self):
"""Datetime at which the zone was created.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the creation time (None until set from the server).
"""
return self._properties.get('creationTime')
@property
def name_servers(self):
"""Datetime at which the zone was created.
:rtype: list of strings, or ``NoneType``.
:returns: the assigned name servers (None until set from the server).
"""
return self._properties.get('nameServers')
@property
def zone_id(self):
"""ID for the zone resource.
:rtype: str, or ``NoneType``
:returns: the ID (None until set from the server).
"""
return self._properties.get('id')
@property
def description(self):
"""Description of the zone.
:rtype: str, or ``NoneType``
:returns: The description as set by the user, or None (the default).
"""
return self._properties.get('description')
@description.setter
def description(self, value):
"""Update description of the zone.
:type value: str
:param value: (Optional) new description
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.string_types) and value is not None:
raise ValueError("Pass a string, or None")
self._properties['description'] = value
@property
def name_server_set(self):
"""Named set of DNS name servers that all host the same ManagedZones.
Most users will leave this blank.
See
https://cloud.google.com/dns/api/v1/managedZones#nameServerSet
:rtype: str, or ``NoneType``
:returns: The name as set by the user, or None (the default).
"""
return self._properties.get('nameServerSet')
@name_server_set.setter
def name_server_set(self, value):
"""Update named set of DNS name servers.
:type value: str
:param value: (Optional) new title
:raises: ValueError for invalid value types.
"""
if not isinstance(value, six.string_types) and value is not None:
raise ValueError("Pass a string, or None")
self._properties['nameServerSet'] = value
def resource_record_set(self, name, record_type, ttl, rrdatas):
"""Construct a resource record set bound to this zone.
:type name: str
:param name: Name of the record set.
:type record_type: str
:param record_type: RR type
:type ttl: int
:param ttl: TTL for the RR, in seconds
:type rrdatas: list of string
:param rrdatas: resource data for the RR
:rtype: :class:`google.cloud.dns.resource_record_set.ResourceRecordSet`
:returns: a new ``ResourceRecordSet`` instance
"""
return ResourceRecordSet(name, record_type, ttl, rrdatas, zone=self)
def changes(self):
"""Construct a change set bound to this zone.
:rtype: :class:`google.cloud.dns.changes.Changes`
:returns: a new ``Changes`` instance
"""
return Changes(zone=self)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
:rtype: :class:`google.cloud.dns.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: dict
:param api_response: response returned from an API call
"""
self._properties.clear()
cleaned = api_response.copy()
self.dns_name = cleaned.pop('dnsName', None)
if 'creationTime' in cleaned:
cleaned['creationTime'] = _rfc3339_to_datetime(
cleaned['creationTime'])
self._properties.update(cleaned)
def _build_resource(self):
"""Generate a resource for ``create`` or ``update``."""
resource = {
'name': self.name,
}
if self.dns_name is not None:
resource['dnsName'] = self.dns_name
if self.description is not None:
resource['description'] = self.description
if self.name_server_set is not None:
resource['nameServerSet'] = self.name_server_set
return resource
def create(self, client=None):
"""API call: create the zone via a PUT request
See
https://cloud.google.com/dns/api/v1/managedZones/create
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
"""
client = self._require_client(client)
path = '/projects/%s/managedZones' % (self.project,)
api_response = client._connection.api_request(
method='POST', path=path, data=self._build_resource())
self._set_properties(api_response)
def exists(self, client=None):
"""API call: test for the existence of the zone via a GET request
See
https://cloud.google.com/dns/api/v1/managedZones/get
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
:rtype: bool
:returns: Boolean indicating existence of the managed zone.
"""
client = self._require_client(client)
try:
client._connection.api_request(method='GET', path=self.path,
query_params={'fields': 'id'})
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: refresh zone properties via a GET request
See
https://cloud.google.com/dns/api/v1/managedZones/get
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
"""
client = self._require_client(client)
api_response = client._connection.api_request(
method='GET', path=self.path)
self._set_properties(api_response)
def delete(self, client=None):
"""API call: delete the zone via a DELETE request
See
https://cloud.google.com/dns/api/v1/managedZones/delete
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
"""
client = self._require_client(client)
client._connection.api_request(method='DELETE', path=self.path)
def list_resource_record_sets(self, max_results=None, page_token=None,
client=None):
"""List resource record sets for this zone.
See
https://cloud.google.com/dns/api/v1/resourceRecordSets/list
:type max_results: int
:param max_results: maximum number of zones to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of zones. If
not passed, the API will return the first page of
zones.
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of :class:`~.resource_record_set.ResourceRecordSet`
belonging to this zone.
"""
client = self._require_client(client)
path = '/projects/%s/managedZones/%s/rrsets' % (
self.project, self.name)
iterator = page_iterator.HTTPIterator(
client=client,
api_request=client._connection.api_request,
path=path,
item_to_value=_item_to_resource_record_set,
items_key='rrsets',
page_token=page_token,
max_results=max_results)
iterator.zone = self
return iterator
def list_changes(self, max_results=None, page_token=None, client=None):
"""List change sets for this zone.
See
https://cloud.google.com/dns/api/v1/resourceRecordSets/list
:type max_results: int
:param max_results: maximum number of zones to return, If not
passed, defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of zones. If
not passed, the API will return the first page of
zones.
:type client: :class:`google.cloud.dns.client.Client`
:param client:
(Optional) the client to use. If not passed, falls back to the
``client`` stored on the current zone.
:rtype: :class:`~google.api_core.page_iterator.Iterator`
:returns: Iterator of :class:`~.changes.Changes`
belonging to this zone.
"""
client = self._require_client(client)
path = '/projects/%s/managedZones/%s/changes' % (
self.project, self.name)
iterator = page_iterator.HTTPIterator(
client=client,
api_request=client._connection.api_request,
path=path,
item_to_value=_item_to_changes,
items_key='changes',
page_token=page_token,
max_results=max_results)
iterator.zone = self
return iterator
def _item_to_resource_record_set(iterator, resource):
"""Convert a JSON resource record set value to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that has retrieved the item.
:type resource: dict
:param resource: An item to be converted to a resource record set.
:rtype: :class:`~.resource_record_set.ResourceRecordSet`
:returns: The next resource record set in the page.
"""
return ResourceRecordSet.from_api_repr(resource, iterator.zone)
def _item_to_changes(iterator, resource):
"""Convert a JSON "changes" value to the native object.
:type iterator: :class:`~google.api_core.page_iterator.Iterator`
:param iterator: The iterator that has retrieved the item.
:type resource: dict
:param resource: An item to be converted to a "changes".
:rtype: :class:`.Changes`
:returns: The next "changes" in the page.
"""
return Changes.from_api_repr(resource, iterator.zone)
| apache-2.0 |
munnerz/CouchPotatoServer | libs/caper/__init__.py | 81 | 5426 | # Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logr import Logr
from caper.matcher import FragmentMatcher
from caper.objects import CaperFragment, CaperClosure
from caper.parsers.anime import AnimeParser
from caper.parsers.scene import SceneParser
from caper.parsers.usenet import UsenetParser
__version_info__ = ('0', '3', '1')
__version_branch__ = 'master'
__version__ = "%s%s" % (
'.'.join(__version_info__),
'-' + __version_branch__ if __version_branch__ else ''
)
CL_START_CHARS = ['(', '[', '<', '>']
CL_END_CHARS = [')', ']', '<', '>']
CL_END_STRINGS = [' - ']
STRIP_START_CHARS = ''.join(CL_START_CHARS)
STRIP_END_CHARS = ''.join(CL_END_CHARS)
STRIP_CHARS = ''.join(['_', ' ', '.'])
FRAGMENT_SEPARATORS = ['.', '-', '_', ' ']
CL_START = 0
CL_END = 1
class Caper(object):
def __init__(self, debug=False):
self.debug = debug
self.parsers = {
'anime': AnimeParser,
'scene': SceneParser,
'usenet': UsenetParser
}
def _closure_split(self, name):
"""
:type name: str
:rtype: list of CaperClosure
"""
closures = []
def end_closure(closures, buf):
buf = buf.strip(STRIP_CHARS)
if len(buf) < 2:
return
cur = CaperClosure(len(closures), buf)
cur.left = closures[len(closures) - 1] if len(closures) > 0 else None
if cur.left:
cur.left.right = cur
closures.append(cur)
state = CL_START
buf = ""
for x, ch in enumerate(name):
# Check for start characters
if state == CL_START and ch in CL_START_CHARS:
end_closure(closures, buf)
state = CL_END
buf = ""
buf += ch
if state == CL_END and ch in CL_END_CHARS:
# End character found, create the closure
end_closure(closures, buf)
state = CL_START
buf = ""
elif state == CL_START and buf[-3:] in CL_END_STRINGS:
# End string found, create the closure
end_closure(closures, buf[:-3])
state = CL_START
buf = ""
end_closure(closures, buf)
return closures
def _clean_closure(self, closure):
"""
:type closure: str
:rtype: str
"""
return closure.lstrip(STRIP_START_CHARS).rstrip(STRIP_END_CHARS)
def _fragment_split(self, closures):
"""
:type closures: list of CaperClosure
:rtype: list of CaperClosure
"""
cur_position = 0
cur = None
def end_fragment(fragments, cur, cur_position):
cur.position = cur_position
cur.left = fragments[len(fragments) - 1] if len(fragments) > 0 else None
if cur.left:
cur.left_sep = cur.left.right_sep
cur.left.right = cur
cur.right_sep = ch
fragments.append(cur)
for closure in closures:
closure.fragments = []
separator_buffer = ""
for x, ch in enumerate(self._clean_closure(closure.value)):
if not cur:
cur = CaperFragment(closure)
if ch in FRAGMENT_SEPARATORS:
if cur.value:
separator_buffer = ""
separator_buffer += ch
if cur.value or not closure.fragments:
end_fragment(closure.fragments, cur, cur_position)
elif len(separator_buffer) > 1:
cur.value = separator_buffer.strip()
if cur.value:
end_fragment(closure.fragments, cur, cur_position)
separator_buffer = ""
# Reset
cur = None
cur_position += 1
else:
cur.value += ch
# Finish parsing the last fragment
if cur and cur.value:
end_fragment(closure.fragments, cur, cur_position)
# Reset
cur_position = 0
cur = None
return closures
def parse(self, name, parser='scene'):
closures = self._closure_split(name)
closures = self._fragment_split(closures)
# Print closures
for closure in closures:
Logr.debug("closure [%s]", closure.value)
for fragment in closure.fragments:
Logr.debug("\tfragment [%s]", fragment.value)
if parser not in self.parsers:
raise ValueError("Unknown parser")
# TODO autodetect the parser type
return self.parsers[parser](self.debug).run(closures)
| gpl-3.0 |
otherness-space/myProject002 | my_project_002/lib/python2.7/site-packages/django/contrib/messages/tests/base.py | 63 | 14277 | from django import http
from django.conf import settings, global_settings
from django.contrib.messages import constants, utils, get_level, set_level
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.storage import default_storage, base
from django.contrib.messages.storage.base import Message
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.translation import ugettext_lazy
from django.utils.unittest import skipIf
def skipUnlessAuthIsInstalled(func):
return skipIf(
'django.contrib.auth' not in settings.INSTALLED_APPS,
"django.contrib.auth isn't installed")(func)
def add_level_messages(storage):
"""
Adds 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super(override_settings_tags, self).enable()
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, we need to update that constant too.
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super(override_settings_tags, self).disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTest(TestCase):
storage_class = default_storage
urls = 'django.contrib.messages.tests.urls'
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATE_DIRS = (),
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS,
MESSAGE_TAGS = '',
MESSAGE_STORAGE = '%s.%s' % (self.storage_class.__module__,
self.storage_class.__name__),
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
"""
Returns the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, tests that messages are properly
stored and then retrieved across the full request/redirect/response
cycle.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
messages = [Message(self.levels[level], msg) for msg in
data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show_template_response')
for level in self.levels.keys():
add_url = reverse('django.contrib.messages.tests.urls.add_template_response',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertTrue('messages' in response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Tests that messages persist properly when multiple POSTs are made
before a GET.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend([Message(self.levels[level], msg) for msg in
data['messages']])
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertTrue('messages' in response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(
INSTALLED_APPS=filter(
lambda app:app!='django.contrib.messages', settings.INSTALLED_APPS),
MIDDLEWARE_CLASSES=filter(
lambda m:'MessageMiddleware' not in m, settings.MIDDLEWARE_CLASSES),
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda p:'context_processors.messages' not in p,
settings.TEMPLATE_CONTEXT_PROCESSORS),
MESSAGE_LEVEL=constants.DEBUG
)
def test_middleware_disabled(self):
"""
Tests that, when the middleware is disabled, an exception is raised
when one attempts to store a message.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
self.assertRaises(MessageFailure, self.client.post, add_url,
data, follow=True)
@override_settings(
INSTALLED_APPS=filter(
lambda app:app!='django.contrib.messages', settings.INSTALLED_APPS),
MIDDLEWARE_CLASSES=filter(
lambda m:'MessageMiddleware' not in m, settings.MIDDLEWARE_CLASSES),
TEMPLATE_CONTEXT_PROCESSORS=filter(
lambda p:'context_processors.messages' not in p,
settings.TEMPLATE_CONTEXT_PROCESSORS),
MESSAGE_LEVEL=constants.DEBUG
)
def test_middleware_disabled_fail_silently(self):
"""
Tests that, when the middleware is disabled, an exception is not
raised if 'fail_silently' = True
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('django.contrib.messages.tests.urls.show')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('django.contrib.messages.tests.urls.add',
args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertFalse('messages' in response.context)
def stored_messages_count(self, storage, response):
"""
Returns the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2',
extra_tags='tag')])
def test_existing_read(self):
"""
Tests that reading the existing storage doesn't cause the data to be
lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', '', 'extra-tag debug', 'warning', 'error',
'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
}
)
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', 'custom', 'extra-tag', '', 'bad', 'success'])
| mit |
parksandwildlife/borgcollector | livelayermanager/forms.py | 2 | 5053 | from django import forms
from tablemanager.models import Workspace
from livelayermanager.models import Datasource,Layer,SqlViewLayer
from borg_utils.form_fields import GeoserverSettingForm,MetaTilingFactorField,GridSetField
from borg_utils.form_fields import GroupedModelChoiceField,BorgSelect
from borg_utils.forms import BorgModelForm
class DatasourceForm(BorgModelForm,GeoserverSettingForm):
"""
A form for Datasource model
"""
max_connections = forms.IntegerField(label="Max concurrent connections",initial=10,min_value=1,max_value=128)
max_connections.setting_type = "geoserver_setting"
max_connections.key = "max connections"
connect_timeout = forms.IntegerField(label="Connect timeout in seconds",initial=30,min_value=1,max_value=3600)
connect_timeout.setting_type = "geoserver_setting"
connect_timeout.key = "Connection timeout"
min_connections = forms.IntegerField(label="Min concurrent connections",initial=1,min_value=1,max_value=128)
min_connections.setting_type = "geoserver_setting"
min_connections.key = "min connections"
max_connection_idle_time = forms.IntegerField(label="Max connection idle time",initial=300,min_value=1)
max_connection_idle_time.setting_type = "geoserver_setting"
max_connection_idle_time.key = "Max connection idle time"
fetch_size = forms.IntegerField(label="Fetch size",initial=1000,min_value=1)
fetch_size.setting_type = "geoserver_setting"
fetch_size.key = "fetch size"
workspace = GroupedModelChoiceField('publish_channel',queryset=Workspace.objects.all(),required=True,choice_family="workspace",choice_name="workspace_choices",widget=BorgSelect())
def __init__(self, *args, **kwargs):
kwargs['initial']=kwargs.get('initial',{})
self.get_setting_from_model(*args,**kwargs)
super(DatasourceForm, self).__init__(*args, **kwargs)
if 'instance' in kwargs and kwargs['instance'] and kwargs['instance'].pk:
self.fields['workspace'].widget.attrs['readonly'] = True
def _post_clean(self):
if self.errors:
return
self.set_setting_to_model()
super(DatasourceForm,self)._post_clean()
class Meta:
model = Datasource
fields = "__all__"
class LayerForm(BorgModelForm,GeoserverSettingForm):
"""
A form for Layer model
"""
create_cache_layer = forms.BooleanField(required=False,label="create_cache_layer",initial={"enabled":True})
create_cache_layer.setting_type = "geoserver_setting"
server_cache_expire = forms.IntegerField(label="server_cache_expire",min_value=0,required=False,initial=0,help_text="Expire server cache after n seconds (set to 0 to use source setting)")
server_cache_expire.setting_type = "geoserver_setting"
client_cache_expire = forms.IntegerField(label="client_cache_expire",min_value=0,required=False,initial=0,help_text="Expire client cache after n seconds (set to 0 to use source setting)")
client_cache_expire.setting_type = "geoserver_setting"
def __init__(self, *args, **kwargs):
kwargs['initial']=kwargs.get('initial',{})
self.get_setting_from_model(*args,**kwargs)
super(LayerForm, self).__init__(*args, **kwargs)
self.fields['table'].widget.attrs['readonly'] = True
instance = kwargs.get("instance")
if instance and instance.is_published:
self.fields['name'].widget.attrs['readonly'] = True
def _post_clean(self):
if self.errors:
return
self.set_setting_to_model()
super(LayerForm,self)._post_clean()
class Meta:
model = Layer
fields = "__all__"
class SqlViewLayerForm(BorgModelForm,GeoserverSettingForm):
"""
A form for SqlViewLayer model
"""
create_cache_layer = forms.BooleanField(required=False,label="create_cache_layer",initial={"enabled":True})
create_cache_layer.setting_type = "geoserver_setting"
server_cache_expire = forms.IntegerField(label="server_cache_expire",min_value=0,required=False,initial=0,help_text="Expire server cache after n seconds (set to 0 to use source setting)")
server_cache_expire.setting_type = "geoserver_setting"
client_cache_expire = forms.IntegerField(label="client_cache_expire",min_value=0,required=False,initial=0,help_text="Expire client cache after n seconds (set to 0 to use source setting)")
client_cache_expire.setting_type = "geoserver_setting"
def __init__(self, *args, **kwargs):
kwargs['initial']=kwargs.get('initial',{})
self.get_setting_from_model(*args,**kwargs)
super(SqlViewLayerForm, self).__init__(*args, **kwargs)
instance = kwargs.get("instance")
if instance and instance.is_published:
self.fields['name'].widget.attrs['readonly'] = True
def _post_clean(self):
if self.errors:
return
self.set_setting_to_model()
super(SqlViewLayerForm,self)._post_clean()
class Meta:
model = SqlViewLayer
fields = "__all__"
| bsd-3-clause |
moonrisewarrior/line-memebot | SDK/linebot/utils.py | 4 | 1738 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""linebot.http_client module."""
from __future__ import unicode_literals
import logging
import re
import sys
LOGGER = logging.getLogger('linebot')
PY3 = sys.version_info[0] == 3
def to_snake_case(text):
"""Convert to snake case.
:param str text:
:rtype: str
:return:
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', text)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def to_camel_case(text):
"""Convert to camel case.
:param str text:
:rtype: str
:return:
"""
split = text.split('_')
return split[0] + "".join(x.title() for x in split[1:])
def safe_compare_digest(val1, val2):
"""safe_compare_digest method.
:param val1: string or bytes for compare
:type val1: str | bytes
:param val2: string or bytes for compare
:type val2: str | bytes
"""
if len(val1) != len(val2):
return False
result = 0
if PY3 and isinstance(val1, bytes) and isinstance(val2, bytes):
for i, j in zip(val1, val2):
result |= i ^ j
else:
for i, j in zip(val1, val2):
result |= (ord(i) ^ ord(j))
return result == 0
| apache-2.0 |
moto-timo/ironpython3 | Src/StdLib/Lib/test/test_with.py | 4 | 26472 | """Unit tests for the with statement specified in PEP 343."""
__author__ = "Mike Bland"
__email__ = "mbland at acm dot org"
import sys
import unittest
from collections import deque
from contextlib import _GeneratorContextManager, contextmanager
from test.support import run_unittest
class MockContextManager(_GeneratorContextManager):
def __init__(self, *args):
super().__init__(*args)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return _GeneratorContextManager.__enter__(self)
def __exit__(self, type, value, traceback):
self.exit_called = True
self.exit_args = (type, value, traceback)
return _GeneratorContextManager.__exit__(self, type,
value, traceback)
def mock_contextmanager(func):
def helper(*args, **kwds):
return MockContextManager(func, args, kwds)
return helper
class MockResource(object):
def __init__(self):
self.yielded = False
self.stopped = False
@mock_contextmanager
def mock_contextmanager_generator():
mock = MockResource()
try:
mock.yielded = True
yield mock
finally:
mock.stopped = True
class Nested(object):
def __init__(self, *managers):
self.managers = managers
self.entered = None
def __enter__(self):
if self.entered is not None:
raise RuntimeError("Context is not reentrant")
self.entered = deque()
vars = []
try:
for mgr in self.managers:
vars.append(mgr.__enter__())
self.entered.appendleft(mgr)
except:
if not self.__exit__(*sys.exc_info()):
raise
return vars
def __exit__(self, *exc_info):
# Behave like nested with statements
# first in, last out
# New exceptions override old ones
ex = exc_info
for mgr in self.entered:
try:
if mgr.__exit__(*ex):
ex = (None, None, None)
except:
ex = sys.exc_info()
self.entered = None
if ex is not exc_info:
raise ex[0](ex[1]).with_traceback(ex[2])
class MockNested(Nested):
def __init__(self, *managers):
Nested.__init__(self, *managers)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return Nested.__enter__(self)
def __exit__(self, *exc_info):
self.exit_called = True
self.exit_args = exc_info
return Nested.__exit__(self, *exc_info)
class FailureTestCase(unittest.TestCase):
def testNameError(self):
def fooNotDeclared():
with foo: pass
self.assertRaises(NameError, fooNotDeclared)
def testEnterAttributeError(self):
class LacksEnter(object):
def __exit__(self, type, value, traceback):
pass
def fooLacksEnter():
foo = LacksEnter()
with foo: pass
self.assertRaises(AttributeError, fooLacksEnter)
def testExitAttributeError(self):
class LacksExit(object):
def __enter__(self):
pass
def fooLacksExit():
foo = LacksExit()
with foo: pass
self.assertRaises(AttributeError, fooLacksExit)
def assertRaisesSyntaxError(self, codestr):
def shouldRaiseSyntaxError(s):
compile(s, '', 'single')
self.assertRaises(SyntaxError, shouldRaiseSyntaxError, codestr)
def testAssignmentToNoneError(self):
self.assertRaisesSyntaxError('with mock as None:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None):\n'
' pass')
def testAssignmentToEmptyTupleError(self):
self.assertRaisesSyntaxError(
'with mock as ():\n'
' pass')
def testAssignmentToTupleOnlyContainingNoneError(self):
self.assertRaisesSyntaxError('with mock as None,:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None,):\n'
' pass')
def testAssignmentToTupleContainingNoneError(self):
self.assertRaisesSyntaxError(
'with mock as (foo, None, bar):\n'
' pass')
def testEnterThrows(self):
class EnterThrows(object):
def __enter__(self):
raise RuntimeError("Enter threw")
def __exit__(self, *args):
pass
def shouldThrow():
ct = EnterThrows()
self.foo = None
with ct as self.foo:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertEqual(self.foo, None)
def testExitThrows(self):
class ExitThrows(object):
def __enter__(self):
return
def __exit__(self, *args):
raise RuntimeError(42)
def shouldThrow():
with ExitThrows():
pass
self.assertRaises(RuntimeError, shouldThrow)
class ContextmanagerAssertionMixin(object):
def setUp(self):
self.TEST_EXCEPTION = RuntimeError("test exception")
def assertInWithManagerInvariants(self, mock_manager):
self.assertTrue(mock_manager.enter_called)
self.assertFalse(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, None)
def assertAfterWithManagerInvariants(self, mock_manager, exit_args):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, exit_args)
def assertAfterWithManagerInvariantsNoError(self, mock_manager):
self.assertAfterWithManagerInvariants(mock_manager,
(None, None, None))
def assertInWithGeneratorInvariants(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertFalse(mock_generator.stopped)
def assertAfterWithGeneratorInvariantsNoError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
def raiseTestException(self):
raise self.TEST_EXCEPTION
def assertAfterWithManagerInvariantsWithError(self, mock_manager,
exc_type=None):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
if exc_type is None:
self.assertEqual(mock_manager.exit_args[1], self.TEST_EXCEPTION)
exc_type = type(self.TEST_EXCEPTION)
self.assertEqual(mock_manager.exit_args[0], exc_type)
# Test the __exit__ arguments. Issue #7853
self.assertIsInstance(mock_manager.exit_args[1], exc_type)
self.assertIsNot(mock_manager.exit_args[2], None)
def assertAfterWithGeneratorInvariantsWithError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
class NonexceptionalTestCase(unittest.TestCase, ContextmanagerAssertionMixin):
def testInlineGeneratorSyntax(self):
with mock_contextmanager_generator():
pass
def testUnboundGenerator(self):
mock = mock_contextmanager_generator()
with mock:
pass
self.assertAfterWithManagerInvariantsNoError(mock)
def testInlineGeneratorBoundSyntax(self):
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
# FIXME: In the future, we'll try to keep the bound names from leaking
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToExistingVariable(self):
foo = None
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToDottedVariable(self):
with mock_contextmanager_generator() as self.foo:
self.assertInWithGeneratorInvariants(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.foo)
def testBoundGenerator(self):
mock = mock_contextmanager_generator()
with mock as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertInWithManagerInvariants(mock)
self.assertAfterWithGeneratorInvariantsNoError(foo)
self.assertAfterWithManagerInvariantsNoError(mock)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
with mock_a as foo:
mock_b = mock_contextmanager_generator()
with mock_b as bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(foo)
self.assertInWithGeneratorInvariants(bar)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsNoError(bar)
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithManagerInvariantsNoError(mock_a)
self.assertAfterWithGeneratorInvariantsNoError(foo)
class NestedNonexceptionalTestCase(unittest.TestCase,
ContextmanagerAssertionMixin):
def testSingleArgInlineGeneratorSyntax(self):
with Nested(mock_contextmanager_generator()):
pass
def testSingleArgBoundToNonTuple(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as foo:
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToSingleElementParenthesizedList(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as (foo):
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToMultipleElementTupleError(self):
def shouldThrowValueError():
with Nested(mock_contextmanager_generator()) as (foo, bar):
pass
self.assertRaises(ValueError, shouldThrowValueError)
def testSingleArgUnbound(self):
mock_contextmanager = mock_contextmanager_generator()
mock_nested = MockNested(mock_contextmanager)
with mock_nested:
self.assertInWithManagerInvariants(mock_contextmanager)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(mock_contextmanager)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgUnbound(self):
m = mock_contextmanager_generator()
n = mock_contextmanager_generator()
o = mock_contextmanager_generator()
mock_nested = MockNested(m, n, o)
with mock_nested:
self.assertInWithManagerInvariants(m)
self.assertInWithManagerInvariants(n)
self.assertInWithManagerInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(m)
self.assertAfterWithManagerInvariantsNoError(n)
self.assertAfterWithManagerInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgBound(self):
mock_nested = MockNested(mock_contextmanager_generator(),
mock_contextmanager_generator(), mock_contextmanager_generator())
with mock_nested as (m, n, o):
self.assertInWithGeneratorInvariants(m)
self.assertInWithGeneratorInvariants(n)
self.assertInWithGeneratorInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithGeneratorInvariantsNoError(m)
self.assertAfterWithGeneratorInvariantsNoError(n)
self.assertAfterWithGeneratorInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
class ExceptionalTestCase(ContextmanagerAssertionMixin, unittest.TestCase):
def testSingleResource(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
self.assertInWithManagerInvariants(cm)
self.assertInWithGeneratorInvariants(self.resource)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm)
self.assertAfterWithGeneratorInvariantsWithError(self.resource)
def testExceptionNormalized(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
# Note this relies on the fact that 1 // 0 produces an exception
# that is not normalized immediately.
1 // 0
self.assertRaises(ZeroDivisionError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm, ZeroDivisionError)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsWithError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsWithError(self.bar)
def testMultipleResourcesInSingleStatement(self):
cm_a = mock_contextmanager_generator()
cm_b = mock_contextmanager_generator()
mock_nested = MockNested(cm_a, cm_b)
def shouldThrow():
with mock_nested as (self.resource_a, self.resource_b):
self.assertInWithManagerInvariants(cm_a)
self.assertInWithManagerInvariants(cm_b)
self.assertInWithManagerInvariants(mock_nested)
self.assertInWithGeneratorInvariants(self.resource_a)
self.assertInWithGeneratorInvariants(self.resource_b)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm_a)
self.assertAfterWithManagerInvariantsWithError(cm_b)
self.assertAfterWithManagerInvariantsWithError(mock_nested)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_a)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_b)
def testNestedExceptionBeforeInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
self.bar = None
def shouldThrow():
with mock_a as self.foo:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(self.foo)
self.raiseTestException()
with mock_b as self.bar:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
# The inner statement stuff should never have been touched
self.assertEqual(self.bar, None)
self.assertFalse(mock_b.enter_called)
self.assertFalse(mock_b.exit_called)
self.assertEqual(mock_b.exit_args, None)
def testNestedExceptionAfterInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.bar)
def testRaisedStopIteration1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration2(self):
# From bug 1462485
class cm(object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration3(self):
# Another variant where the exception hasn't been instantiated
# From bug 1705170
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise next(iter([]))
self.assertRaises(StopIteration, shouldThrow)
def testRaisedGeneratorExit1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testRaisedGeneratorExit2(self):
# From bug 1462485
class cm (object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testErrorsInBool(self):
# issue4589: __exit__ return code may raise an exception
# when looking at its truth value.
class cm(object):
def __init__(self, bool_conversion):
class Bool:
def __bool__(self):
return bool_conversion()
self.exit_result = Bool()
def __enter__(self):
return 3
def __exit__(self, a, b, c):
return self.exit_result
def trueAsBool():
with cm(lambda: True):
self.fail("Should NOT see this")
trueAsBool()
def falseAsBool():
with cm(lambda: False):
self.fail("Should raise")
self.assertRaises(AssertionError, falseAsBool)
def failAsBool():
with cm(lambda: 1//0):
self.fail("Should NOT see this")
self.assertRaises(ZeroDivisionError, failAsBool)
class NonLocalFlowControlTestCase(unittest.TestCase):
def testWithBreak(self):
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
break
counter += 100 # Not reached
self.assertEqual(counter, 11)
def testWithContinue(self):
counter = 0
while True:
counter += 1
if counter > 2:
break
with mock_contextmanager_generator():
counter += 10
continue
counter += 100 # Not reached
self.assertEqual(counter, 12)
def testWithReturn(self):
def foo():
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
return counter
counter += 100 # Not reached
self.assertEqual(foo(), 11)
def testWithYield(self):
def gen():
with mock_contextmanager_generator():
yield 12
yield 13
x = list(gen())
self.assertEqual(x, [12, 13])
def testWithRaise(self):
counter = 0
try:
counter += 1
with mock_contextmanager_generator():
counter += 10
raise RuntimeError
counter += 100 # Not reached
except RuntimeError:
self.assertEqual(counter, 11)
else:
self.fail("Didn't raise RuntimeError")
class AssignmentTargetTestCase(unittest.TestCase):
def testSingleComplexTarget(self):
targets = {1: [0, 1, 2]}
with mock_contextmanager_generator() as targets[1][0]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][0].__class__, MockResource)
with mock_contextmanager_generator() as list(targets.values())[0][1]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][1].__class__, MockResource)
with mock_contextmanager_generator() as targets[2]:
keys = list(targets.keys())
keys.sort()
self.assertEqual(keys, [1, 2])
class C: pass
blah = C()
with mock_contextmanager_generator() as blah.foo:
self.assertEqual(hasattr(blah, "foo"), True)
def testMultipleComplexTargets(self):
class C:
def __enter__(self): return 1, 2, 3
def __exit__(self, t, v, tb): pass
targets = {1: [0, 1, 2]}
with C() as (targets[1][0], targets[1][1], targets[1][2]):
self.assertEqual(targets, {1: [1, 2, 3]})
with C() as (list(targets.values())[0][2], list(targets.values())[0][1], list(targets.values())[0][0]):
self.assertEqual(targets, {1: [3, 2, 1]})
with C() as (targets[1], targets[2], targets[3]):
self.assertEqual(targets, {1: 1, 2: 2, 3: 3})
class B: pass
blah = B()
with C() as (blah.one, blah.two, blah.three):
self.assertEqual(blah.one, 1)
self.assertEqual(blah.two, 2)
self.assertEqual(blah.three, 3)
class ExitSwallowsExceptionTestCase(unittest.TestCase):
def testExitTrueSwallowsException(self):
class AfricanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return True
try:
with AfricanSwallow():
1/0
except ZeroDivisionError:
self.fail("ZeroDivisionError should have been swallowed")
def testExitFalseDoesntSwallowException(self):
class EuropeanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return False
try:
with EuropeanSwallow():
1/0
except ZeroDivisionError:
pass
else:
self.fail("ZeroDivisionError should have been raised")
class NestedWith(unittest.TestCase):
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
def testNoExceptions(self):
with self.Dummy() as a, self.Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with self.Dummy() as a, self.InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with self.Dummy() as a, self.EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with self.Dummy(gobble=True) as a, self.ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with self.Dummy(value=(1,2)) as (a1, a2), \
self.Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
def test_main():
run_unittest(FailureTestCase, NonexceptionalTestCase,
NestedNonexceptionalTestCase, ExceptionalTestCase,
NonLocalFlowControlTestCase,
AssignmentTargetTestCase,
ExitSwallowsExceptionTestCase,
NestedWith)
if __name__ == '__main__':
test_main()
| apache-2.0 |
vrs01/mopidy | tests/mpd/protocol/test_command_list.py | 17 | 2684 | from __future__ import absolute_import, unicode_literals
from tests.mpd import protocol
class CommandListsTest(protocol.BaseTestCase):
def test_command_list_begin(self):
response = self.send_request('command_list_begin')
self.assertEqual([], response)
def test_command_list_end(self):
self.send_request('command_list_begin')
self.send_request('command_list_end')
self.assertInResponse('OK')
def test_command_list_end_without_start_first_is_an_unknown_command(self):
self.send_request('command_list_end')
self.assertEqualResponse(
'ACK [5@0] {} unknown command "command_list_end"')
def test_command_list_with_ping(self):
self.send_request('command_list_begin')
self.assertTrue(self.dispatcher.command_list_receiving)
self.assertFalse(self.dispatcher.command_list_ok)
self.assertEqual([], self.dispatcher.command_list)
self.send_request('ping')
self.assertIn('ping', self.dispatcher.command_list)
self.send_request('command_list_end')
self.assertInResponse('OK')
self.assertFalse(self.dispatcher.command_list_receiving)
self.assertFalse(self.dispatcher.command_list_ok)
self.assertEqual([], self.dispatcher.command_list)
def test_command_list_with_error_returns_ack_with_correct_index(self):
self.send_request('command_list_begin')
self.send_request('play') # Known command
self.send_request('paly') # Unknown command
self.send_request('command_list_end')
self.assertEqualResponse('ACK [5@1] {} unknown command "paly"')
def test_command_list_ok_begin(self):
response = self.send_request('command_list_ok_begin')
self.assertEqual([], response)
def test_command_list_ok_with_ping(self):
self.send_request('command_list_ok_begin')
self.assertTrue(self.dispatcher.command_list_receiving)
self.assertTrue(self.dispatcher.command_list_ok)
self.assertEqual([], self.dispatcher.command_list)
self.send_request('ping')
self.assertIn('ping', self.dispatcher.command_list)
self.send_request('command_list_end')
self.assertInResponse('list_OK')
self.assertInResponse('OK')
self.assertFalse(self.dispatcher.command_list_receiving)
self.assertFalse(self.dispatcher.command_list_ok)
self.assertEqual([], self.dispatcher.command_list)
# FIXME this should also include the special handling of idle within a
# command list. That is that once a idle/noidle command is found inside a
# commad list, the rest of the list seems to be ignored.
| apache-2.0 |
xialirong/prefpy | setup.py | 2 | 1104 | from setuptools import setup
if __name__ == "__main__":
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
__version__ = "0.8.1"
base_url = "https://github.com/xialirong/prefpy"
setup(name="prefpy",
version=__version__,
description="Rank aggregation algorithms",
long_description=long_description,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering"
],
url=base_url,
download_url="{0}/archive/v{1}.tar.gz".format(base_url, __version__),
author="Peter Piech",
license="GPL-3",
packages=["prefpy"],
zip_safe=False)
| gpl-3.0 |
manishpatell/erpcustomizationssaiimpex123qwe | addons/account_asset/__openerp__.py | 314 | 2182 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Assets Management',
'version': '1.0',
'depends': ['account'],
'author': 'OpenERP S.A.',
'description': """
Financial and accounting asset management.
==========================================
This Module manages the assets owned by a company or an individual. It will keep
track of depreciation's occurred on those assets. And it allows to create Move's
of the depreciation lines.
""",
'website': 'https://www.odoo.com/page/accounting',
'category': 'Accounting & Finance',
'sequence': 32,
'demo': [ 'account_asset_demo.xml'],
'test': [
'test/account_asset_demo.yml',
'test/account_asset.yml',
'test/account_asset_wizard.yml',
],
'data': [
'security/account_asset_security.xml',
'security/ir.model.access.csv',
'wizard/account_asset_change_duration_view.xml',
'wizard/wizard_asset_compute_view.xml',
'account_asset_view.xml',
'account_asset_invoice_view.xml',
'report/account_asset_report_view.xml',
],
'auto_install': False,
'installable': True,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
EmadMokhtar/Django | tests/template_tests/syntax_tests/i18n/test_get_language_info_list.py | 78 | 2403 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from django.utils import translation
from ...utils import setup
class GetLanguageInfoListTests(SimpleTestCase):
libraries = {
'custom': 'template_tests.templatetags.custom',
'i18n': 'django.templatetags.i18n',
}
@setup({'i18n30': '{% load i18n %}'
'{% get_language_info_list for langcodes as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n30(self):
output = self.engine.render_to_string('i18n30', {'langcodes': ['it', 'no']})
self.assertEqual(output, 'it: Italian/italiano bidi=False; no: Norwegian/norsk bidi=False; ')
@setup({'i18n31': '{% load i18n %}'
'{% get_language_info_list for langcodes as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n31(self):
output = self.engine.render_to_string('i18n31', {'langcodes': (('sl', 'Slovenian'), ('fa', 'Persian'))})
self.assertEqual(
output,
'sl: Slovenian/Sloven\u0161\u010dina bidi=False; '
'fa: Persian/\u0641\u0627\u0631\u0633\u06cc bidi=True; '
)
@setup({'i18n38_2': '{% load i18n custom %}'
'{% get_language_info_list for langcodes|noop:"x y" as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }}/{{ l.name_translated }} '
'bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n38_2(self):
with translation.override('cs'):
output = self.engine.render_to_string('i18n38_2', {'langcodes': ['it', 'fr']})
self.assertEqual(
output,
'it: Italian/italiano/italsky bidi=False; '
'fr: French/français/francouzsky bidi=False; '
)
@setup({'i18n_syntax': '{% load i18n %} {% get_language_info_list error %}'})
def test_no_for_as(self):
msg = "'get_language_info_list' requires 'for sequence as variable' (got ['error'])"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('i18n_syntax')
| mit |
rossgoodwin/musapaedia | musapaedia/en/parser/nltk_lite/corpora/timit.py | 9 | 12505 | # Natural Language Toolkit: TIMIT Corpus Reader
#
# Copyright (C) 2001-2006 University of Pennsylvania
# Author: Haejoong Lee <haejoong@ldc.upenn.edu>
# Steven Bird <sb@ldc.upenn.edu>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
"""
Read tokens, phonemes and audio data from the NLTK TIMIT Corpus.
This corpus contains selected portion of the TIMIT corpus.
* 16 speakers from 8 dialect regions
* 1 male and 1 female from each dialect region
* total 130 sentences (10 sentences per speaker. Note that some
sentences are shared among other speakers, especially sa1 and sa2
are spoken by all speakers.)
* total 160 recording of sentences (10 recordings per speaker)
* audio format: NIST Sphere, single channel, 16kHz sampling,
16 bit sample, PCM encoding
Module contents
---------------
The timit module provides 4 functions and 4 data items.
* items
List of items in the corpus. There are total 160 items, each of which
corresponds to a unique utterance of a speaker. Here's an example of an
item in the list:
dr1-fvmh0:sx206
- _---- _---
| | | | |
| | | | |
| | | | `--- sentence number
| | | `----- sentence type (a:all, i:shared, x:exclusive)
| | `--------- speaker ID
| `------------ sex (m:male, f:female)
`-------------- dialect region (1..8)
* speakers
List of speaker IDs. An example of speaker ID:
dr1-fvmh0
Note that if you split an item ID with colon and take the first element of
the result, you will get a speaker ID.
>>> itemid = dr1-fvmh0:sx206
>>> spkrid,sentid = itemid.split(':')
>>> spkrid
'dr1-fvmh0'
The second element of the result is a sentence ID.
* dictionary
Phonetic dictionary of words contained in this corpus. This is a Python
dictionary from words to phoneme lists.
* spkrinfo
Speaker information table. It's a Python dictionary from speaker IDs to
records of 10 fields. Speaker IDs the same as the ones in timie.speakers.
Each record is a dictionary from field names to values, and the fields are
as follows:
id speaker ID as defined in the original TIMIT speaker info table
sex speaker gender (M:male, F:female)
dr speaker dialect region (1:new england, 2:northern,
3:north midland, 4:south midland, 5:southern, 6:new york city,
7:western, 8:army brat (moved around))
use corpus type (TRN:training, TST:test)
in this sample corpus only TRN is available
recdate recording date
birthdate speaker birth date
ht speaker height
race speaker race (WHT:white, BLK:black, AMR:american indian,
SPN:spanish-american, ORN:oriental,???:unknown)
edu speaker education level (HS:high school, AS:associate degree,
BS:bachelor's degree (BS or BA), MS:master's degree (MS or MA),
PHD:doctorate degree (PhD,JD,MD), ??:unknown)
comments comments by the recorder
The 4 functions are as follows.
* raw(sentences=items, offset=False)
Given a list of items, returns an iterator of a list of word lists,
each of which corresponds to an item (sentence). If offset is set to True,
each element of the word list is a tuple of word(string), start offset and
end offset, where offset is represented as a number of 16kHz samples.
* phonetic(sentences=items, offset=False)
Given a list of items, returns an iterator of a list of phoneme lists,
each of which corresponds to an item (sentence). If offset is set to True,
each element of the phoneme list is a tuple of word(string), start offset
and end offset, where offset is represented as a number of 16kHz samples.
* audiodata(item, start=0, end=None)
Given an item, returns a chunk of audio samples formatted into a string.
When the fuction is called, if start and end are omitted, the entire
samples of the recording will be returned. If only end is omitted,
samples from the start offset to the end of the recording will be returned.
* play(data)
Play the given audio samples. The audio samples can be obtained from the
timit.audiodata function.
"""
from en.parser.nltk_lite.corpora import get_basedir
from en.parser.nltk_lite import tokenize
from itertools import islice
import ossaudiodev, time
import sys, os, re
if sys.platform.startswith('linux') or sys.platform.startswith('freebsd'):
PLAY_ENABLED = True
else:
PLAY_ENABLED = False
__all__ = ["items", "raw", "phonetic", "speakers", "dictionary", "spkrinfo",
"audiodata", "play"]
PREFIX = os.path.join(get_basedir(),"timit")
speakers = []
items = []
dictionary = {}
spkrinfo = {}
for f in os.listdir(PREFIX):
if re.match("^dr[0-9]-[a-z]{4}[0-9]$", f):
speakers.append(f)
for g in os.listdir(os.path.join(PREFIX,f)):
if g.endswith(".txt"):
items.append(f+':'+g[:-4])
speakers.sort()
items.sort()
# read dictionary
for l in open(os.path.join(PREFIX,"timitdic.txt")):
if l[0] == ';': continue
a = l.strip().split(' ')
dictionary[a[0]] = a[1].strip('/').split()
# read spkrinfo
header = ['id','sex','dr','use','recdate','birthdate','ht','race','edu',
'comments']
for l in open(os.path.join(PREFIX,"spkrinfo.txt")):
if l[0] == ';': continue
rec = l[:54].split() + [l[54:].strip()]
key = "dr%s-%s%s" % (rec[2],rec[1].lower(),rec[0].lower())
spkrinfo[key] = dict([(header[i],rec[i]) for i in range(10)])
def _prim(ext, sentences=items, offset=False):
if isinstance(sentences,str):
sentences = [sentences]
for sent in sentences:
fnam = os.path.sep.join([PREFIX] + sent.split(':')) + ext
r = []
for l in open(fnam):
if not l.strip(): continue
a = l.split()
if offset:
r.append((a[2],int(a[0]),int(a[1])))
else:
r.append(a[2])
yield r
def raw(sentences=items, offset=False):
"""
Given a list of items, returns an iterator of a list of word lists,
each of which corresponds to an item (sentence). If offset is set to True,
each element of the word list is a tuple of word(string), start offset and
end offset, where offset is represented as a number of 16kHz samples.
@param sentences: List of items (sentences) for which tokenized word list
will be returned. In case there is only one item, it is possible to
pass the item id as a string.
@type sentences: list of strings or a string
@param offset: If True, the start and end offsets are accompanied to each
word in the returned list. Note that here, an offset is represented by
the number of 16kHz samples.
@type offset: bool
@return: List of list of strings (words) if offset is False. List of list
of tuples (word, start offset, end offset) if offset if True.
"""
return _prim(".wrd", sentences, offset)
def phonetic(sentences=items, offset=False):
"""
Given a list of items, returns an iterator of a list of phoneme lists,
each of which corresponds to an item (sentence). If offset is set to True,
each element of the phoneme list is a tuple of word(string), start offset
and end offset, where offset is represented as a number of 16kHz samples.
@param sentences: List of items (sentences) for which phoneme list
will be returned. In case there is only one item, it is possible to
pass the item id as a string.
@type sentences: list of strings or a string
@param offset: If True, the start and end offsets are accompanied to each
phoneme in the returned list. Note that here, an offset is represented by
the number of 16kHz samples.
@type offset: bool
@return: List of list of strings (phonemes) if offset is False. List of
list of tuples (phoneme, start offset, end offset) if offset if True.
"""
return _prim(".phn", sentences, offset)
def audiodata(item, start=0, end=None):
"""
Given an item, returns a chunk of audio samples formatted into a string.
When the fuction is called, if start and end are omitted, the entire
samples of the recording will be returned. If only end is omitted,
samples from the start offset to the end of the recording will be returned.
@param start: start offset
@type start: integer (number of 16kHz frames)
@param end: end offset
@type end: integer (number of 16kHz frames) or None to indicate
the end of file
@return: string of sequence of bytes of audio samples
"""
assert(end is None or end > start)
headersize = 44
fnam = os.path.join(PREFIX,item.replace(':',os.path.sep)) + '.wav'
if end is None:
data = open(fnam).read()
else:
data = open(fnam).read(headersize+end*2)
return data[headersize+start*2:]
def play(data):
"""
Play the given audio samples.
@param data: audio samples
@type data: string of bytes of audio samples
"""
if not PLAY_ENABLED:
print >>sys.stderr, "sorry, currently we don't support audio playback on this platform:", sys.platform
return
try:
dsp = ossaudiodev.open('w')
except IOError, e:
print >>sys.stderr, "can't acquire the audio device; please activate your audio device."
print >>sys.stderr, "system error message:", str(e)
return
dsp.setfmt(ossaudiodev.AFMT_S16_LE)
dsp.channels(1)
dsp.speed(16000)
dsp.write(data)
dsp.close()
def demo():
from en.parser.nltk_lite.corpora import timit
print "6th item (timit.items[5])"
print "-------------------------"
itemid = timit.items[5]
spkrid, sentid = itemid.split(':')
print " item id: ", itemid
print " speaker id: ", spkrid
print " sentence id:", sentid
print
record = timit.spkrinfo[spkrid]
print " speaker information:"
print " TIMIT speaker id: ", record['id']
print " speaker sex: ", record['sex']
print " dialect region: ", record['dr']
print " data type: ", record['use']
print " recording date: ", record['recdate']
print " date of birth: ", record['birthdate']
print " speaker height: ", record['ht']
print " speaker race: ", record['race']
print " speaker education:", record['edu']
print " comments: ", record['comments']
print
print " words of the sentence:"
print " ", timit.raw(sentences=itemid).next()
print
print " words of the sentence with offsets (first 3):"
print " ", timit.raw(sentences=itemid, offset=True).next()[:3]
print
print " phonemes of the sentence (first 10):"
print " ", timit.phonetic(sentences=itemid).next()[:10]
print
print " phonemes of the sentence with offsets (first 3):"
print " ", timit.phonetic(sentences=itemid, offset=True).next()[:3]
print
print " looking up dictionary for words of the sentence..."
words = timit.raw(sentences=itemid).next()
for word in words:
print " %-5s:" % word, timit.dictionary[word]
print
print "audio playback:"
print "---------------"
print " playing sentence", sentid, "by speaker", spkrid, "(a.k.a. %s)"%record["id"], "..."
data = timit.audiodata(itemid)
timit.play(data)
print
print " playing words:"
words = timit.raw(sentences=itemid, offset=True).next()
for word, start, end in words:
print " playing %-10s in 1.5 seconds ..." % `word`
time.sleep(1.5)
data = timit.audiodata(itemid, start, end)
timit.play(data)
print
print " playing phonemes (first 10):"
phones = timit.phonetic(sentences=itemid, offset=True).next()
for phone, start, end in phones[:10]:
print " playing %-10s in 1.5 seconds ..." % `phone`
time.sleep(1.5)
data = timit.audiodata(itemid, start, end)
timit.play(data)
print
# play sentence sa1 of all female speakers
sentid = 'sa1'
for spkr in timit.speakers:
if timit.spkrinfo[spkr]['sex'] == 'F':
itemid = spkr + ':' + sentid
print " playing sentence %s of speaker %s ..." % (sentid, spkr)
data = timit.audiodata(itemid)
timit.play(data)
print
if __name__ == '__main__':
demo()
| mit |
huner2/SublimusAPI | Site/main.py | 1 | 9045 | #!/usr/bin/env python
import dataset
import simplejson as json
import time
import re
import urllib2
from functools import wraps
from base64 import b64decode
from flask import Flask
from flask import jsonify
from flask import make_response
from flask import redirect
from flask import render_template
from flask import request
from flask import session
from flask import url_for
from flask.ext.seasurf import SeaSurf
app = Flask(__name__)
csrf = SeaSurf(app)
db = None
config = None
def login_required(f):
"""Ensures that an user is logged in"""
@wraps(f)
def decorated_function(*args, **kwargs):
if 'user_id' not in session:
return redirect("/error/login_required")
return f(*args, **kwargs)
return decorated_function
def get_user():
"""Looks up the current user in the database"""
login = 'user_id' in session
if login:
return (True, db['users'].find_one(id=session['user_id']))
return (False, None)
@app.errorhandler(404)
def page_not_found(e):
return redirect('/error/page_not_found')
@app.errorhandler(500)
def i_broke_it(e):
return redirect('/error/server_error')
@app.route('/error/<msg>')
def error(msg):
"""Displays an error message"""
message = msg
login, user = get_user()
render = render_template('frame.html', page='error.html',
message=message, login=login, user=user)
return make_response(render)
@app.route('/license')
def license():
"""Displays license page"""
login, user = get_user()
render = render_template('frame.html', page='license.html',
login=login, user=user)
return make_response(render)
@app.route('/terms')
def terms():
"""Displays terms page"""
login, user = get_user()
render = render_template('frame.html', page='terms.html',
login=login, user=user)
return make_response(render)
@app.route('/privacy')
def privacy():
"""Displays privacy page"""
login, user = get_user()
render = render_template('frame.html', page='privacy.html',
login=login, user=user)
return make_response(render)
@app.route('/about')
def about():
"""Displays about page"""
login, user = get_user()
render = render_template('frame.html', page='about.html',
login=login, user=user)
return make_response(render)
@app.route('/apis')
def apis():
"""Displays api page"""
login, user = get_user()
render = render_template('frame.html', page='apis.html',
login=login, user=user)
return make_response(render)
@app.route('/apis/getUsernameById/<uid>', methods=['GET'])
def getUsernameById(uid):
"""Get user by ID"""
try:
userpage = urllib2.urlopen("http://www.roblox.com/users/" + uid + "/profile")
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = userpage.read()
index = page_source.find("<h2>", page_source.find("header-title"))
endIndex = page_source.find("</h2>", index)
username = page_source[index+4:endIndex] # Add tag length
return jsonify({'response': 200, 'username': username})
@app.route('/apis/userCanManageAsset/<uid>/<aid>', methods=['GET'])
def userCanManageAsset(uid, aid):
"""Return if a user can manage an asset"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/users/"+uid+"/canmanage/"+aid)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = apicall.read()
jsony = json.loads(page_source)
if jsony["Success"] == False:
return jsonify({'response': -1337})
return jsonify({'response': 200, 'manage': jsony["CanManage"]})
@app.route('/apis/userHasAsset/<uid>/<aid>', methods=['GET'])
def userHasAsset(uid, aid):
"""Return if a user has an asset"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/ownership/hasasset?userId="+uid+"&assetId="+aid)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = apicall.read()
return jsonify({'response': 200, 'has':page_source})
@app.route('/apis/getMarketplaceInfo/<aid>', methods=['GET'])
def getMarketPlaceInfo(aid):
"""Return product info for given product"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/marketplace/productinfo?assetId="+aid)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = apicall.read()
return jsonify({'response': 200, 'info':page_source})
@app.route('/apis/getClanByUser/<uid>', methods=['GET'])
def getClanByUser(uid):
"""Get clan information for given user id"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/clans/get-by-user?userId="+uid)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = apicall.read()
return jsonify({'response': 200, 'info':page_source})
@app.route('/apis/getClanById/<cid>', methods=['GET'])
def getClanById(cid):
"""Get clan information for given clan id"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/clans/get-by-id?clanId="+cid)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = apicall.read()
return jsonify({'response': 200, 'info':page_source})
@app.route('/apis/getFriendsOfUser/<uid>/<page>', methods=['GET'])
def friendsOfUser(uid, page):
"""Get a paged list of friends for the specified user"""
try:
apicall = urllib2.urlopen("http://api.roblox.com/users/"+uid+"/friends?page="+page)
except urllib2.HTTPError, err:
return jsonify({'reponse': err.code})
page_source = apicall.read()
return jsonify({'response': 200, 'info': page_source})
@app.route('/apis/getIdByUsername/<username>', methods=['GET'])
def getIdByUsername(username):
"""Get ID by user"""
try:
search = urllib2.urlopen("http://m.roblox.com/User/DoSearch?startRow=0&keyword="+username)
except urllib2.HTTPError, err:
return jsonify({'response': err.code})
page_source = search.read()
index = page_source.find("alt=")
endIndex = page_source.find("/>", index)
firstUsername = page_source[index+5:endIndex-3]
if (username.lower() != firstUsername.lower()):
return jsonify({'response': -1337})
index = page_source.find("/users")
endIndex = page_source.find(">", index)
id = page_source[index + 7:endIndex-1]
return jsonify({'response': 200, 'id': id})
def session_login(username):
"""Initializes the session with the current user's id"""
user = db['users'].find_one(username=username)
session['user_id'] = user['id']
@app.route('/login')
def loginPage():
"""Displays the login page"""
login, user = get_user()
render = render_template('frame.html', page='login.html',
login=login, user=user)
return make_response(render)
@app.route('/logins', methods = ['POST'])
def login():
"""Attempts to log the user in"""
from werkzeug.security import check_password_hash
username = request.form['user']
password = request.form['password']
username = username.lower()
username = username[:1].upper() + username[1:]
user = db['users'].find_one(username=username)
if user is None:
return redirect('/error/invalid_credentials')
if check_password_hash(user['password'], password):
session_login(username)
return redirect('/')
return redirect('/error/invalid_credentials')
@app.route('/register')
def register():
"""Displays the register form"""
# Render template
render = render_template('frame.html', page='register.html', login=False, minpasslength=minpasslength)
return make_response(render)
@app.route('/register/submit', methods = ['POST'])
def register_submit():
"""Attempts to register a new user"""
from werkzeug.security import generate_password_hash
username = request.form['user']
password = request.form['password']
username = username.lower()
username = username[:1].upper() + username[1:]
if not username:
return redirect('/error/empty_user')
user_found = db['users'].find_one(username=username)
if user_found:
return redirect('/error/already_registered')
if len(password) < minpasslength:
return redirect('error/password_too_short')
new_user = dict(hidden=0, username=username,
password=generate_password_hash(password), roID=0, confirmed=False)
# Set up the user id for this session
session_login(username)
return redirect('/')
@app.route('/logout')
@login_required
def logout():
"""Logs the current user out"""
del session['user_id']
return redirect('/')
@app.route('/')
def index():
login, user = get_user()
# Render template
render = render_template('frame.html',
page='main.html', login=login, user=user, testing=config["testing"])
return make_response(render)
if __name__ == '__main__':
config_str = open('config.json', 'rb').read()
config = json.loads(config_str)
minpasslength = config['minimum_password_length']
app.secret_key = config['secret_key']
db = dataset.connect(config['db'])
app.run(host=config["host"],port=config["port"],debug=config["debug"],threaded = True)
| apache-2.0 |
hh-italian-group/HHbbTauTau | PatProduction/python/treeProduction.py | 1 | 2069 | ## @package treeProduction
# Configuration file that defines the sequence to produce ROOT-tuples for X->HH->bbTauTau analysis.
#
# \author Subir Sarkar
# \author Rosamaria Venditti (INFN Bari, Bari University)
# \author Konstantin Androsov (University of Siena, INFN Pisa)
# \author Maria Teresa Grippo (University of Siena, INFN Pisa)
#
# Copyright 2011-2013 Subir Sarkar, Rosamaria Venditti (INFN Bari, Bari University)
# Copyright 2014 Konstantin Androsov <konstantin.androsov@gmail.com>,
# Maria Teresa Grippo <grippomariateresa@gmail.com>
#
# This file is part of X->HH->bbTauTau.
#
# X->HH->bbTauTau is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# X->HH->bbTauTau is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with X->HH->bbTauTau. If not, see <http://www.gnu.org/licenses/>.
import FWCore.ParameterSet.Config as cms
def addTreeSequence(process, includeSim, treeOutput):
process.TFileService = cms.Service("TFileService", fileName = cms.string(treeOutput) )
process.load("HHbbTauTau.TreeProduction.TreeContentConfig_cff")
process.mainTreeContentSequence = cms.Sequence(
process.eventBlock
+ process.vertexBlock
+ process.electronBlock
+ process.jetBlock
+ process.metBlock
+ process.muonBlock
+ process.tauBlock
+ process.pfCandBlock
+ process.triggerBlock
+ process.triggerObjectBlock
)
process.simTreeContentSequence = cms.Sequence()
if includeSim:
process.simTreeContentSequence = cms.Sequence(process.genParticleBlock + process.genEventBlock + process.genMETBlock)
return
| gpl-2.0 |
florentx/OpenUpgrade | addons/hw_escpos/escpos/constants.py | 129 | 7219 | # -*- coding: utf-8 -*-
""" ESC/POS Commands (Constants) """
# Feed control sequences
CTL_LF = '\x0a' # Print and line feed
CTL_FF = '\x0c' # Form feed
CTL_CR = '\x0d' # Carriage return
CTL_HT = '\x09' # Horizontal tab
CTL_VT = '\x0b' # Vertical tab
# Printer hardware
HW_INIT = '\x1b\x40' # Clear data in buffer and reset modes
HW_SELECT = '\x1b\x3d\x01' # Printer select
HW_RESET = '\x1b\x3f\x0a\x00' # Reset printer hardware
# Cash Drawer
CD_KICK_2 = '\x1b\x70\x00' # Sends a pulse to pin 2 []
CD_KICK_5 = '\x1b\x70\x01' # Sends a pulse to pin 5 []
# Paper
PAPER_FULL_CUT = '\x1d\x56\x00' # Full cut paper
PAPER_PART_CUT = '\x1d\x56\x01' # Partial cut paper
# Text format
TXT_NORMAL = '\x1b\x21\x00' # Normal text
TXT_2HEIGHT = '\x1b\x21\x10' # Double height text
TXT_2WIDTH = '\x1b\x21\x20' # Double width text
TXT_DOUBLE = '\x1b\x21\x30' # Double height & Width
TXT_UNDERL_OFF = '\x1b\x2d\x00' # Underline font OFF
TXT_UNDERL_ON = '\x1b\x2d\x01' # Underline font 1-dot ON
TXT_UNDERL2_ON = '\x1b\x2d\x02' # Underline font 2-dot ON
TXT_BOLD_OFF = '\x1b\x45\x00' # Bold font OFF
TXT_BOLD_ON = '\x1b\x45\x01' # Bold font ON
TXT_FONT_A = '\x1b\x4d\x00' # Font type A
TXT_FONT_B = '\x1b\x4d\x01' # Font type B
TXT_ALIGN_LT = '\x1b\x61\x00' # Left justification
TXT_ALIGN_CT = '\x1b\x61\x01' # Centering
TXT_ALIGN_RT = '\x1b\x61\x02' # Right justification
TXT_COLOR_BLACK = '\x1b\x72\x00' # Default Color
TXT_COLOR_RED = '\x1b\x72\x01' # Alternative Color ( Usually Red )
# Text Encoding
TXT_ENC_PC437 = '\x1b\x74\x00' # PC437 USA
TXT_ENC_KATAKANA= '\x1b\x74\x01' # KATAKANA (JAPAN)
TXT_ENC_PC850 = '\x1b\x74\x02' # PC850 Multilingual
TXT_ENC_PC860 = '\x1b\x74\x03' # PC860 Portuguese
TXT_ENC_PC863 = '\x1b\x74\x04' # PC863 Canadian-French
TXT_ENC_PC865 = '\x1b\x74\x05' # PC865 Nordic
TXT_ENC_KANJI6 = '\x1b\x74\x06' # One-pass Kanji, Hiragana
TXT_ENC_KANJI7 = '\x1b\x74\x07' # One-pass Kanji
TXT_ENC_KANJI8 = '\x1b\x74\x08' # One-pass Kanji
TXT_ENC_PC851 = '\x1b\x74\x0b' # PC851 Greek
TXT_ENC_PC853 = '\x1b\x74\x0c' # PC853 Turkish
TXT_ENC_PC857 = '\x1b\x74\x0d' # PC857 Turkish
TXT_ENC_PC737 = '\x1b\x74\x0e' # PC737 Greek
TXT_ENC_8859_7 = '\x1b\x74\x0f' # ISO8859-7 Greek
TXT_ENC_WPC1252 = '\x1b\x74\x10' # WPC1252
TXT_ENC_PC866 = '\x1b\x74\x11' # PC866 Cyrillic #2
TXT_ENC_PC852 = '\x1b\x74\x12' # PC852 Latin2
TXT_ENC_PC858 = '\x1b\x74\x13' # PC858 Euro
TXT_ENC_KU42 = '\x1b\x74\x14' # KU42 Thai
TXT_ENC_TIS11 = '\x1b\x74\x15' # TIS11 Thai
TXT_ENC_TIS18 = '\x1b\x74\x1a' # TIS18 Thai
TXT_ENC_TCVN3 = '\x1b\x74\x1e' # TCVN3 Vietnamese
TXT_ENC_TCVN3B = '\x1b\x74\x1f' # TCVN3 Vietnamese
TXT_ENC_PC720 = '\x1b\x74\x20' # PC720 Arabic
TXT_ENC_WPC775 = '\x1b\x74\x21' # WPC775 Baltic Rim
TXT_ENC_PC855 = '\x1b\x74\x22' # PC855 Cyrillic
TXT_ENC_PC861 = '\x1b\x74\x23' # PC861 Icelandic
TXT_ENC_PC862 = '\x1b\x74\x24' # PC862 Hebrew
TXT_ENC_PC864 = '\x1b\x74\x25' # PC864 Arabic
TXT_ENC_PC869 = '\x1b\x74\x26' # PC869 Greek
TXT_ENC_8859_2 = '\x1b\x74\x27' # ISO8859-2 Latin2
TXT_ENC_8859_9 = '\x1b\x74\x28' # ISO8859-2 Latin9
TXT_ENC_PC1098 = '\x1b\x74\x29' # PC1098 Farsi
TXT_ENC_PC1118 = '\x1b\x74\x2a' # PC1118 Lithuanian
TXT_ENC_PC1119 = '\x1b\x74\x2b' # PC1119 Lithuanian
TXT_ENC_PC1125 = '\x1b\x74\x2c' # PC1125 Ukrainian
TXT_ENC_WPC1250 = '\x1b\x74\x2d' # WPC1250 Latin2
TXT_ENC_WPC1251 = '\x1b\x74\x2e' # WPC1251 Cyrillic
TXT_ENC_WPC1253 = '\x1b\x74\x2f' # WPC1253 Greek
TXT_ENC_WPC1254 = '\x1b\x74\x30' # WPC1254 Turkish
TXT_ENC_WPC1255 = '\x1b\x74\x31' # WPC1255 Hebrew
TXT_ENC_WPC1256 = '\x1b\x74\x32' # WPC1256 Arabic
TXT_ENC_WPC1257 = '\x1b\x74\x33' # WPC1257 Baltic Rim
TXT_ENC_WPC1258 = '\x1b\x74\x34' # WPC1258 Vietnamese
TXT_ENC_KZ1048 = '\x1b\x74\x35' # KZ-1048 Kazakhstan
TXT_ENC_KATAKANA_MAP = {
# Maps UTF-8 Katakana symbols to KATAKANA Page Codes
# Half-Width Katakanas
'\xef\xbd\xa1':'\xa1', # 。
'\xef\xbd\xa2':'\xa2', # 「
'\xef\xbd\xa3':'\xa3', # 」
'\xef\xbd\xa4':'\xa4', # 、
'\xef\xbd\xa5':'\xa5', # ・
'\xef\xbd\xa6':'\xa6', # ヲ
'\xef\xbd\xa7':'\xa7', # ァ
'\xef\xbd\xa8':'\xa8', # ィ
'\xef\xbd\xa9':'\xa9', # ゥ
'\xef\xbd\xaa':'\xaa', # ェ
'\xef\xbd\xab':'\xab', # ォ
'\xef\xbd\xac':'\xac', # ャ
'\xef\xbd\xad':'\xad', # ュ
'\xef\xbd\xae':'\xae', # ョ
'\xef\xbd\xaf':'\xaf', # ッ
'\xef\xbd\xb0':'\xb0', # ー
'\xef\xbd\xb1':'\xb1', # ア
'\xef\xbd\xb2':'\xb2', # イ
'\xef\xbd\xb3':'\xb3', # ウ
'\xef\xbd\xb4':'\xb4', # エ
'\xef\xbd\xb5':'\xb5', # オ
'\xef\xbd\xb6':'\xb6', # カ
'\xef\xbd\xb7':'\xb7', # キ
'\xef\xbd\xb8':'\xb8', # ク
'\xef\xbd\xb9':'\xb9', # ケ
'\xef\xbd\xba':'\xba', # コ
'\xef\xbd\xbb':'\xbb', # サ
'\xef\xbd\xbc':'\xbc', # シ
'\xef\xbd\xbd':'\xbd', # ス
'\xef\xbd\xbe':'\xbe', # セ
'\xef\xbd\xbf':'\xbf', # ソ
'\xef\xbe\x80':'\xc0', # タ
'\xef\xbe\x81':'\xc1', # チ
'\xef\xbe\x82':'\xc2', # ツ
'\xef\xbe\x83':'\xc3', # テ
'\xef\xbe\x84':'\xc4', # ト
'\xef\xbe\x85':'\xc5', # ナ
'\xef\xbe\x86':'\xc6', # ニ
'\xef\xbe\x87':'\xc7', # ヌ
'\xef\xbe\x88':'\xc8', # ネ
'\xef\xbe\x89':'\xc9', # ノ
'\xef\xbe\x8a':'\xca', # ハ
'\xef\xbe\x8b':'\xcb', # ヒ
'\xef\xbe\x8c':'\xcc', # フ
'\xef\xbe\x8d':'\xcd', # ヘ
'\xef\xbe\x8e':'\xce', # ホ
'\xef\xbe\x8f':'\xcf', # マ
'\xef\xbe\x90':'\xd0', # ミ
'\xef\xbe\x91':'\xd1', # ム
'\xef\xbe\x92':'\xd2', # メ
'\xef\xbe\x93':'\xd3', # モ
'\xef\xbe\x94':'\xd4', # ヤ
'\xef\xbe\x95':'\xd5', # ユ
'\xef\xbe\x96':'\xd6', # ヨ
'\xef\xbe\x97':'\xd7', # ラ
'\xef\xbe\x98':'\xd8', # リ
'\xef\xbe\x99':'\xd9', # ル
'\xef\xbe\x9a':'\xda', # レ
'\xef\xbe\x9b':'\xdb', # ロ
'\xef\xbe\x9c':'\xdc', # ワ
'\xef\xbe\x9d':'\xdd', # ン
'\xef\xbe\x9e':'\xde', # ゙
'\xef\xbe\x9f':'\xdf', # ゚
}
# Barcod format
BARCODE_TXT_OFF = '\x1d\x48\x00' # HRI barcode chars OFF
BARCODE_TXT_ABV = '\x1d\x48\x01' # HRI barcode chars above
BARCODE_TXT_BLW = '\x1d\x48\x02' # HRI barcode chars below
BARCODE_TXT_BTH = '\x1d\x48\x03' # HRI barcode chars both above and below
BARCODE_FONT_A = '\x1d\x66\x00' # Font type A for HRI barcode chars
BARCODE_FONT_B = '\x1d\x66\x01' # Font type B for HRI barcode chars
BARCODE_HEIGHT = '\x1d\x68\x64' # Barcode Height [1-255]
BARCODE_WIDTH = '\x1d\x77\x03' # Barcode Width [2-6]
BARCODE_UPC_A = '\x1d\x6b\x00' # Barcode type UPC-A
BARCODE_UPC_E = '\x1d\x6b\x01' # Barcode type UPC-E
BARCODE_EAN13 = '\x1d\x6b\x02' # Barcode type EAN13
BARCODE_EAN8 = '\x1d\x6b\x03' # Barcode type EAN8
BARCODE_CODE39 = '\x1d\x6b\x04' # Barcode type CODE39
BARCODE_ITF = '\x1d\x6b\x05' # Barcode type ITF
BARCODE_NW7 = '\x1d\x6b\x06' # Barcode type NW7
# Image format
S_RASTER_N = '\x1d\x76\x30\x00' # Set raster image normal size
S_RASTER_2W = '\x1d\x76\x30\x01' # Set raster image double width
S_RASTER_2H = '\x1d\x76\x30\x02' # Set raster image double height
S_RASTER_Q = '\x1d\x76\x30\x03' # Set raster image quadruple
| agpl-3.0 |
Alwnikrotikz/sulley | process_monitor_unix.py | 6 | 8581 | import os
import sys
import getopt
import signal
import time
import threading
from sulley import pedrpc
'''
By nnp
http://www.unprotectedhex.com
This intended as a basic replacement for Sulley's process_monitor.py on *nix.
The below options are accepted. Crash details are limited to the signal that
caused the death and whatever operating system supported mechanism is in place (i.e
core dumps)
Replicated methods:
- alive
- log
- post_send
- pre_send
_ start_target
- stop_target
- set_start_commands
- set_stop_commands
Limitations
- Cannot attach to an already running process
- Currently only accepts one start_command
- Limited 'crash binning'. Relies on the availability of core dumps. These
should be created in the same directory the process is ran from on Linux
and in the (hidden) /cores directory on OS X. On OS X you have to add
the option COREDUMPS=-YES- to /etc/hostconfig and then `ulimit -c
unlimited` as far as I know. A restart may be required. The file
specified by crash_bin will any other available details such as the test
that caused the crash and the signal received by the program
'''
USAGE = "USAGE: process_monitor_unix.py"\
"\n [-c|--crash_bin] File to record crash info too" \
"\n [-P|--port PORT] TCP port to bind this agent too"\
"\n [-l|--log_level LEVEL] log level (default 1), increase for more verbosity"
ERR = lambda msg: sys.stderr.write("ERR> " + msg + "\n") or sys.exit(1)
class debugger_thread:
def __init__(self, start_command):
'''
This class isn't actually ran as a thread, only the start_monitoring
method is. It can spawn/stop a process, wait for it to exit and report on
the exit status/code.
'''
self.start_command = start_command
self.tokens = start_command.split(' ')
self.cmd_args = []
self.pid = None
self.exit_status = None
self.alive = False
def spawn_target(self):
print self.tokens
self.pid = os.spawnv(os.P_NOWAIT, self.tokens[0], self.tokens)
self.alive = True
def start_monitoring(self):
'''
self.exit_status = os.waitpid(self.pid, os.WNOHANG | os.WUNTRACED)
while self.exit_status == (0, 0):
self.exit_status = os.waitpid(self.pid, os.WNOHANG | os.WUNTRACED)
'''
self.exit_status = os.waitpid(self.pid, 0)
# [0] is the pid
self.exit_status = self.exit_status[1]
self.alive = False
def get_exit_status(self):
return self.exit_status
def stop_target(self):
os.kill(self.pid, signal.SIGKILL)
self.alive = False
def isAlive(self):
return self.alive
########################################################################################################################
class nix_process_monitor_pedrpc_server(pedrpc.server):
def __init__(self, host, port, crash_bin, log_level=1):
'''
@type host: String
@param host: Hostname or IP address
@type port: Integer
@param port: Port to bind server to
'''
pedrpc.server.__init__(self, host, port)
self.crash_bin = crash_bin
self.log_level = log_level
self.dbg = None
self.log("Process Monitor PED-RPC server initialized:")
self.log("Listening on %s:%s" % (host, port))
self.log("awaiting requests...")
def alive (self):
'''
Returns True. Useful for PED-RPC clients who want to see if the PED-RPC connection is still alive.
'''
return True
def log (self, msg="", level=1):
'''
If the supplied message falls under the current log level, print the specified message to screen.
@type msg: String
@param msg: Message to log
'''
if self.log_level >= level:
print "[%s] %s" % (time.strftime("%I:%M.%S"), msg)
def post_send (self):
'''
This routine is called after the fuzzer transmits a test case and returns the status of the target.
@rtype: Boolean
@return: Return True if the target is still active, False otherwise.
'''
if not self.dbg.isAlive():
exit_status = self.dbg.get_exit_status()
rec_file = open(self.crash_bin, 'a')
if os.WCOREDUMP(exit_status):
reason = 'Segmentation fault'
elif os.WIFSTOPPED(exit_status):
reason = 'Stopped with signal ' + str(os.WTERMSIG(exit_status))
elif os.WIFSIGNALED(exit_status):
reason = 'Terminated with signal ' + str(os.WTERMSIG(exit_status))
elif os.WIFEXITED(exit_status):
reason = 'Exit with code - ' + str(os.WEXITSTATUS(exit_status))
else:
reason = 'Process died for unknown reason'
self.last_synopsis = '[%s] Crash : Test - %d Reason - %s\n' % (time.strftime("%I:%M.%S"), self.test_number, reason)
rec_file.write(self.last_synopsis)
rec_file.close()
return self.dbg.isAlive()
def pre_send (self, test_number):
'''
This routine is called before the fuzzer transmits a test case and ensure the debugger thread is operational.
(In this implementation do nothing for now)
@type test_number: Integer
@param test_number: Test number to retrieve PCAP for.
'''
if self.dbg == None:
self.start_target()
self.log("pre_send(%d)" % test_number, 10)
self.test_number = test_number
def start_target (self):
'''
Start up the target process by issuing the commands in self.start_commands.
'''
self.log("starting target process")
self.dbg = debugger_thread(self.start_commands[0])
self.dbg.spawn_target()
# prevent blocking by spawning off another thread to waitpid
threading.Thread(target=self.dbg.start_monitoring).start()
self.log("done. target up and running, giving it 5 seconds to settle in.")
time.sleep(5)
def stop_target (self):
'''
Kill the current debugger thread and stop the target process by issuing the commands in self.stop_commands.
'''
# give the debugger thread a chance to exit.
time.sleep(1)
self.log("stopping target process")
for command in self.stop_commands:
if command == "TERMINATE_PID":
self.dbg.stop_target()
else:
os.system(command)
def set_start_commands (self, start_commands):
'''
We expect start_commands to be a list with one element for example
['/usr/bin/program arg1 arg2 arg3']
'''
if len(start_commands) > 1:
self.log("This process monitor does not accept > 1 start command")
return
self.log("updating start commands to: %s" % start_commands)
self.start_commands = start_commands
def set_stop_commands (self, stop_commands):
self.log("updating stop commands to: %s" % stop_commands)
self.stop_commands = stop_commands
def set_proc_name (self, proc_name):
self.log("updating target process name to '%s'" % proc_name)
self.proc_name = proc_name
def get_crash_synopsis (self):
'''
Return the last recorded crash synopsis.
@rtype: String
@return: Synopsis of last recorded crash.
'''
return self.last_synopsis
########################################################################################################################
if __name__ == "__main__":
# parse command line options.
try:
opts, args = getopt.getopt(sys.argv[1:], "c:P:l:", ["crash_bin=","port=","log_level=",])
except getopt.GetoptError:
ERR(USAGE)
log_level = 1
PORT = None
for opt, arg in opts:
if opt in ("-c", "--crash_bin"): crash_bin = arg
if opt in ("-P", "--port"): PORT = int(arg)
if opt in ("-l", "--log_level"): log_level = int(arg)
if crash_bin == None: ERR(USAGE)
if PORT == None:
PORT = 26002
# spawn the PED-RPC servlet.
servlet = nix_process_monitor_pedrpc_server("0.0.0.0", PORT, crash_bin, log_level)
servlet.serve_forever()
| gpl-2.0 |
benschmaus/catapult | third_party/gsutil/third_party/boto/tests/integration/ec2/test_cert_verification.py | 126 | 1549 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
from tests.integration import ServiceCertVerificationTest
import boto.ec2
class EC2CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
ec2 = True
regions = boto.ec2.regions()
def sample_service_call(self, conn):
conn.get_all_reservations()
| bsd-3-clause |
timkrentz/SunTracker | IMU/VTK-6.2.0/Common/ComputationalGeometry/Testing/Python/KSpline.py | 2 | 4896 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Now create the RenderWindow, Renderer and Interactor
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
math = vtk.vtkMath()
numberOfInputPoints = 30
aSplineX = vtk.vtkKochanekSpline()
aSplineY = vtk.vtkKochanekSpline()
aSplineZ = vtk.vtkKochanekSpline()
# generate random points
inputPoints = vtk.vtkPoints()
i = 0
while i < numberOfInputPoints:
x = math.Random(0, 1)
y = math.Random(0, 1)
z = math.Random(0, 1)
aSplineX.AddPoint(i, x)
aSplineY.AddPoint(i, y)
aSplineZ.AddPoint(i, z)
inputPoints.InsertPoint(i, x, y, z)
i += 1
inputData = vtk.vtkPolyData()
inputData.SetPoints(inputPoints)
balls = vtk.vtkSphereSource()
balls.SetRadius(.01)
balls.SetPhiResolution(10)
balls.SetThetaResolution(10)
glyphPoints = vtk.vtkGlyph3D()
glyphPoints.SetInputData(inputData)
glyphPoints.SetSourceConnection(balls.GetOutputPort())
glyphMapper = vtk.vtkPolyDataMapper()
glyphMapper.SetInputConnection(glyphPoints.GetOutputPort())
glyph = vtk.vtkActor()
glyph.SetMapper(glyphMapper)
glyph.GetProperty().SetDiffuseColor(1, 0.6, 0.6)
glyph.GetProperty().SetSpecular(.3)
glyph.GetProperty().SetSpecularPower(30)
ren1.AddActor(glyph)
points = vtk.vtkPoints()
# create a line
tension = 0
bias = 0
continuity = 0
aSplineX.SetDefaultTension(tension)
aSplineX.SetDefaultBias(bias)
aSplineX.SetDefaultContinuity(continuity)
aSplineY.SetDefaultTension(tension)
aSplineY.SetDefaultBias(bias)
aSplineY.SetDefaultContinuity(continuity)
aSplineZ.SetDefaultTension(tension)
aSplineZ.SetDefaultBias(bias)
aSplineZ.SetDefaultContinuity(continuity)
profileData = vtk.vtkPolyData()
numberOfOutputPoints = 300
offset = 1.0
def fit ():
points.Reset()
i = 0
while i < numberOfOutputPoints:
t = (numberOfInputPoints - offset) / (numberOfOutputPoints - 1) * i
points.InsertPoint(i, aSplineX.Evaluate(t), aSplineY.Evaluate(t), aSplineZ.Evaluate(t))
i = i + 1
profileData.Modified()
fit()
lines = vtk.vtkCellArray()
lines.InsertNextCell(numberOfOutputPoints)
i = 0
while i < numberOfOutputPoints:
lines.InsertCellPoint(i)
i = i + 1
profileData.SetPoints(points)
profileData.SetLines(lines)
profileTubes = vtk.vtkTubeFilter()
profileTubes.SetNumberOfSides(8)
profileTubes.SetInputData(profileData)
profileTubes.SetRadius(.005)
profileMapper = vtk.vtkPolyDataMapper()
profileMapper.SetInputConnection(profileTubes.GetOutputPort())
profile = vtk.vtkActor()
profile.SetMapper(profileMapper)
profile.GetProperty().SetDiffuseColor(1, 1, 0.7)
profile.GetProperty().SetSpecular(.3)
profile.GetProperty().SetSpecularPower(30)
ren1.AddActor(profile)
ren1.GetActiveCamera().Dolly(1.5)
ren1.ResetCamera()
ren1.ResetCameraClippingRange()
renWin.SetSize(400, 400)
# render the image
#
iren.Initialize()
def defaults (aSplineX, aSplineY, aSplineZ):
aSplineX.SetDefaultBias(0)
aSplineX.SetDefaultTension(0)
aSplineX.SetDefaultContinuity(0)
aSplineY.SetDefaultBias(0)
aSplineY.SetDefaultTension(0)
aSplineY.SetDefaultContinuity(0)
aSplineZ.SetDefaultBias(0)
aSplineZ.SetDefaultTension(0)
aSplineZ.SetDefaultContinuity(0)
fit()
renWin.Render()
def varyBias (aSplineX, aSplineY, aSplineZ):
defaults(aSplineX, aSplineY, aSplineZ)
bias = -1
while bias <= 1:
aSplineX.SetDefaultBias(bias)
aSplineY.SetDefaultBias(bias)
aSplineZ.SetDefaultBias(bias)
fit()
renWin.Render()
bias += .05
def varyTension (aSplineX, aSplineY, aSplineZ):
defaults(aSplineX, aSplineY, aSplineZ)
tension = -1
while tension <= 1:
aSplineX.SetDefaultTension(tension)
aSplineY.SetDefaultTension(tension)
aSplineZ.SetDefaultTension(tension)
fit()
renWin.Render()
tension += 0.05
def varyContinuity (aSplineX, aSplineY, aSplineZ):
defaults(aSplineX, aSplineY, aSplineZ)
Continuity = -1
while Continuity <= 1:
aSplineX.SetDefaultContinuity(Continuity)
aSplineY.SetDefaultContinuity(Continuity)
aSplineZ.SetDefaultContinuity(Continuity)
fit()
renWin.Render()
Continuity += 0.05
def closed (aSplineX, aSplineY, aSplineZ):
offset = 0.0
aSplineX.ClosedOn()
aSplineY.ClosedOn()
aSplineZ.ClosedOn()
fit()
renWin.Render()
def opened (aSplineX, aSplineY, aSplineZ):
offset = 1.0
aSplineX.ClosedOff()
aSplineY.ClosedOff()
aSplineZ.ClosedOff()
fit()
renWin.Render()
# iren.Start()
| mit |
chokribr/PIST | modules/webjournal/lib/elements/bfe_webjournal_rss.py | 25 | 5765 | # -*- coding: utf-8 -*-
## $Id: bfe_webjournal_widget_whatsNew.py,v 1.24 2009/01/27 07:25:12 jerome Exp $
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal widget - Display the index of the lastest articles,
including 'breaking news'.
"""
from invenio.webjournal_utils import \
parse_url_string, \
get_journal_categories, \
get_category_query
from invenio.messages import gettext_set_language
from invenio.config import CFG_SITE_URL
from invenio.urlutils import create_html_link
from invenio.dbquery import run_sql
from urllib import quote
def format_element(bfo, categories, label="Subscribe by RSS",
rss_icon_url="/img/rss.png", cc='', css_class="rssLink",
rss_icon_width='16px', rss_icon_height='16px'):
"""
Display RSS links to journal articles, in one or several
categories, or to the whole journal (if 'cc' parameter is used).
Note about 'cc': if we want an RSS of *all* articles (whathever
the category is), either we build an RSS url to each of the
categories/collections of the journal, or we simply link to the
main collection ('cc') of the journal (which implies that journal
categories exist as sub-collections of 'cc'). The second option is
preferred.
@param categories: comma-separated list of journal categories that will be linked from this RSS. If 'all', use all. If empty, try to use current category.
@param label: label of the RSS link
@param rss_icon_url: if provided, display the RSS icon in front of the label
@param rss_icon_width: if provided, declared width for the RSS icon
@param rss_icon_height: if provided, declared height for the RSS icon
@param cc: if provided, use as root collection for the journal, and ignore 'categories' parameter.
@param css_class: CSS class of the RSS link.
"""
args = parse_url_string(bfo.user_info['uri'])
category_name = args["category"]
journal_name = args["journal_name"]
ln = bfo.lang
_ = gettext_set_language(ln)
if cc:
categories = []
elif categories.lower() == 'all':
categories = get_journal_categories(journal_name)
elif not categories and category_name:
categories = [category_name]
else:
categories = categories.split(',')
# Build the query definition for selected categories. If a
# category name can a match collection name, we can simply search
# in this collection. Otherwise we have to search using the query
# definition of the category.
# Note that if there is one category that does not match a
# collection name, we have to use collections queries for all
# categories (we cannot display all records of a collection +
# apply search constraint on other collections)
collections = []
pattern = []
must_use_pattern = False
for category in categories:
dbquery = get_category_query(journal_name, category)
if dbquery:
pattern.append(dbquery)
res = None
if not must_use_pattern:
res = run_sql("SELECT name FROM collection WHERE dbquery=%s",
(dbquery,))
if res:
collections.append(res[0][0])
else:
# Could not find corresponding collection. Maybe
# replace '980__a' by 'collection'?
if not must_use_pattern:
res = run_sql("SELECT name FROM collection WHERE dbquery=%s",
(dbquery.replace('980__a', 'collection'),))
if res:
collections.append(res[0][0])
else:
# Really no matching collection name
# apparently. Use query definition.
must_use_pattern = True
# Build label
link_label = ''
if rss_icon_url:
if rss_icon_url.startswith('/'):
# Build an absolute URL
rss_icon_url = CFG_SITE_URL + rss_icon_url
link_label += '<img src="%s" alt="RSS" border="0"%s%s/> ' % \
(rss_icon_url, rss_icon_width and ' width="%s"' % rss_icon_width or '',
rss_icon_height and ' height="%s"' % rss_icon_height or '')
if label:
link_label += _(label)
# Build link
rss_url = CFG_SITE_URL + '/rss'
if cc:
rss_url += '?cc=' + quote(cc)
elif must_use_pattern:
rss_url += '?p=' + quote(' or '.join(pattern))
else:
rss_url += '?c=' + '&c='.join([quote(coll) \
for coll in collections])
rss_url += '&ln=' + ln
return create_html_link(rss_url, {},
link_label=link_label,
linkattrd={'class': css_class})
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
_ = gettext_set_language('en')
dummy = _("Subscribe by RSS")
| gpl-2.0 |
xzamirx/foursquared.eclair | util/gen_parser.py | 262 | 4392 | #!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna (joe@joelapenna.com)
* @param <T>
*/
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()
| apache-2.0 |
Qalthos/ansible | lib/ansible/modules/network/ironware/ironware_config.py | 32 | 11397 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ironware_config
version_added: "2.5"
author: "Paul Baker (@paulquack)"
short_description: Manage configuration sections on Extreme Ironware devices
description:
- Extreme Ironware configurations use a simple block indent file syntax
for segmenting configuration into sections. This module provides
an implementation for working with Ironware configuration sections in
a deterministic way.
extends_documentation_fragment: ironware
options:
lines:
description:
- The ordered set of commands that should be configured in the
section. The commands must be the exact same commands as found
in the device running-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
aliases: ['commands']
parents:
description:
- The ordered set of parents that uniquely identify the section
the commands should be checked against. If the parents argument
is omitted, the commands are checked against the set of top
level or global commands.
src:
description:
- Specifies the source path to the file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(lines), I(parents).
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line. If
match is set to I(strict), command lines are matched with respect
to position. If match is set to I(exact), command lines
must be an equal match. Finally, if match is set to I(none), the
module will not attempt to compare the source configuration with
the running configuration on the remote device.
default: line
choices: ['line', 'strict', 'exact', 'none']
replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the replace argument is set to I(line) then
the modified lines are pushed to the device in configuration
mode. If the replace argument is set to I(block) then the entire
command block is pushed to the device in configuration mode if any
line is not correct
default: line
choices: ['line', 'block']
update:
description:
- The I(update) argument controls how the configuration statements
are processed on the remote device. Valid choices for the I(update)
argument are I(merge) and I(check). When the argument is set to
I(merge), the configuration changes are merged with the current
device running configuration. When the argument is set to I(check)
the configuration updates are determined but not actually configured
on the remote device.
default: merge
choices: ['merge', 'check']
commit:
description:
- This argument specifies the update method to use when applying the
configuration changes to the remote node. If the value is set to
I(merge) the configuration updates are merged with the running-
config. If the value is set to I(check), no changes are made to
the remote host.
default: merge
choices: ['merge', 'check']
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. If the C(backup_options) value is not given,
the backup file is written to the C(backup) folder in the playbook
root directory. If the directory does not exist, it is created.
type: bool
default: 'no'
config:
description:
- The C(config) argument allows the playbook designer to supply
the base configuration to be used to validate configuration
changes necessary. If this argument is provided, the module
will not download the running-config from the remote node.
save_when:
description:
- When changes are made to the device running-configuration, the
changes are not copied to non-volatile storage by default. Using
this argument will change that before. If the argument is set to
I(always), then the running-config will always be copied to the
startup-config and the I(modified) flag will always be set to
True. If the argument is set to I(modified), then the running-config
will only be copied to the startup-config if it has changed since
the last save to startup-config. If the argument is set to
I(never), the running-config will never be copied to the
startup-config
default: never
choices: ['always', 'never', 'modified']
version_added: "2.4"
backup_options:
description:
- This is a dict object containing configurable options related to backup file path.
The value of this option is read only when C(backup) is set to I(yes), if C(backup) is set
to I(no) this option will be silently ignored.
suboptions:
filename:
description:
- The filename to be used to store the backup configuration. If the the filename
is not given it will be generated based on the hostname, current time and date
in format defined by <hostname>_config.<current-date>@<current-time>
dir_path:
description:
- This option provides the path ending with directory name in which the backup
configuration file will be stored. If the directory does not exist it will be first
created and the filename is either the value of C(filename) or default filename
as described in C(filename) options description. If the path value is not given
in that case a I(backup) directory will be created in the current working directory
and backup configuration will be copied in C(filename) within I(backup) directory.
type: path
type: dict
version_added: "2.8"
"""
EXAMPLES = """
- ironware_config:
lines:
- port-name test
- enable
- load-interval 30
- rate-limit input broadcast unknown-unicast multicast 521216 64000
parents: ['interface ethernet 1/2']
"""
RETURN = """
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['...', '...']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/ironware_config.2016-07-16@22:28:34
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.ironware.ironware import ironware_argument_spec, check_args
from ansible.module_utils.network.ironware.ironware import get_config, load_config, run_commands
from ansible.module_utils.network.common.config import NetworkConfig, dumps
def get_candidate(module):
candidate = NetworkConfig(indent=1)
if module.params['src']:
candidate.load(module.params['src'])
elif module.params['lines']:
parents = module.params['parents'] or list()
candidate.add(module.params['lines'], parents=parents)
return candidate
def run(module, result):
match = module.params['match']
replace = module.params['replace']
path = module.params['parents']
configobjs = None
candidate = get_candidate(module)
if match != 'none':
contents = module.params['config']
if not contents:
contents = get_config(module)
config = NetworkConfig(indent=1, contents=contents)
configobjs = candidate.difference(config, path=path, match=match,
replace=replace)
else:
configobjs = candidate.items
if configobjs:
commands = dumps(configobjs, 'commands').split('\n')
if module.params['lines']:
if module.params['before']:
commands[:0] = module.params['before']
if module.params['after']:
commands.extend(module.params['after'])
result['updates'] = commands
# send the configuration commands to the device and merge
# them with the current running config
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
if result['changed'] or module.params['save_when'] == 'always':
result['changed'] = True
if not module.check_mode:
cmd = {'command': 'write memory'}
run_commands(module, [cmd])
def main():
""" main entry point for module execution
"""
backup_spec = dict(
filename=dict(),
dir_path=dict(type='path')
)
argument_spec = dict(
src=dict(type='path'),
lines=dict(aliases=['commands'], type='list'),
parents=dict(type='list'),
before=dict(type='list'),
after=dict(type='list'),
match=dict(default='line', choices=['line', 'strict', 'exact', 'none']),
replace=dict(default='line', choices=['line', 'block']),
config=dict(),
backup=dict(type='bool', default=False),
backup_options=dict(type='dict', options=backup_spec),
save_when=dict(choices=['always', 'never', 'modified'], default='never')
)
argument_spec.update(ironware_argument_spec)
mutually_exclusive = [('lines', 'src'),
('parents', 'src')]
required_if = [('match', 'strict', ['lines']),
('match', 'exact', ['lines']),
('replace', 'block', ['lines'])]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
result = {'changed': False}
check_args(module)
if module.params['backup']:
result['__backup__'] = get_config(module)
run(module, result)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
lfalvarez/votai | popular_proposal/subscriptions.py | 2 | 4106 | from collections import OrderedDict
from votai_utils.send_mails import send_mail
from elections.models import Candidate, Area
from backend_candidate.models import CandidacyContact
from popular_proposal.models import Commitment, ProposalLike, PopularProposal
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
class SubscriptionEventBase(object):
def get_who(self):
raise NotImplementedError
def get_mail_from(self, person):
raise NotImplementedError
def __init__(self, proposal, *args, **kwargs):
self.proposal = proposal
def get_context(self, person):
return {'proposal': self.proposal,
'person': person}
def get_template(self):
return self.mail_template
def notify(self):
for person in self.get_who():
email = self.get_mail_from(person)
context = self.get_context(person=person)
template = self.get_template()
send_mail(context, template,
to=[email])
class NewCommitmentNotificationToProposer(SubscriptionEventBase):
mail_template = 'genia_lograste_compromiso'
def __init__(self, *args, **kwargs):
super(NewCommitmentNotificationToProposer, self).__init__(*args, **kwargs)
self.commitment = kwargs.pop('commitment')
def get_who(self):
return [self.proposal.proposer, ]
def get_mail_from(self, person):
return person.email
def get_template(self):
if self.commitment.commited:
return 'genia_lograste_compromiso'
else:
return 'genia_lo_sentimos'
def get_context(self, **kwargs):
context = super(NewCommitmentNotificationToProposer, self).get_context(**kwargs)
context['commitment'] = self.commitment
return context
class NumericNotificationBase(SubscriptionEventBase):
def __init__(self, *args, **kwargs):
super(NumericNotificationBase, self).__init__(*args, **kwargs)
self.number = kwargs.pop('number')
def get_context(self, **kwargs):
context = super(NumericNotificationBase, self).get_context(**kwargs)
context['number'] = self.number
return context
class ManyCitizensSupportingNotification(NumericNotificationBase):
mail_template = 'many_citizens_supporting'
def get_who(self):
if not settings.NOTIFY_CANDIDATES_WHEN_MANY_PROPOSALS_REACH_A_NUMBER:
return []
commitments = Commitment.objects.filter(proposal=self.proposal)
candidates_pks = []
for commitment in commitments:
candidates_pks.append(commitment.candidate.id)
candidates = Candidate.objects.filter(elections__area=self.proposal.area).exclude(id__in=candidates_pks)
contacts = CandidacyContact.objects.filter(candidate__in=candidates)
return contacts
def get_mail_from(self, contact):
return contact.mail
def get_context(self, **kwargs):
context = super(ManyCitizensSupportingNotification, self).get_context(**kwargs)
contact = kwargs.pop('person')
context['contact'] = contact
return context
class YouAreAHeroNotification(NumericNotificationBase):
mail_template = 'sos_una_grande'
def get_who(self):
return [self.proposal.proposer, ]
def get_mail_from(self, proposer):
return proposer.email
class EventDispatcher(object):
events = OrderedDict({'new-commitment': [## NewCommitmentNotification,
NewCommitmentNotificationToProposer]})
def register(self, event, event_class):
self.events[event] = event_class
def trigger(self, event, proposal, kwargs={}):
for event_nofifier_class in self.events[event]:
event_nofifier = event_nofifier_class(proposal, **kwargs)
event_nofifier.notify()
def notification_trigger(event, **kwargs):
dispatcher = EventDispatcher()
proposal = kwargs.pop('proposal')
dispatcher.trigger(event, proposal, kwargs)
| gpl-3.0 |
Gutenshit/CANBadger-Server | node_handler.py | 1 | 4930 | from PyQt4.QtCore import *
from PyQt4.QtNetwork import *
from PyQt4.QtGui import *
import datetime
from node_list_item import *
from node_connection import *
from canbadger_mainwindow import *
from helpers import *
##
# handle discovery and connection of nodes
class NodeHandler(QObject):
newNodeDiscovered = pyqtSignal(dict)
nodeConnected = pyqtSignal(dict)
nodeDisconnected = pyqtSignal(dict)
nodeDisappeared = pyqtSignal(dict)
nodeAliveMessage = pyqtSignal(dict)
threadReady = pyqtSignal()
def __init__(self, mainwindow):
super(QObject, self).__init__()
self.visibleNodes = {}
self.connectedNodes = {}
self.nodeListMutex = QMutex()
self.mainwindow = mainwindow
@pyqtSlot()
def onRun(self):
# multithreading hack to prevent threading sigsev conditions
# all the stuff should be executed in this threads event loop
# if we do stuff here, the thread context could be different
self.threadReady.connect(self.onThreadReady)
self.threadReady.emit()
@pyqtSlot()
def onThreadReady(self):
self.udpSocket = QUdpSocket(self)
self.udpSocket.bind(13370, QUdpSocket.ShareAddress)
self.udpSocket.readyRead.connect(self.onSocketReadyRead)
# check every second
self.disconnectTimer = QTimer(self)
self.disconnectTimer.moveToThread(self.thread())
self.disconnectTimer.timeout.connect(self.onDisconnectTimerFire)
self.disconnectTimer.start(1*1000)
@pyqtSlot()
def onSocketReadyRead(self):
msg = self.udpSocket.readDatagram(self.udpSocket.pendingDatagramSize())
if msg[0][0:2] == "CB":
msg_split = msg[0].split('|')
device_id = msg_split[1]
device_version = msg_split[2]
now = datetime.datetime.now()
device = {"id": device_id, "version": device_version, "ip": str(msg[1].toString()), "last_seen": now}
self.nodeListMutex.lock()
if (device_id not in self.connectedNodes.iterkeys()) and \
(device_id not in self.visibleNodes.iterkeys()):
self.visibleNodes[device_id] = device
self.newNodeDiscovered.emit(device)
# update timestamps for known visible/connected devices
if device_id in self.visibleNodes.iterkeys():
self.visibleNodes[device_id]["last_seen"] = now
self.nodeAliveMessage.emit(self.visibleNodes[device_id])
if device_id in self.connectedNodes.iterkeys():
self.connectedNodes[device_id]["last_seen"] = now
self.nodeAliveMessage.emit(self.connectedNodes[device_id])
self.nodeListMutex.unlock()
@pyqtSlot()
def onDisconnectTimerFire(self):
now = datetime.datetime.now()
self.nodeListMutex.lock()
ids_to_delete = []
for id, node in self.visibleNodes.iteritems():
# check time difference
if (now - node["last_seen"]) > datetime.timedelta(seconds=5):
ids_to_delete.append(id)
self.nodeDisappeared.emit(node)
for id in ids_to_delete:
del self.visibleNodes[id]
ids_to_delete = []
for id, node in self.connectedNodes.iteritems():
# check time difference
if (now - node["last_seen"]) > datetime.timedelta(seconds=5):
ids_to_delete.append(id)
self.nodeDisconnected.emit(node)
for id in ids_to_delete:
del self.connectedNodes[id]
self.nodeListMutex.unlock()
@pyqtSlot(dict)
def onConnectToNode(self, node):
thread = QThread()
node["thread"] = thread
con = NodeConnection(node)
node["connection"] = con
con.moveToThread(thread)
node["thread"].started.connect(node["connection"].onRun)
con.connectionSucceeded.connect(self.onConnectionSucceeded)
con.newDebugMessage.connect(self.mainwindow.onUpdateDebugLog)
thread.start()
@pyqtSlot()
def onConnectionSucceeded(self):
node = self.sender()
del self.visibleNodes[node.node["id"]]
self.connectedNodes[node.node["id"]] = node.node
self.nodeConnected.emit(node.node)
node.node["connection"].nodeDisconnected.connect(self.onDisconnectNode)
@pyqtSlot(dict)
def onDisconnectNode(self, node):
# for now, just reset node and remove it from internal list
node["connection"].resetNode()
if node["id"] in self.connectedNodes:
del self.connectedNodes[node["id"]]
if node["id"] in self.visibleNodes:
del self.visibleNodes[node["id"]]
if self.mainwindow.selectedNode == node:
self.mainwindow.connectDisconnectNodeBtn.setText("Connect")
@pyqtSlot(dict)
def onSelectedNodeChanged(self, node):
pass | gpl-3.0 |
PAIR-code/lit | lit_nlp/lib/caching_test.py | 2 | 3840 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Lint as: python3
"""Tests for lit_nlp.lib.model."""
from absl.testing import absltest
from lit_nlp.lib import caching
from lit_nlp.lib import testing_utils
class CachingTest(absltest.TestCase):
def test_preds_cache(self):
"""Test with an exact match."""
cache = caching.PredsCache()
self.assertEqual("0", cache.info())
cache.put("test", None)
self.assertEqual("0", cache.info())
cache.put("test", ("a", "1"))
self.assertEqual("1", cache.info())
self.assertIsNone(None, cache.get(("a", "2")))
self.assertEqual("test", cache.get(("a", "1")))
def test_caching_model_wrapper_no_dataset_skip_cache(self):
model = testing_utils.TestIdentityRegressionModel()
wrapper = caching.CachingModelWrapper(model, "test")
examples = [{"data": {"val": 1}, "id": "my_id"}]
results = wrapper.predict_with_metadata(examples)
self.assertEqual(1, model.count)
self.assertEqual({"score": 1}, results[0])
results = wrapper.predict_with_metadata(examples)
self.assertEqual(2, model.count)
self.assertEqual({"score": 1}, results[0])
def test_caching_model_wrapper_use_cache(self):
model = testing_utils.TestIdentityRegressionModel()
wrapper = caching.CachingModelWrapper(model, "test")
examples = [{"data": {"val": 1}, "id": "id_to_cache"}]
results = wrapper.predict_with_metadata(examples, "dataset")
self.assertEqual(1, model.count)
self.assertEqual({"score": 1}, results[0])
results = wrapper.predict_with_metadata(examples, "dataset")
self.assertEqual(1, model.count)
self.assertEqual({"score": 1}, results[0])
def test_caching_model_wrapper_not_cached(self):
model = testing_utils.TestIdentityRegressionModel()
wrapper = caching.CachingModelWrapper(model, "test")
examples = [{"data": {"val": 1}, "id": "my_id"}]
results = wrapper.predict_with_metadata(examples, "dataset")
self.assertEqual(1, model.count)
self.assertEqual({"score": 1}, results[0])
examples = [{"data": {"val": 2}, "id": "other_id"}]
results = wrapper.predict_with_metadata(examples)
self.assertEqual(2, model.count)
self.assertEqual({"score": 2}, results[0])
def test_caching_model_wrapper_mixed_list(self):
model = testing_utils.TestIdentityRegressionModel()
wrapper = caching.CachingModelWrapper(model, "test")
examples = [{"data": {"val": 1}, "id": "my_id"}]
results = wrapper.predict_with_metadata(examples, "dataset")
self.assertEqual(1, model.count)
self.assertEqual({"score": 1}, results[0])
examples = [
{
"data": {
"val": 0
},
"id": "first_id"
},
{
"data": {
"val": 1
},
"id": "my_id"
},
{
"data": {
"val": 2
},
"id": "last_id"
},
]
results = wrapper.predict_with_metadata(examples, "dataset")
self.assertEqual(3, model.count)
self.assertEqual({"score": 0}, results[0])
self.assertEqual({"score": 1}, results[1])
self.assertEqual({"score": 2}, results[2])
if __name__ == "__main__":
absltest.main()
| apache-2.0 |
laborautonomo/pip | pip/_vendor/html5lib/filters/lint.py | 979 | 4306 | from __future__ import absolute_import, division, unicode_literals
from gettext import gettext
_ = gettext
from . import _base
from ..constants import cdataElements, rcdataElements, voidElements
from ..constants import spaceCharacters
spaceCharacters = "".join(spaceCharacters)
class LintError(Exception):
pass
class Filter(_base.Filter):
def __iter__(self):
open_elements = []
contentModelFlag = "PCDATA"
for token in _base.Filter.__iter__(self):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError(_("StartTag not in PCDATA content model flag: %(tag)s") % {"tag": name})
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
if not name:
raise LintError(_("Empty tag name"))
if type == "StartTag" and name in voidElements:
raise LintError(_("Void element reported as StartTag token: %(tag)s") % {"tag": name})
elif type == "EmptyTag" and name not in voidElements:
raise LintError(_("Non-void element reported as EmptyTag token: %(tag)s") % {"tag": token["name"]})
if type == "StartTag":
open_elements.append(name)
for name, value in token["data"]:
if not isinstance(name, str):
raise LintError(_("Attribute name is not a string: %(name)r") % {"name": name})
if not name:
raise LintError(_("Empty attribute name"))
if not isinstance(value, str):
raise LintError(_("Attribute value is not a string: %(value)r") % {"value": value})
if name in cdataElements:
contentModelFlag = "CDATA"
elif name in rcdataElements:
contentModelFlag = "RCDATA"
elif name == "plaintext":
contentModelFlag = "PLAINTEXT"
elif type == "EndTag":
name = token["name"]
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
if not name:
raise LintError(_("Empty tag name"))
if name in voidElements:
raise LintError(_("Void element reported as EndTag token: %(tag)s") % {"tag": name})
start_name = open_elements.pop()
if start_name != name:
raise LintError(_("EndTag (%(end)s) does not match StartTag (%(start)s)") % {"end": name, "start": start_name})
contentModelFlag = "PCDATA"
elif type == "Comment":
if contentModelFlag != "PCDATA":
raise LintError(_("Comment not in PCDATA content model flag"))
elif type in ("Characters", "SpaceCharacters"):
data = token["data"]
if not isinstance(data, str):
raise LintError(_("Attribute name is not a string: %(name)r") % {"name": data})
if not data:
raise LintError(_("%(type)s token with empty data") % {"type": type})
if type == "SpaceCharacters":
data = data.strip(spaceCharacters)
if data:
raise LintError(_("Non-space character(s) found in SpaceCharacters token: %(token)r") % {"token": data})
elif type == "Doctype":
name = token["name"]
if contentModelFlag != "PCDATA":
raise LintError(_("Doctype not in PCDATA content model flag: %(name)s") % {"name": name})
if not isinstance(name, str):
raise LintError(_("Tag name is not a string: %(tag)r") % {"tag": name})
# XXX: what to do with token["data"] ?
elif type in ("ParseError", "SerializeError"):
pass
else:
raise LintError(_("Unknown token type: %(type)s") % {"type": type})
yield token
| mit |
stormi/tsunami | src/primaires/commerce/__init__.py | 1 | 6143 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le module primaire commerce."""
from abstraits.module import *
from primaires.objet.vente_unique import VenteUnique
from . import masques
from . import commandes
from . import types
from .questeur import Questeur
class Module(BaseModule):
"""Cette classe contient les informations du module primaire commerce.
Ce module gère le commerce, c'est-à-dire les transactions, les magasins,
les monnaies.
Note : on peut étendre ce module en proposant de nouveaux objets pouvant
être vendus. Pour cela, il faut :
1. Lors de la configuration du module contenant les nouveaux
objets, on doit signaler au module commerce qu'un nouveau type
d'objet sera susceptible d'être vendu. Pour cela, il faut ajouter
une entrée dans le dictionnaire types_services avec en clé le
nom du nouveau service et en valeur, un dictionnaire permettant
de trouver l'objet grâce à sa clé. Pour des exemples, regardez
le module primaire objet
2. La classe produisant des objets pouvant être vendus en magasin
doit posséder :
A. Un attribut de classe type_achat (str)
B. Un attribut de classe aide_achat (str)
B. Une propriété ou un attribut d'objet m_valeur (float)
C. Une propriété ou un attribut d'objet nom_achat (str)
D. Un attribut d'objet cle (str) correspondant à sa clé dans le
dictionnaire
E. Une méthode acheter réalisant l'achat
"""
def __init__(self, importeur):
"""Constructeur du module"""
BaseModule.__init__(self, importeur, "commerce", "primaire")
self.commandes = []
self.types_services = {}
self.aides_types = {}
self.questeurs = {}
def init(self):
"""Initialisation du module."""
self.importeur.hook["temps:minute"].ajouter_evenement(
self.renouveler_magasins)
self.importeur.hook["objet:doit_garder"].ajouter_evenement(
self.doit_garder_objets)
# On récupère les questeurs
questeurs = self.importeur.supenr.charger_groupe(Questeur)
for questeur in questeurs:
self.ajouter_questeur(questeur)
BaseModule.init(self)
def ajouter_commandes(self):
"""Ajout des commandes"""
self.commandes = [
commandes.acheter.CmdAcheter(),
commandes.info.CmdInfo(),
commandes.lister.CmdLister(),
commandes.questeur.CmdQuesteur(),
commandes.vendre.CmdVendre(),
]
for cmd in self.commandes:
importeur.interpreteur.ajouter_commande(cmd)
def preparer(self):
"""Préparation du module."""
for cle, questeur in tuple(self.questeurs.items()):
if not cle.e_existe:
del self.questeurs[cle]
def creer_questeur(self, salle):
"""Crée un questeur et l'ajout dans le dictionnaire."""
questeur = Questeur(salle)
self.ajouter_questeur(questeur)
return questeur
def ajouter_questeur(self, questeur):
"""Ajoute le questeur dans le dictionnaire."""
self.questeurs[questeur.salle] = questeur
def questeur_existe(self, salle):
"""Retourne True ou False si le questeur existe dans la salle."""
return self.questeurs.get(salle) is not None
def supprimer_questeur(self, salle):
"""Supprime le questeur."""
questeur = self.questeurs[salle]
questeur.detruire()
del self.questeurs[salle]
def renouveler_magasins(self, temps):
"""Renouvelle les magasins."""
magasins = importeur.salle.a_renouveler.get(temps.heure_minute,
[])
for magasin in magasins:
magasin.inventaire[:] = []
magasin.renouveler()
magasins = importeur.salle.magasins_a_ouvrir.get(
temps.heure_minute, [])
for magasin in magasins:
magasin.ouvrir()
magasins = importeur.salle.magasins_a_fermer.get(
temps.heure_minute, [])
for magasin in magasins:
magasin.fermer()
def doit_garder_objets(self):
"""Retourne les objets à ne pas détruire."""
a_garder = []
for salle in importeur.salle.salles.values():
if salle.magasin:
for service, qtt in salle.magasin.inventaire:
if isinstance(service, VenteUnique) and service.objet:
a_garder.append(service.objet)
return a_garder
| bsd-3-clause |
annarev/tensorflow | tensorflow/lite/testing/op_tests/split.py | 17 | 2106 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for split."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_split_tests(options):
"""Make a set of tests to do tf.split."""
test_parameters = [{
"input_shape": [[1, 3, 4, 6], [2, 4, 1], [6, 4], [8]],
"num_or_size_splits": [1, 2, 3, 4, 5],
"axis": [0, 1, 2, 3, -4, -3, -2, -1],
"fully_quantize": [True, False],
}]
def build_graph(parameters):
input_tensor = tf.compat.v1.placeholder(
dtype=tf.float32, name="input", shape=parameters["input_shape"])
out = tf.split(input_tensor, parameters["num_or_size_splits"],
parameters["axis"])
return [input_tensor], [out[0]]
def build_inputs(parameters, sess, inputs, outputs):
values = [
create_tensor_data(
np.float32, parameters["input_shape"], min_value=-1, max_value=1)
]
return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
make_zip_of_tests(
options,
test_parameters,
build_graph,
build_inputs,
expected_tf_failures=224)
| apache-2.0 |
cemarchi/biosphere | Src/BioDataManagement/DataAccess/Entities/GeneExpressionLevel.py | 1 | 1204 | from typing import Dict
from Src.BioDataManagement.DataAccess.Entities.BiologicalMeasureType import BiologicalMeasureType
from Src.Core.Entity.EntityBase import EntityBase
class GeneExpressionLevel(BiologicalMeasureType, EntityBase):
"""description of class"""
def __init__(self, **kargs):
"""
:param kargs:
"""
super().__init__(**kargs)
self.__id_entrez = kargs.get('id_entrez')
def __hash__(self):
return hash(self.__id_entrez)
def __eq__(self, other):
return isinstance(other, GeneExpressionLevel) and \
self.__id_entrez == other.id_entrez
@property
def id_entrez(self) -> int:
"""description of property"""
return self.__id_entrez
@id_entrez.setter
def id_entrez(self, value: int):
"""
:param value:
:return:
"""
self.__id_entrez = value
def validate(self):
super().validate()
if not self.__id_entrez:
raise ValueError('id_entrez is required.')
def as_dict(self)-> Dict:
obj_dict = super().as_dict()
obj_dict.update({'id_entrez': self.__id_entrez})
return obj_dict | bsd-3-clause |
capsci/ShareCalendar | Credentials.py | 1 | 1777 | '''
file: Credentials.py
Sets up API credentials for future data requests
Please follow steps at https://github.com/capsci/ShareCalendar for more details
'''
__author__ = "Kapil Somani"
__email__ = "kmsomani@ncsu.edu"
__status__ = "Prototype"
import os
from apiclient.discovery import build
import oauth2client
from oauth2client import client
from oauth2client import tools
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Calendar API Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('.')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'calendar-quickstart.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatability with Python 2.6
credentials = tools.run(flow, store)
print 'Storing credentials to ' + credential_path
return credentials | mit |
demisto/demisto-py | demisto_client/demisto_api/models/module_args.py | 1 | 3903 | # coding: utf-8
"""
Demisto API
This is the public REST API to integrate with the demisto server. HTTP request can be sent using any HTTP-client. For an example dedicated client take a look at: https://github.com/demisto/demisto-py. Requests must include API-key that can be generated in the Demisto web client under 'Settings' -> 'Integrations' -> 'API keys' Optimistic Locking and Versioning\\: When using Demisto REST API, you will need to make sure to work on the latest version of the item (incident, entry, etc.), otherwise, you will get a DB version error (which not allow you to override a newer item). In addition, you can pass 'version\\: -1' to force data override (make sure that other users data might be lost). Assume that Alice and Bob both read the same data from Demisto server, then they both changed the data, and then both tried to write the new versions back to the server. Whose changes should be saved? Alice’s? Bob’s? To solve this, each data item in Demisto has a numeric incremental version. If Alice saved an item with version 4 and Bob trying to save the same item with version 3, Demisto will rollback Bob request and returns a DB version conflict error. Bob will need to get the latest item and work on it so Alice work will not get lost. Example request using 'curl'\\: ``` curl 'https://hostname:443/incidents/search' -H 'content-type: application/json' -H 'accept: application/json' -H 'Authorization: <API Key goes here>' --data-binary '{\"filter\":{\"query\":\"-status:closed -category:job\",\"period\":{\"by\":\"day\",\"fromValue\":7}}}' --compressed ``` # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ModuleArgs(dict):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""ModuleArgs - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ModuleArgs, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ModuleArgs):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 |
CourseTalk/edx-platform | common/lib/xmodule/xmodule/library_content_module.py | 17 | 27284 | # -*- coding: utf-8 -*-
"""
LibraryContent: The XBlock used to include blocks from a library in a course.
"""
import json
from lxml import etree
from copy import copy
from capa.responsetypes import registry
from gettext import ngettext
from lazy import lazy
from .mako_module import MakoModuleDescriptor
from opaque_keys.edx.locator import LibraryLocator
import random
from webob import Response
from xblock.core import XBlock
from xblock.fields import Scope, String, List, Integer, Boolean
from xblock.fragment import Fragment
from xmodule.validation import StudioValidationMessage, StudioValidation
from xmodule.x_module import XModule, STUDENT_VIEW
from xmodule.studio_editable import StudioEditableModule, StudioEditableDescriptor
from .xml_module import XmlDescriptor
from pkg_resources import resource_string
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
ANY_CAPA_TYPE_VALUE = 'any'
def _get_human_name(problem_class):
"""
Get the human-friendly name for a problem type.
"""
return getattr(problem_class, 'human_name', problem_class.__name__)
def _get_capa_types():
"""
Gets capa types tags and labels
"""
capa_types = {tag: _get_human_name(registry.get_class_for_tag(tag)) for tag in registry.registered_tags()}
return [{'value': ANY_CAPA_TYPE_VALUE, 'display_name': _('Any Type')}] + sorted([
{'value': capa_type, 'display_name': caption}
for capa_type, caption in capa_types.items()
], key=lambda item: item.get('display_name'))
class LibraryContentFields(object):
"""
Fields for the LibraryContentModule.
Separated out for now because they need to be added to the module and the
descriptor.
"""
# Please note the display_name of each field below is used in
# common/test/acceptance/pages/studio/library.py:StudioLibraryContentXBlockEditModal
# to locate input elements - keep synchronized
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
default="Randomized Content Block",
scope=Scope.settings,
)
source_library_id = String(
display_name=_("Library"),
help=_("Select the library from which you want to draw content."),
scope=Scope.settings,
values_provider=lambda instance: instance.source_library_values(),
)
source_library_version = String(
# This is a hidden field that stores the version of source_library when we last pulled content from it
display_name=_("Library Version"),
scope=Scope.settings,
)
mode = String(
display_name=_("Mode"),
help=_("Determines how content is drawn from the library"),
default="random",
values=[
{"display_name": _("Choose n at random"), "value": "random"}
# Future addition: Choose a new random set of n every time the student refreshes the block, for self tests
# Future addition: manually selected blocks
],
scope=Scope.settings,
)
max_count = Integer(
display_name=_("Count"),
help=_("Enter the number of components to display to each student."),
default=1,
scope=Scope.settings,
)
capa_type = String(
display_name=_("Problem Type"),
help=_('Choose a problem type to fetch from the library. If "Any Type" is selected no filtering is applied.'),
default=ANY_CAPA_TYPE_VALUE,
values=_get_capa_types(),
scope=Scope.settings,
)
has_score = Boolean(
display_name=_("Scored"),
help=_("Set this value to True if this module is either a graded assignment or a practice problem."),
default=False,
scope=Scope.settings,
)
selected = List(
# This is a list of (block_type, block_id) tuples used to record
# which random/first set of matching blocks was selected per user
default=[],
scope=Scope.user_state,
)
has_children = True
@property
def source_library_key(self):
"""
Convenience method to get the library ID as a LibraryLocator and not just a string
"""
return LibraryLocator.from_string(self.source_library_id)
#pylint: disable=abstract-method
@XBlock.wants('library_tools') # Only needed in studio
class LibraryContentModule(LibraryContentFields, XModule, StudioEditableModule):
"""
An XBlock whose children are chosen dynamically from a content library.
Can be used to create randomized assessments among other things.
Note: technically, all matching blocks from the content library are added
as children of this block, but only a subset of those children are shown to
any particular student.
"""
@classmethod
def make_selection(cls, selected, children, max_count, mode):
"""
Dynamically selects block_ids indicating which of the possible children are displayed to the current user.
Arguments:
selected - list of (block_type, block_id) tuples assigned to this student
children - children of this block
max_count - number of components to display to each student
mode - how content is drawn from the library
Returns:
A dict containing the following keys:
'selected' (set) of (block_type, block_id) tuples assigned to this student
'invalid' (set) of dropped (block_type, block_id) tuples that are no longer valid
'overlimit' (set) of dropped (block_type, block_id) tuples that were previously selected
'added' (set) of newly added (block_type, block_id) tuples
"""
selected = set(tuple(k) for k in selected) # set of (block_type, block_id) tuples assigned to this student
# Determine which of our children we will show:
valid_block_keys = set([(c.block_type, c.block_id) for c in children])
# Remove any selected blocks that are no longer valid:
invalid_block_keys = (selected - valid_block_keys)
if invalid_block_keys:
selected -= invalid_block_keys
# If max_count has been decreased, we may have to drop some previously selected blocks:
overlimit_block_keys = set()
while len(selected) > max_count:
overlimit_block_keys.add(selected.pop())
# Do we have enough blocks now?
num_to_add = max_count - len(selected)
added_block_keys = None
if num_to_add > 0:
# We need to select [more] blocks to display to this user:
pool = valid_block_keys - selected
if mode == "random":
num_to_add = min(len(pool), num_to_add)
added_block_keys = set(random.sample(pool, num_to_add))
# We now have the correct n random children to show for this user.
else:
raise NotImplementedError("Unsupported mode.")
selected |= added_block_keys
return {
'selected': selected,
'invalid': invalid_block_keys,
'overlimit': overlimit_block_keys,
'added': added_block_keys,
}
def _publish_event(self, event_name, result, **kwargs):
"""
Helper method to publish an event for analytics purposes
"""
event_data = {
"location": unicode(self.location),
"result": result,
"previous_count": getattr(self, "_last_event_result_count", len(self.selected)),
"max_count": self.max_count,
}
event_data.update(kwargs)
self.runtime.publish(self, "edx.librarycontentblock.content.{}".format(event_name), event_data)
self._last_event_result_count = len(result) # pylint: disable=attribute-defined-outside-init
@classmethod
def publish_selected_children_events(cls, block_keys, format_block_keys, publish_event):
"""
Helper method for publishing events when children blocks are
selected/updated for a user. This helper is also used by
the ContentLibraryTransformer.
Arguments:
block_keys -
A dict describing which events to publish (add or
remove), see `make_selection` above for format details.
format_block_keys -
A function to convert block keys to the format expected
by publish_event. Must have the signature:
[(block_type, block_id)] -> T
Where T is a collection of block keys as accepted by
`publish_event`.
publish_event -
Function that handles the actual publishing. Must have
the signature:
<'removed'|'assigned'> -> result:T -> removed:T -> reason:basestring -> None
Where T is a collection of block_keys as returned by
`format_block_keys`.
"""
if block_keys['invalid']:
# reason "invalid" means deleted from library or a different library is now being used.
publish_event(
"removed",
result=format_block_keys(block_keys['selected']),
removed=format_block_keys(block_keys['invalid']),
reason="invalid"
)
if block_keys['overlimit']:
publish_event(
"removed",
result=format_block_keys(block_keys['selected']),
removed=format_block_keys(block_keys['overlimit']),
reason="overlimit"
)
if block_keys['added']:
publish_event(
"assigned",
result=format_block_keys(block_keys['selected']),
added=format_block_keys(block_keys['added'])
)
def selected_children(self):
"""
Returns a set() of block_ids indicating which of the possible children
have been selected to display to the current user.
This reads and updates the "selected" field, which has user_state scope.
Note: self.selected and the return value contain block_ids. To get
actual BlockUsageLocators, it is necessary to use self.children,
because the block_ids alone do not specify the block type.
"""
if hasattr(self, "_selected_set"):
# Already done:
return self._selected_set # pylint: disable=access-member-before-definition
block_keys = self.make_selection(self.selected, self.children, self.max_count, "random") # pylint: disable=no-member
# Publish events for analytics purposes:
lib_tools = self.runtime.service(self, 'library_tools')
format_block_keys = lambda keys: lib_tools.create_block_analytics_summary(self.location.course_key, keys)
self.publish_selected_children_events(
block_keys,
format_block_keys,
self._publish_event,
)
# Save our selections to the user state, to ensure consistency:
selected = block_keys['selected']
self.selected = list(selected) # TODO: this doesn't save from the LMS "Progress" page.
# Cache the results
self._selected_set = selected # pylint: disable=attribute-defined-outside-init
return selected
def _get_selected_child_blocks(self):
"""
Generator returning XBlock instances of the children selected for the
current user.
"""
for block_type, block_id in self.selected_children():
yield self.runtime.get_block(self.location.course_key.make_usage_key(block_type, block_id))
def student_view(self, context):
fragment = Fragment()
contents = []
child_context = {} if not context else copy(context)
for child in self._get_selected_child_blocks():
for displayable in child.displayable_items():
rendered_child = displayable.render(STUDENT_VIEW, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': displayable.location.to_deprecated_string(),
'content': rendered_child.content,
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents,
'xblock_context': context,
'show_bookmark_button': False,
}))
return fragment
def validate(self):
"""
Validates the state of this Library Content Module Instance.
"""
return self.descriptor.validate()
def author_view(self, context):
"""
Renders the Studio views.
Normal studio view: If block is properly configured, displays library status summary
Studio container view: displays a preview of all possible children.
"""
fragment = Fragment()
root_xblock = context.get('root_xblock')
is_root = root_xblock and root_xblock.location == self.location
if is_root:
# User has clicked the "View" link. Show a preview of all possible children:
if self.children: # pylint: disable=no-member
fragment.add_content(self.system.render_template("library-block-author-preview-header.html", {
'max_count': self.max_count,
'display_name': self.display_name or self.url_name,
}))
context['can_edit_visibility'] = False
self.render_children(context, fragment, can_reorder=False, can_add=False)
# else: When shown on a unit page, don't show any sort of preview -
# just the status of this block in the validation area.
# The following JS is used to make the "Update now" button work on the unit page and the container view:
fragment.add_javascript_url(self.runtime.local_resource_url(self, 'public/js/library_content_edit.js'))
fragment.initialize_js('LibraryContentAuthorView')
return fragment
def get_child_descriptors(self):
"""
Return only the subset of our children relevant to the current student.
"""
return list(self._get_selected_child_blocks())
@XBlock.wants('user')
@XBlock.wants('library_tools') # Only needed in studio
@XBlock.wants('studio_user_permissions') # Only available in studio
class LibraryContentDescriptor(LibraryContentFields, MakoModuleDescriptor, XmlDescriptor, StudioEditableDescriptor):
"""
Descriptor class for LibraryContentModule XBlock.
"""
module_class = LibraryContentModule
mako_template = 'widgets/metadata-edit.html'
js = {'coffee': [resource_string(__name__, 'js/src/vertical/edit.coffee')]}
js_module_name = "VerticalDescriptor"
show_in_read_only_mode = True
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(LibraryContentDescriptor, self).non_editable_metadata_fields
# The only supported mode is currently 'random'.
# Add the mode field to non_editable_metadata_fields so that it doesn't
# render in the edit form.
non_editable_fields.extend([LibraryContentFields.mode, LibraryContentFields.source_library_version])
return non_editable_fields
@lazy
def tools(self):
"""
Grab the library tools service or raise an error.
"""
return self.runtime.service(self, 'library_tools')
def get_user_id(self):
"""
Get the ID of the current user.
"""
user_service = self.runtime.service(self, 'user')
if user_service:
# May be None when creating bok choy test fixtures
user_id = user_service.get_current_user().opt_attrs.get('edx-platform.user_id', None)
else:
user_id = None
return user_id
@XBlock.handler
def refresh_children(self, request=None, suffix=None): # pylint: disable=unused-argument
"""
Refresh children:
This method is to be used when any of the libraries that this block
references have been updated. It will re-fetch all matching blocks from
the libraries, and copy them as children of this block. The children
will be given new block_ids, but the definition ID used should be the
exact same definition ID used in the library.
This method will update this block's 'source_library_id' field to store
the version number of the libraries used, so we easily determine if
this block is up to date or not.
"""
user_perms = self.runtime.service(self, 'studio_user_permissions')
user_id = self.get_user_id()
if not self.tools:
return Response("Library Tools unavailable in current runtime.", status=400)
self.tools.update_children(self, user_id, user_perms)
return Response()
# Copy over any overridden settings the course author may have applied to the blocks.
def _copy_overrides(self, store, user_id, source, dest):
"""
Copy any overrides the user has made on blocks in this library.
"""
for field in source.fields.itervalues():
if field.scope == Scope.settings and field.is_set_on(source):
setattr(dest, field.name, field.read_from(source))
if source.has_children:
source_children = [self.runtime.get_block(source_key) for source_key in source.children]
dest_children = [self.runtime.get_block(dest_key) for dest_key in dest.children]
for source_child, dest_child in zip(source_children, dest_children):
self._copy_overrides(store, user_id, source_child, dest_child)
store.update_item(dest, user_id)
def studio_post_duplicate(self, store, source_block):
"""
Used by the studio after basic duplication of a source block. We handle the children
ourselves, because we have to properly reference the library upstream and set the overrides.
Otherwise we'll end up losing data on the next refresh.
"""
# The first task will be to refresh our copy of the library to generate the children.
# We must do this at the currently set version of the library block. Otherwise we may not have
# exactly the same children-- someone may be duplicating an out of date block, after all.
user_id = self.get_user_id()
user_perms = self.runtime.service(self, 'studio_user_permissions')
if not self.tools:
raise RuntimeError("Library tools unavailable, duplication will not be sane!")
self.tools.update_children(self, user_id, user_perms, version=self.source_library_version)
self._copy_overrides(store, user_id, source_block, self)
# Children have been handled.
return True
def _validate_library_version(self, validation, lib_tools, version, library_key):
"""
Validates library version
"""
latest_version = lib_tools.get_library_version(library_key)
if latest_version is not None:
if version is None or version != unicode(latest_version):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'This component is out of date. The library has new content.'),
# TODO: change this to action_runtime_event='...' once the unit page supports that feature.
# See https://openedx.atlassian.net/browse/TNL-993
action_class='library-update-btn',
# Translators: {refresh_icon} placeholder is substituted to "↻" (without double quotes)
action_label=_(u"{refresh_icon} Update now.").format(refresh_icon=u"↻")
)
)
return False
else:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(u'Library is invalid, corrupt, or has been deleted.'),
action_class='edit-button',
action_label=_(u"Edit Library List.")
)
)
return False
return True
def _set_validation_error_if_empty(self, validation, summary):
""" Helper method to only set validation summary if it's empty """
if validation.empty:
validation.set_summary(summary)
def validate(self):
"""
Validates the state of this Library Content Module Instance. This
is the override of the general XBlock method, and it will also ask
its superclass to validate.
"""
validation = super(LibraryContentDescriptor, self).validate()
if not isinstance(validation, StudioValidation):
validation = StudioValidation.copy(validation)
library_tools = self.runtime.service(self, "library_tools")
if not (library_tools and library_tools.can_use_library_content(self)):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(
u"This course does not support content libraries. "
u"Contact your system administrator for more information."
)
)
)
return validation
if not self.source_library_id:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.NOT_CONFIGURED,
_(u"A library has not yet been selected."),
action_class='edit-button',
action_label=_(u"Select a Library.")
)
)
return validation
lib_tools = self.runtime.service(self, 'library_tools')
self._validate_library_version(validation, lib_tools, self.source_library_version, self.source_library_key)
# Note: we assume refresh_children() has been called
# since the last time fields like source_library_id or capa_types were changed.
matching_children_count = len(self.children) # pylint: disable=no-member
if matching_children_count == 0:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'There are no matching problem types in the specified libraries.'),
action_class='edit-button',
action_label=_(u"Select another problem type.")
)
)
if matching_children_count < self.max_count:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
(
ngettext(
u'The specified library is configured to fetch {count} problem, ',
u'The specified library is configured to fetch {count} problems, ',
self.max_count
) +
ngettext(
u'but there is only {actual} matching problem.',
u'but there are only {actual} matching problems.',
matching_children_count
)
).format(count=self.max_count, actual=matching_children_count),
action_class='edit-button',
action_label=_(u"Edit the library configuration.")
)
)
return validation
def source_library_values(self):
"""
Return a list of possible values for self.source_library_id
"""
lib_tools = self.runtime.service(self, 'library_tools')
user_perms = self.runtime.service(self, 'studio_user_permissions')
all_libraries = lib_tools.list_available_libraries()
if user_perms:
all_libraries = [
(key, name) for key, name in all_libraries
if user_perms.can_read(key) or self.source_library_id == unicode(key)
]
all_libraries.sort(key=lambda entry: entry[1]) # Sort by name
if self.source_library_id and self.source_library_key not in [entry[0] for entry in all_libraries]:
all_libraries.append((self.source_library_id, _(u"Invalid Library")))
all_libraries = [(u"", _("No Library Selected"))] + all_libraries
values = [{"display_name": name, "value": unicode(key)} for key, name in all_libraries]
return values
def editor_saved(self, user, old_metadata, old_content):
"""
If source_library_id or capa_type has been edited, refresh_children automatically.
"""
old_source_library_id = old_metadata.get('source_library_id', [])
if (old_source_library_id != self.source_library_id or
old_metadata.get('capa_type', ANY_CAPA_TYPE_VALUE) != self.capa_type):
try:
self.refresh_children()
except ValueError:
pass # The validation area will display an error message, no need to do anything now.
def has_dynamic_children(self):
"""
Inform the runtime that our children vary per-user.
See get_child_descriptors() above
"""
return True
def get_content_titles(self):
"""
Returns list of friendly titles for our selected children only; without
thi, all possible children's titles would be seen in the sequence bar in
the LMS.
This overwrites the get_content_titles method included in x_module by default.
"""
titles = []
for child in self._xmodule.get_child_descriptors():
titles.extend(child.get_content_titles())
return titles
@classmethod
def definition_from_xml(cls, xml_object, system):
children = [
system.process_xml(etree.tostring(child)).scope_ids.usage_id
for child in xml_object.getchildren()
]
definition = {
attr_name: json.loads(attr_value)
for attr_name, attr_value in xml_object.attrib
}
return definition, children
def definition_to_xml(self, resource_fs):
""" Exports Library Content Module to XML """
xml_object = etree.Element('library_content')
for child in self.get_children():
self.runtime.add_block_as_child_node(child, xml_object)
# Set node attributes based on our fields.
for field_name, field in self.fields.iteritems():
if field_name in ('children', 'parent', 'content'):
continue
if field.is_set_on(self):
xml_object.set(field_name, unicode(field.read_from(self)))
return xml_object
| agpl-3.0 |
mobarski/sandbox | kv/ko_x1.py | 1 | 2373 | #from cPickle import dump,load
from marshal import dump,load,dumps,loads
from multiprocessing import Lock
import os
from time import time
class no_lock:
def __enter__(self): pass
def __exit__(self,*a,**kw): pass
class KO:
"KV database where keys are in memory and values are stored in separate file"
def __init__(self,name,lock=None):
self.name = name
if os.path.exists(name+'.k'):
with open(name+'.k','rb') as kf:
self.offset = load(kf)
else:
self.offset = {}
if not os.path.exists(name+'.v'):
open(name+'.v','w').close()
self.vf = open(name+'.v','r+b')
self.vf.seek(0,2)
self.at_end = True
self.lock = lock or Lock()
#self.lock = no_lock()
### WRITE ###
def set(self,k,v):
#with self.lock:
f = self.vf
if not self.at_end:
f.seek(0,2)
self.at_end = True
self.offset[k] = f.tell()
dump(v,f,2)
def __setitem__(self,k,v):
self.set(k,v)
def __delitem__(self,k):
#with self.lock:
if k in self.offset: del self.offset[k]
## def update(self,k,items):
## self.data.update(items)
def incr(self,k,v=1): # 100k/s
#with self.lock:
if k not in self.offset:
self.set(k,v)
else:
curr_val = self.get(k)
curr_len = self.vf.tell()-self.offset[k]
new_val = dumps(curr_val + v,2)
new_len = len(new_val)
if new_len>curr_len:
self.set(k,curr_val+v)
else:
self.vf.seek(self.offset[k])
self.vf.write(new_val)
def incr_items(self,items):
try:
kv = items.items()
except:
kv = items
for k,v in kv:
self.incr(k,v)
### READ ###
def get(self,k,default=None):
if k not in self.offset: return default
self.at_end = False
f = self.vf
f.seek(self.offset[k])
return load(f)
def __getitem__(self,k):
return self.get(k)
def __contains__(self,k):
return k in self.offset
def __len__(self):
return len(self.offset)
def items(self):
for k in self.offset:
yield k,self.get(k)
def keys(self):
return self.offset.keys()
def values(self):
for k in self.offset:
yield self.get(k)
### OTHER ###
def sync(self):
with open(self.name+'.k','wb') as kf:
dump(self.offset,kf,2)
self.vf.flush()
M = 100
N = 1000*M
db = KO('ko_x1')
t0=time()
if 0:
for i in range(N):
db[i] = i
db.sync()
## for k in range(M):
## for i in range(N):
## db.incr(i,1.2)
## db.sync()
if 1:
for i in range(N):
db[i]
print(N/(time()-t0))
| mit |
jonyroda97/redbot-amigosprovaveis | lib/idna/intranges.py | 293 | 1749 | """
Given a list of integers, made up of (hopefully) a small number of long runs
of consecutive integers, compute a representation of the form
((start1, end1), (start2, end2) ...). Then answer the question "was x present
in the original list?" in time O(log(# runs)).
"""
import bisect
def intranges_from_list(list_):
"""Represent a list of integers as a sequence of ranges:
((start_0, end_0), (start_1, end_1), ...), such that the original
integers are exactly those x such that start_i <= x < end_i for some i.
Ranges are encoded as single integers (start << 32 | end), not as tuples.
"""
sorted_list = sorted(list_)
ranges = []
last_write = -1
for i in range(len(sorted_list)):
if i+1 < len(sorted_list):
if sorted_list[i] == sorted_list[i+1]-1:
continue
current_range = sorted_list[last_write+1:i+1]
ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
last_write = i
return tuple(ranges)
def _encode_range(start, end):
return (start << 32) | end
def _decode_range(r):
return (r >> 32), (r & ((1 << 32) - 1))
def intranges_contain(int_, ranges):
"""Determine if `int_` falls into one of the ranges in `ranges`."""
tuple_ = _encode_range(int_, 0)
pos = bisect.bisect_left(ranges, tuple_)
# we could be immediately ahead of a tuple (start, end)
# with start < int_ <= end
if pos > 0:
left, right = _decode_range(ranges[pos-1])
if left <= int_ < right:
return True
# or we could be immediately behind a tuple (int_, end)
if pos < len(ranges):
left, _ = _decode_range(ranges[pos])
if left == int_:
return True
return False
| gpl-3.0 |
windelbouwman/ppci-mirror | ppci/codegen/codegen.py | 1 | 12881 | """ Machine code generator.
The architecture is provided when the generator is created.
"""
import logging
from .. import ir
from ..irutils import Verifier, split_block
from ..arch.arch import Architecture
from ..arch.generic_instructions import Label, Comment, Global, DebugData
from ..arch.generic_instructions import RegisterUseDef, VirtualInstruction
from ..arch.generic_instructions import InlineAssembly, SetSymbolType
from ..arch.generic_instructions import ArtificialInstruction, Alignment
from ..arch.encoding import Instruction
from ..arch.data_instructions import DZero, DByte
from ..arch import data_instructions
from ..arch.arch_info import Endianness
from ..binutils.debuginfo import DebugType, DebugLocation, DebugDb
from ..binutils.outstream import MasterOutputStream, FunctionOutputStream
from .irdag import SelectionGraphBuilder
from .instructionselector import InstructionSelector1
from .instructionscheduler import InstructionScheduler
from .registerallocator import GraphColoringRegisterAllocator
from .peephole import PeepHoleStream
class CodeGenerator:
""" Machine code generator """
logger = logging.getLogger("codegen")
def __init__(self, arch, optimize_for="size"):
assert isinstance(arch, Architecture), arch
self.arch = arch
self.verifier = Verifier()
self.sgraph_builder = SelectionGraphBuilder(arch)
weights_map = {
"size": (10, 1, 1),
"speed": (3, 10, 1),
"co2": (1, 2, 10),
"awesome": (13, 13, 13),
}
selection_weights = weights_map.get(optimize_for, (1, 1, 1))
self.instruction_selector = InstructionSelector1(
arch, self.sgraph_builder, weights=selection_weights
)
self.instruction_scheduler = InstructionScheduler()
self.register_allocator = GraphColoringRegisterAllocator(
arch, self.instruction_selector
)
def generate(
self, ircode: ir.Module, output_stream, reporter, debug=False
):
""" Generate machine code from ir-code into output stream """
assert isinstance(ircode, ir.Module)
if ircode.debug_db:
self.debug_db = ircode.debug_db
else:
self.debug_db = DebugDb()
self.logger.info(
"Generating %s code for module %s", str(self.arch), ircode.name
)
# Declare externals:
output_stream.select_section("data")
for external in ircode.externals:
self._mark_global(output_stream, external)
if isinstance(external, ir.ExternalSubRoutine):
output_stream.emit(SetSymbolType(external.name, 'func'))
# Generate code for global variables:
output_stream.select_section("data")
for var in ircode.variables:
self.generate_global(var, output_stream, debug)
# Generate code for functions:
# Munch program into a bunch of frames. One frame per function.
# Each frame has a flat list of abstract instructions.
output_stream.select_section("code")
for function in ircode.functions:
self.generate_function(
function, output_stream, reporter, debug=debug
)
# Output debug type data:
if debug:
for di in self.debug_db.infos:
if isinstance(di, DebugType):
# TODO: prevent this from being emitted twice in some way?
output_stream.emit(DebugData(di))
def generate_global(self, var, output_stream, debug):
""" Generate code for a global variable """
alignment = Alignment(var.alignment)
output_stream.emit(alignment)
self._mark_global(output_stream, var)
label = Label(var.name)
output_stream.emit(label)
if var.amount == 0 and var.value is None and not var.used_by:
pass # E.g. empty WASM func_table
elif var.amount > 0:
if var.value:
assert isinstance(var.value, tuple)
for part in var.value:
if isinstance(part, bytes):
# Emit plain byte data:
for byte in part:
output_stream.emit(DByte(byte))
elif isinstance(part, tuple) and part[0] is ir.ptr:
# Emit reference to a label:
assert isinstance(part[1], str)
labels_refs = {
(2, Endianness.LITTLE): data_instructions.Dw2,
(4, Endianness.LITTLE): data_instructions.Dcd2,
(8, Endianness.LITTLE): data_instructions.Dq2,
}
key = (
self.arch.info.get_size(part[0]),
self.arch.info.endianness,
)
op_cls = labels_refs[key]
output_stream.emit(op_cls(part[1]))
else:
raise NotImplementedError(str(part))
else:
output_stream.emit(DZero(var.amount))
else: # pragma: no cover
raise NotImplementedError()
self.debug_db.map(var, label)
if self.debug_db.contains(label) and debug:
dv = self.debug_db.get(label)
dv.address = label.name
output_stream.emit(DebugData(dv))
def generate_function(
self, ir_function, output_stream, reporter, debug=False
):
""" Generate code for one function into a frame """
self.logger.info(
"Generating %s code for function %s",
str(self.arch),
ir_function.name,
)
reporter.heading(3, "Log for {}".format(ir_function))
reporter.dump_ir(ir_function)
# Split too large basic blocks in smaller chunks (for literal pools):
# TODO: fix arbitrary number of 500. This works for arm and thumb..
split_block_nr = 1
for block in ir_function:
max_block_len = 200
while len(block) > max_block_len:
self.logger.debug("%s too large, splitting up", str(block))
newname = "{}_splitted_block_{}".format(
ir_function.name, split_block_nr
)
split_block_nr += 1
_, block = split_block(
block, pos=max_block_len, newname=newname
)
self._mark_global(output_stream, ir_function)
output_stream.emit(SetSymbolType(ir_function.name, 'func'))
# Create a frame for this function:
frame_name = ir_function.name
frame = self.arch.new_frame(frame_name, ir_function)
frame.debug_db = self.debug_db # Attach debug info
self.debug_db.map(ir_function, frame)
# Select instructions and schedule them:
self.select_and_schedule(ir_function, frame, reporter)
reporter.dump_frame(frame)
# Do register allocation:
self.register_allocator.alloc_frame(frame)
# TODO: Peep-hole here?
# frame.instructions = [i for i in frame.instructions]
if hasattr(self.arch, "peephole"):
frame.instructions = self.arch.peephole(frame)
reporter.dump_frame(frame)
# Add label and return and stack adjustment:
instruction_list = []
output_stream = MasterOutputStream(
[FunctionOutputStream(instruction_list.append), output_stream]
)
peep_hole_stream = PeepHoleStream(output_stream)
self.emit_frame_to_stream(frame, peep_hole_stream, debug=debug)
peep_hole_stream.flush()
# Emit function debug info:
if self.debug_db.contains(frame) and debug:
func_end_label = self.debug_db.new_label()
output_stream.emit(Label(func_end_label))
d = self.debug_db.get(frame)
d.begin = frame_name
d.end = func_end_label
dd = DebugData(d)
output_stream.emit(dd)
reporter.dump_instructions(instruction_list, self.arch)
def select_and_schedule(self, ir_function, frame, reporter):
""" Perform instruction selection and scheduling """
self.logger.debug("Selecting instructions")
tree_method = True
if tree_method:
self.instruction_selector.select(ir_function, frame, reporter)
else: # pragma: no cover
raise NotImplementedError("TODO")
# Build a graph:
# self.sgraph_builder.build(ir_function, function_info)
# reporter.message('Selection graph')
# reporter.dump_sgraph(sgraph)
# Schedule instructions:
# self.instruction_scheduler.schedule(sgraph, frame)
def emit_frame_to_stream(self, frame, output_stream, debug=False):
"""
Add code for the prologue and the epilogue. Add a label, the
return instruction and the stack pointer adjustment for the frame.
At this point we know how much stack space must be reserved for
locals and what registers should be saved.
"""
# Materialize the register allocated instructions into a stream of
# real instructions.
self.logger.debug("Emitting instructions")
debug_data = []
# Prefix code:
output_stream.emit_all(self.arch.gen_prologue(frame))
for instruction in frame.instructions:
assert isinstance(instruction, Instruction), str(instruction)
# If the instruction has debug location, emit it here:
if self.debug_db.contains(instruction) and debug:
d = self.debug_db.get(instruction)
assert isinstance(d, DebugLocation)
if not d.address:
label_name = self.debug_db.new_label()
d.address = label_name
source_line = d.loc.get_source_line()
output_stream.emit(Comment(source_line))
output_stream.emit(Label(label_name))
debug_data.append(DebugData(d))
if isinstance(instruction, VirtualInstruction):
# Process virtual instructions
if isinstance(instruction, RegisterUseDef):
pass
elif isinstance(instruction, ArtificialInstruction):
output_stream.emit(instruction)
elif isinstance(instruction, InlineAssembly):
self._generate_inline_assembly(
instruction.template,
instruction.output_registers,
instruction.input_registers,
output_stream,
)
else: # pragma: no cover
raise NotImplementedError(str(instruction))
else:
# Real instructions:
assert all(r.is_colored for r in instruction.registers)
output_stream.emit(instruction)
# Postfix code, like register restore and stack adjust:
output_stream.emit_all(self.arch.gen_epilogue(frame))
# Last but not least, emit debug infos:
for dd in debug_data:
output_stream.emit(dd)
# Check if we know what variables are live
for tmp in frame.ig.temp_map:
if self.debug_db.contains(tmp):
self.debug_db.get(tmp)
# print(tmp, di)
# frame.live_ranges(tmp)
# print('live ranges:', lr)
def _generate_inline_assembly(
self, assembly_source, output_registers, input_registers, ostream
):
""" Emit inline assembly template to outstream.
"""
from ..common import DiagnosticsManager
# poor mans assembly api copied from api.py
# Replace template variables with actual registers:
mapping = {
"%{}".format(index): str(register)
for index, register in enumerate(
output_registers + input_registers
)
}
for k, v in mapping.items():
assembly_source = assembly_source.replace(k, v)
diag = DiagnosticsManager()
assembler = self.arch.assembler
assembler.prepare()
assembler.assemble(assembly_source, ostream, diag)
# TODO: this flush action might be troublesome, since it might emit
# a literal pool on ARM.
assembler.flush()
def _mark_global(self, output_stream, value):
# Indicate static or global variable.
assert isinstance(value, ir.GlobalValue)
if value.binding == ir.Binding.GLOBAL:
output_stream.emit(Global(value.name))
| bsd-2-clause |
PanDAWMS/panda-harvester | pandaharvester/harvestermiddleware/ssh_tunnel_pool.py | 1 | 6713 | import random
import threading
import uuid
import socket
import six
import pexpect
from pandaharvester.harvestercore import core_utils
if six.PY2:
pexpect_spawn = pexpect.spawn
else:
pexpect_spawn = pexpect.spawnu
# logger
baseLogger = core_utils.setup_logger('ssh_tunnel_pool')
# Pool of SSH tunnels
class SshTunnelPool(object):
# constructor
def __init__(self):
self.lock = threading.Lock()
self.pool = dict()
self.params = dict()
# make a dict key
def make_dict_key(self, host, port):
return '{0}:{1}'.format(host, port)
# make a tunnel server
def make_tunnel_server(self, remote_host, remote_port, remote_bind_port=None, num_tunnels=1,
ssh_username=None, ssh_password=None, private_key=None, pass_phrase=None,
jump_host=None, jump_port=None, login_timeout=60, reconnect=False,
with_lock=True):
dict_key = self.make_dict_key(remote_host, remote_port)
if with_lock:
self.lock.acquire()
# make dicts
if dict_key not in self.pool:
self.pool[dict_key] = []
# preserve parameters
if not reconnect:
self.params[dict_key] = {'remote_bind_port': remote_bind_port,
'num_tunnels': num_tunnels,
'ssh_username': ssh_username,
'ssh_password': ssh_password,
'private_key': private_key,
'pass_phrase': pass_phrase,
'jump_host': jump_host,
'jump_port': jump_port,
'login_timeout': login_timeout
}
else:
remote_bind_port = self.params[dict_key]['remote_bind_port']
num_tunnels = self.params[dict_key]['num_tunnels']
ssh_username = self.params[dict_key]['ssh_username']
ssh_password = self.params[dict_key]['ssh_password']
private_key = self.params[dict_key]['private_key']
pass_phrase = self.params[dict_key]['pass_phrase']
jump_host = self.params[dict_key]['jump_host']
jump_port = self.params[dict_key]['jump_port']
login_timeout = self.params[dict_key]['login_timeout']
# make a tunnel server
for i in range(num_tunnels - len(self.pool[dict_key])):
# get a free port
s = socket.socket()
s.bind(('', 0))
com = "ssh -L {local_bind_port}:127.0.0.1:{remote_bind_port} "
com += "-p {remote_port} {ssh_username}@{remote_host} "
com += "-o ServerAliveInterval=120 -o ServerAliveCountMax=2 "
if private_key is not None:
com += "-i {private_key} "
if jump_port is not None:
com += '-o ProxyCommand="ssh -p {jump_port} {ssh_username}@{jump_host} -W %h:%p" '
local_bind_port = s.getsockname()[1]
com = com.format(remote_host=remote_host, remote_port=remote_port, remote_bind_port=remote_bind_port,
ssh_username=ssh_username, private_key=private_key, jump_host=jump_host,
jump_port=jump_port, local_bind_port=local_bind_port)
s.close()
# list of expected strings
loginString = 'login_to_be_confirmed_with ' + uuid.uuid4().hex
expected_list = [
pexpect.EOF,
pexpect.TIMEOUT,
"(?i)are you sure you want to continue connecting",
'(?i)password:',
'(?i)enter passphrase for key.*',
loginString,
]
c = pexpect_spawn(com, echo=False)
c.logfile_read = baseLogger.handlers[0].stream
isOK = False
for iTry in range(3):
idx = c.expect(expected_list, timeout=login_timeout)
if idx == expected_list.index(loginString):
# succeeded
isOK = True
break
if idx == 1:
# timeout
baseLogger.error('timeout when making a tunnel with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
if idx == 2:
# new certificate
c.sendline("yes")
idx = c.expect(expected_list, timeout=login_timeout)
if idx == 1:
# timeout
baseLogger.error('timeout after accepting new cert with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
if idx == 3:
# password prompt
c.sendline(ssh_password)
elif idx == 4:
# passphrase prompt
c.sendline(pass_phrase)
elif idx == 0:
baseLogger.error('something weired with com={0} out={1}'.format(com,
c.buffer))
c.close()
break
# exec to confirm login
c.sendline('echo {0}'.format(loginString))
if isOK:
self.pool[dict_key].append((local_bind_port, c))
if with_lock:
self.lock.release()
# get a tunnel
def get_tunnel(self, remote_host, remote_port):
dict_key = self.make_dict_key(remote_host, remote_port)
self.lock.acquire()
active_tunnels = []
someClosed = False
for port, child in self.pool[dict_key]:
if child.isalive():
active_tunnels.append([port, child])
else:
child.close()
someClosed = True
if someClosed:
self.make_tunnel_server(remote_host, remote_port, reconnect=True, with_lock=False)
active_tunnels = [item for item in self.pool[dict_key] if item[1].isalive()]
if len(active_tunnels) > 0:
port, child = random.choice(active_tunnels)
else:
port, child = None, None
self.lock.release()
return ("127.0.0.1", port, child)
# singleton
sshTunnelPool = SshTunnelPool()
del SshTunnelPool
| apache-2.0 |
rackerlabs/deuce-client | deuceclient/tests/test_client_deuce_file_creation.py | 1 | 2549 | """
Tests - Deuce Client - Client - Deuce - File - Creation
"""
import json
import httpretty
import deuceclient.api as api
import deuceclient.client.deuce
from deuceclient.tests import *
class ClientDeuceFileCreationTests(ClientTestBase):
def setUp(self):
super(ClientDeuceFileCreationTests, self).setUp()
self.client = deuceclient.client.deuce.DeuceClient(self.authenticator,
self.apihost,
sslenabled=True)
def tearDown(self):
super(ClientDeuceFileCreationTests, self).tearDown()
@httpretty.activate
def test_file_creation(self):
file_id = create_file()
file_url = get_file_url(self.apihost, self.vault.vault_id, file_id)
httpretty.register_uri(httpretty.POST,
get_files_url(self.apihost,
self.vault.vault_id),
adding_headers={
'location': file_url,
'x-file-id': file_id
},
status=201)
file_id = self.client.CreateFile(self.vault)
self.assertIn(file_id, self.vault.files)
self.assertEqual(file_url, self.vault.files[file_id].url)
def test_file_creation_bad_vault(self):
with self.assertRaises(TypeError):
self.client.CreateFile(self.vault.vault_id)
@httpretty.activate
def test_file_creation_missing_location_header(self):
file_id = create_file()
file_url = get_file_url(self.apihost, self.vault.vault_id, file_id)
httpretty.register_uri(httpretty.POST,
get_files_url(self.apihost,
self.vault.vault_id),
status=201)
with self.assertRaises(KeyError) as creation_error:
self.client.CreateFile(self.vault)
@httpretty.activate
def test_file_creation_failed(self):
file_id = create_file()
file_url = get_file_url(self.apihost, self.vault.vault_id, file_id)
httpretty.register_uri(httpretty.POST,
get_files_url(self.apihost,
self.vault.vault_id),
status=404)
with self.assertRaises(RuntimeError) as creation_error:
self.client.CreateFile(self.vault)
| apache-2.0 |
detiber/ansible | lib/ansible/plugins/action/ops_template.py | 35 | 1776 | #
# Copyright 2015 Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.module_utils.six import string_types
from ansible.plugins.action import ActionBase
from ansible.plugins.action.net_template import ActionModule as NetActionModule
class ActionModule(NetActionModule, ActionBase):
def run(self, tmp=None, task_vars=None):
if self._connection.transport == 'local':
return super(ActionModule, self).run(tmp, task_vars)
result = dict(changed=False)
if isinstance(self._task.args['src'], string_types):
self._handle_template()
result.update(self._execute_module(module_name=self._task.action,
module_args=self._task.args, task_vars=task_vars))
if self._task.args.get('backup') and result.get('_backup'):
contents = json.dumps(result['_backup'], indent=4)
self._write_backup(task_vars['inventory_hostname'], contents)
if '_backup' in result:
del result['_backup']
return result
| gpl-3.0 |
qbektrix/pyglet | pyglet/__init__.py | 12 | 14377 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''pyglet is a cross-platform games and multimedia package.
Detailed documentation is available at http://www.pyglet.org
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os
import sys
_is_epydoc = hasattr(sys, 'is_epydoc') and sys.is_epydoc
#: The release version of this pyglet installation.
#:
#: Valid only if pyglet was installed from a source or binary distribution
#: (i.e. not in a checked-out copy from SVN).
#:
#: Use setuptools if you need to check for a specific release version, e.g.::
#:
#: >>> import pyglet
#: >>> from pkg_resources import parse_version
#: >>> parse_version(pyglet.version) >= parse_version('1.1')
#: True
#:
version = '1.2alpha1'
# Pyglet platform treats *BSD systems as Linux
compat_platform = sys.platform
if "bsd" in compat_platform:
compat_platform = "linux-compat"
def _require_ctypes_version(version):
# Check ctypes version
import ctypes
req = [int(i) for i in version.split('.')]
have = [int(i) for i in ctypes.__version__.split('.')]
if not tuple(have) >= tuple(req):
raise ImportError('pyglet requires ctypes %s or later.' % version)
_require_ctypes_version('1.0.0')
_enable_optimisations = not __debug__
if getattr(sys, 'frozen', None):
_enable_optimisations = True
#: Global dict of pyglet options. To change an option from its default, you
#: must import ``pyglet`` before any sub-packages. For example::
#:
#: import pyglet
#: pyglet.options['debug_gl'] = False
#:
#: The default options can be overridden from the OS environment. The
#: corresponding environment variable for each option key is prefaced by
#: ``PYGLET_``. For example, in Bash you can set the ``debug_gl`` option with::
#:
#: PYGLET_DEBUG_GL=True; export PYGLET_DEBUG_GL
#:
#: For options requiring a tuple of values, separate each value with a comma.
#:
#: The non-development options are:
#:
#: audio
#: A sequence of the names of audio modules to attempt to load, in
#: order of preference. Valid driver names are:
#:
#: * directsound, the Windows DirectSound audio module (Windows only)
#: * pulse, the PulseAudio module (Linux only)
#: * openal, the OpenAL audio module
#: * silent, no audio
#: debug_lib
#: If True, prints the path of each dynamic library loaded.
#: debug_gl
#: If True, all calls to OpenGL functions are checked afterwards for
#: errors using ``glGetError``. This will severely impact performance,
#: but provides useful exceptions at the point of failure. By default,
#: this option is enabled if ``__debug__`` is (i.e., if Python was not run
#: with the -O option). It is disabled by default when pyglet is "frozen"
#: within a py2exe or py2app library archive.
#: shadow_window
#: By default, pyglet creates a hidden window with a GL context when
#: pyglet.gl is imported. This allows resources to be loaded before
#: the application window is created, and permits GL objects to be
#: shared between windows even after they've been closed. You can
#: disable the creation of the shadow window by setting this option to
#: False.
#:
#: Some OpenGL driver implementations may not support shared OpenGL
#: contexts and may require disabling the shadow window (and all resources
#: must be loaded after the window using them was created). Recommended
#: for advanced developers only.
#:
#: **Since:** pyglet 1.1
#: vsync
#: If set, the `pyglet.window.Window.vsync` property is ignored, and
#: this option overrides it (to either force vsync on or off). If unset,
#: or set to None, the `pyglet.window.Window.vsync` property behaves
#: as documented.
#: xsync
#: If set (the default), pyglet will attempt to synchronise the drawing of
#: double-buffered windows to the border updates of the X11 window
#: manager. This improves the appearance of the window during resize
#: operations. This option only affects double-buffered windows on
#: X11 servers supporting the Xsync extension with a window manager
#: that implements the _NET_WM_SYNC_REQUEST protocol.
#:
#: **Since:** pyglet 1.1
#: darwin_cocoa
#: If True, the Cocoa-based pyglet implementation is used as opposed to
#: the 32-bit Carbon implementation. When python is running in 64-bit mode
#: on Mac OS X 10.6 or later, this option is set to True by default.
#: Otherwise the Carbon implementation is preferred.
#:
#: **Since:** pyglet 1.2
#:
#: search_local_libs
#: If False, pyglet won't try to search for libraries in the script
#: directory and its `lib` subdirectory. This is useful to load a local
#: library instead of the system installed version. This option is set
#: to True by default.
#:
#: **Since:** pyglet 1.2
#:
options = {
'audio': ('directsound', 'pulse', 'openal', 'silent'),
'font': ('gdiplus', 'win32'), # ignored outside win32; win32 is deprecated
'debug_font': False,
'debug_gl': not _enable_optimisations,
'debug_gl_trace': False,
'debug_gl_trace_args': False,
'debug_graphics_batch': False,
'debug_lib': False,
'debug_media': False,
'debug_texture': False,
'debug_trace': False,
'debug_trace_args': False,
'debug_trace_depth': 1,
'debug_trace_flush': True,
'debug_win32': False,
'debug_x11': False,
'graphics_vbo': True,
'shadow_window': True,
'vsync': None,
'xsync': True,
'xlib_fullscreen_override_redirect': False,
'darwin_cocoa': False,
'search_local_libs': True,
}
_option_types = {
'audio': tuple,
'font': tuple,
'debug_font': bool,
'debug_gl': bool,
'debug_gl_trace': bool,
'debug_gl_trace_args': bool,
'debug_graphics_batch': bool,
'debug_lib': bool,
'debug_media': bool,
'debug_texture': bool,
'debug_trace': bool,
'debug_trace_args': bool,
'debug_trace_depth': int,
'debug_trace_flush': bool,
'debug_win32': bool,
'debug_x11': bool,
'graphics_vbo': bool,
'shadow_window': bool,
'vsync': bool,
'xsync': bool,
'xlib_fullscreen_override_redirect': bool,
'darwin_cocoa': bool,
}
def _choose_darwin_platform():
"""Choose between Darwin's Carbon and Cocoa implementations."""
if compat_platform != 'darwin':
return
import struct
numbits = 8*struct.calcsize("P")
if numbits == 64:
import platform
osx_version = platform.mac_ver()[0].split(".")
if int(osx_version[0]) == 10 and int(osx_version[1]) < 6:
raise Exception('pyglet is not compatible with 64-bit Python for versions of Mac OS X prior to 10.6.')
options['darwin_cocoa'] = True
else:
options['darwin_cocoa'] = False
_choose_darwin_platform() # can be overridden by an environment variable below
def _read_environment():
'''Read defaults for options from environment'''
for key in options:
env = 'PYGLET_%s' % key.upper()
try:
value = os.environ[env]
if _option_types[key] is tuple:
options[key] = value.split(',')
elif _option_types[key] is bool:
options[key] = value in ('true', 'TRUE', 'True', '1')
elif _option_types[key] is int:
options[key] = int(value)
except KeyError:
pass
_read_environment()
if compat_platform == 'cygwin':
# This hack pretends that the posix-like ctypes provides windows
# functionality. COM does not work with this hack, so there is no
# DirectSound support.
import ctypes
ctypes.windll = ctypes.cdll
ctypes.oledll = ctypes.cdll
ctypes.WINFUNCTYPE = ctypes.CFUNCTYPE
ctypes.HRESULT = ctypes.c_long
# Call tracing
# ------------
_trace_filename_abbreviations = {}
def _trace_repr(value, size=40):
value = repr(value)
if len(value) > size:
value = value[:size//2-2] + '...' + value[-size//2-1:]
return value
def _trace_frame(thread, frame, indent):
from pyglet import lib
if frame.f_code is lib._TraceFunction.__call__.func_code:
is_ctypes = True
func = frame.f_locals['self']._func
name = func.__name__
location = '[ctypes]'
else:
is_ctypes = False
code = frame.f_code
name = code.co_name
path = code.co_filename
line = code.co_firstlineno
try:
filename = _trace_filename_abbreviations[path]
except KeyError:
# Trim path down
dir = ''
path, filename = os.path.split(path)
while len(dir + filename) < 30:
filename = os.path.join(dir, filename)
path, dir = os.path.split(path)
if not dir:
filename = os.path.join('', filename)
break
else:
filename = os.path.join('...', filename)
_trace_filename_abbreviations[path] = filename
location = '(%s:%d)' % (filename, line)
if indent:
name = 'Called from %s' % name
print '[%d] %s%s %s' % (thread, indent, name, location)
if _trace_args:
if is_ctypes:
args = [_trace_repr(arg) for arg in frame.f_locals['args']]
print ' %sargs=(%s)' % (indent, ', '.join(args))
else:
for argname in code.co_varnames[:code.co_argcount]:
try:
argvalue = _trace_repr(frame.f_locals[argname])
print ' %s%s=%s' % (indent, argname, argvalue)
except:
pass
if _trace_flush:
sys.stdout.flush()
def _thread_trace_func(thread):
def _trace_func(frame, event, arg):
if event == 'call':
indent = ''
for i in range(_trace_depth):
_trace_frame(thread, frame, indent)
indent += ' '
frame = frame.f_back
if not frame:
break
elif event == 'exception':
(exception, value, traceback) = arg
print 'First chance exception raised:', repr(exception)
return _trace_func
def _install_trace():
global _trace_thread_count
sys.setprofile(_thread_trace_func(_trace_thread_count))
_trace_thread_count += 1
_trace_thread_count = 0
_trace_args = options['debug_trace_args']
_trace_depth = options['debug_trace_depth']
_trace_flush = options['debug_trace_flush']
if options['debug_trace']:
_install_trace()
# Lazy loading
# ------------
class _ModuleProxy(object):
_module = None
def __init__(self, name):
self.__dict__['_module_name'] = name
def __getattr__(self, name):
try:
return getattr(self._module, name)
except AttributeError:
if self._module is not None:
raise
import_name = 'pyglet.%s' % self._module_name
__import__(import_name)
module = sys.modules[import_name]
object.__setattr__(self, '_module', module)
globals()[self._module_name] = module
return getattr(module, name)
def __setattr__(self, name, value):
try:
setattr(self._module, name, value)
except AttributeError:
if self._module is not None:
raise
import_name = 'pyglet.%s' % self._module_name
__import__(import_name)
module = sys.modules[import_name]
object.__setattr__(self, '_module', module)
globals()[self._module_name] = module
setattr(module, name, value)
if True:
app = _ModuleProxy('app')
canvas = _ModuleProxy('canvas')
clock = _ModuleProxy('clock')
com = _ModuleProxy('com')
event = _ModuleProxy('event')
font = _ModuleProxy('font')
gl = _ModuleProxy('gl')
graphics = _ModuleProxy('graphics')
image = _ModuleProxy('image')
input = _ModuleProxy('input')
lib = _ModuleProxy('lib')
media = _ModuleProxy('media')
resource = _ModuleProxy('resource')
sprite = _ModuleProxy('sprite')
text = _ModuleProxy('text')
window = _ModuleProxy('window')
# Fool py2exe, py2app into including all top-level modules (doesn't understand
# lazy loading)
if False:
import app
import canvas
import clock
import com
import event
import font
import gl
import graphics
import input
import image
import lib
import media
import resource
import sprite
import text
import window
# Hack around some epydoc bug that causes it to think pyglet.window is None.
if False:
import window
| bsd-3-clause |
auferack08/edx-platform | common/lib/xmodule/xmodule/modulestore/draft_and_published.py | 4 | 4396 | """
This module provides an abstraction for Module Stores that support Draft and Published branches.
"""
import threading
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
from . import ModuleStoreEnum
# Things w/ these categories should never be marked as version=DRAFT
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
class BranchSettingMixin(object):
"""
A mixin to manage a module store's branch setting.
The order of override is (from higher precedence to lower):
1. thread-specific setting temporarily set using the branch_setting contextmanager
2. the return value of the branch_setting_func passed into this mixin's init method
3. the default branch setting being ModuleStoreEnum.Branch.published_only
"""
def __init__(self, *args, **kwargs):
"""
:param branch_setting_func: a function that returns the default branch setting for this object.
If not specified, ModuleStoreEnum.Branch.published_only is used as the default setting.
"""
super(BranchSettingMixin, self).__init__(*args, **kwargs)
self.default_branch_setting_func = kwargs.pop(
'branch_setting_func',
lambda: ModuleStoreEnum.Branch.published_only
)
# cache the branch setting on a local thread to support a multi-threaded environment
self.thread_cache = threading.local()
@contextmanager
def branch_setting(self, branch_setting, course_id=None): # pylint: disable=unused-argument
"""
A context manager for temporarily setting a store's branch value on the current thread.
"""
previous_thread_branch_setting = getattr(self.thread_cache, 'branch_setting', None)
try:
self.thread_cache.branch_setting = branch_setting
yield
finally:
self.thread_cache.branch_setting = previous_thread_branch_setting
def get_branch_setting(self, course_id=None): # pylint: disable=unused-argument
"""
Returns the current branch_setting on the store.
Returns the thread-local setting, if set.
Otherwise, returns the default value of the setting function set during the store's initialization.
"""
# first check the thread-local cache
thread_local_branch_setting = getattr(self.thread_cache, 'branch_setting', None)
if thread_local_branch_setting:
return thread_local_branch_setting
else:
# return the default value
return self.default_branch_setting_func()
class ModuleStoreDraftAndPublished(BranchSettingMixin):
"""
A mixin for a read-write database backend that supports two branches, Draft and Published, with
options to prefer Draft and fallback to Published.
"""
__metaclass__ = ABCMeta
def __init__(self, *args, **kwargs):
super(ModuleStoreDraftAndPublished, self).__init__(*args, **kwargs)
@abstractmethod
def delete_item(self, location, user_id, revision=None, **kwargs):
raise NotImplementedError
@abstractmethod
def get_parent_location(self, location, revision=None, **kwargs):
raise NotImplementedError
@abstractmethod
def has_changes(self, xblock):
raise NotImplementedError
@abstractmethod
def publish(self, location, user_id):
raise NotImplementedError
@abstractmethod
def unpublish(self, location, user_id):
raise NotImplementedError
@abstractmethod
def revert_to_published(self, location, user_id):
raise NotImplementedError
@abstractmethod
def compute_publish_state(self, xblock):
raise NotImplementedError
@abstractmethod
def convert_to_draft(self, location, user_id):
raise NotImplementedError
class UnsupportedRevisionError(ValueError):
"""
This error is raised if a method is called with an unsupported revision parameter.
"""
def __init__(self, allowed_revisions=None):
if not allowed_revisions:
allowed_revisions = [
None,
ModuleStoreEnum.RevisionOption.published_only,
ModuleStoreEnum.RevisionOption.draft_only
]
super(UnsupportedRevisionError, self).__init__('revision not one of {}'.format(allowed_revisions))
| agpl-3.0 |
gnuhub/intellij-community | python/lib/Lib/encodings/iso8859_15.py | 593 | 13468 | """ Python Character Mapping Codec iso8859_15 generated from 'MAPPINGS/ISO8859/8859-15.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-15',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\u20ac' # 0xA4 -> EURO SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\u0160' # 0xA6 -> LATIN CAPITAL LETTER S WITH CARON
u'\xa7' # 0xA7 -> SECTION SIGN
u'\u0161' # 0xA8 -> LATIN SMALL LETTER S WITH CARON
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u017d' # 0xB4 -> LATIN CAPITAL LETTER Z WITH CARON
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u017e' # 0xB8 -> LATIN SMALL LETTER Z WITH CARON
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u0152' # 0xBC -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xBD -> LATIN SMALL LIGATURE OE
u'\u0178' # 0xBE -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0xFE -> LATIN SMALL LETTER THORN
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
Sabayon/entropy | server/eit/commands/repack.py | 1 | 2118 | # -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <lxnay@sabayon.org>
@contact: lxnay@sabayon.org
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Infrastructure Toolkit}.
"""
import sys
import argparse
from entropy.i18n import _
from _entropy.eit.commands.descriptor import EitCommandDescriptor
from _entropy.eit.commands.commit import EitCommit
class EitRepack(EitCommit):
"""
Main Eit repack command.
"""
NAME = "repack"
ALIASES = ["rp"]
INTRODUCTION = """\
Recrate the whole Entropy package from live system through
the Source Package Manager. This allows the latter to regenerate
its metadata (useful in case of dependency changes).
The package must be already available in the queried repository.
"""
SEE_ALSO = "eit-add(1), eit-commit(1)"
def _get_parser(self):
""" Overridden from EitCommit """
descriptor = EitCommandDescriptor.obtain_descriptor(
EitRepack.NAME)
parser = argparse.ArgumentParser(
description=descriptor.get_description(),
formatter_class=argparse.RawDescriptionHelpFormatter,
prog="%s %s" % (sys.argv[0], EitRepack.NAME))
parser.add_argument("packages", nargs='+', metavar="<package>",
help=_("package names"))
parser.add_argument("--in", metavar="<repository>",
help=_("repack to given repository"),
default=None, dest="into")
return parser
def parse(self):
""" Overridden from EitCommit """
parser = self._get_parser()
try:
nsargs = parser.parse_args(self._args)
except IOError as err:
sys.stderr.write("%s\n" % (err,))
return parser.print_help, []
# setup atoms variable before spawning commit
self._repackage = nsargs.packages[:]
return self._call_exclusive, [self._commit, nsargs.into]
EitCommandDescriptor.register(
EitCommandDescriptor(
EitRepack,
EitRepack.NAME,
_('rebuild packages in repository'))
)
| gpl-2.0 |
pybel/pybel-tools | src/pybel_tools/document_utils.py | 1 | 6764 | # -*- coding: utf-8 -*-
"""Utilities to merge multiple BEL documents on the same topic."""
import logging
from typing import Iterable, Mapping, Optional, Set, TextIO, Union
from xml.etree import ElementTree
import pandas as pd
import requests
from bel_resources import make_knowledge_header
from pybel.utils import ensure_quotes
__all__ = [
'write_boilerplate',
]
logger = logging.getLogger(__name__)
abstract_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/abstract"
title_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/title"
#: SO gives short citation information
so_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/so"
def make_pubmed_abstract_group(pmids: Iterable[Union[str, int]]) -> Iterable[str]:
"""Build a skeleton for the citations' statements.
:param pmids: A list of PubMed identifiers
:return: An iterator over the lines of the citation section
"""
for pmid in set(pmids):
yield ''
res = requests.get(title_url_fmt.format(pmid))
title = res.content.decode('utf-8').strip()
yield f'SET Citation = {{"{title}", "{pmid}"}}'
res = requests.get(abstract_url_fmt.format(pmid))
abstract = res.content.decode('utf-8').strip()
yield f'SET Evidence = "{abstract}"'
yield '\nUNSET Evidence\nUNSET Citation'
def _sanitize(s):
if s is not None:
return s.strip().replace('\n', '')
#: Allows for querying the Entrez Gene Summary utility by formatting with an entrez id or list of comma seperated ids
PUBMED_GENE_QUERY_URL = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=gene&id={}'
def get_entrez_gene_data(entrez_ids: Iterable[Union[str, int]]):
"""Get gene info from Entrez."""
url = PUBMED_GENE_QUERY_URL.format(','.join(str(x).strip() for x in entrez_ids))
response = requests.get(url)
tree = ElementTree.fromstring(response.content)
return {
element.attrib['uid']: {
'summary': _sanitize(element.find('Summary').text),
'description': element.find('Description').text
}
for element in tree.findall('./DocumentSummarySet/DocumentSummary')
}
def make_pubmed_gene_group(entrez_ids: Iterable[Union[str, int]]) -> Iterable[str]:
"""Build a skeleton for gene summaries.
:param entrez_ids: A list of Entrez Gene identifiers to query the PubMed service
:return: An iterator over statement lines for NCBI Entrez Gene summaries
"""
url = PUBMED_GENE_QUERY_URL.format(','.join(str(x).strip() for x in entrez_ids))
response = requests.get(url)
tree = ElementTree.fromstring(response.content)
for x in tree.findall('./DocumentSummarySet/DocumentSummary'):
yield '\n# {}'.format(x.find('Description').text)
yield 'SET Citation = {{"Other", "PubMed Gene", "{}"}}'.format(x.attrib['uid'])
yield 'SET Evidence = "{}"'.format(x.find('Summary').text.strip().replace('\n', ''))
yield '\nUNSET Evidence\nUNSET Citation'
def write_boilerplate(
name: str,
version: Optional[str] = None,
description: Optional[str] = None,
authors: Optional[str] = None,
contact: Optional[str] = None,
copyright: Optional[str] = None,
licenses: Optional[str] = None,
disclaimer: Optional[str] = None,
namespace_url: Optional[Mapping[str, str]] = None,
namespace_patterns: Optional[Mapping[str, str]] = None,
annotation_url: Optional[Mapping[str, str]] = None,
annotation_patterns: Optional[Mapping[str, str]] = None,
annotation_list: Optional[Mapping[str, Set[str]]] = None,
pmids: Optional[Iterable[Union[str, int]]] = None,
entrez_ids: Optional[Iterable[Union[str, int]]] = None,
file: Optional[TextIO] = None,
) -> None:
"""Write a boilerplate BEL document, with standard document metadata, definitions.
:param name: The unique name for this BEL document
:param contact: The email address of the maintainer
:param description: A description of the contents of this document
:param authors: The authors of this document
:param version: The version. Defaults to current date in format ``YYYYMMDD``.
:param copyright: Copyright information about this document
:param licenses: The license applied to this document
:param disclaimer: The disclaimer for this document
:param namespace_url: an optional dictionary of {str name: str URL} of namespaces
:param namespace_patterns: An optional dictionary of {str name: str regex} namespaces
:param annotation_url: An optional dictionary of {str name: str URL} of annotations
:param annotation_patterns: An optional dictionary of {str name: str regex} of regex annotations
:param annotation_list: An optional dictionary of {str name: set of names} of list annotations
:param pmids: A list of PubMed identifiers to auto-populate with citation and abstract
:param entrez_ids: A list of Entrez identifiers to autopopulate the gene summary as evidence
:param file: A writable file or file-like. If None, defaults to :data:`sys.stdout`
"""
lines = make_knowledge_header(
name=name,
version=version or '1.0.0',
description=description,
authors=authors,
contact=contact,
copyright=copyright,
licenses=licenses,
disclaimer=disclaimer,
namespace_url=namespace_url,
namespace_patterns=namespace_patterns,
annotation_url=annotation_url,
annotation_patterns=annotation_patterns,
annotation_list=annotation_list,
)
for line in lines:
print(line, file=file)
if pmids is not None:
for line in make_pubmed_abstract_group(pmids):
print(line, file=file)
if entrez_ids is not None:
for line in make_pubmed_gene_group(entrez_ids):
print(line, file=file)
def replace_selventa_namespaces(path: str) -> None:
"""Update SFAM/SCOM namespaces to FamPlex."""
df = pd.read_csv(
'https://raw.githubusercontent.com/johnbachman/famplex/master/equivalences.csv',
names=['namespace', 'label', 'famplex']
)
# Filter to namespace BEL
df = df[df.namespace == 'BEL']
mapping_dict = {}
for _, label, famplex in df.values:
for p in 'SCOMP', 'SFAM':
mapping_dict[f'{p}:{ensure_quotes(label)}'] = f'FPLX:{ensure_quotes(famplex)}'
lines = []
with open(path) as file:
for line in file:
for k, v in mapping_dict.items():
if k in line:
print(f'Upgrating line {k} to {v}')
line = line.replace(k, v)
lines.append(line.strip('\n'))
with open(path, 'w') as file:
for line in lines:
print(line, file=file)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.