repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
jokerfr9/DragonsKernel_Kylessopen | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
hogarthj/ansible | test/units/modules/network/f5/test_bigip_hostname.py | 28 | 3226 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.bigip_hostname import Parameters
from library.bigip_hostname import ModuleManager
from library.bigip_hostname import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_hostname import Parameters
from ansible.modules.network.f5.bigip_hostname import ModuleManager
from ansible.modules.network.f5.bigip_hostname import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
hostname='foo.internal.com'
)
p = Parameters(params=args)
assert p.hostname == 'foo.internal.com'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_update_hostname(self, *args):
set_module_args(dict(
hostname='foo2.internal.com',
password='passsword',
server='localhost',
user='admin'
))
# Configure the parameters that would be returned by querying the
# remote device
current = Parameters(
dict(
hostname='foo.internal.com'
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.update_on_device = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is True
assert results['hostname'] == 'foo2.internal.com'
| gpl-3.0 |
flavour/eden | controllers/gis.py | 1 | 154445 | # -*- coding: utf-8 -*-
"""
GIS Controllers
"""
module = request.controller
resourcename = request.function
# Compact JSON encoding
SEPARATORS = (",", ":")
# -----------------------------------------------------------------------------
def index():
"""
Module's Home Page: Show the Main map
"""
module_name = settings.modules[module].get("name_nice")
response.title = module_name
# Read user request
get_vars_get = get_vars.get
height = get_vars_get("height", None)
width = get_vars_get("width", None)
toolbar = get_vars_get("toolbar", None)
if toolbar is None:
toolbar = settings.get_gis_toolbar()
elif toolbar == "0":
toolbar = False
else:
toolbar = True
collapsed = get_vars_get("collapsed", False)
if collapsed:
collapsed = True
iframe = get_vars_get("iframe", False)
if iframe:
response.view = "gis/iframe.html"
else:
# Code to go fullscreen
# IE (even 9) doesn't like the dynamic full-screen, so simply do a page refresh for now
# Remove components from embedded Map's containers without destroying their contents
# Add a full-screen window which will inherit these components
if s3.debug:
script = "/%s/static/scripts/S3/s3.gis.fullscreen.js" % appname
else:
script = "/%s/static/scripts/S3/s3.gis.fullscreen.min.js" % appname
s3.scripts.append(script)
# Allow us to target CSS to make map full-width
s3.jquery_ready.append('''$('body').addClass('gis')''')
save = settings.get_gis_save()
if not save:
help = T("To Print or Share the Map you will have to take a screenshot. If you need help taking a screen shot, have a look at these instructions for %(windows)s or %(mac)s") \
% {"windows": "<a href='http://www.wikihow.com/Take-a-Screenshot-in-Microsoft-Windows' target='_blank'>Windows</a>",
"mac": "<a href='http://www.wikihow.com/Take-a-Screenshot-in-Mac-OS-X' target='_blank'>Mac</a>",
}
script = '''i18n.gis_print_help="%s"''' % help
s3.js_global.append(script)
script = "/%s/static/scripts/S3/s3.gis.print_help.js" % appname
s3.scripts.append(script)
# Include an embedded Map on the index page
map = define_map(height = height,
width = width,
window = False,
toolbar = toolbar,
collapsed = collapsed,
closable = False,
maximizable = False,
save = save,
)
return {"map": map,
"title": T("Map"),
}
# =============================================================================
def map_viewing_client():
"""
Map Viewing Client.
UI for a user to view the overall Maps with associated Features
"""
# Read user request
print_mode = get_vars.get("print", None)
if print_mode:
collapsed = True
mouse_position = False
print_mode = True
toolbar = False
zoomcontrol = False
else:
collapsed = False
mouse_position = None # Use deployment_settings
print_mode = False
toolbar = True
zoomcontrol = None
save = settings.get_gis_save()
map = define_map(window = True,
toolbar = toolbar,
collapsed = collapsed,
closable = False,
maximizable = False,
mouse_position = mouse_position,
print_mode = print_mode,
save = save,
zoomcontrol = zoomcontrol,
)
response.title = T("Map Viewing Client")
return {"map": map}
# -----------------------------------------------------------------------------
def define_map(height = None,
width = None,
window = False,
toolbar = False,
closable = True,
collapsed = False,
maximizable = True,
mouse_position = None,
print_mode = False,
save = False,
zoomcontrol = None,
):
"""
Define the main Situation Map
This is called from both the Index page (embedded)
& the Map_Viewing_Client (fullscreen)
"""
config = get_vars.get("config", None)
if config:
try:
config = int(config)
except:
config = None
else:
config = gis.set_config(config)
if not config:
config = gis.get_config()
legend = settings.get_gis_legend()
search = settings.get_gis_search_geonames()
if config.wmsbrowser_url:
wms_browser = {"name": config.wmsbrowser_name,
"url" : config.wmsbrowser_url,
}
else:
wms_browser = None
# Do we allow creation of PoIs from the main Map?
add_feature = add_line = add_polygon = False
poi_resources = settings.get_gis_poi_create_resources()
if poi_resources:
layers = []
# Remove those which this user doesn't have permissions to create
not_permitted = []
for res in poi_resources:
permit = auth.s3_has_permission("create", res["table"], c=res["c"], f=res["f"])
if permit:
# Store the layer name
layers.append(res["layer"])
# Enable the relevant button
# @ToDo: Support Menus / Popups
feature_type = res.get("type", None)
if feature_type == "line":
add_line = True
elif feature_type == "polygon":
add_polygon = True
else:
# Default
add_feature = True
else:
# Remove from list
not_permitted.append(res)
poi_resources = list(poi_resources)
for res in not_permitted:
poi_resources.remove(res)
if poi_resources:
# Lookup Layer IDs
ftable = s3db.gis_layer_feature
rows = db(ftable.name.belongs(layers)).select(ftable.layer_id, ftable.name)
layers_lookup = {}
for row in rows:
layers_lookup[row.name] = row.layer_id
# Prepare JSON data structure
pois = []
s3_unicode = s3base.s3_unicode
for res in poi_resources:
poi = {"c": res["c"],
"f": res["f"],
"l": s3_unicode(res["label"]),
#"t": s3_unicode(res["tooltip"]),
"i": layers_lookup.get(res["layer"], None),
"t": res.get("type", "point"),
}
pois.append(poi)
# Inject client-side JS
script = '''S3.gis.poi_resources=%s''' % json.dumps(pois, separators=SEPARATORS)
s3.js_global.append(script)
if s3.debug:
script = "/%s/static/scripts/S3/s3.gis.pois.js" % appname
else:
script = "/%s/static/scripts/S3/s3.gis.pois.min.js" % appname
s3.scripts.append(script)
# Are we wanting to display a specific PoI Marker?
poi = get_vars.get("poi", None)
if poi:
ptable = s3db.gis_poi
gtable = db.gis_location
query = (ptable.id == poi) & \
(ptable.location_id == gtable.id)
record = db(query).select(gtable.lat,
gtable.lon,
limitby=(0, 1)
).first()
if record:
lat = record.lat
lon = record.lon
filter_url = "~.id=%s" % poi
# @ToDo: Generalise with feature/tablename?
layer = db(ftable.name == "PoIs").select(ftable.layer_id,
limitby=(0, 1)
).first()
if layer:
feature_resources = [{"name": T("PoI"),
"id": "PoI",
"layer_id": layer.layer_id,
"filter": filter_url,
"active": True,
},
]
else:
lat = None
lon = None
feature_resources = None
else:
# get_vars checks happen inside s3gis.py
lat = None
lon = None
feature_resources = None
map = gis.show_map(height = height,
width = width,
lat = lat,
lon = lon,
add_feature = add_feature,
add_line = add_line,
add_polygon = add_polygon,
catalogue_layers = True,
feature_resources = feature_resources,
legend = legend,
mouse_position = mouse_position,
print_mode = print_mode,
save = save,
search = search,
toolbar = toolbar,
wms_browser = wms_browser,
collapsed = collapsed,
closable = closable,
maximizable = maximizable,
window = window,
zoomcontrol = zoomcontrol,
)
return map
# =============================================================================
def map2():
"""
Work-in-Progress update of map_viewing_client to OpenLayers 6
"""
from s3.s3gis import MAP2
return {"map": MAP2(catalogue_layers = True)}
# =============================================================================
def location():
""" RESTful CRUD controller for Locations """
tablename = "gis_location"
table = s3db.gis_location
# Custom Methods
set_method = s3db.set_method
from s3 import S3ExportPOI
set_method("gis", "location",
method = "export_poi",
action = S3ExportPOI())
from s3 import S3ImportPOI
set_method("gis", "location",
method = "import_poi",
action = S3ImportPOI())
set_method("gis", "location",
method = "parents",
action = s3_gis_location_parents)
location_hierarchy = gis.get_location_hierarchy()
from s3 import S3TextFilter, S3OptionsFilter#, S3LocationFilter
search_fields = ["name",
"comments",
"tag.value",
]
if settings.get_L10n_translate_gis_location():
search_fields.append("name.name_l10n")
if settings.get_L10n_name_alt_gis_location():
search_fields.append("name_alt.name_alt")
filter_level_widgets = []
for level, level_label in location_hierarchy.items():
search_fields.append(level)
hidden = False if level == "L0" else True
filter_level_widgets.append(S3OptionsFilter(level,
label = level_label,
#cols = 5,
hidden = hidden,
))
filter_widgets = [
S3TextFilter(search_fields,
label = T("Search"),
comment = T("To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations."),
#_class = "filter-search",
),
# @ToDo: Hierarchical filter working on id
#S3LocationFilter("id",
# label = T("Location"),
# levels = ("L0", "L1", "L2", "L3",),
# #hidden = True,
# ),
]
if get_vars.get("~.level") != "None":
filter_widgets.append(S3OptionsFilter("level",
label = T("Level"),
options = location_hierarchy,
#hidden = True,
))
filter_widgets.extend(filter_level_widgets)
s3db.configure(tablename,
filter_widgets = filter_widgets,
# Don't include Bulky Location Selector in List Views
listadd = False,
)
if "report" in request.args:
# @ToDo: Migrate to Field.Method
class S3LocationVirtualFields:
def population(self):
"""
Used by the Report
"""
table = current.s3db.gis_location_tag
query = (table.location_id == self.gis_location.id) & \
(table.tag == "population")
location = current.db(query).select(table.value,
limitby=(0, 1)).first()
if location:
return int(location.value)
else:
return None
table.virtualfields.append(S3LocationVirtualFields())
s3db.configure(tablename,
report_options = Storage(
rows = ["name"],
cols = [],
fact = [(T("Total Population"), "sum(population)")],
defaults = Storage(rows="name",
cols=None,
fact="sum(population)",
totals=True
)
),
)
# Pre-processor
# Allow prep to pass vars back to the controller
prep_vars = {}
def prep(r, prep_vars):
if r.interactive and not r.component:
# Restrict access to Polygons to just MapAdmins
if settings.get_security_map() and not auth.s3_has_role("MAP_ADMIN"):
table.gis_feature_type.writable = table.gis_feature_type.readable = False
table.wkt.writable = table.wkt.readable = False
else:
table.wkt.comment = DIV(_class="stickytip",
_title="WKT|%s %s%s %s%s" % (T("The"),
"<a href='http://en.wikipedia.org/wiki/Well-known_text' target=_blank>",
T("Well-Known Text"),
"</a>",
T("representation of the Polygon/Line.")))
table.level.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Level"),
T("If the location is a geographic area, then state at what level here.")))
parent_comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Parent"),
T("The Area which this Site is located within.")))
if r.representation == "popup":
table.parent.comment = parent_comment
else:
# Include 'Create Location' button
table.parent.comment = DIV(S3PopupLink(c="gis",
f="location",
vars=dict(child="parent")),
parent_comment)
table.inherited.comment = DIV(_class="tooltip",
_title="%s|%s" % (table.inherited.label,
T("Whether the Latitude & Longitude are inherited from a higher level in the location hierarchy rather than being a separately-entered figure.")))
table.comments.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Comments"),
T("Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.")))
if r.method in (None, "list") and r.record is None:
# List
pass
elif r.method in ("delete", "import", "profile", "summary"):
pass
else:
if r.method == "report":
r.resource.add_filter(table.level != None)
# Add Map to allow locations to be found this way
config = gis.get_config()
lat = config.lat
lon = config.lon
zoom = config.zoom
bbox = {}
feature_resources = None
if r.method in ("create", "update"):
if get_vars.get("~.level") == "None":
# Specific Locations
from s3 import S3SQLCustomForm
crud_fields = ["name",
"parent",
"gis_feature_type",
"lat",
"lon",
"wkt",
"addr_street",
"comments",
]
if settings.get_gis_postcode_selector():
crud_fields.insert(-1, "addr_postcode")
crud_form = S3SQLCustomForm(*crud_fields)
s3db.configure("gis_location",
crud_form = crud_form,
)
elif get_vars.get("~.level__ne") == "None":
# Administrative Units
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("name",
"level",
"parent",
"gis_feature_type",
"lat",
"lon",
"wkt",
"comments",
)
s3db.configure("gis_location",
crud_form = crud_form,
)
if r.method == "create":
# @ToDo: Support Polygons here
if s3.debug:
script = "/%s/static/scripts/S3/s3.gis.feature_crud.js" % appname
else:
script = "/%s/static/scripts/S3/s3.gis.feature_crud.min.js" % appname
s3.scripts.append(script)
add_feature = True
add_feature_active = True
table.inherited.readable = False
else:
if r.method == "update":
if s3.debug:
script = "/%s/static/scripts/S3/s3.gis.feature_crud.js" % appname
else:
script = "/%s/static/scripts/S3/s3.gis.feature_crud.min.js" % appname
s3.scripts.append(script)
add_feature = True
add_feature_active = False
else:
# Read
add_feature = False
add_feature_active = False
record = r.record
if record:
if record.gis_feature_type == 1 and record.lat is not None and record.lon is not None:
lat = record.lat
lon = record.lon
# Same as a single zoom on a cluster
zoom = zoom + 2
else:
lat = lon = zoom = None
bbox = {"lon_min": record.lon_min,
"lat_min": record.lat_min,
"lon_max": record.lon_max,
"lat_max": record.lat_max,
}
feature_resources = {"name" : T("Location"),
"id" : "location",
"active": True,
}
# Is there a layer defined for Locations?
ftable = s3db.gis_layer_feature
query = (ftable.controller == "gis") & \
(ftable.function == "location")
layer = db(query).select(ftable.layer_id,
limitby=(0, 1)
).first()
if layer:
feature_resources.update(layer_id = layer.layer_id,
filter = "~.id=%s" % record.id,
)
else:
feature_resources.update(tablename = "gis_location",
url = "/%s/gis/location.geojson?~.id=%s" % (appname, record.id),
opacity = 0.9,
# @ToDo: Style isn't taking effect since gis_feature_type isn't in the attributes
style = '[{"prop":"gis_feature_type","cat":1,"externalGraphic":"img/markers/marker_red.png"},{"prop":"gis_feature_type","cat":3,"fill":"FFFFFF","fillOpacity":0.01,"stroke":"0000FF"},{"prop":"gis_feature_type","cat":6,"fill":"FFFFFF","fillOpacity":0.01,"stroke":"0000FF"}]',
)
feature_resources = (feature_resources,)
_map = gis.show_map(lat = lat,
lon = lon,
zoom = zoom,
bbox = bbox,
feature_resources = feature_resources,
add_feature = add_feature,
add_feature_active = add_feature_active,
# We want to be able to see a location against Satellite imagery, etc
catalogue_layers = True,
toolbar = True,
collapsed = True)
# Pass the map back to the main controller
prep_vars.update(_map=_map)
elif r.representation == "json":
# Path field should be visible
table.path.readable = True
elif r.representation == "geojson":
# Don't represent the feature_type, so we can use it for styling
table.gis_feature_type.represent = None
return True
s3.prep = lambda r, prep_vars=prep_vars: prep(r, prep_vars)
# Options
_vars = request.vars
filters = []
parent = _vars.get("parent_")
# Don't use 'parent' as the var name as otherwise it conflicts with the form's var of the same name & hence this will be triggered during form submission
if parent:
# We want to do case-insensitive searches
# (default anyway on MySQL/SQLite, but not PostgreSQL)
_parent = parent.lower()
# Can't do this using a JOIN in DAL syntax
# .belongs() not GAE-compatible!
query = (table.name.lower().like(_parent))
filters.append((table.parent.belongs(db(query).select(table.id))))
# ToDo: Make this recursive - want descendants not just direct children!
# Use new gis.get_children() function
if filters:
from operator import __and__
s3.filter = reduce(__and__, filters)
caller = _vars.get("caller")
if caller:
# We've been called as a Popup
if "gis_location_parent" in caller:
# Hide unnecessary rows
table.addr_street.readable = table.addr_street.writable = False
table.addr_postcode.readable = table.addr_postcode.writable = False
table.start_date.readable = table.start_date.writable = False
table.end_date.readable = table.end_date.writable = False
elif "project_location_location_id" in caller:
# Hide unnecessary rows
table.addr_street.readable = table.addr_street.writable = False
table.addr_postcode.readable = table.addr_postcode.writable = False
table.start_date.readable = table.start_date.writable = False
table.end_date.readable = table.end_date.writable = False
# Show the options for the currently-active gis_config
levels = gis.get_relevant_hierarchy_levels(as_dict=True)
level_keys = list(levels.keys())
if "L0" in level_keys:
# Don't add Countries
levels.popitem(last=False)
else:
# Parent can be a Country
level_keys.insert(0, "L0")
table.level.requires = IS_IN_SET(levels)
# Parent is Required & must be above lowest level
# @ToDo: Don't allow users to add locked Lx levels unless they are MAP_ADMIN
# @ToDo: Dynamic filtering based on selected level (taking into account strict or not)
level_keys.pop()
table.parent.requires = IS_ONE_OF(db, "gis_location.id",
s3db.gis_location_represent,
filterby="level",
filter_opts=level_keys,
orderby="gis_location.name",
)
else:
parent = _vars.get("parent_")
# Don't use 'parent' as the var name as otherwise it conflicts with the form's var of the same name & hence this will be triggered during form submission
if parent:
table.parent.default = parent
# Hide unnecessary rows
table.level.readable = table.level.writable = False
level = _vars.get("level")
if level:
# We've been called from the Location Selector widget
table.addr_street.readable = table.addr_street.writable = False
country = S3ReusableField("country", "string", length=2,
label = COUNTRY,
requires = IS_EMPTY_OR(IS_IN_SET_LAZY(
lambda: gis.get_countries(key_type="code"),
zero = SELECT_LOCATION)),
represent = lambda code: \
gis.get_country(code, key_type="code") or UNKNOWN_OPT)
output = s3_rest_controller(# CSV column headers, so no T()
csv_extra_fields = [{"label": "Country",
"field": country(),
}
],
rheader = s3db.gis_rheader,
)
_map = prep_vars.get("_map")
if _map and isinstance(output, dict):
output["_map"] = _map
return output
# -----------------------------------------------------------------------------
def ldata():
"""
Return JSON of location hierarchy suitable for use by
S3LocationSelector:
GET '/eden/gis/ldata/' + id
If requesting data for a level after a missed level:
GET '/eden/gis/ldata/' + id + '/' + level
Response JSON:
{id : {'n' : name,
'l' : level,
'f' : parent,
'b' : [lon_min, lat_min, lon_max, lat_max]
}
}
@ToDo: DRY with S3LocationSelector _locations()
"""
req_args = request.args
try:
location_id = req_args[0]
except:
raise HTTP(400)
s3base.s3_keep_messages()
response.headers["Content-Type"] = "application/json"
if len(req_args) > 1:
output_level = int(req_args[1])
else:
output_level = None
# Translate options using gis_location_name?
language = session.s3.language
if language in ("en", "en-gb"):
translate = False
else:
translate = settings.get_L10n_translate_gis_location()
table = s3db.gis_location
query = (table.deleted == False) & \
(table.end_date == None) & \
(table.level != None)
if output_level:
# We will be reading all descendants, which is inefficient, but otherwise we cannot support individual locations with missing levels
# Filter out results from the missing level as otherwise these show up like individual locations with missing levels
filter_level = output_level - 1
query &= (table.level != "L%s" % filter_level) & \
((table.path.like(location_id + "/%")) | \
(table.path.like("%/" + location_id + "/%")))
else:
query &= (table.parent == location_id)
fields = [table.id,
table.name,
table.level,
table.parent,
table.lon_min,
table.lat_min,
table.lon_max,
table.lat_max,
]
if translate:
ntable = s3db.gis_location_name
fields.append(ntable.name_l10n)
left = ntable.on((ntable.deleted == False) & \
(ntable.language == language) & \
(ntable.location_id == table.id))
else:
left = None
locations = db((table.id == location_id) | query).select(*fields,
left=left)
location_id = int(location_id)
if not output_level:
# Introspect it
if translate:
try:
id_level = int(locations.as_dict(key="gis_location.id")[location_id]["gis_location"]["level"][1:])
except:
return "{}"
else:
try:
id_level = int(locations.as_dict()[location_id]["level"][1:])
except:
return "{}"
output_level = id_level + 1
search_level = "L%s" % output_level
location_dict = {}
if translate:
for location in locations:
l = location["gis_location"]
if l.level == search_level:
this_level = output_level
# In case we're using a missing level, use the pseudo-parent
#f = int(l.parent)
f = location_id
else:
# An individual location with a Missing Level
this_level = int(l.level[1:])
parent = l.parent
if parent:
f = int(parent)
else:
f = None
name = location["gis_location_name.name_l10n"] or l.name
if l.lon_min is not None:
location_dict[int(l.id)] = {"n": name,
"l": this_level,
"f": f,
"b": [l.lon_min,
l.lat_min,
l.lon_max,
l.lat_max
],
}
else:
location_dict[int(l.id)] = {"n": name,
"l": this_level,
"f": f,
}
else:
for l in locations:
if l.level == search_level:
this_level = output_level
# In case we're using a missing level, use the pseudo-parent
#f = int(l.parent)
f = location_id
else:
# An individual location with a Missing Level
this_level = int(l.level[1:])
parent = l.parent
if parent:
f = int(parent)
else:
f = None
if l.lon_min is not None:
location_dict[int(l.id)] = {"n": l.name,
"l": this_level,
"f": f,
"b": [l.lon_min,
l.lat_min,
l.lon_max,
l.lat_max
],
}
else:
location_dict[int(l.id)] = {"n": l.name,
"l": this_level,
"f": f,
}
return json.dumps(location_dict, separators=SEPARATORS)
# -----------------------------------------------------------------------------
def hdata():
"""
Return JSON of hierarchy labels suitable for use by
S3LocationSelector:
GET '/eden/gis/hdata/' + l0_id
Response JSON:
{1 : l1_name,
2 : l2_name,
etc,
}
"""
try:
location_id = request.args[0]
except:
raise HTTP(400)
response.headers["Content-Type"] = "application/json"
# @ToDo: Translate options using gis_hierarchy_name?
#translate = settings.get_L10n_translate_gis_location()
#if translate:
# language = session.s3.language
# if language == settings.get_L10n_default_language():
# translate = False
table = s3db.gis_hierarchy
query = (table.deleted == False) & \
(table.location_id == location_id)
row = db(query).select(table.L1,
table.L2,
table.L3,
table.L4,
table.L5,
limitby = (0, 1),
).first()
hdict = {}
if row:
for l in ["L1", "L2", "L3", "L4", "L5"]:
if row[l]:
hdict[int(l[1:])] = row[l]
return json.dumps(hdict, separators=SEPARATORS)
# -----------------------------------------------------------------------------
def s3_gis_location_parents(r, **attr):
"""
Custom S3Method
Return a list of Parents for a Location
"""
table = r.resource.table
# Check permission
if not auth.s3_has_permission("read", table):
r.unauthorised()
if r.representation == "html":
# @ToDo
output = {}
#return output
raise HTTP(501, ERROR.BAD_FORMAT)
elif r.representation == "json":
if r.id:
# Get the parents for a Location
parents = gis.get_parents(r.id)
if parents:
_parents = {}
for parent in parents:
_parents[parent.level] = parent.id
output = json.dumps(_parents, separators=SEPARATORS)
return output
else:
raise HTTP(404, ERROR.NO_MATCH)
else:
raise HTTP(404, ERROR.BAD_RECORD)
else:
raise HTTP(415, ERROR.BAD_FORMAT)
# -----------------------------------------------------------------------------
def l0():
"""
A specialised controller to return details for an L0 location
- suitable for use with the LocationSelector
arg: ID of the L0 location
returns JSON
"""
try:
record_id = request.args[0]
except:
item = current.xml.json_message(False, 400, "Need to specify a record ID!")
raise HTTP(400, body=item)
table = s3db.gis_location
ttable = s3db.gis_location_tag
query = (table.id == record_id) & \
(table.deleted == False) & \
(table.level == "L0") & \
(ttable.tag == "ISO2") & \
(ttable.location_id == table.id)
record = db(query).select(table.id,
table.name,
# Code for the Geocoder lookup filter
ttable.value,
# LatLon for Centering the Map
table.lon,
table.lat,
# Bounds for Zooming the Map
table.lon_min,
table.lon_max,
table.lat_min,
table.lat_max,
cache = s3db.cache,
limitby=(0, 1)).first()
if not record:
item = current.xml.json_message(False, 400, "Invalid ID!")
raise HTTP(400, body=item)
result = record.as_dict()
location_part = result["gis_location"]
for key in location_part:
result[key] = location_part[key]
del result["gis_location"]
result["code"] = result["gis_location_tag"]["value"]
del result["gis_location_tag"]
# Provide the Location Hierarchy for this country
location_hierarchy = gis.get_location_hierarchy(location=record_id)
for key in location_hierarchy:
result[key] = location_hierarchy[key]
output = json.dumps(result, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return output
# =============================================================================
# Common CRUD strings for all layers
ADD_LAYER = T("Create Layer")
LAYER_DETAILS = T("Layer Details")
LAYERS = T("Layers")
EDIT_LAYER = T("Edit Layer")
ADD_NEW_LAYER = T("Create Layer")
LIST_LAYERS = T("List Layers")
DELETE_LAYER = T("Delete Layer")
LAYER_ADDED = T("Layer added")
LAYER_UPDATED = T("Layer updated")
LAYER_DELETED = T("Layer deleted")
# These may be differentiated per type of layer.
TYPE_LAYERS_FMT = "%s Layers"
ADD_NEW_TYPE_LAYER_FMT = "Add New %s Layer"
EDIT_TYPE_LAYER_FMT = "Edit %s Layer"
LIST_TYPE_LAYERS_FMT = "List %s Layers"
NO_TYPE_LAYERS_FMT = "No %s Layers currently defined"
# -----------------------------------------------------------------------------
def catalog():
""" Custom View to link to different Layers """
return {}
# -----------------------------------------------------------------------------
def config_default(r, **attr):
"""
Set a Config to be the default
designed to be a custom method called by an action button
"""
id = r.id
table = s3db.gis_config
config = db(table.id == id).select(table.id,
table.pe_id,
table.pe_default,
table.name,
table.default_location_id,
table.lat,
table.lon,
table.zoom,
limitby=(0, 1)
).first()
if not config:
session.error = T("Config not found!")
redirect(URL())
pe_id = auth.user.pe_id
if config.pe_id == pe_id:
if config.pe_default:
session.confirmation = T("Map is already your Default")
redirect(URL())
else:
# Set this to default
config.update_record(pe_default = True)
# Set all others to False
query = (table.pe_id == pe_id) & \
(table.id != id)
db(query).update(pe_default = False)
session.confirmation = T("Map has been set as Default")
redirect(URL())
else:
# Copy Config
new_id = table.insert(pe_id = pe_id,
pe_type = 1,
pe_default = True,
name = config.name,
default_location_id = config.default_location_id,
lat = config.lat,
lon = config.lon,
zoom = config.zoom,
)
# Copy Layers
table = db.gis_layer_config
query = (table.config_id == id) & \
(table.deleted == False)
layers = db(query).select(table.layer_id,
table.enabled,
table.visible,
table.base,
)
insert = table.insert
for layer in layers:
insert(config_id = new_id,
layer_id = layer.layer_id,
enabled = layer.enabled,
visible = layer.visible,
base = layer.base,
)
# Copy Styles
table = db.gis_style
query = (table.config_id == id) & \
(table.deleted == False)
styles = db(query).select(table.layer_id,
table.record_id,
table.marker_id,
table.gps_marker,
table.opacity,
table.popup_format,
table.cluster_distance,
table.cluster_threshold,
table.style,
)
insert = table.insert
for style in styles:
insert(config_id = new_id,
layer_id = style.layer_id,
record_id = style.record_id,
marker_id = style.marker_id,
gps_marker = style.gps_marker,
opacity = style.opacity,
popup_format = style.popup_format,
cluster_distance = style.cluster_distance,
cluster_threshold = style.cluster_threshold,
style = style.style,
)
session.confirmation = T("Map has been copied and set as Default")
redirect(URL())
# -----------------------------------------------------------------------------
def config():
""" RESTful CRUD controller """
# Filter out Temp configs
FS = s3base.S3FieldSelector
s3.filter = (FS("config.temp") == False)
# Custom Methods to set as default
set_method = s3db.set_method
set_method(module, resourcename,
method = "default",
action = config_default)
# Custom Methods to enable/disable layers
set_method(module, resourcename,
component_name = "layer_entity",
method = "enable",
action = enable_layer)
set_method(module, resourcename,
component_name = "layer_entity",
method = "disable",
action = disable_layer)
# Pre-process
def prep(r):
if r.representation == "url":
# Save from Map
if r.method == "create" and \
auth.is_logged_in():
table = r.table
table.pe_id.default = auth.user.pe_id
table.pe_type.default = 1
table.temp.writable = True
elif r.interactive or r.representation == "aadata":
if not r.component:
s3db.gis_config_form_setup()
list_fields = s3db.get_config("gis_config", "list_fields")
if auth.s3_has_role("MAP_ADMIN"):
list_fields += ["region_location_id",
"default_location_id",
]
s3db.configure("gis_config",
subheadings = {"zoom": T("Map Settings"),
"default_location_id": T("Form Settings"),
},
)
else:
s3.crud_strings.gis_config.title_list = T("Saved Maps")
# Hide Exports
settings.ui.export_formats = []
# Filter Region & Default Configs
query = (FS("config.temp") == False) & \
(FS("config.region_location_id") == None) & \
(FS("config.uuid") != "SITE_DEFAULT")
r.resource.add_filter(query)
list_fields.append("pe_default")
CREATED_BY = T("Created By")
field = r.table.pe_id
field.label = CREATED_BY
field.represent = s3db.pr_PersonEntityRepresent(show_label = False,
show_type = False,
show_link = True,
)
if auth.is_logged_in():
settings.search.filter_manager = False
from s3.s3filter import S3OptionsFilter
filter_widgets = [
S3OptionsFilter("pe_id",
label = "",
options = {"*": T("All"),
auth.user.pe_id: T("My Maps"),
},
cols = 2,
multiple = False,
)
]
s3db.configure("gis_config",
filter_clear = False,
filter_widgets = filter_widgets,
)
# For Create forms
field.default = auth.user.pe_id
field.readable = field.writable = False
fields = ["name",
"pe_default",
"default_location_id",
"zoom",
"lat",
"lon",
#"projection_id",
#"wmsbrowser_url",
#"wmsbrowser_name",
]
osm_table = s3db.gis_layer_openstreetmap
openstreetmap = db(osm_table.deleted == False).select(osm_table.id,
limitby=(0, 1))
if openstreetmap:
# OpenStreetMap config
s3db.add_components("gis_config",
auth_user_options={"joinby": "pe_id",
"pkey": "pe_id",
"multiple": False,
},
)
fields += ["user_options.osm_oauth_consumer_key",
"user_options.osm_oauth_consumer_secret",
]
crud_form = s3base.S3SQLCustomForm(*fields)
else:
crud_form = None
s3db.configure("gis_config",
crud_form = crud_form,
insertable = False,
)
elif r.component_name == "layer_entity":
s3.crud_strings["gis_layer_config"] = Storage(
label_create = T("Add Layer to this Profile"),
title_display = LAYER_DETAILS,
title_list = LAYERS,
title_update = EDIT_LAYER,
label_list_button = T("List Layers in Profile"),
label_delete_button = T("Remove Layer from Profile"),
msg_record_created = LAYER_ADDED,
msg_record_modified = LAYER_UPDATED,
msg_list_empty = T("No Layers currently configured in this Profile"),
)
table = s3db.gis_layer_entity
ltable = s3db.gis_layer_config
if r.method == "update":
# Existing records don't need to change the layer pointed to (confusing UI & adds validation overheads)
ltable.layer_id.writable = False
# Hide irrelevant fields
query = (table.layer_id == r.component_id)
instance_type = db(query).select(table.instance_type,
limitby=(0, 1)
).first().instance_type
if instance_type in ("gis_layer_coordinate",
"gis_layer_georss",
"gis_layer_gpx",
"gis_layer_mgrs",
"gis_layer_openweathermap",
):
ltable.base.readable = ltable.base.writable = False
elif instance_type in ("gis_layer_bing",
"gis_layer_google",
"gis_layer_tms",
):
ltable.visible.readable = ltable.visible.writable = False
elif instance_type in ("gis_layer_feature",
"gis_layer_geojson",
"gis_layer_kml",
"gis_layer_shapefile",
"gis_layer_theme",
"gis_layer_wfs",
):
ltable.base.readable = ltable.base.writable = False
else:
# Only show Layers not yet in this config
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(ltable.config_id == r.id)
rows = db(query).select(table.layer_id)
# Filter them out
ltable.layer_id.requires = IS_ONE_OF(db, "gis_layer_entity.layer_id",
s3db.gis_layer_represent,
not_filterby="layer_id",
not_filter_opts=[row.layer_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive:
if r.component_name == "layer_entity":
s3_action_buttons(r, deletable=False)
ltable = s3db.gis_layer_config
query = (ltable.config_id == r.id)
rows = db(query).select(ltable.layer_id,
ltable.enabled)
# Show the enable button if the layer is not currently enabled
restrict = [str(row.layer_id) for row in rows if not row.enabled]
s3.actions.append({"label": str(T("Enable")),
"_class": "action-btn",
"url": URL(args=[r.id, "layer_entity", "[id]", "enable"]),
"restrict": restrict
})
# Show the disable button if the layer is not currently disabled
restrict = [str(row.layer_id) for row in rows if row.enabled]
s3.actions.append({"label": str(T("Disable")),
"_class": "action-btn",
"url": URL(args=[r.id, "layer_entity", "[id]", "disable"]),
"restrict": restrict
})
elif not r.component and r.method not in ("datalist", "import"):
show = {"url": URL(c="gis", f="index",
vars={"config":"[id]"}),
"label": str(T("Show")),
"_class": "action-btn",
}
if auth.s3_has_role("MAP_ADMIN"):
s3_action_buttons(r, copyable=True)
s3.actions.append(show)
else:
s3.actions = [show]
if auth.is_logged_in():
default = {"url": URL(args=["[id]", "default"]),
"label": str(T("Set as my Default")),
"_class": "action-btn",
}
s3.actions.append(default)
elif r.representation == "url":
# Save from Map
result = json.loads(output["item"])
if result["status"] == "success":
config_id = r.id
post_vars = request.post_vars
if post_vars.get("temp", False):
# This is coming from a Print Screenshot
# Hide the message
try:
del result["message"]
except:
pass
# Add the ID
result["id"] = config_id
output["item"] = json.dumps(result, separators=SEPARATORS)
elif post_vars.get("hide", False):
# This is coming from Save Panel
# Hide the message
try:
del result["message"]
except:
pass
output["item"] = json.dumps(result, separators=SEPARATORS)
# Process Layers
ltable = s3db.gis_layer_config
layers = json.loads(request.post_vars.layers)
form = Storage()
for layer in layers:
if "id" in layer and layer["id"] != "search_results":
layer_id = layer["id"]
form_vars = Storage(config_id = config_id,
layer_id = layer_id,
)
if "base" in layer:
form_vars.base = layer["base"]
if "dir" in layer:
form_vars.dir = layer["dir"]
form_vars.visible = layer.get("visible", False)
# Update or Insert?
query = (ltable.config_id == config_id) & \
(ltable.layer_id == layer_id)
record = db(query).select(ltable.id,
limitby=(0, 1)).first()
if record:
record_id = record.id
form_vars.id = record_id
db(ltable.id == record_id).update(**form_vars)
else:
# New Saved Map
form_vars.id = ltable.insert(**form_vars)
# Ensure that Default Base processing happens properly
form.vars = form_vars
s3db.gis_layer_config_onaccept(form)
if "style" in layer:
form_vars = Storage(config_id = config_id,
layer_id = layer_id,
)
form_vars.style = layer["style"]
# Update or Insert?
stable = s3db.gis_style
query = (stable.config_id == config_id) & \
(stable.layer_id == layer_id)
record = db(query).select(stable.id,
limitby=(0, 1)).first()
if record:
record.update_record(**form_vars)
else:
# New Style
stable.insert(**form_vars)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader,
)
return output
# -----------------------------------------------------------------------------
def enable_layer(r, **attr):
"""
Enable a Layer
designed to be a custom method called by an action button
@ToDo: Make this call an API function which can then also be used by CLI scripts (like msg_channel_enable)
"""
if r.component_name != "layer_entity":
session.error = T("Incorrect parameters")
redirect(URL(args=[r.id, "layer_entity"]))
ltable = s3db.gis_layer_config
query = (ltable.config_id == r.id) & \
(ltable.layer_id == r.component_id)
db(query).update(enabled = True)
session.confirmation = T("Layer has been Enabled")
redirect(URL(args=[r.id, "layer_entity"]))
# -----------------------------------------------------------------------------
def disable_layer(r, **attr):
"""
Disable a Layer
designed to be a custom method called by an action button in config/layer_entity
@ToDo: Make this call an API function which can then also be used by CLI scripts (like msg_channel_disable)
"""
if r.component_name != "layer_entity":
session.error = T("Incorrect parameters")
redirect(URL(args=[r.id, "layer_entity"]))
ltable = s3db.gis_layer_config
query = (ltable.config_id == r.id) & \
(ltable.layer_id == r.component_id)
db(query).update(enabled = False)
session.confirmation = T("Layer has been Disabled")
redirect(URL(args=[r.id, "layer_entity"]))
# -----------------------------------------------------------------------------
def hierarchy():
""" RESTful CRUD controller """
s3db.gis_hierarchy_form_setup()
return s3_rest_controller()
# -----------------------------------------------------------------------------
def location_tag():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def menu():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def marker():
""" RESTful CRUD controller """
# Pre-process
def prep(r):
if r.interactive:
if r.method == "create":
table = r.table
table.height.readable = False
table.width.readable = False
return True
s3.prep = prep
return s3_rest_controller(rheader=s3db.gis_rheader)
# -----------------------------------------------------------------------------
def projection():
""" RESTful CRUD controller """
if settings.get_security_map() and not auth.s3_has_role("MAP_ADMIN"):
auth.permission.fail()
return s3_rest_controller()
# -----------------------------------------------------------------------------
def style():
""" RESTful CRUD controller """
field = s3db.gis_style.layer_id
field.readable = field.writable = True
field.label = T("Layer")
represent = field.represent = s3base.S3Represent(lookup = "gis_layer_entity")
field.requires = IS_ONE_OF(db, "gis_layer_entity.layer_id",
represent)
return s3_rest_controller()
# -----------------------------------------------------------------------------
def waypoint():
""" RESTful CRUD controller for GPS Waypoints """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def waypoint_upload():
"""
Custom View
Temporary: Likely to be refactored into the main waypoint controller
"""
return dict()
# -----------------------------------------------------------------------------
def trackpoint():
""" RESTful CRUD controller for GPS Track points """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def track():
""" RESTful CRUD controller for GPS Tracks (uploaded as files) """
return s3_rest_controller()
# =============================================================================
def inject_enable(output):
"""
Inject an 'Enable in Default Config?' checkbox into the form
"""
if "form" in output:
id = "layer_enable"
label = LABEL("%s:" % T("Enable in Default Config?"),
_for="enable")
widget = INPUT(_name="enable",
_type="checkbox",
_value="on",
_id="layer_enable",
_class="boolean",
)
comment = ""
if s3_formstyle == "bootstrap":
_controls = DIV(widget, comment, _class="controls")
row = DIV(label,
_controls,
_class="control-group",
_id="%s__row" % id
)
elif callable(s3_formstyle):
row = s3_formstyle(id, label, widget, comment)
else:
# Unsupported
raise
output["form"][0][-2].append(row)
# -----------------------------------------------------------------------------
def layer_config():
""" RESTful CRUD controller """
if settings.get_security_map() and not auth.s3_has_role("MAP_ADMIN"):
auth.permission.fail()
layer = get_vars.get("layer", None)
if layer:
csv_stylesheet = "layer_%s.xsl" % layer
else:
# Cannot import without a specific layer type
csv_stylesheet = None
output = s3_rest_controller(csv_stylesheet = csv_stylesheet)
return output
# -----------------------------------------------------------------------------
def layer_entity():
""" RESTful CRUD controller """
if settings.get_security_map() and not auth.s3_has_role("MAP_ADMIN"):
auth.permission.fail()
# Custom Method
s3db.set_method(module, resourcename,
method = "disable",
action = disable_layer)
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
# Hide irrelevant fields
type = r.record.instance_type
if type in ("gis_layer_coordinate",
"gis_layer_feature",
"gis_layer_geojson",
"gis_layer_georss",
"gis_layer_gpx",
"gis_layer_kml",
"gis_layer_mgrs",
"gis_layer_wfs",
):
ltable.base.writable = ltable.base.readable = False
elif type in ("gis_layer_empty",
"gis_layer_bing",
"gis_layer_google",
"gis_layer_tms",
):
ltable.visible.writable = ltable.visible.readable = False
if r.method =="update":
# Existing records don't need to change the config pointed to (confusing UI & adds validation overheads)
ltable.config_id.writable = False
else:
# Only show Symbologies not yet defined for this Layer
table = s3db.gis_config
# Find the records which are used
query = (ltable.config_id == table.id) & \
(ltable.layer_id == r.id)
rows = db(query).select(table.id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="id",
not_filter_opts=[row.id for row in rows]
)
elif r.component_name == "style":
# Hide irrelevant fields
type = r.record.instance_type
if type != "gis_layer_feature":
field = s3db.gis_style.gps_marker
field.writable = field.readable = False
return True
s3.prep = prep
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_feature():
""" RESTful CRUD controller """
# Custom Method
s3db.set_method(module, resourcename,
method = "disable",
action = disable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_openstreetmap():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "OpenStreetMap"
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_bing():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "Bing"
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_update=EDIT_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED)
s3db.configure(tablename,
deletable = False,
listadd = False,
)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.visible.writable = ltable.visible.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_empty():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "Empty"
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_update=EDIT_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED)
s3db.configure(tablename,
deletable = False,
listadd = False,
)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.visible.writable = ltable.visible.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_google():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "Google"
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_update=EDIT_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED)
s3db.configure(tablename,
deletable = False,
listadd = False,
)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.visible.writable = ltable.visible.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_mgrs():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "MGRS"
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
s3db.configure(tablename,
deletable = False,
listadd = False,
)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_arcrest():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "ArcGIS REST"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_geojson():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "GeoJSON"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
elif r.component_name == "style":
field = s3db.gis_style.gps_marker
field.writable = field.readable = False
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_georss():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "GeoRSS"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
elif r.component_name == "style":
field = s3db.gis_style.gps_marker
field.writable = field.readable = False
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_gpx():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# Model options
# Needed in multiple controllers, so defined in Model
# CRUD Strings
type = "GPX"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_kml():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "KML"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
#s3db.set_method(module, resourcename,
# method = "enable",
# action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_openweathermap():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "OpenWeatherMap"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
elif r.component_name == "style":
field = s3db.gis_style.gps_marker
field.writable = field.readable = False
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_shapefile():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
table = s3db[tablename]
# CRUD Strings
type = "Shapefile"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
args = request.args
if len(args) > 1:
test = args[1]
if test[:4] == "data":
# This must be a request for the data held within a layer
# Define the Table
id = args[0]
_tablename = "gis_layer_shapefile_%s" % id
Fields = [Field("lat", "float"),
Field("lon", "float"),
Field("wkt", "text"),
Field("layer_id", table),
]
append = Fields.append
row = db(table.id == id).select(table.data,
limitby=(0, 1)
).first()
if row and row.data:
fields = json.loads(row.data)
for field in fields:
# Unicode fieldnames not supported
append(Field(str(field[0]), field[1]))
if settings.get_gis_spatialdb():
# Add a spatial field
append(Field("the_geom", "geometry()"))
s3db.define_table(_tablename, *Fields)
new_arg = _tablename[4:]
extension = test[4:]
if extension:
new_arg = "%s%s" % (new_arg, extension)
args[1] = new_arg
s3db.add_components("gis_layer_shapefile",
**{_tablename: "layer_id"})
# @ToDo: onaccept to write any modified data back to the attached shapefile
# If we need to reproject, then we need to write a .prj file out:
#outSpatialRef.MorphToESRI()
#file = open(outfilepath + '\\'+ outfileshortname + '.prj', 'w')
#file.write(outSpatialRef.ExportToWkt())
#file.close()
else:
raise ValueError
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_theme():
""" RESTful CRUD controller """
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
else:
# CRUD Strings
type = "Theme"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings["gis_layer_theme"] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
# Inject Import links
s3.rfooter = DIV(A(T("Import Layers"),
_href=URL(args="import"),
_class="action-btn"),
A(T("Import Data"),
_href=URL(f="theme_data", args="import"),
_class="action-btn"),
)
return output
s3.postp = postp
if "import" in request.args:
# Import to 'layer_config' resource instead
output = s3_rest_controller("gis", "layer_config",
csv_template="layer_theme",
csv_stylesheet="layer_theme.xsl",
)
else:
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def theme_data():
""" RESTful CRUD controller """
field = s3db.gis_layer_theme_id()
field.requires = IS_EMPTY_OR(field.requires)
output = s3_rest_controller(csv_extra_fields = [# CSV column headers, so no T()
{"label": "Layer",
"field": field,
}],
)
return output
# -----------------------------------------------------------------------------
def layer_tms():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "TMS"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.visible.writable = ltable.visible.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_wfs():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "WFS"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.base.writable = ltable.base.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_wms():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "WMS"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_xyz():
""" RESTful CRUD controller """
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "XYZ"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Custom Method
s3db.set_method(module, resourcename,
method = "enable",
action = enable_layer)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
ltable.visible.writable = ltable.visible.readable = False
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
s3_action_buttons(r, copyable=True)
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# -----------------------------------------------------------------------------
def layer_js():
""" RESTful CRUD controller """
if settings.get_security_map() and not auth.s3_has_role("MAP_ADMIN"):
auth.permission.fail()
tablename = "%s_%s" % (module, resourcename)
s3db.table(tablename)
# CRUD Strings
type = "JS"
LAYERS = T(TYPE_LAYERS_FMT % type)
ADD_NEW_LAYER = T(ADD_NEW_TYPE_LAYER_FMT % type)
EDIT_LAYER = T(EDIT_TYPE_LAYER_FMT % type)
LIST_LAYERS = T(LIST_TYPE_LAYERS_FMT % type)
NO_LAYERS = T(NO_TYPE_LAYERS_FMT % type)
s3.crud_strings[tablename] = Storage(
label_create=ADD_LAYER,
title_display=LAYER_DETAILS,
title_list=LAYERS,
title_update=EDIT_LAYER,
label_list_button=LIST_LAYERS,
label_delete_button = DELETE_LAYER,
msg_record_created=LAYER_ADDED,
msg_record_modified=LAYER_UPDATED,
msg_record_deleted=LAYER_DELETED,
msg_list_empty=NO_LAYERS)
# Pre-processor
def prep(r):
if r.interactive:
if r.component_name == "config":
ltable = s3db.gis_layer_config
if r.method != "update":
# Only show Configs with no definition yet for this layer
table = r.table
# Find the records which are used
query = (ltable.layer_id == table.layer_id) & \
(table.id == r.id)
rows = db(query).select(ltable.config_id)
# Filter them out
ltable.config_id.requires = IS_ONE_OF(db, "gis_config.id",
"%(name)s",
not_filterby="config_id",
not_filter_opts=[row.config_id for row in rows]
)
return True
s3.prep = prep
# Post-processor
def postp(r, output):
if r.interactive and r.method != "import":
if not r.component:
# Inject checkbox to enable layer in default config
inject_enable(output)
return output
s3.postp = postp
output = s3_rest_controller(rheader = s3db.gis_rheader)
return output
# =============================================================================
def cache_feed():
"""
RESTful CRUD controller
- cache GeoRSS/KML feeds &
make them available to the Map Viewing Client as GeoJSON
The create.georss/create.kml methods are designed to be called
asynchronously using S3Task
This allows:
* Feed can be refreshed on a schedule rather than every client request
- especially useful when unzipping or following network links
* BBOX strategy can be used to allow clients to only download the
features in their area of interest
* Can parse the feed using XSLT to extract whatever information we
want from the feed
* Can unify the client-side support for Markers & Popups
* Slightly smaller OpenLayers.js
* Remove dependency from filesystem to support scaling
- EC2, GAE or clustering
* Possible to Cluster multiple feeds together
- will require rewriting the way we turn layers on/off if we wish
to retain the ability to do so independently
* Can have dynamic Layer Filtering
- change layer.protocol.url
- call refresh strategy
NB This can't be simply called 'cache' as this conflicts with the
global cache object
"""
# Load Models
#s3db.table("gis_cache")
#if kml:
# Unzip & Follow Network Links
#download_kml.delay(url)
output = s3_rest_controller("gis", "cache")
return output
# =============================================================================
def feature_query():
"""
RESTful CRUD controller
- cache Feature Queries &
make them available to the Map Viewing Client as GeoJSON
This allows:
* Feed can be refreshed on a schedule rather than every client request
- especially useful when unzipping or following network links
* BBOX strategy can be used to allow clients to only download the
features in their area of interest
* Can parse the feed using XSLT to extract whatever information we
want from the feed
* Can unify the client-side support for Markers & Popups
* Slightly smaller OpenLayers.js
* Remove dependency from filesystem to support scaling
- EC2, GAE or clustering
* Possible to Cluster multiple feeds together
- will require rewriting the way we turn layers on/off if we wish
to retain the ability to do so independently
* Can have dynamic Layer Filtering
- change layer.protocol.url
- call refresh strategy
The create.georss/create.kml methods are designed to be called
asynchronously using S3Task
"""
table = s3db.gis_feature_query
# Filter out any records without LatLon
s3.filter = (table.lat != None) & (table.lon != None)
# Parse the Request
r = s3_request()
if r.representation != "geojson":
session.error = ERROR.BAD_FORMAT
redirect(URL(c="default", f="index", args=None, vars=None))
# Execute the request
output = r()
return output
# =============================================================================
def poi_type():
"""
RESTful CRUD controller for PoI Types
"""
return s3_rest_controller()
# -----------------------------------------------------------------------------
def poi():
"""
RESTful CRUD controller for PoIs
"""
def prep(r):
if r.http == "GET":
if r.method in ("create", "create.popup"):
field = r.table.location_id
field.label = ""
# Lat/Lon from Feature?
# @ToDo: S3PoIWidget() instead to pickup the passed Lat/Lon/WKT
lat = get_vars.get("lat", None)
if lat is not None:
lon = get_vars.get("lon", None)
if lon is not None:
form_vars = Storage(lat = float(lat),
lon = float(lon),
)
form = Storage(vars = form_vars)
s3db.gis_location_onvalidation(form)
id = s3db.gis_location.insert(**form_vars)
field.default = id
# WKT from Feature?
wkt = get_vars.get("wkt", None)
if wkt is not None:
form_vars = Storage(wkt = wkt,
)
form = Storage(vars = form_vars)
s3db.gis_location_onvalidation(form)
id = s3db.gis_location.insert(**form_vars)
field.default = id
elif r.method in ("update", "update.popup"):
table = r.table
table.location_id.label = ""
table.created_by.readable = True
table.created_on.readable = True
table.created_on.represent = lambda d: \
s3base.S3DateTime.date_represent(d)
elif r.representation == "plain":
# Map Popup
table = r.table
table.created_by.readable = True
table.created_on.readable = True
table.created_on.represent = lambda d: \
s3base.S3DateTime.date_represent(d)
# @ToDo: Allow multiple PoI layers
ftable = s3db.gis_layer_feature
layer = db(ftable.name == "PoIs").select(ftable.layer_id,
limitby=(0, 1)
).first()
if layer:
popup_edit_url = r.url(method = "update",
representation = "popup",
vars = {"refresh_layer": layer.layer_id},
)
else:
popup_edit_url = r.url(method = "update",
representation = "popup",
)
s3db.configure("gis_poi",
popup_edit_url = popup_edit_url,
)
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
# Normal Action Buttons
s3_action_buttons(r, deletable=False)
# Custom Action Buttons
s3.actions += [{"label": s3_str(T("Show on Map")),
"_class": "action-btn",
"url": URL(f = "index",
vars = {"poi": "[id]"},
),
},
]
return output
s3.postp = postp
dt_bulk_actions = [(T("Delete"), "delete")]
return s3_rest_controller(dtargs = {"dt_bulk_actions": dt_bulk_actions})
# =============================================================================
def display_feature():
"""
Cut-down version of the Map Viewing Client.
Used by gis_LocationRepresent() to show just this feature on the map.
Called by the s3_viewMap() JavaScript
"""
# The Location
location_id = request.args[0]
table = s3db.gis_location
ftable = s3db.gis_layer_feature
stable = s3db.gis_style
gtable = s3db.gis_config
# Check user is authorised to access record
if not auth.s3_has_permission("read", table, location_id):
session.error = T("No access to this record!")
raise HTTP(401, body=current.xml.json_message(False, 401, session.error))
location = db(table.id == location_id).select(table.id,
table.parent,
table.lat,
table.lon,
table.wkt,
limitby=(0, 1)
).first()
if not location:
session.error = T("Record not found!")
raise HTTP(404, body=current.xml.json_message(False, 404, session.error))
# Centre on Location
lat = location.lat
lon = location.lon
if (lat is None) or (lon is None):
if location.parent:
# Skip the current record if we can
latlon = gis.get_latlon(location.parent)
elif location.id:
latlon = gis.get_latlon(location.id)
if latlon:
lat = latlon["lat"]
lon = latlon["lon"]
else:
session.error = T("No location information defined!")
raise HTTP(404, body=current.xml.json_message(False, 404, session.error))
# Default zoom +2 (same as a single zoom on a cluster)
# config = gis.get_config()
# zoom = config.zoom + 2
bounds = gis.get_bounds(features = [location])
options = {"lat": lat,
"lon": lon,
#"zoom": zoom,
"bbox": bounds,
"window": False,
"closable": False,
"collapsed": True,
}
# Layers
controller = get_vars.c
function = get_vars.f
query = ((ftable.controller == controller) & \
(ftable.function == function) & \
(ftable.layer_id == stable.layer_id) & \
# Marker not specific to a record
(stable.record_id == None) & \
# Marker available to all or 'Default' Profile
((stable.config_id == None) | ((stable.config_id == gtable.id) & \
(gtable.name == "Default")))
)
row = db(query).select(ftable.layer_id,
limitby = (0, 1)
).first()
if row:
# Display feature using Layer Styling
feature_opts = {"name": T("Represent"),
"id": "resource_represent",
"active": True,
"layer_id": row.layer_id,
}
record_id = get_vars.r
if record_id:
feature_opts["filter"] = "~.id=%s" % record_id
options["feature_resources"] = [feature_opts]
else:
# Just display feature geometry
options["features"] = [location.wkt]
# Add Width & Height if opened in Window
if get_vars.popup == "1":
options["width"] = 640
options["height"] = 480
else:
options["height"] = settings.get_gis_map_selector_height()
response.view = "gis/iframe.html"
map = gis.show_map(**options)
return {"map": map}
# -----------------------------------------------------------------------------
def display_features():
"""
Cut-down version of the Map Viewing Client.
Used as a link from the RHeader.
URL generated server-side
Shows all locations matching a query.
@ToDo: Most recent location is marked using a bigger Marker.
@ToDo: Move to S3Method (will then use AAA etc).
"""
ltable = s3db.gis_location
# Parse the URL, check for implicit resources, extract the primary record
# http://127.0.0.1:8000/eden/gis/display_features&module=pr&resource=person&instance=1&jresource=presence
ok = 0
if "module" in request.vars:
res_module = request.vars.module
ok +=1
if "resource" in request.vars:
resource = request.vars.resource
ok +=1
if "instance" in request.vars:
instance = int(request.vars.instance)
ok +=1
if "jresource" in request.vars:
jresource = request.vars.jresource
ok +=1
if ok != 4:
session.error = T("Insufficient vars: Need module, resource, jresource, instance")
raise HTTP(400, body=current.xml.json_message(False, 400, session.error))
tablename = "%s_%s" % (res_module, resource)
s3db.table(tablename)
table = db[table]
component, pkey, fkey = s3db.get_component(table, jresource)
jtable = db[str(component.table)]
query = (jtable[fkey] == table[pkey]) & (table.id == instance)
# Filter out deleted
deleted = (table.deleted == False)
query = query & deleted
# Filter out inaccessible
query2 = (ltable.id == jtable.location_id)
accessible = auth.s3_accessible_query("read", ltable)
query2 = query2 & accessible
features = db(query).select(ltable.wkt,
left = [ltable.on(query2)]
)
# Calculate an appropriate BBox
bounds = gis.get_bounds(features=features)
map = gis.show_map(
features = [f.wkt for f in features],
bbox = bounds,
window = True,
closable = False,
collapsed = True
)
return {"map": map}
# =============================================================================
def geocode():
"""
Geocode a location
- designed to be called via AJAX POST
Looks up Lx in our own database and returns Bounds
Passes on Street names to 3rd party services and returns a Point
@param L0: Country (as ID)
@param L1: State/Province (as ID)
@param L2: County/District (as ID)
@param L3: City/Town (as ID)
@param L4: Village/Neighborhood (as ID)
@param L5: Village/Census Tract (as ID)
@param street: Street Address
@param postcode: Postcode
"""
# Read the request
vars = request.post_vars
street = vars.get("address", None)
postcode = vars.get("postcode", None)
L0 = vars.get("L0", None)
if L0:
L0 = int(L0)
L1 = vars.get("L1", None)
if L1:
L1 = int(L1)
L2 = vars.get("L2", None)
if L2:
L2 = int(L2)
L3 = vars.get("L3", None)
if L3:
L3 = int(L3)
L4 = vars.get("L4", None)
if L4:
L4 = int(L4)
L5 = vars.get("L5", None)
if L5:
L5 = int(L5)
# Is this a Street or Lx?
if street:
Lx_ids = []
append = Lx_ids.append
for id in (L0, L1, L2, L3, L4, L5):
if id:
append(id)
# Send request to external geocoders to get a Point
gis.google_geocode_retry = False
results = gis.geocode(street, postcode, Lx_ids)
else:
# Lx: Lookup Bounds in our own database
# @ToDo
# Not needed by S3LocationSelector as it downloads bounds with options
results = "NotImplementedError"
results = json.dumps(results, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return results
# -----------------------------------------------------------------------------
def geocode_r():
"""
Reverse-Geocode a location
- designed to be called via AJAX POST
Looks up Lx in our own database
@ToDo: if not found then calls out to 3rd party services
@param lat: float (as string)
@param lon: float (as string)
"""
# Read the request
vars = request.post_vars
lat = vars.get("lat", None)
lon = vars.get("lon", None)
# Reverse Geocode
results = gis.geocode_r(lat, lon)
# Return the results
results = json.dumps(results, separators=SEPARATORS)
response.headers["Content-Type"] = "application/json"
return results
# -----------------------------------------------------------------------------
def geocode_manual():
"""
Manually Geocode locations
@ToDo: make this accessible by Anonymous users?
"""
table = s3db.gis_location
# Filter
query = (table.level == None)
# @ToDo: make this role-dependent
# - Normal users do the Lat/Lons
# - Special users do the Codes
_filter = (table.lat == None)
s3.filter = (query & _filter)
# Hide unnecessary fields
table.level.readable = table.level.writable = False
table.gis_feature_type.readable = table.gis_feature_type.writable = False
table.wkt.readable = table.wkt.writable = False
table.comments.readable = table.comments.writable = False
# Customise Labels for specific use-cases
#table.name.label = T("Building Name") # Building Assessments-specific
#table.parent.label = T("Suburb") # Christchurch-specific
# Allow prep to pass vars back to the controller
vars = {}
# Pre-processor
def prep(r, vars):
def get_location_info():
table = s3db.gis_location
return db(table.id == r.id).select(table.lat,
table.lon,
table.level,
limitby=(0, 1)).first()
if r.method in (None, "list") and r.record is None:
# List
pass
elif r.method in ("delete", "search"):
pass
else:
# Add Map to allow locations to be found this way
# @ToDo: DRY with one in location()
config = gis.get_config()
lat = config.lat
lon = config.lon
zoom = config.zoom
feature_queries = []
if r.method == "create":
add_feature = True
add_feature_active = True
else:
if r.method == "update":
add_feature = True
add_feature_active = False
else:
# Read
add_feature = False
add_feature_active = False
try:
location
except:
location = get_location_info()
if location and location.lat is not None and location.lon is not None:
lat = location.lat
lon = location.lon
# Same as a single zoom on a cluster
zoom = zoom + 2
# @ToDo: Does map make sense if the user is updating a group?
# If not, maybe leave it out. OTOH, might be nice to select
# admin regions to include in the group by clicking on them in
# the map. Would involve boundaries...
_map = gis.show_map(lat = lat,
lon = lon,
zoom = zoom,
feature_queries = feature_queries,
add_feature = add_feature,
add_feature_active = add_feature_active,
toolbar = True,
collapsed = True)
# Pass the map back to the main controller
vars.update(_map=_map)
return True
s3.prep = lambda r, vars=vars: prep(r, vars)
s3db.configure(table._tablename,
listadd=False,
list_fields=["id",
"name",
"address",
"parent"
])
output = s3_rest_controller("gis", "location")
_map = vars.get("_map", None)
if _map and isinstance(output, dict):
output.update(_map=_map)
return output
# =============================================================================
def geoexplorer():
"""
Embedded GeoExplorer: https://github.com/opengeo/GeoExplorer
This is used as a demo of GXP components which we want to pull into
the Map Viewing Client.
No real attempt to integrate/optimise.
@ToDo: Get working
If gxp is loaded in debug mode then it barfs Ext:
ext-all-debug.js (line 10535)
types[config.xtype || defaultType] is not a constructor
[Break On This Error] return config.render ? con...config.xtype || defaultType](config);
In non-debug mode, the suite breaks, but otherwise the UI loads fine & is operational.
However no tiles are ever visible!
"""
# @ToDo: Optimise to a single query of table
bing_key = settings.get_gis_api_bing()
google_key = settings.get_gis_api_google()
yahoo_key = settings.get_gis_api_yahoo()
# http://eden.sahanafoundation.org/wiki/BluePrintGISPrinting
print_service = settings.get_gis_print_service()
geoserver_url = settings.get_gis_geoserver_url()
response.title = "GeoExplorer"
return {#"config": gis.get_config(),
"bing_key": bing_key,
"google_key": google_key,
"yahoo_key": yahoo_key,
"print_service": print_service,
"geoserver_url": geoserver_url,
}
# -----------------------------------------------------------------------------
def about():
""" Custom View for GeoExplorer """
return {}
# -----------------------------------------------------------------------------
def maps():
"""
Map Save/Publish Handler for GeoExplorer
NB
The models for this are currently not enabled in modules/s3db/gis.py
This hasn't been tested at all with the new version of GeoExplorer
"""
table = s3db.gis_wmc
ltable = s3db.gis_wmc_layer
if request.env.request_method == "GET":
# This is a request to read the config of a saved map
# Which map are we updating?
id = request.args[0]
if not id:
raise HTTP(501)
# Read the WMC record
record = db(table.id == id).select(limitby=(0, 1)).first()
# & linked records
#projection = db(db.gis_projection.id == record.projection).select(limitby=(0, 1)).first()
# Put details into the correct structure
output = {}
output["map"] = {}
map = output["map"]
map["center"] = [record.lat, record.lon]
map["zoom"] = record.zoom
# @ToDo: Read Projection (we generally use 900913 & no way to edit this yet)
map["projection"] = "EPSG:900913"
map["units"] = "m"
map["maxResolution"] = 156543.0339
# @ToDo: Read Layers
map["layers"] = []
#map["layers"].append(dict(source="google", title="Google Terrain", name="TERRAIN", group="background"))
#map["layers"].append(dict(source="ol", group="background", fixed=True, type="OpenLayers.Layer", args=[ "None", {"visibility":False} ]))
for _layer in record.layer_id:
layer = db(ltable.id == _layer).select(limitby=(0, 1)).first()
if layer.type_ == "OpenLayers.Layer":
# Add args
map["layers"].append({"source": layer.source,
"title": layer.title,
"name": layer.name,
"group": layer.group_,
"type": layer.type_,
"format": layer.img_format,
"visibility": layer.visibility,
"transparent": layer.transparent,
"opacity": layer.opacity,
"fixed": layer.fixed,
"args": [ "None", {"visibility":False} ],
})
else:
map["layers"].append({"source": layer.source,
"title": layer.title,
"name": layer.name,
"group": layer.group_,
"type": layer.type_,
"format": layer.img_format,
"visibility": layer.visibility,
"transparent": layer.transparent,
"opacity": layer.opacity,
"fixed": layer.fixed,
})
# @ToDo: Read Metadata (no way of editing this yet)
# Encode as JSON
output = json.dumps(output, separators=SEPARATORS)
# Output to browser
response.headers["Content-Type"] = "application/json"
return output
elif request.env.request_method == "POST":
# This is a request to save/publish a new map
# Get the data from the POST
source = request.body.read()
if isinstance(source, basestring):
from s3compat import StringIO
source = StringIO(source)
# Decode JSON
source = json.load(source)
# @ToDo: Projection (we generally use 900913 & no way to edit this yet)
lat = source["map"]["center"][0]
lon = source["map"]["center"][1]
zoom = source["map"]["zoom"]
# Layers
layers = []
for layer in source["map"]["layers"]:
try:
opacity = layer["opacity"]
except:
opacity = None
try:
name = layer["name"]
except:
name = None
query = (ltable.source == layer["source"]) & \
(ltable.name == name) & \
(ltable.visibility == layer["visibility"]) & \
(ltable.opacity == opacity)
_layer = db(query).select(ltable.id,
limitby=(0, 1)).first()
if _layer:
# This is an existing layer
layers.append(_layer.id)
else:
# This is a new layer
try:
type_ = layer["type"]
except:
type_ = None
try:
group_ = layer["group"]
except:
group_ = None
try:
fixed = layer["fixed"]
except:
fixed = None
try:
format = layer["format"]
except:
format = None
try:
transparent = layer["transparent"]
except:
transparent = None
# Add a new record to the gis_wmc_layer table
_layer = ltable.insert(source=layer["source"],
name=name,
visibility=layer["visibility"],
opacity=opacity,
type_=type_,
title=layer["title"],
group_=group_,
fixed=fixed,
transparent=transparent,
img_format=format)
layers.append(_layer)
# @ToDo: Metadata (no way of editing this yet)
# Save a record in the WMC table
id = table.insert(lat=lat, lon=lon, zoom=zoom, layer_id=layers)
# Return the ID of the saved record for the Bookmark
output = json.dumps({"id": id}, separators=SEPARATORS)
return output
elif request.env.request_method == "PUT":
# This is a request to save/publish an existing map
# Which map are we updating?
id = request.args[0]
if not id:
raise HTTP(501)
# Get the data from the PUT
source = request.body.read()
if isinstance(source, basestring):
from s3compat import StringIO
source = StringIO(source)
# Decode JSON
source = json.load(source)
# @ToDo: Projection (unlikely to change)
lat = source["map"]["center"][0]
lon = source["map"]["center"][1]
zoom = source["map"]["zoom"]
# Layers
layers = []
for layer in source["map"]["layers"]:
try:
opacity = layer["opacity"]
except:
opacity = None
try:
name = layer["name"]
except:
name = None
query = (ltable.source == layer["source"]) & \
(ltable.name == name) & \
(ltable.visibility == layer["visibility"]) & \
(ltable.opacity == opacity)
_layer = db(query).select(ltable.id,
limitby=(0, 1)).first()
if _layer:
# This is an existing layer
layers.append(_layer.id)
else:
# This is a new layer
try:
type_ = layer["type"]
except:
type_ = None
try:
group_ = layer["group"]
except:
group_ = None
try:
fixed = layer["fixed"]
except:
fixed = None
try:
format = layer["format"]
except:
format = None
try:
transparent = layer["transparent"]
except:
transparent = None
# Add a new record to the gis_wmc_layer table
_layer = ltable.insert(source=layer["source"],
name=name,
visibility=layer["visibility"],
opacity=opacity,
type_=type_,
title=layer["title"],
group_=group_,
fixed=fixed,
transparent=transparent,
img_format=format)
layers.append(_layer)
# @ToDo: Metadata (no way of editing this yet)
# Update the record in the WMC table
db(table.id == id).update(lat=lat, lon=lon, zoom=zoom, layer_id=layers)
# Return the ID of the saved record for the Bookmark
output = json.dumps({"id": id}, separators=SEPARATORS)
return output
# Abort - we shouldn't get here
raise HTTP(501)
# =============================================================================
def potlatch2():
"""
Custom View for the Potlatch2 OpenStreetMap editor
http://wiki.openstreetmap.org/wiki/Potlatch_2
"""
config = gis.get_config()
pe_id = auth.s3_user_pe_id(auth.user.id) if auth.s3_logged_in() else None
opt = s3db.auth_user_options_get_osm(auth.user.pe_id) if pe_id else None
if opt:
osm_oauth_consumer_key, osm_oauth_consumer_secret = opt
gpx_url = None
if "gpx_id" in request.vars:
# Pass in a GPX Track
# @ToDo: Set the viewport based on the Track, if one is specified
table = s3db.gis_layer_track
query = (table.id == request.vars.gpx_id)
track = db(query).select(table.track,
limitby=(0, 1)).first()
if track:
gpx_url = "%s/%s" % (URL(c="default", f="download"),
track.track)
if "lat" in request.vars:
lat = request.vars.lat
lon = request.vars.lon
else:
lat = config.lat
lon = config.lon
if "zoom" in request.vars:
zoom = request.vars.zoom
else:
# This isn't good as it makes for too large an area to edit
#zoom = config.zoom
zoom = 14
site_name = settings.get_system_name_short()
return {"lat": lat,
"lon": lon,
"zoom": zoom,
"gpx_url": gpx_url,
"site_name": site_name,
"key": osm_oauth_consumer_key,
"secret": osm_oauth_consumer_secret,
}
else:
session.warning = T("To edit OpenStreetMap, you need to edit the OpenStreetMap settings in your Map Config")
redirect(URL(c="pr", f="person", args=["config"]))
# =============================================================================
def proxy():
"""
Based on http://trac.openlayers.org/browser/trunk/openlayers/examples/proxy.cgi
This is a blind proxy that we use to get around browser
restrictions that prevent the Javascript from loading pages not on the
same server as the Javascript. This has several problems: it's less
efficient, it might break some sites, and it's a security risk because
people can use this proxy to browse the web and possibly do bad stuff
with it. It only loads pages via http and https, but it can load any
content type. It supports GET and POST requests.
"""
import socket
from s3compat import URLError, urllib2, urlopen
import cgi
if auth.is_logged_in():
# Authenticated users can use our Proxy
allowedHosts = None
allowed_content_types = None
else:
# @ToDo: Link to map_service_catalogue to prevent Open Proxy abuse
# (although less-critical since we restrict content type)
allowedHosts = []
#append = allowedHosts.append
#letable = s3db.gis_layer_entity
#rows = db(letable.deleted == False).select(letable.layer_id, letable.instance_type)
# @ToDo: Better query (single query by instance_type)
#for row in rows:
# table = db[row.instance_type]
# @ToDo: Check url2/url3 for relevant instance_types
# r = db(table.layer_id == row.layer_id).select(table.url, limitby=(0, 1)).first()
# if r:
# append(r.url)
allowed_content_types = (
"application/json", "text/json", "text/x-json",
"application/xml", "text/xml",
"application/vnd.ogc.se_xml", # OGC Service Exception
"application/vnd.ogc.se+xml", # OGC Service Exception
"application/vnd.ogc.success+xml", # OGC Success (SLD Put)
"application/vnd.ogc.wms_xml", # WMS Capabilities
"application/vnd.ogc.context+xml", # WMC
"application/vnd.ogc.gml", # GML
"application/vnd.ogc.sld+xml", # SLD
"application/vnd.google-earth.kml+xml", # KML
)
method = request["wsgi"].environ["REQUEST_METHOD"]
if method == "POST":
# This can probably use same call as GET in web2py
qs = request["wsgi"].environ["QUERY_STRING"]
d = cgi.parse_qs(qs)
if "url" in d:
url = d["url"][0]
else:
url = "http://www.openlayers.org"
else:
# GET
if "url" in request.vars:
url = request.vars.url
else:
session.error = T("Need a 'url' argument!")
raise HTTP(400, body=current.xml.json_message(False, 400, session.error))
# Debian has no default timeout so connection can get stuck with dodgy servers
socket.setdefaulttimeout(30)
try:
host = url.split("/")[2]
if allowedHosts and not host in allowedHosts:
raise HTTP(403, "Host not permitted: %s" % host)
elif url.startswith("http://") or url.startswith("https://"):
if method == "POST":
length = int(request["wsgi"].environ["CONTENT_LENGTH"])
headers = {"Content-Type": request["wsgi"].environ["CONTENT_TYPE"]}
body = request.body.read(length)
r = urllib2.Request(url, body, headers)
try:
y = urlopen(r)
except URLError:
raise HTTP(504, "Unable to reach host %s" % r)
else:
# GET
try:
y = urlopen(url)
except URLError:
raise HTTP(504, "Unable to reach host %s" % url)
i = y.info()
if "Content-Type" in i:
ct = i["Content-Type"]
else:
ct = None
if allowed_content_types:
# Check for allowed content types
if not ct:
raise HTTP(406, "Unknown Content")
elif not ct.split(";")[0] in allowed_content_types:
# @ToDo?: Allow any content type from allowed hosts (any port)
#if allowedHosts and not host in allowedHosts:
raise HTTP(403, "Content-Type not permitted")
msg = y.read()
y.close()
if ct:
# Maintain the incoming Content-Type
response.headers["Content-Type"] = ct
return msg
else:
# Bad Request
raise HTTP(400)
except Exception as e:
raise HTTP(500, "Some unexpected error occurred. Error text was: %s" % str(e))
# =============================================================================
def screenshot():
"""
Take a screenshot of a map
"""
config_id = request.args(0) or 1
# If passed a size, set the Pixels for 300ppi
size = get_vars.get("size")
if size == "Letter":
height = 2550 # 612 for 72ppi
width = 3300 # 792 for 72ppi
elif size == "A4":
height = 2480 # 595 for 72ppi
width = 3508 # 842 for 72ppi
elif size == "A3":
height = 3508 # 842 for 72ppi
width = 4962 # 1191 for 72ppi
elif size == "A2":
height = 4962 # 1191 for 72ppi
width = 7017 # 1684 for 72ppi
elif size == "A1":
height = 7017 # 1684 for 72ppi
width = 9933 # 2384 for 72ppi
elif size == "A0":
height = 9933 # 2384 for 72ppi
width = 14061 # 3375 for 72ppi
else:
height = get_vars.get("height")
try:
height = int(height)
except (ValueError, TypeError):
height = 2480
width = get_vars.get("width")
try:
width = int(width)
except (ValueError, TypeError):
width = 3508
filename = gis.get_screenshot(config_id, height=height, width=width)
if filename:
redirect(URL(c="static", f="cache",
args=["jpg", filename]))
else:
raise HTTP(500, "Screenshot not taken")
# END =========================================================================
| mit |
MechCoder/sympy | sympy/physics/mechanics/linearize.py | 62 | 15341 | from __future__ import print_function, division
__all__ = ['Linearizer']
from sympy import Matrix, eye, zeros, Dummy
from sympy.utilities.iterables import flatten
from sympy.physics.vector import dynamicsymbols
from sympy.physics.mechanics.functions import msubs
import collections
class Linearizer(object):
"""This object holds the general model form for a dynamic system.
This model is used for computing the linearized form of the system,
while properly dealing with constraints leading to dependent
coordinates and speeds.
Attributes
----------
f_0, f_1, f_2, f_3, f_4, f_c, f_v, f_a : Matrix
Matrices holding the general system form.
q, u, r : Matrix
Matrices holding the generalized coordinates, speeds, and
input vectors.
q_i, u_i : Matrix
Matrices of the independent generalized coordinates and speeds.
q_d, u_d : Matrix
Matrices of the dependent generalized coordinates and speeds.
perm_mat : Matrix
Permutation matrix such that [q_ind, u_ind]^T = perm_mat*[q, u]^T
"""
def __init__(self, f_0, f_1, f_2, f_3, f_4, f_c, f_v, f_a, q, u,
q_i=None, q_d=None, u_i=None, u_d=None, r=None, lams=None):
"""
Parameters
----------
f_0, f_1, f_2, f_3, f_4, f_c, f_v, f_a : array_like
System of equations holding the general system form.
Supply empty array or Matrix if the parameter
doesn't exist.
q : array_like
The generalized coordinates.
u : array_like
The generalized speeds
q_i, u_i : array_like, optional
The independent generalized coordinates and speeds.
q_d, u_d : array_like, optional
The dependent generalized coordinates and speeds.
r : array_like, optional
The input variables.
lams : array_like, optional
The lagrange multipliers
"""
# Generalized equation form
self.f_0 = Matrix(f_0)
self.f_1 = Matrix(f_1)
self.f_2 = Matrix(f_2)
self.f_3 = Matrix(f_3)
self.f_4 = Matrix(f_4)
self.f_c = Matrix(f_c)
self.f_v = Matrix(f_v)
self.f_a = Matrix(f_a)
# Generalized equation variables
self.q = Matrix(q)
self.u = Matrix(u)
none_handler = lambda x: Matrix(x) if x else Matrix()
self.q_i = none_handler(q_i)
self.q_d = none_handler(q_d)
self.u_i = none_handler(u_i)
self.u_d = none_handler(u_d)
self.r = none_handler(r)
self.lams = none_handler(lams)
# Derivatives of generalized equation variables
self._qd = self.q.diff(dynamicsymbols._t)
self._ud = self.u.diff(dynamicsymbols._t)
# If the user doesn't actually use generalized variables, and the
# qd and u vectors have any intersecting variables, this can cause
# problems. We'll fix this with some hackery, and Dummy variables
dup_vars = set(self._qd).intersection(self.u)
self._qd_dup = Matrix([var if var not in dup_vars else Dummy()
for var in self._qd])
# Derive dimesion terms
l = len(self.f_c)
m = len(self.f_v)
n = len(self.q)
o = len(self.u)
s = len(self.r)
k = len(self.lams)
dims = collections.namedtuple('dims', ['l', 'm', 'n', 'o', 's', 'k'])
self._dims = dims(l, m, n, o, s, k)
self._setup_done = False
def _setup(self):
# Calculations here only need to be run once. They are moved out of
# the __init__ method to increase the speed of Linearizer creation.
self._form_permutation_matrices()
self._form_block_matrices()
self._form_coefficient_matrices()
self._setup_done = True
def _form_permutation_matrices(self):
"""Form the permutation matrices Pq and Pu."""
# Extract dimension variables
l, m, n, o, s, k = self._dims
# Compute permutation matrices
if n != 0:
self._Pq = permutation_matrix(self.q, Matrix([self.q_i, self.q_d]))
if l > 0:
self._Pqi = self._Pq[:, :-l]
self._Pqd = self._Pq[:, -l:]
else:
self._Pqi = self._Pq
self._Pqd = Matrix()
if o != 0:
self._Pu = permutation_matrix(self.u, Matrix([self.u_i, self.u_d]))
if m > 0:
self._Pui = self._Pu[:, :-m]
self._Pud = self._Pu[:, -m:]
else:
self._Pui = self._Pu
self._Pud = Matrix()
# Compute combination permutation matrix for computing A and B
P_col1 = Matrix([self._Pqi, zeros(o + k, n - l)])
P_col2 = Matrix([zeros(n, o - m), self._Pui, zeros(k, o - m)])
if P_col1:
if P_col2:
self.perm_mat = P_col1.row_join(P_col2)
else:
self.perm_mat = P_col1
else:
self.perm_mat = P_col2
def _form_coefficient_matrices(self):
"""Form the coefficient matrices C_0, C_1, and C_2."""
# Extract dimension variables
l, m, n, o, s, k = self._dims
# Build up the coefficient matrices C_0, C_1, and C_2
# If there are configuration constraints (l > 0), form C_0 as normal.
# If not, C_0 is I_(nxn). Note that this works even if n=0
if l > 0:
f_c_jac_q = self.f_c.jacobian(self.q)
self._C_0 = (eye(n) - self._Pqd * (f_c_jac_q *
self._Pqd).LUsolve(f_c_jac_q)) * self._Pqi
else:
self._C_0 = eye(n)
# If there are motion constraints (m > 0), form C_1 and C_2 as normal.
# If not, C_1 is 0, and C_2 is I_(oxo). Note that this works even if
# o = 0.
if m > 0:
f_v_jac_u = self.f_v.jacobian(self.u)
temp = f_v_jac_u * self._Pud
if n != 0:
f_v_jac_q = self.f_v.jacobian(self.q)
self._C_1 = -self._Pud * temp.LUsolve(f_v_jac_q)
else:
self._C_1 = zeros(o, n)
self._C_2 = (eye(o) - self._Pud *
temp.LUsolve(f_v_jac_u)) * self._Pui
else:
self._C_1 = zeros(o, n)
self._C_2 = eye(o)
def _form_block_matrices(self):
"""Form the block matrices for composing M, A, and B."""
# Extract dimension variables
l, m, n, o, s, k = self._dims
# Block Matrix Definitions. These are only defined if under certain
# conditions. If undefined, an empty matrix is used instead
if n != 0:
self._M_qq = self.f_0.jacobian(self._qd)
self._A_qq = -(self.f_0 + self.f_1).jacobian(self.q)
else:
self._M_qq = Matrix()
self._A_qq = Matrix()
if n != 0 and m != 0:
self._M_uqc = self.f_a.jacobian(self._qd_dup)
self._A_uqc = -self.f_a.jacobian(self.q)
else:
self._M_uqc = Matrix()
self._A_uqc = Matrix()
if n != 0 and o - m + k != 0:
self._M_uqd = self.f_3.jacobian(self._qd_dup)
self._A_uqd = -(self.f_2 + self.f_3 + self.f_4).jacobian(self.q)
else:
self._M_uqd = Matrix()
self._A_uqd = Matrix()
if o != 0 and m != 0:
self._M_uuc = self.f_a.jacobian(self._ud)
self._A_uuc = -self.f_a.jacobian(self.u)
else:
self._M_uuc = Matrix()
self._A_uuc = Matrix()
if o != 0 and o - m + k != 0:
self._M_uud = self.f_2.jacobian(self._ud)
self._A_uud = -(self.f_2 + self.f_3).jacobian(self.u)
else:
self._M_uud = Matrix()
self._A_uud = Matrix()
if o != 0 and n != 0:
self._A_qu = -self.f_1.jacobian(self.u)
else:
self._A_qu = Matrix()
if k != 0 and o - m + k != 0:
self._M_uld = self.f_4.jacobian(self.lams)
else:
self._M_uld = Matrix()
if s != 0 and o - m + k != 0:
self._B_u = -self.f_3.jacobian(self.r)
else:
self._B_u = Matrix()
def linearize(self, op_point=None, A_and_B=False, simplify=False):
"""Linearize the system about the operating point. Note that
q_op, u_op, qd_op, ud_op must satisfy the equations of motion.
These may be either symbolic or numeric.
Parameters
----------
op_point : dict or iterable of dicts, optional
Dictionary or iterable of dictionaries containing the operating
point conditions. These will be substituted in to the linearized
system before the linearization is complete. Leave blank if you
want a completely symbolic form. Note that any reduction in
symbols (whether substituted for numbers or expressions with a
common parameter) will result in faster runtime.
A_and_B : bool, optional
If A_and_B=False (default), (M, A, B) is returned for forming
[M]*[q, u]^T = [A]*[q_ind, u_ind]^T + [B]r. If A_and_B=True,
(A, B) is returned for forming dx = [A]x + [B]r, where
x = [q_ind, u_ind]^T.
simplify : bool, optional
Determines if returned values are simplified before return.
For large expressions this may be time consuming. Default is False.
Potential Issues
----------------
Note that the process of solving with A_and_B=True is
computationally intensive if there are many symbolic parameters.
For this reason, it may be more desirable to use the default
A_and_B=False, returning M, A, and B. More values may then be
substituted in to these matrices later on. The state space form can
then be found as A = P.T*M.LUsolve(A), B = P.T*M.LUsolve(B), where
P = Linearizer.perm_mat.
"""
# Run the setup if needed:
if not self._setup_done:
self._setup()
# Compose dict of operating conditions
if isinstance(op_point, dict):
op_point_dict = op_point
elif isinstance(op_point, collections.Iterable):
op_point_dict = {}
for op in op_point:
op_point_dict.update(op)
else:
op_point_dict = {}
# Extract dimension variables
l, m, n, o, s, k = self._dims
# Rename terms to shorten expressions
M_qq = self._M_qq
M_uqc = self._M_uqc
M_uqd = self._M_uqd
M_uuc = self._M_uuc
M_uud = self._M_uud
M_uld = self._M_uld
A_qq = self._A_qq
A_uqc = self._A_uqc
A_uqd = self._A_uqd
A_qu = self._A_qu
A_uuc = self._A_uuc
A_uud = self._A_uud
B_u = self._B_u
C_0 = self._C_0
C_1 = self._C_1
C_2 = self._C_2
# Build up Mass Matrix
# |M_qq 0_nxo 0_nxk|
# M = |M_uqc M_uuc 0_mxk|
# |M_uqd M_uud M_uld|
if o != 0:
col2 = Matrix([zeros(n, o), M_uuc, M_uud])
if k != 0:
col3 = Matrix([zeros(n + m, k), M_uld])
if n != 0:
col1 = Matrix([M_qq, M_uqc, M_uqd])
if o != 0 and k != 0:
M = col1.row_join(col2).row_join(col3)
elif o != 0:
M = col1.row_join(col2)
else:
M = col1
elif k != 0:
M = col2.row_join(col3)
else:
M = col2
M_eq = msubs(M, op_point_dict)
# Build up state coefficient matrix A
# |(A_qq + A_qu*C_1)*C_0 A_qu*C_2|
# A = |(A_uqc + A_uuc*C_1)*C_0 A_uuc*C_2|
# |(A_uqd + A_uud*C_1)*C_0 A_uud*C_2|
# Col 1 is only defined if n != 0
if n != 0:
r1c1 = A_qq
if o != 0:
r1c1 += (A_qu * C_1)
r1c1 = r1c1 * C_0
if m != 0:
r2c1 = A_uqc
if o != 0:
r2c1 += (A_uuc * C_1)
r2c1 = r2c1 * C_0
else:
r2c1 = Matrix()
if o - m + k != 0:
r3c1 = A_uqd
if o != 0:
r3c1 += (A_uud * C_1)
r3c1 = r3c1 * C_0
else:
r3c1 = Matrix()
col1 = Matrix([r1c1, r2c1, r3c1])
else:
col1 = Matrix()
# Col 2 is only defined if o != 0
if o != 0:
if n != 0:
r1c2 = A_qu * C_2
else:
r1c2 = Matrix()
if m != 0:
r2c2 = A_uuc * C_2
else:
r2c2 = Matrix()
if o - m + k != 0:
r3c2 = A_uud * C_2
else:
r3c2 = Matrix()
col2 = Matrix([r1c2, r2c2, r3c2])
else:
col2 = Matrix()
if col1:
if col2:
Amat = col1.row_join(col2)
else:
Amat = col1
else:
Amat = col2
Amat_eq = msubs(Amat, op_point_dict)
# Build up the B matrix if there are forcing variables
# |0_(n + m)xs|
# B = |B_u |
if s != 0 and o - m + k != 0:
Bmat = zeros(n + m, s).col_join(B_u)
Bmat_eq = msubs(Bmat, op_point_dict)
else:
Bmat_eq = Matrix()
# kwarg A_and_B indicates to return A, B for forming the equation
# dx = [A]x + [B]r, where x = [q_indnd, u_indnd]^T,
if A_and_B:
A_cont = self.perm_mat.T * M_eq.LUsolve(Amat_eq)
if Bmat_eq:
B_cont = self.perm_mat.T * M_eq.LUsolve(Bmat_eq)
else:
# Bmat = Matrix([]), so no need to sub
B_cont = Bmat_eq
if simplify:
A_cont.simplify()
B_cont.simplify()
return A_cont, B_cont
# Otherwise return M, A, B for forming the equation
# [M]dx = [A]x + [B]r, where x = [q, u]^T
else:
if simplify:
M_eq.simplify()
Amat_eq.simplify()
Bmat_eq.simplify()
return M_eq, Amat_eq, Bmat_eq
def permutation_matrix(orig_vec, per_vec):
"""Compute the permutation matrix to change order of
orig_vec into order of per_vec.
Parameters
----------
orig_vec : array_like
Symbols in original ordering.
per_vec : array_like
Symbols in new ordering.
Returns
-------
p_matrix : Matrix
Permutation matrix such that orig_vec == (p_matrix * per_vec).
"""
if not isinstance(orig_vec, (list, tuple)):
orig_vec = flatten(orig_vec)
if not isinstance(per_vec, (list, tuple)):
per_vec = flatten(per_vec)
if set(orig_vec) != set(per_vec):
raise ValueError("orig_vec and per_vec must be the same length, " +
"and contain the same symbols.")
ind_list = [orig_vec.index(i) for i in per_vec]
p_matrix = zeros(len(orig_vec))
for i, j in enumerate(ind_list):
p_matrix[i, j] = 1
return p_matrix
| bsd-3-clause |
yorvic/.vim | bundle/python-mode/pylibs/pylama/checkers/pylint/logilab/astng/as_string.py | 27 | 18381 | # copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""This module renders ASTNG nodes as string:
* :func:`to_code` function return equivalent (hopefuly valid) python string
* :func:`dump` function return an internal representation of nodes found
in the tree, useful for debugging or understanding the tree structure
"""
import sys
INDENT = ' ' # 4 spaces ; keep indentation variable
def dump(node, ids=False):
"""print a nice astng tree representation.
:param ids: if true, we also print the ids (usefull for debugging)
"""
result = []
_repr_tree(node, result, ids=ids)
return "\n".join(result)
def _repr_tree(node, result, indent='', _done=None, ids=False):
"""built a tree representation of a node as a list of lines"""
if _done is None:
_done = set()
if not hasattr(node, '_astng_fields'): # not a astng node
return
if node in _done:
result.append( indent + 'loop in tree: %s' % node )
return
_done.add(node)
node_str = str(node)
if ids:
node_str += ' . \t%x' % id(node)
result.append( indent + node_str )
indent += INDENT
for field in node._astng_fields:
value = getattr(node, field)
if isinstance(value, (list, tuple) ):
result.append( indent + field + " = [" )
for child in value:
if isinstance(child, (list, tuple) ):
# special case for Dict # FIXME
_repr_tree(child[0], result, indent, _done, ids)
_repr_tree(child[1], result, indent, _done, ids)
result.append(indent + ',')
else:
_repr_tree(child, result, indent, _done, ids)
result.append( indent + "]" )
else:
result.append( indent + field + " = " )
_repr_tree(value, result, indent, _done, ids)
class AsStringVisitor(object):
"""Visitor to render an ASTNG node as a valid python code string"""
def __call__(self, node):
"""Makes this visitor behave as a simple function"""
return node.accept(self)
def _stmt_list(self, stmts):
"""return a list of nodes to string"""
stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
return INDENT + stmts.replace('\n', '\n'+INDENT)
## visit_<node> methods ###########################################
def visit_arguments(self, node):
"""return an astng.Function node as string"""
return node.format_args()
def visit_assattr(self, node):
"""return an astng.AssAttr node as string"""
return self.visit_getattr(node)
def visit_assert(self, node):
"""return an astng.Assert node as string"""
if node.fail:
return 'assert %s, %s' % (node.test.accept(self),
node.fail.accept(self))
return 'assert %s' % node.test.accept(self)
def visit_assname(self, node):
"""return an astng.AssName node as string"""
return node.name
def visit_assign(self, node):
"""return an astng.Assign node as string"""
lhs = ' = '.join([n.accept(self) for n in node.targets])
return '%s = %s' % (lhs, node.value.accept(self))
def visit_augassign(self, node):
"""return an astng.AugAssign node as string"""
return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
def visit_backquote(self, node):
"""return an astng.Backquote node as string"""
return '`%s`' % node.value.accept(self)
def visit_binop(self, node):
"""return an astng.BinOp node as string"""
return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
def visit_boolop(self, node):
"""return an astng.BoolOp node as string"""
return (' %s ' % node.op).join(['(%s)' % n.accept(self)
for n in node.values])
def visit_break(self, node):
"""return an astng.Break node as string"""
return 'break'
def visit_callfunc(self, node):
"""return an astng.CallFunc node as string"""
expr_str = node.func.accept(self)
args = [arg.accept(self) for arg in node.args]
if node.starargs:
args.append( '*' + node.starargs.accept(self))
if node.kwargs:
args.append( '**' + node.kwargs.accept(self))
return '%s(%s)' % (expr_str, ', '.join(args))
def visit_class(self, node):
"""return an astng.Class node as string"""
decorate = node.decorators and node.decorators.accept(self) or ''
bases = ', '.join([n.accept(self) for n in node.bases])
bases = bases and '(%s)' % bases or ''
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
self._stmt_list( node.body))
def visit_compare(self, node):
"""return an astng.Compare node as string"""
rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
for op, expr in node.ops])
return '%s %s' % (node.left.accept(self), rhs_str)
def visit_comprehension(self, node):
"""return an astng.Comprehension node as string"""
ifs = ''.join([ ' if %s' % n.accept(self) for n in node.ifs])
return 'for %s in %s%s' % (node.target.accept(self),
node.iter.accept(self), ifs )
def visit_const(self, node):
"""return an astng.Const node as string"""
return repr(node.value)
def visit_continue(self, node):
"""return an astng.Continue node as string"""
return 'continue'
def visit_delete(self, node): # XXX check if correct
"""return an astng.Delete node as string"""
return 'del %s' % ', '.join([child.accept(self)
for child in node.targets])
def visit_delattr(self, node):
"""return an astng.DelAttr node as string"""
return self.visit_getattr(node)
def visit_delname(self, node):
"""return an astng.DelName node as string"""
return node.name
def visit_decorators(self, node):
"""return an astng.Decorators node as string"""
return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
def visit_dict(self, node):
"""return an astng.Dict node as string"""
return '{%s}' % ', '.join(['%s: %s' % (key.accept(self),
value.accept(self)) for key, value in node.items])
def visit_dictcomp(self, node):
"""return an astng.DictComp node as string"""
return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
' '.join([n.accept(self) for n in node.generators]))
def visit_discard(self, node):
"""return an astng.Discard node as string"""
return node.value.accept(self)
def visit_emptynode(self, node):
"""dummy method for visiting an Empty node"""
return ''
def visit_excepthandler(self, node):
if node.type:
if node.name:
excs = 'except %s, %s' % (node.type.accept(self),
node.name.accept(self))
else:
excs = 'except %s' % node.type.accept(self)
else:
excs = 'except'
return '%s:\n%s' % (excs, self._stmt_list(node.body))
def visit_ellipsis(self, node):
"""return an astng.Ellipsis node as string"""
return '...'
def visit_empty(self, node):
"""return an Empty node as string"""
return ''
def visit_exec(self, node):
"""return an astng.Exec node as string"""
if node.locals:
return 'exec %s in %s, %s' % (node.expr.accept(self),
node.locals.accept(self),
node.globals.accept(self))
if node.globals:
return 'exec %s in %s' % (node.expr.accept(self),
node.globals.accept(self))
return 'exec %s' % node.expr.accept(self)
def visit_extslice(self, node):
"""return an astng.ExtSlice node as string"""
return ','.join( [dim.accept(self) for dim in node.dims] )
def visit_for(self, node):
"""return an astng.For node as string"""
fors = 'for %s in %s:\n%s' % (node.target.accept(self),
node.iter.accept(self),
self._stmt_list( node.body))
if node.orelse:
fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
return fors
def visit_from(self, node):
"""return an astng.From node as string"""
return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
_import_string(node.names))
def visit_function(self, node):
"""return an astng.Function node as string"""
decorate = node.decorators and node.decorators.accept(self) or ''
docs = node.doc and '\n%s"""%s"""' % (INDENT, node.doc) or ''
return '\n%sdef %s(%s):%s\n%s' % (decorate, node.name, node.args.accept(self),
docs, self._stmt_list(node.body))
def visit_genexpr(self, node):
"""return an astng.GenExpr node as string"""
return '(%s %s)' % (node.elt.accept(self), ' '.join([n.accept(self)
for n in node.generators]))
def visit_getattr(self, node):
"""return an astng.Getattr node as string"""
return '%s.%s' % (node.expr.accept(self), node.attrname)
def visit_global(self, node):
"""return an astng.Global node as string"""
return 'global %s' % ', '.join(node.names)
def visit_if(self, node):
"""return an astng.If node as string"""
ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
if node.orelse:# XXX use elif ???
ifs.append('else:\n%s' % self._stmt_list(node.orelse))
return '\n'.join(ifs)
def visit_ifexp(self, node):
"""return an astng.IfExp node as string"""
return '%s if %s else %s' % (node.body.accept(self),
node.test.accept(self), node.orelse.accept(self))
def visit_import(self, node):
"""return an astng.Import node as string"""
return 'import %s' % _import_string(node.names)
def visit_keyword(self, node):
"""return an astng.Keyword node as string"""
return '%s=%s' % (node.arg, node.value.accept(self))
def visit_lambda(self, node):
"""return an astng.Lambda node as string"""
return 'lambda %s: %s' % (node.args.accept(self), node.body.accept(self))
def visit_list(self, node):
"""return an astng.List node as string"""
return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
def visit_listcomp(self, node):
"""return an astng.ListComp node as string"""
return '[%s %s]' % (node.elt.accept(self), ' '.join([n.accept(self)
for n in node.generators]))
def visit_module(self, node):
"""return an astng.Module node as string"""
docs = node.doc and '"""%s"""\n\n' % node.doc or ''
return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
def visit_name(self, node):
"""return an astng.Name node as string"""
return node.name
def visit_pass(self, node):
"""return an astng.Pass node as string"""
return 'pass'
def visit_print(self, node):
"""return an astng.Print node as string"""
nodes = ', '.join([n.accept(self) for n in node.values])
if not node.nl:
nodes = '%s,' % nodes
if node.dest:
return 'print >> %s, %s' % (node.dest.accept(self), nodes)
return 'print %s' % nodes
def visit_raise(self, node):
"""return an astng.Raise node as string"""
if node.exc:
if node.inst:
if node.tback:
return 'raise %s, %s, %s' % (node.exc.accept(self),
node.inst.accept(self),
node.tback.accept(self))
return 'raise %s, %s' % (node.exc.accept(self),
node.inst.accept(self))
return 'raise %s' % node.exc.accept(self)
return 'raise'
def visit_return(self, node):
"""return an astng.Return node as string"""
if node.value:
return 'return %s' % node.value.accept(self)
else:
return 'return'
def visit_index(self, node):
"""return a astng.Index node as string"""
return node.value.accept(self)
def visit_set(self, node):
"""return an astng.Set node as string"""
return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
def visit_setcomp(self, node):
"""return an astng.SetComp node as string"""
return '{%s %s}' % (node.elt.accept(self), ' '.join([n.accept(self)
for n in node.generators]))
def visit_slice(self, node):
"""return a astng.Slice node as string"""
lower = node.lower and node.lower.accept(self) or ''
upper = node.upper and node.upper.accept(self) or ''
step = node.step and node.step.accept(self) or ''
if step:
return '%s:%s:%s' % (lower, upper, step)
return '%s:%s' % (lower, upper)
def visit_subscript(self, node):
"""return an astng.Subscript node as string"""
return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
def visit_tryexcept(self, node):
"""return an astng.TryExcept node as string"""
trys = ['try:\n%s' % self._stmt_list( node.body)]
for handler in node.handlers:
trys.append(handler.accept(self))
if node.orelse:
trys.append('else:\n%s' % self._stmt_list(node.orelse))
return '\n'.join(trys)
def visit_tryfinally(self, node):
"""return an astng.TryFinally node as string"""
return 'try:\n%s\nfinally:\n%s' % (self._stmt_list( node.body),
self._stmt_list(node.finalbody))
def visit_tuple(self, node):
"""return an astng.Tuple node as string"""
return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
def visit_unaryop(self, node):
"""return an astng.UnaryOp node as string"""
if node.op == 'not':
operator = 'not '
else:
operator = node.op
return '%s%s' % (operator, node.operand.accept(self))
def visit_while(self, node):
"""return an astng.While node as string"""
whiles = 'while %s:\n%s' % (node.test.accept(self),
self._stmt_list(node.body))
if node.orelse:
whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
return whiles
def visit_with(self, node): # 'with' without 'as' is possible
"""return an astng.With node as string"""
as_var = node.vars and " as (%s)" % (node.vars.accept(self)) or ""
withs = 'with (%s)%s:\n%s' % (node.expr.accept(self), as_var,
self._stmt_list( node.body))
return withs
def visit_yield(self, node):
"""yield an ast.Yield node as string"""
yi_val = node.value and (" " + node.value.accept(self)) or ""
expr = 'yield' + yi_val
if node.parent.is_statement:
return expr
else:
return "(%s)" % (expr,)
class AsStringVisitor3k(AsStringVisitor):
"""AsStringVisitor3k overwrites some AsStringVisitor methods"""
def visit_excepthandler(self, node):
if node.type:
if node.name:
excs = 'except %s as %s' % (node.type.accept(self),
node.name.accept(self))
else:
excs = 'except %s' % node.type.accept(self)
else:
excs = 'except'
return '%s:\n%s' % (excs, self._stmt_list(node.body))
def visit_nonlocal(self, node):
"""return an astng.Nonlocal node as string"""
return 'nonlocal %s' % ', '.join(node.names)
def visit_raise(self, node):
"""return an astng.Raise node as string"""
if node.exc:
if node.cause:
return 'raise %s from %s' % (node.exc.accept(self),
node.cause.accept(self))
return 'raise %s' % node.exc.accept(self)
return 'raise'
def visit_starred(self, node):
"""return Starred node as string"""
return "*" + node.value.accept(self)
def _import_string(names):
"""return a list of (name, asname) formatted as a string"""
_names = []
for name, asname in names:
if asname is not None:
_names.append('%s as %s' % (name, asname))
else:
_names.append(name)
return ', '.join(_names)
if sys.version_info >= (3, 0):
AsStringVisitor = AsStringVisitor3k
# this visitor is stateless, thus it can be reused
to_code = AsStringVisitor()
| gpl-3.0 |
MartinThoma/LaTeX-examples | tikz/gpx-gps-elevation-chart-generator/elevation_chart_gen.py | 1 | 2708 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Generate an elevation chart from a GPX file."""
import gpxpy
import logging
import sys
from math import sin, cos, sqrt, atan2, radians
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG,
stream=sys.stdout)
def main(gpx_file):
"""Orchestrate."""
gpx_points = parse_gpx_points(gpx_file)
logging.info("Loaded %i points from file '%s'.", len(gpx_points), gpx_file)
chart_points = get_chart_points(gpx_points)
with open("data.csv", "w") as f:
f.write("dist,ele\n")
for p in chart_points:
f.write("%0.4f,%0.4f\n" % (p['dist']/1000.0, p['ele']))
def get_dist(lat1, lon1, lat2, lon2):
"""Get the distance in km of two points on earth."""
R = 6373.0
lat1 = radians(52.2296756)
lon1 = radians(21.0122287)
lat2 = radians(52.406374)
lon2 = radians(16.9251681)
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
c = 2 * atan2(sqrt(a), sqrt(1 - a))
distance = R * c
return distance
def parse_gpx_points(gpx_file):
"""Get all points as a list from gpx_file."""
with open(gpx_file) as f:
gpx_data_txt = f.read()
gpx = gpxpy.parse(gpx_data_txt)
points = []
for track in gpx.tracks:
for segment in track.segments:
for point in segment.points:
points.append({'lat': point.latitude,
'lon': point.longitude,
'ele': point.elevation})
return points
def get_chart_points(points):
"""Get a list of points (x, y) with x=traveled distance, y=eleveation."""
chart_points = [{'dist': 0, 'ele': points[0]['ele']}]
dist = 0.0
get_dist = gpxpy.geo.haversine_distance
for i in range(1, len(points)):
dist += get_dist(points[i - 1]['lat'], points[i - 1]['lon'],
points[i]['lat'], points[i]['lon'])
chart_points.append({'dist': dist, 'ele': points[i]['ele']})
return chart_points
def get_parser():
"""Get parser object for elevation_chart_gen.py."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-f", "--file",
dest="filename",
help="GPX file",
metavar="FILE",
required=True)
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.filename)
| mit |
aperigault/ansible | test/units/modules/network/f5/test_bigip_wait.py | 38 | 3621 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_wait import Parameters
from library.modules.bigip_wait import ModuleManager
from library.modules.bigip_wait import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_wait import Parameters
from ansible.modules.network.f5.bigip_wait import ModuleManager
from ansible.modules.network.f5.bigip_wait import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
delay=3,
timeout=500,
sleep=10,
msg='We timed out during waiting for BIG-IP :-('
)
p = Parameters(params=args)
assert p.delay == 3
assert p.timeout == 500
assert p.sleep == 10
assert p.msg == 'We timed out during waiting for BIG-IP :-('
def test_module_string_parameters(self):
args = dict(
delay='3',
timeout='500',
sleep='10',
msg='We timed out during waiting for BIG-IP :-('
)
p = Parameters(params=args)
assert p.delay == 3
assert p.timeout == 500
assert p.sleep == 10
assert p.msg == 'We timed out during waiting for BIG-IP :-('
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_wait_already_available(self, *args):
set_module_args(dict(
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm._connect_to_device = Mock(return_value=True)
mm._device_is_rebooting = Mock(return_value=False)
mm._is_mprov_running_on_device = Mock(return_value=False)
mm._get_client_connection = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is False
assert results['elapsed'] == 0
| gpl-3.0 |
asnorkin/parapapapam | ensemble/_ensemble.py | 1 | 10880 | import numpy as np
from sklearn.model_selection import cross_val_predict, cross_val_score, StratifiedKFold
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.svm import SVC
from ..metrics import METRICS
class Blender:
def make_greedy_blend(self, X, y, models, scoring=None, cv=3,
proba=True, random_state=42, verbose=False):
"""
This func makes greedy blend for many models.
Attributes
----------
X : array-like
The data to fit.
y : array-like
The target variable to try to predict.
models : list
List of models to blend.
scoring : string or callable, optional
Scoring function from sklearn.
cv : int, cross validation generator, optional, default: 3
Cross validation from sklearn or number of folds.
proba : bool, optional, default: True
If true than probabilities were predicted else labels.
random_state : int, optional, default: 42
Returns
-------
"""
try:
metric = METRICS[scoring]
except KeyError:
metrics = [metric for metric in METRICS]
raise ValueError('%r is not a valid scoring value. '
'Valid options are %s'
% (scoring, sorted(metrics)))
if isinstance(cv, int):
cv = StratifiedKFold(n_splits=cv, shuffle=True, random_state=random_state)
scores, preds = self._evaluate_models(X, y, models, metric, cv)
models, scores = np.array(models), np.array(scores)
isorted = np.argsort(scores)[::-1]
blend = BlendClassifier((models[isorted][0],), (1,))
best_scores, best_pred = np.array([scores[isorted][0]]), preds[isorted][0]
if verbose:
print('First blending model:\n{}\nScore: {}'
.format(models[isorted][0], scores[isorted][0]))
for model, pred in zip(models[isorted][1:], preds[isorted][1:]):
score, alpha = self._blend_two_preds(y, best_pred, pred, metric, cv)
if alpha != 1 and score > best_scores[-1]:
blend = blend.get_updated_classifier((model, ), (1 - alpha, ))
best_scores = np.append(best_scores, score)
best_pred = alpha * best_pred + (1 - alpha) * pred
if verbose:
print('The model added to blending:\n{}\nCoef: {}\nNew score: {}'
.format(model, 1 - alpha, score))
elif verbose:
print('The model is not added to blending:\n{}'
.format(model))
return blend, best_scores
def blend_two_models(self, X, y, est1, est2, scoring, cv,
proba=True, random_state=42):
"""
This func blends two estimators as the following combination:
alpha * est1_prediction + (1 - alpha) * est2_prediction
and finds the best combination.
Attributes
----------
X : array-like
The data to fit.
y : array-like
The target variable to try to predict.
est1 : estimator
The first estimator to blend.
est2 : estimator
The second estimator to blend.
scoring : string or callable, optional
Scoring function from sklearn.
cv : int, cross validation generator, optional, default: 3
Cross validation from sklearn or number of folds.
proba : bool, optional, default: True
If true than probabilities were predicted else labels.
random_state : int, optional, default: 42
Returns
-------
best_score : float
The best score of blending.
best_alpha : float
The alpha parameter of best blending combination.
"""
try:
metric = METRICS[scoring]
except KeyError:
metrics = [metric for metric in METRICS]
raise ValueError('%r is not a valid scoring value. '
'Valid options are %s'
% (scoring, sorted(metrics)))
weights = np.linspace(0, 1, 101)
method = 'predict_proba' if proba else 'predict'
if isinstance(cv, int):
cv = StratifiedKFold(n_splits=cv, random_state=random_state)
preds1 = cross_val_predict(est1, X, y, cv=cv, method=method)
preds2 = cross_val_predict(est2, X, y, cv=cv, method=method)
if proba:
preds1, preds2 = preds1[:, 1], preds2[:, 1]
best_score, best_alpha = metric(y, preds1), 1
for idx, alpha in enumerate(weights):
preds = alpha * preds1 + (1 - alpha) * preds2
score = metric(y, preds)
if score > best_score:
best_score = score
best_alpha = alpha
return best_score, best_alpha
def _blend_two_preds(self, y, pred1, pred2, metric, cv):
weights = np.linspace(0, 1, 101)
best_score, best_alpha = metric(y, pred1), 1
for idx, alpha in enumerate(weights):
preds = alpha * pred1 + (1 - alpha) * pred2
score = metric(y, preds)
if score > best_score:
best_score = score
best_alpha = alpha
return best_score, best_alpha
def _evaluate_models(self, X, y, models, metric, cv, proba=True):
scores = []
preds = []
method = 'predict_proba' if proba else 'predict'
for model in models:
preds.append(cross_val_predict(model, X, y, cv=cv, method=method))
scores.append(metric(y, preds[-1]))
return scores, preds
def _get_n_best_estimators_from_each_class(self, task_manager, n, classes):
if classes is None:
classes = task_manager.get_done_model_classes()
models = []
for cls in classes:
models.extend(task_manager.get_best_models(cls, n))
return list(reversed(sorted(models, key=lambda x: x[0])))
def _get_models_with_scores(self, models, scores):
return np.array(list(reversed(sorted(zip(scores, models)))))
class BlendClassifier(BaseEstimator, ClassifierMixin):
def __init__(self, estimators=(SVC(),), coefs=(1,)):
"""
Estimator which result is blending of many models.
Parameters
----------
estimators : tuple of classifiers, optional, default: (SVC(),)
The tuple of classifiers to blend.
coefs : tuple of numbers (float, int), optional, default: (1,)
The tuple of coefficients for classifiers blending.
"""
self._check_params(estimators, coefs)
self.estimators = estimators
self.coefs = coefs
def __repr__(self):
output = ''
for idx, (est, coef) in enumerate(zip(self.estimators, self.coefs)):
output += 'step {}. {} : {}\n'.format(idx + 1, est, coef)
return output
@property
def n_estimators(self):
return len(self.estimators)
def fit(self, X, y):
for est in self.estimators:
est.fit(X, y)
return self
def predict(self, X):
result = np.zeros(len(X))
for coef, est in zip(self.coefs, self.estimators):
result += coef * est.predict(X)
return result
def predict_proba(self, X):
result = np.zeros((len(X), 2))
for coef, est in zip(self.coefs, self.estimators):
result += coef * est.predict_proba(X)
return result
def get_updated_classifier(self, new_estimators, new_coefs):
"""
This func makes updated blending classifier
and returns new blending classifier.
Parameters
----------
new_estimators : tuple of estimators
The tuple of new models to blend.
new_coefs : tuple of numbers (float, int)
The tuple of coefficients of new models
If sum of coefficients were equal to 1 then all models will be added
with saving that rule. I.e. models will be added one by one and coefs
will be updated by the following algorithm:
- we have coefs array and new_coef to be added
- coefs = coefs * (1 - new_coef)
- coefs.append(new_coef)
Returns
-------
blend_clf : BlendClassifier
Updated blend classifier
"""
_estimators = self.estimators + new_estimators
_coefs = self.coefs
if self._check_coefs_sum(verbose=True):
# Append each coefficient saving whole sum equal to 1
for coef in new_coefs:
_coefs = tuple(map(lambda x: x * (1 - coef), self.coefs))
_coefs = _coefs + (coef,)
else:
_coefs += new_coefs
blend_clf = BlendClassifier(_estimators, _coefs)
return blend_clf
def _check_coefs_sum(self, verbose=True):
if len(self.coefs) > 0 and sum(self.coefs) != 1:
if verbose:
print('WARNING: the sum of coefficients is not equal to 1.')
return False
return True
def _check_array_elems_type(self, arr, types):
for elem in arr:
if not isinstance(elem, types):
return False
return True
def _get_models_and_coefs(self, *args):
if len(args) % 2:
raise ValueError('Number of model and number of coefficients must be equal.')
if len(args) == 2 and isinstance(args[0], (list, tuple)):
models, coefs = args[0], args[1]
else:
models, coefs = args[:len(args) / 2], args[len(args) / 2:]
if not self._check_array_elems_type(models, BaseEstimator):
raise ValueError('All models must have some of estimator type.')
if not self._check_array_elems_type(coefs, (float, int)):
raise ValueError('All coefficients must be float.')
return models, coefs
def _check_params(self, estimators, coefs):
if len(estimators) != len(coefs):
raise ValueError('Number of estimators and number of coefficients must be the same.\n'
'Given estimators parameter has len {} and coefs parameter has len {}'
.format(len(estimators), len(coefs)))
if not isinstance(estimators, tuple):
raise ValueError('The estimators parameter must be a tuple type.\n'
'Given estimators parameter have {} type'.format(type(estimators)))
if not isinstance(coefs, tuple):
raise ValueError('The coefs must be a tuple type.\n'
'Given coefs parameter have {} type'.format(type(coefs))) | mit |
creativcoder/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/inputstream.py | 618 | 30855 | from __future__ import absolute_import, division, unicode_literals
from six import text_type
from six.moves import http_client
import codecs
import re
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import encodings, ReparseException
from . import utils
from io import StringIO
try:
from io import BytesIO
except ImportError:
BytesIO = StringIO
try:
from io import BufferedIOBase
except ImportError:
class BufferedIOBase(object):
pass
# Non-unicode versions of constants for use in the pre-parser
spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
invalid_unicode_re = re.compile("[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uD800-\uDFFF\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]")
non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
0x10FFFE, 0x10FFFF])
ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]")
# Cache for charsUntil()
charsUntilRegEx = {}
class BufferedStream(object):
"""Buffering for streams that do not have buffering of their own
The buffer is implemented as a list of chunks on the assumption that
joining many strings will be slow since it is O(n**2)
"""
def __init__(self, stream):
self.stream = stream
self.buffer = []
self.position = [-1, 0] # chunk number, offset
def tell(self):
pos = 0
for chunk in self.buffer[:self.position[0]]:
pos += len(chunk)
pos += self.position[1]
return pos
def seek(self, pos):
assert pos <= self._bufferedBytes()
offset = pos
i = 0
while len(self.buffer[i]) < offset:
offset -= len(self.buffer[i])
i += 1
self.position = [i, offset]
def read(self, bytes):
if not self.buffer:
return self._readStream(bytes)
elif (self.position[0] == len(self.buffer) and
self.position[1] == len(self.buffer[-1])):
return self._readStream(bytes)
else:
return self._readFromBuffer(bytes)
def _bufferedBytes(self):
return sum([len(item) for item in self.buffer])
def _readStream(self, bytes):
data = self.stream.read(bytes)
self.buffer.append(data)
self.position[0] += 1
self.position[1] = len(data)
return data
def _readFromBuffer(self, bytes):
remainingBytes = bytes
rv = []
bufferIndex = self.position[0]
bufferOffset = self.position[1]
while bufferIndex < len(self.buffer) and remainingBytes != 0:
assert remainingBytes > 0
bufferedData = self.buffer[bufferIndex]
if remainingBytes <= len(bufferedData) - bufferOffset:
bytesToRead = remainingBytes
self.position = [bufferIndex, bufferOffset + bytesToRead]
else:
bytesToRead = len(bufferedData) - bufferOffset
self.position = [bufferIndex, len(bufferedData)]
bufferIndex += 1
rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
remainingBytes -= bytesToRead
bufferOffset = 0
if remainingBytes:
rv.append(self._readStream(remainingBytes))
return b"".join(rv)
def HTMLInputStream(source, encoding=None, parseMeta=True, chardet=True):
if isinstance(source, http_client.HTTPResponse):
# Work around Python bug #20007: read(0) closes the connection.
# http://bugs.python.org/issue20007
isUnicode = False
elif hasattr(source, "read"):
isUnicode = isinstance(source.read(0), text_type)
else:
isUnicode = isinstance(source, text_type)
if isUnicode:
if encoding is not None:
raise TypeError("Cannot explicitly set an encoding with a unicode string")
return HTMLUnicodeInputStream(source)
else:
return HTMLBinaryInputStream(source, encoding, parseMeta, chardet)
class HTMLUnicodeInputStream(object):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
_defaultChunkSize = 10240
def __init__(self, source):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
parseMeta - Look for a <meta> element containing encoding information
"""
# Craziness
if len("\U0010FFFF") == 1:
self.reportCharacterErrors = self.characterErrorsUCS4
self.replaceCharactersRegexp = re.compile("[\uD800-\uDFFF]")
else:
self.reportCharacterErrors = self.characterErrorsUCS2
self.replaceCharactersRegexp = re.compile("([\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?<![\uD800-\uDBFF])[\uDC00-\uDFFF])")
# List of where new lines occur
self.newLines = [0]
self.charEncoding = ("utf-8", "certain")
self.dataStream = self.openStream(source)
self.reset()
def reset(self):
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
self.errors = []
# number of (complete) lines in previous chunks
self.prevNumLines = 0
# number of columns in the last line of the previous chunk
self.prevNumCols = 0
# Deal with CR LF and surrogates split over chunk boundaries
self._bufferedCharacter = None
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = StringIO(source)
return stream
def _position(self, offset):
chunk = self.chunk
nLines = chunk.count('\n', 0, offset)
positionLine = self.prevNumLines + nLines
lastLinePos = chunk.rfind('\n', 0, offset)
if lastLinePos == -1:
positionColumn = self.prevNumCols + offset
else:
positionColumn = offset - (lastLinePos + 1)
return (positionLine, positionColumn)
def position(self):
"""Returns (line, col) of the current position in the stream."""
line, col = self._position(self.chunkOffset)
return (line + 1, col)
def char(self):
""" Read one character from the stream or queue if available. Return
EOF when EOF is reached.
"""
# Read a new chunk from the input stream if necessary
if self.chunkOffset >= self.chunkSize:
if not self.readChunk():
return EOF
chunkOffset = self.chunkOffset
char = self.chunk[chunkOffset]
self.chunkOffset = chunkOffset + 1
return char
def readChunk(self, chunkSize=None):
if chunkSize is None:
chunkSize = self._defaultChunkSize
self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
data = self.dataStream.read(chunkSize)
# Deal with CR LF and surrogates broken across chunks
if self._bufferedCharacter:
data = self._bufferedCharacter + data
self._bufferedCharacter = None
elif not data:
# We have no more data, bye-bye stream
return False
if len(data) > 1:
lastv = ord(data[-1])
if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
self._bufferedCharacter = data[-1]
data = data[:-1]
self.reportCharacterErrors(data)
# Replace invalid characters
# Note U+0000 is dealt with in the tokenizer
data = self.replaceCharactersRegexp.sub("\ufffd", data)
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
self.chunk = data
self.chunkSize = len(data)
return True
def characterErrorsUCS4(self, data):
for i in range(len(invalid_unicode_re.findall(data))):
self.errors.append("invalid-codepoint")
def characterErrorsUCS2(self, data):
# Someone picked the wrong compile option
# You lose
skip = False
for match in invalid_unicode_re.finditer(data):
if skip:
continue
codepoint = ord(match.group())
pos = match.start()
# Pretty sure there should be endianness issues here
if utils.isSurrogatePair(data[pos:pos + 2]):
# We have a surrogate pair!
char_val = utils.surrogatePairToCodepoint(data[pos:pos + 2])
if char_val in non_bmp_invalid_codepoints:
self.errors.append("invalid-codepoint")
skip = True
elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
pos == len(data) - 1):
self.errors.append("invalid-codepoint")
else:
skip = False
self.errors.append("invalid-codepoint")
def charsUntil(self, characters, opposite=False):
""" Returns a string of characters from the stream up to but not
including any character in 'characters' or EOF. 'characters' must be
a container that supports the 'in' method and iteration over its
characters.
"""
# Use a cache of regexps to find the required characters
try:
chars = charsUntilRegEx[(characters, opposite)]
except KeyError:
if __debug__:
for c in characters:
assert(ord(c) < 128)
regex = "".join(["\\x%02x" % ord(c) for c in characters])
if not opposite:
regex = "^%s" % regex
chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
rv = []
while True:
# Find the longest matching prefix
m = chars.match(self.chunk, self.chunkOffset)
if m is None:
# If nothing matched, and it wasn't because we ran out of chunk,
# then stop
if self.chunkOffset != self.chunkSize:
break
else:
end = m.end()
# If not the whole chunk matched, return everything
# up to the part that didn't match
if end != self.chunkSize:
rv.append(self.chunk[self.chunkOffset:end])
self.chunkOffset = end
break
# If the whole remainder of the chunk matched,
# use it all and read the next chunk
rv.append(self.chunk[self.chunkOffset:])
if not self.readChunk():
# Reached EOF
break
r = "".join(rv)
return r
def unget(self, char):
# Only one character is allowed to be ungotten at once - it must
# be consumed again before any further call to unget
if char is not None:
if self.chunkOffset == 0:
# unget is called quite rarely, so it's a good idea to do
# more work here if it saves a bit of work in the frequently
# called char and charsUntil.
# So, just prepend the ungotten character onto the current
# chunk:
self.chunk = char + self.chunk
self.chunkSize += 1
else:
self.chunkOffset -= 1
assert self.chunk[self.chunkOffset] == char
class HTMLBinaryInputStream(HTMLUnicodeInputStream):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
def __init__(self, source, encoding=None, parseMeta=True, chardet=True):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
parseMeta - Look for a <meta> element containing encoding information
"""
# Raw Stream - for unicode objects this will encode to utf-8 and set
# self.charEncoding as appropriate
self.rawStream = self.openStream(source)
HTMLUnicodeInputStream.__init__(self, self.rawStream)
self.charEncoding = (codecName(encoding), "certain")
# Encoding Information
# Number of bytes to use when looking for a meta element with
# encoding information
self.numBytesMeta = 512
# Number of bytes to use when using detecting encoding using chardet
self.numBytesChardet = 100
# Encoding to use if no other information can be found
self.defaultEncoding = "windows-1252"
# Detect encoding iff no explicit "transport level" encoding is supplied
if (self.charEncoding[0] is None):
self.charEncoding = self.detectEncoding(parseMeta, chardet)
# Call superclass
self.reset()
def reset(self):
self.dataStream = codecs.getreader(self.charEncoding[0])(self.rawStream,
'replace')
HTMLUnicodeInputStream.reset(self)
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = BytesIO(source)
try:
stream.seek(stream.tell())
except:
stream = BufferedStream(stream)
return stream
def detectEncoding(self, parseMeta=True, chardet=True):
# First look for a BOM
# This will also read past the BOM if present
encoding = self.detectBOM()
confidence = "certain"
# If there is no BOM need to look for meta elements with encoding
# information
if encoding is None and parseMeta:
encoding = self.detectEncodingMeta()
confidence = "tentative"
# Guess with chardet, if avaliable
if encoding is None and chardet:
confidence = "tentative"
try:
try:
from charade.universaldetector import UniversalDetector
except ImportError:
from chardet.universaldetector import UniversalDetector
buffers = []
detector = UniversalDetector()
while not detector.done:
buffer = self.rawStream.read(self.numBytesChardet)
assert isinstance(buffer, bytes)
if not buffer:
break
buffers.append(buffer)
detector.feed(buffer)
detector.close()
encoding = detector.result['encoding']
self.rawStream.seek(0)
except ImportError:
pass
# If all else fails use the default encoding
if encoding is None:
confidence = "tentative"
encoding = self.defaultEncoding
# Substitute for equivalent encodings:
encodingSub = {"iso-8859-1": "windows-1252"}
if encoding.lower() in encodingSub:
encoding = encodingSub[encoding.lower()]
return encoding, confidence
def changeEncoding(self, newEncoding):
assert self.charEncoding[1] != "certain"
newEncoding = codecName(newEncoding)
if newEncoding in ("utf-16", "utf-16-be", "utf-16-le"):
newEncoding = "utf-8"
if newEncoding is None:
return
elif newEncoding == self.charEncoding[0]:
self.charEncoding = (self.charEncoding[0], "certain")
else:
self.rawStream.seek(0)
self.reset()
self.charEncoding = (newEncoding, "certain")
raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
def detectBOM(self):
"""Attempts to detect at BOM at the start of the stream. If
an encoding can be determined from the BOM return the name of the
encoding otherwise return None"""
bomDict = {
codecs.BOM_UTF8: 'utf-8',
codecs.BOM_UTF16_LE: 'utf-16-le', codecs.BOM_UTF16_BE: 'utf-16-be',
codecs.BOM_UTF32_LE: 'utf-32-le', codecs.BOM_UTF32_BE: 'utf-32-be'
}
# Go to beginning of file and read in 4 bytes
string = self.rawStream.read(4)
assert isinstance(string, bytes)
# Try detecting the BOM using bytes from the string
encoding = bomDict.get(string[:3]) # UTF-8
seek = 3
if not encoding:
# Need to detect UTF-32 before UTF-16
encoding = bomDict.get(string) # UTF-32
seek = 4
if not encoding:
encoding = bomDict.get(string[:2]) # UTF-16
seek = 2
# Set the read position past the BOM if one was found, otherwise
# set it to the start of the stream
self.rawStream.seek(encoding and seek or 0)
return encoding
def detectEncodingMeta(self):
"""Report the encoding declared by the meta element
"""
buffer = self.rawStream.read(self.numBytesMeta)
assert isinstance(buffer, bytes)
parser = EncodingParser(buffer)
self.rawStream.seek(0)
encoding = parser.getEncoding()
if encoding in ("utf-16", "utf-16-be", "utf-16-le"):
encoding = "utf-8"
return encoding
class EncodingBytes(bytes):
"""String-like object with an associated position and various extra methods
If the position is ever greater than the string length then an exception is
raised"""
def __new__(self, value):
assert isinstance(value, bytes)
return bytes.__new__(self, value.lower())
def __init__(self, value):
self._position = -1
def __iter__(self):
return self
def __next__(self):
p = self._position = self._position + 1
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
return self[p:p + 1]
def next(self):
# Py2 compat
return self.__next__()
def previous(self):
p = self._position
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
self._position = p = p - 1
return self[p:p + 1]
def setPosition(self, position):
if self._position >= len(self):
raise StopIteration
self._position = position
def getPosition(self):
if self._position >= len(self):
raise StopIteration
if self._position >= 0:
return self._position
else:
return None
position = property(getPosition, setPosition)
def getCurrentByte(self):
return self[self.position:self.position + 1]
currentByte = property(getCurrentByte)
def skip(self, chars=spaceCharactersBytes):
"""Skip past a list of characters"""
p = self.position # use property for the error-checking
while p < len(self):
c = self[p:p + 1]
if c not in chars:
self._position = p
return c
p += 1
self._position = p
return None
def skipUntil(self, chars):
p = self.position
while p < len(self):
c = self[p:p + 1]
if c in chars:
self._position = p
return c
p += 1
self._position = p
return None
def matchBytes(self, bytes):
"""Look for a sequence of bytes at the start of a string. If the bytes
are found return True and advance the position to the byte after the
match. Otherwise return False and leave the position alone"""
p = self.position
data = self[p:p + len(bytes)]
rv = data.startswith(bytes)
if rv:
self.position += len(bytes)
return rv
def jumpTo(self, bytes):
"""Look for the next sequence of bytes matching a given sequence. If
a match is found advance the position to the last byte of the match"""
newPosition = self[self.position:].find(bytes)
if newPosition > -1:
# XXX: This is ugly, but I can't see a nicer way to fix this.
if self._position == -1:
self._position = 0
self._position += (newPosition + len(bytes) - 1)
return True
else:
raise StopIteration
class EncodingParser(object):
"""Mini parser for detecting character encoding from meta elements"""
def __init__(self, data):
"""string - the data to work on for encoding detection"""
self.data = EncodingBytes(data)
self.encoding = None
def getEncoding(self):
methodDispatch = (
(b"<!--", self.handleComment),
(b"<meta", self.handleMeta),
(b"</", self.handlePossibleEndTag),
(b"<!", self.handleOther),
(b"<?", self.handleOther),
(b"<", self.handlePossibleStartTag))
for byte in self.data:
keepParsing = True
for key, method in methodDispatch:
if self.data.matchBytes(key):
try:
keepParsing = method()
break
except StopIteration:
keepParsing = False
break
if not keepParsing:
break
return self.encoding
def handleComment(self):
"""Skip over comments"""
return self.data.jumpTo(b"-->")
def handleMeta(self):
if self.data.currentByte not in spaceCharactersBytes:
# if we have <meta not followed by a space so just keep going
return True
# We have a valid meta element we want to search for attributes
hasPragma = False
pendingEncoding = None
while True:
# Try to find the next attribute after the current position
attr = self.getAttribute()
if attr is None:
return True
else:
if attr[0] == b"http-equiv":
hasPragma = attr[1] == b"content-type"
if hasPragma and pendingEncoding is not None:
self.encoding = pendingEncoding
return False
elif attr[0] == b"charset":
tentativeEncoding = attr[1]
codec = codecName(tentativeEncoding)
if codec is not None:
self.encoding = codec
return False
elif attr[0] == b"content":
contentParser = ContentAttrParser(EncodingBytes(attr[1]))
tentativeEncoding = contentParser.parse()
if tentativeEncoding is not None:
codec = codecName(tentativeEncoding)
if codec is not None:
if hasPragma:
self.encoding = codec
return False
else:
pendingEncoding = codec
def handlePossibleStartTag(self):
return self.handlePossibleTag(False)
def handlePossibleEndTag(self):
next(self.data)
return self.handlePossibleTag(True)
def handlePossibleTag(self, endTag):
data = self.data
if data.currentByte not in asciiLettersBytes:
# If the next byte is not an ascii letter either ignore this
# fragment (possible start tag case) or treat it according to
# handleOther
if endTag:
data.previous()
self.handleOther()
return True
c = data.skipUntil(spacesAngleBrackets)
if c == b"<":
# return to the first step in the overall "two step" algorithm
# reprocessing the < byte
data.previous()
else:
# Read all attributes
attr = self.getAttribute()
while attr is not None:
attr = self.getAttribute()
return True
def handleOther(self):
return self.data.jumpTo(b">")
def getAttribute(self):
"""Return a name,value pair for the next attribute in the stream,
if one is found, or None"""
data = self.data
# Step 1 (skip chars)
c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
assert c is None or len(c) == 1
# Step 2
if c in (b">", None):
return None
# Step 3
attrName = []
attrValue = []
# Step 4 attribute name
while True:
if c == b"=" and attrName:
break
elif c in spaceCharactersBytes:
# Step 6!
c = data.skip()
break
elif c in (b"/", b">"):
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrName.append(c.lower())
elif c is None:
return None
else:
attrName.append(c)
# Step 5
c = next(data)
# Step 7
if c != b"=":
data.previous()
return b"".join(attrName), b""
# Step 8
next(data)
# Step 9
c = data.skip()
# Step 10
if c in (b"'", b'"'):
# 10.1
quoteChar = c
while True:
# 10.2
c = next(data)
# 10.3
if c == quoteChar:
next(data)
return b"".join(attrName), b"".join(attrValue)
# 10.4
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
# 10.5
else:
attrValue.append(c)
elif c == b">":
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
# Step 11
while True:
c = next(data)
if c in spacesAngleBrackets:
return b"".join(attrName), b"".join(attrValue)
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
class ContentAttrParser(object):
def __init__(self, data):
assert isinstance(data, bytes)
self.data = data
def parse(self):
try:
# Check if the attr name is charset
# otherwise return
self.data.jumpTo(b"charset")
self.data.position += 1
self.data.skip()
if not self.data.currentByte == b"=":
# If there is no = sign keep looking for attrs
return None
self.data.position += 1
self.data.skip()
# Look for an encoding between matching quote marks
if self.data.currentByte in (b'"', b"'"):
quoteMark = self.data.currentByte
self.data.position += 1
oldPosition = self.data.position
if self.data.jumpTo(quoteMark):
return self.data[oldPosition:self.data.position]
else:
return None
else:
# Unquoted value
oldPosition = self.data.position
try:
self.data.skipUntil(spaceCharactersBytes)
return self.data[oldPosition:self.data.position]
except StopIteration:
# Return the whole remaining value
return self.data[oldPosition:]
except StopIteration:
return None
def codecName(encoding):
"""Return the python codec name corresponding to an encoding or None if the
string doesn't correspond to a valid encoding."""
if isinstance(encoding, bytes):
try:
encoding = encoding.decode("ascii")
except UnicodeDecodeError:
return None
if encoding:
canonicalName = ascii_punctuation_re.sub("", encoding).lower()
return encodings.get(canonicalName, None)
else:
return None
| mpl-2.0 |
rtucker/sycamore | Sycamore/support/pytz/zoneinfo/Pacific/Auckland.py | 9 | 6914 | '''tzinfo timezone information for Pacific/Auckland.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Auckland(DstTzInfo):
'''Pacific/Auckland timezone definition. See datetime.tzinfo for details'''
zone = 'Pacific/Auckland'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1927,11,5,14,30,0),
d(1928,3,3,13,30,0),
d(1928,10,13,14,30,0),
d(1929,3,16,14,0,0),
d(1929,10,12,14,30,0),
d(1930,3,15,14,0,0),
d(1930,10,11,14,30,0),
d(1931,3,14,14,0,0),
d(1931,10,10,14,30,0),
d(1932,3,19,14,0,0),
d(1932,10,8,14,30,0),
d(1933,3,18,14,0,0),
d(1933,10,7,14,30,0),
d(1934,4,28,14,0,0),
d(1934,9,29,14,30,0),
d(1935,4,27,14,0,0),
d(1935,9,28,14,30,0),
d(1936,4,25,14,0,0),
d(1936,9,26,14,30,0),
d(1937,4,24,14,0,0),
d(1937,9,25,14,30,0),
d(1938,4,23,14,0,0),
d(1938,9,24,14,30,0),
d(1939,4,29,14,0,0),
d(1939,9,23,14,30,0),
d(1940,4,27,14,0,0),
d(1940,9,28,14,30,0),
d(1945,12,31,12,0,0),
d(1974,11,2,14,0,0),
d(1975,2,22,14,0,0),
d(1975,10,25,14,0,0),
d(1976,3,6,14,0,0),
d(1976,10,30,14,0,0),
d(1977,3,5,14,0,0),
d(1977,10,29,14,0,0),
d(1978,3,4,14,0,0),
d(1978,10,28,14,0,0),
d(1979,3,3,14,0,0),
d(1979,10,27,14,0,0),
d(1980,3,1,14,0,0),
d(1980,10,25,14,0,0),
d(1981,2,28,14,0,0),
d(1981,10,24,14,0,0),
d(1982,3,6,14,0,0),
d(1982,10,30,14,0,0),
d(1983,3,5,14,0,0),
d(1983,10,29,14,0,0),
d(1984,3,3,14,0,0),
d(1984,10,27,14,0,0),
d(1985,3,2,14,0,0),
d(1985,10,26,14,0,0),
d(1986,3,1,14,0,0),
d(1986,10,25,14,0,0),
d(1987,2,28,14,0,0),
d(1987,10,24,14,0,0),
d(1988,3,5,14,0,0),
d(1988,10,29,14,0,0),
d(1989,3,4,14,0,0),
d(1989,10,7,14,0,0),
d(1990,3,17,14,0,0),
d(1990,10,6,14,0,0),
d(1991,3,16,14,0,0),
d(1991,10,5,14,0,0),
d(1992,3,14,14,0,0),
d(1992,10,3,14,0,0),
d(1993,3,20,14,0,0),
d(1993,10,2,14,0,0),
d(1994,3,19,14,0,0),
d(1994,10,1,14,0,0),
d(1995,3,18,14,0,0),
d(1995,9,30,14,0,0),
d(1996,3,16,14,0,0),
d(1996,10,5,14,0,0),
d(1997,3,15,14,0,0),
d(1997,10,4,14,0,0),
d(1998,3,14,14,0,0),
d(1998,10,3,14,0,0),
d(1999,3,20,14,0,0),
d(1999,10,2,14,0,0),
d(2000,3,18,14,0,0),
d(2000,9,30,14,0,0),
d(2001,3,17,14,0,0),
d(2001,10,6,14,0,0),
d(2002,3,16,14,0,0),
d(2002,10,5,14,0,0),
d(2003,3,15,14,0,0),
d(2003,10,4,14,0,0),
d(2004,3,20,14,0,0),
d(2004,10,2,14,0,0),
d(2005,3,19,14,0,0),
d(2005,10,1,14,0,0),
d(2006,3,18,14,0,0),
d(2006,9,30,14,0,0),
d(2007,3,17,14,0,0),
d(2007,10,6,14,0,0),
d(2008,3,15,14,0,0),
d(2008,10,4,14,0,0),
d(2009,3,14,14,0,0),
d(2009,10,3,14,0,0),
d(2010,3,20,14,0,0),
d(2010,10,2,14,0,0),
d(2011,3,19,14,0,0),
d(2011,10,1,14,0,0),
d(2012,3,17,14,0,0),
d(2012,10,6,14,0,0),
d(2013,3,16,14,0,0),
d(2013,10,5,14,0,0),
d(2014,3,15,14,0,0),
d(2014,10,4,14,0,0),
d(2015,3,14,14,0,0),
d(2015,10,3,14,0,0),
d(2016,3,19,14,0,0),
d(2016,10,1,14,0,0),
d(2017,3,18,14,0,0),
d(2017,9,30,14,0,0),
d(2018,3,17,14,0,0),
d(2018,10,6,14,0,0),
d(2019,3,16,14,0,0),
d(2019,10,5,14,0,0),
d(2020,3,14,14,0,0),
d(2020,10,3,14,0,0),
d(2021,3,20,14,0,0),
d(2021,10,2,14,0,0),
d(2022,3,19,14,0,0),
d(2022,10,1,14,0,0),
d(2023,3,18,14,0,0),
d(2023,9,30,14,0,0),
d(2024,3,16,14,0,0),
d(2024,10,5,14,0,0),
d(2025,3,15,14,0,0),
d(2025,10,4,14,0,0),
d(2026,3,14,14,0,0),
d(2026,10,3,14,0,0),
d(2027,3,20,14,0,0),
d(2027,10,2,14,0,0),
d(2028,3,18,14,0,0),
d(2028,9,30,14,0,0),
d(2029,3,17,14,0,0),
d(2029,10,6,14,0,0),
d(2030,3,16,14,0,0),
d(2030,10,5,14,0,0),
d(2031,3,15,14,0,0),
d(2031,10,4,14,0,0),
d(2032,3,20,14,0,0),
d(2032,10,2,14,0,0),
d(2033,3,19,14,0,0),
d(2033,10,1,14,0,0),
d(2034,3,18,14,0,0),
d(2034,9,30,14,0,0),
d(2035,3,17,14,0,0),
d(2035,10,6,14,0,0),
d(2036,3,15,14,0,0),
d(2036,10,4,14,0,0),
d(2037,3,14,14,0,0),
d(2037,10,3,14,0,0),
]
_transition_info = [
i(41400,0,'NZMT'),
i(45000,3600,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(41400,0,'NZMT'),
i(43200,1800,'NZST'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
i(43200,0,'NZST'),
i(46800,3600,'NZDT'),
]
Auckland = Auckland()
| gpl-2.0 |
GuardianRG/sleepy-puppy | sleepypuppy/admin/user/models.py | 13 | 1368 | # Copyright 2015 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sleepypuppy import db
from sqlalchemy.orm import relationship
from sleepypuppy.admin.models import user_associations
from sleepypuppy.admin.assessment.models import Assessment
class User(db.Model):
"""
User model contains the following parameters used for email notifications:
email = email address to send capture notifications to.
assessments = list of assessments the email address will recieve captures for.
Has an association of assessments with users.
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(100))
assessments = relationship(Assessment, secondary=user_associations, backref="users")
def __repr__(self):
return self.email
| apache-2.0 |
moonboots/tensorflow | tensorflow/python/kernel_tests/summary_ops_test.py | 15 | 3767 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for summary ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
class SummaryOpsTest(tf.test.TestCase):
def _AsSummary(self, s):
summ = tf.Summary()
summ.ParseFromString(s)
return summ
def testScalarSummary(self):
with self.test_session() as sess:
const = tf.constant([10.0, 20.0])
summ = tf.scalar_summary(["c1", "c2"], const, name="mysumm")
value = sess.run(summ)
self.assertEqual([], summ.get_shape())
self.assertProtoEquals("""
value { tag: "c1" simple_value: 10.0 }
value { tag: "c2" simple_value: 20.0 }
""", self._AsSummary(value))
def testScalarSummaryDefaultName(self):
with self.test_session() as sess:
const = tf.constant([10.0, 20.0])
summ = tf.scalar_summary(["c1", "c2"], const)
value = sess.run(summ)
self.assertEqual([], summ.get_shape())
self.assertProtoEquals("""
value { tag: "c1" simple_value: 10.0 }
value { tag: "c2" simple_value: 20.0 }
""", self._AsSummary(value))
def testMergeSummary(self):
with self.test_session() as sess:
const = tf.constant(10.0)
summ1 = tf.histogram_summary("h", const, name="histo")
summ2 = tf.scalar_summary("c", const, name="summ")
merge = tf.merge_summary([summ1, summ2])
value = sess.run(merge)
self.assertEqual([], merge.get_shape())
self.assertProtoEquals("""
value {
tag: "h"
histo {
min: 10.0
max: 10.0
num: 1.0
sum: 10.0
sum_squares: 100.0
bucket_limit: 9.93809490288
bucket_limit: 10.9319043932
bucket_limit: 1.7976931348623157e+308
bucket: 0.0
bucket: 1.0
bucket: 0.0
}
}
value { tag: "c" simple_value: 10.0 }
""", self._AsSummary(value))
def testMergeAllSummaries(self):
with tf.Graph().as_default():
const = tf.constant(10.0)
summ1 = tf.histogram_summary("h", const, name="histo")
summ2 = tf.scalar_summary("o", const, name="oops",
collections=["foo_key"])
summ3 = tf.scalar_summary("c", const, name="summ")
merge = tf.merge_all_summaries()
self.assertEqual("MergeSummary", merge.op.type)
self.assertEqual(2, len(merge.op.inputs))
self.assertEqual(summ1, merge.op.inputs[0])
self.assertEqual(summ3, merge.op.inputs[1])
merge = tf.merge_all_summaries("foo_key")
self.assertEqual("MergeSummary", merge.op.type)
self.assertEqual(1, len(merge.op.inputs))
self.assertEqual(summ2, merge.op.inputs[0])
self.assertTrue(tf.merge_all_summaries("bar_key") is None)
def testHistogramSummaryTypes(self):
with tf.Graph().as_default():
for dtype in (tf.int8, tf.uint8, tf.int16, tf.int32,
tf.float32, tf.float64):
const = tf.constant(10, dtype=dtype)
tf.histogram_summary("h", const, name="histo")
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
chrber/dcache-docker | dcache/deps/.vim/bundle/jedi-vim/jedi/jedi/parser/pgen2/pgen.py | 49 | 13966 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
# Modifications:
# Copyright 2014 David Halter. Integration into Jedi.
# Modifications are dual-licensed: MIT and PSF.
# Pgen imports
from . import grammar
from jedi.parser import token
from jedi.parser import tokenize
class ParserGenerator(object):
def __init__(self, filename, stream=None):
close_stream = None
if stream is None:
stream = open(filename)
close_stream = stream.close
self.filename = filename
self.stream = stream
self.generator = tokenize.generate_tokens(stream.readline)
self.gettoken() # Initialize lookahead
self.dfas, self.startsymbol = self.parse()
if close_stream is not None:
close_stream()
self.first = {} # map from symbol name to set of tokens
self.addfirstsets()
def make_grammar(self):
c = grammar.Grammar()
names = list(self.dfas.keys())
names.sort()
names.remove(self.startsymbol)
names.insert(0, self.startsymbol)
for name in names:
i = 256 + len(c.symbol2number)
c.symbol2number[name] = i
c.number2symbol[i] = name
for name in names:
dfa = self.dfas[name]
states = []
for state in dfa:
arcs = []
for label, next in state.arcs.items():
arcs.append((self.make_label(c, label), dfa.index(next)))
if state.isfinal:
arcs.append((0, dfa.index(state)))
states.append(arcs)
c.states.append(states)
c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name))
c.start = c.symbol2number[self.startsymbol]
return c
def make_first(self, c, name):
rawfirst = self.first[name]
first = {}
for label in rawfirst:
ilabel = self.make_label(c, label)
##assert ilabel not in first # XXX failed on <> ... !=
first[ilabel] = 1
return first
def make_label(self, c, label):
# XXX Maybe this should be a method on a subclass of converter?
ilabel = len(c.labels)
if label[0].isalpha():
# Either a symbol name or a named token
if label in c.symbol2number:
# A symbol name (a non-terminal)
if label in c.symbol2label:
return c.symbol2label[label]
else:
c.labels.append((c.symbol2number[label], None))
c.symbol2label[label] = ilabel
return ilabel
else:
# A named token (NAME, NUMBER, STRING)
itoken = getattr(token, label, None)
assert isinstance(itoken, int), label
assert itoken in token.tok_name, label
if itoken in c.tokens:
return c.tokens[itoken]
else:
c.labels.append((itoken, None))
c.tokens[itoken] = ilabel
return ilabel
else:
# Either a keyword or an operator
assert label[0] in ('"', "'"), label
value = eval(label)
if value[0].isalpha():
# A keyword
if value in c.keywords:
return c.keywords[value]
else:
c.labels.append((token.NAME, value))
c.keywords[value] = ilabel
return ilabel
else:
# An operator (any non-numeric token)
itoken = token.opmap[value] # Fails if unknown token
if itoken in c.tokens:
return c.tokens[itoken]
else:
c.labels.append((itoken, None))
c.tokens[itoken] = ilabel
return ilabel
def addfirstsets(self):
names = list(self.dfas.keys())
names.sort()
for name in names:
if name not in self.first:
self.calcfirst(name)
#print name, self.first[name].keys()
def calcfirst(self, name):
dfa = self.dfas[name]
self.first[name] = None # dummy to detect left recursion
state = dfa[0]
totalset = {}
overlapcheck = {}
for label, next in state.arcs.items():
if label in self.dfas:
if label in self.first:
fset = self.first[label]
if fset is None:
raise ValueError("recursion for rule %r" % name)
else:
self.calcfirst(label)
fset = self.first[label]
totalset.update(fset)
overlapcheck[label] = fset
else:
totalset[label] = 1
overlapcheck[label] = {label: 1}
inverse = {}
for label, itsfirst in overlapcheck.items():
for symbol in itsfirst:
if symbol in inverse:
raise ValueError("rule %s is ambiguous; %s is in the"
" first sets of %s as well as %s" %
(name, symbol, label, inverse[symbol]))
inverse[symbol] = label
self.first[name] = totalset
def parse(self):
dfas = {}
startsymbol = None
# MSTART: (NEWLINE | RULE)* ENDMARKER
while self.type != token.ENDMARKER:
while self.type == token.NEWLINE:
self.gettoken()
# RULE: NAME ':' RHS NEWLINE
name = self.expect(token.NAME)
self.expect(token.OP, ":")
a, z = self.parse_rhs()
self.expect(token.NEWLINE)
#self.dump_nfa(name, a, z)
dfa = self.make_dfa(a, z)
#self.dump_dfa(name, dfa)
# oldlen = len(dfa)
self.simplify_dfa(dfa)
# newlen = len(dfa)
dfas[name] = dfa
#print name, oldlen, newlen
if startsymbol is None:
startsymbol = name
return dfas, startsymbol
def make_dfa(self, start, finish):
# To turn an NFA into a DFA, we define the states of the DFA
# to correspond to *sets* of states of the NFA. Then do some
# state reduction. Let's represent sets as dicts with 1 for
# values.
assert isinstance(start, NFAState)
assert isinstance(finish, NFAState)
def closure(state):
base = {}
addclosure(state, base)
return base
def addclosure(state, base):
assert isinstance(state, NFAState)
if state in base:
return
base[state] = 1
for label, next in state.arcs:
if label is None:
addclosure(next, base)
states = [DFAState(closure(start), finish)]
for state in states: # NB states grows while we're iterating
arcs = {}
for nfastate in state.nfaset:
for label, next in nfastate.arcs:
if label is not None:
addclosure(next, arcs.setdefault(label, {}))
for label, nfaset in arcs.items():
for st in states:
if st.nfaset == nfaset:
break
else:
st = DFAState(nfaset, finish)
states.append(st)
state.addarc(st, label)
return states # List of DFAState instances; first one is start
def dump_nfa(self, name, start, finish):
print("Dump of NFA for", name)
todo = [start]
for i, state in enumerate(todo):
print(" State", i, state is finish and "(final)" or "")
for label, next in state.arcs:
if next in todo:
j = todo.index(next)
else:
j = len(todo)
todo.append(next)
if label is None:
print(" -> %d" % j)
else:
print(" %s -> %d" % (label, j))
def dump_dfa(self, name, dfa):
print("Dump of DFA for", name)
for i, state in enumerate(dfa):
print(" State", i, state.isfinal and "(final)" or "")
for label, next in state.arcs.items():
print(" %s -> %d" % (label, dfa.index(next)))
def simplify_dfa(self, dfa):
# This is not theoretically optimal, but works well enough.
# Algorithm: repeatedly look for two states that have the same
# set of arcs (same labels pointing to the same nodes) and
# unify them, until things stop changing.
# dfa is a list of DFAState instances
changes = True
while changes:
changes = False
for i, state_i in enumerate(dfa):
for j in range(i + 1, len(dfa)):
state_j = dfa[j]
if state_i == state_j:
#print " unify", i, j
del dfa[j]
for state in dfa:
state.unifystate(state_j, state_i)
changes = True
break
def parse_rhs(self):
# RHS: ALT ('|' ALT)*
a, z = self.parse_alt()
if self.value != "|":
return a, z
else:
aa = NFAState()
zz = NFAState()
aa.addarc(a)
z.addarc(zz)
while self.value == "|":
self.gettoken()
a, z = self.parse_alt()
aa.addarc(a)
z.addarc(zz)
return aa, zz
def parse_alt(self):
# ALT: ITEM+
a, b = self.parse_item()
while (self.value in ("(", "[") or
self.type in (token.NAME, token.STRING)):
c, d = self.parse_item()
b.addarc(c)
b = d
return a, b
def parse_item(self):
# ITEM: '[' RHS ']' | ATOM ['+' | '*']
if self.value == "[":
self.gettoken()
a, z = self.parse_rhs()
self.expect(token.OP, "]")
a.addarc(z)
return a, z
else:
a, z = self.parse_atom()
value = self.value
if value not in ("+", "*"):
return a, z
self.gettoken()
z.addarc(a)
if value == "+":
return a, z
else:
return a, a
def parse_atom(self):
# ATOM: '(' RHS ')' | NAME | STRING
if self.value == "(":
self.gettoken()
a, z = self.parse_rhs()
self.expect(token.OP, ")")
return a, z
elif self.type in (token.NAME, token.STRING):
a = NFAState()
z = NFAState()
a.addarc(z, self.value)
self.gettoken()
return a, z
else:
self.raise_error("expected (...) or NAME or STRING, got %s/%s",
self.type, self.value)
def expect(self, type, value=None):
if self.type != type or (value is not None and self.value != value):
self.raise_error("expected %s/%s, got %s/%s",
type, value, self.type, self.value)
value = self.value
self.gettoken()
return value
def gettoken(self):
tup = next(self.generator)
while tup[0] in (token.COMMENT, token.NL):
tup = next(self.generator)
self.type, self.value, self.begin, prefix = tup
#print tokenize.tok_name[self.type], repr(self.value)
def raise_error(self, msg, *args):
if args:
try:
msg = msg % args
except:
msg = " ".join([msg] + list(map(str, args)))
line = open(self.filename).readlines()[self.begin[0]]
raise SyntaxError(msg, (self.filename, self.begin[0],
self.begin[1], line))
class NFAState(object):
def __init__(self):
self.arcs = [] # list of (label, NFAState) pairs
def addarc(self, next, label=None):
assert label is None or isinstance(label, str)
assert isinstance(next, NFAState)
self.arcs.append((label, next))
class DFAState(object):
def __init__(self, nfaset, final):
assert isinstance(nfaset, dict)
assert isinstance(next(iter(nfaset)), NFAState)
assert isinstance(final, NFAState)
self.nfaset = nfaset
self.isfinal = final in nfaset
self.arcs = {} # map from label to DFAState
def addarc(self, next, label):
assert isinstance(label, str)
assert label not in self.arcs
assert isinstance(next, DFAState)
self.arcs[label] = next
def unifystate(self, old, new):
for label, next in self.arcs.items():
if next is old:
self.arcs[label] = new
def __eq__(self, other):
# Equality test -- ignore the nfaset instance variable
assert isinstance(other, DFAState)
if self.isfinal != other.isfinal:
return False
# Can't just return self.arcs == other.arcs, because that
# would invoke this method recursively, with cycles...
if len(self.arcs) != len(other.arcs):
return False
for label, next in self.arcs.items():
if next is not other.arcs.get(label):
return False
return True
__hash__ = None # For Py3 compatibility.
def generate_grammar(filename="Grammar.txt"):
p = ParserGenerator(filename)
return p.make_grammar()
| gpl-3.0 |
simonwydooghe/ansible | lib/ansible/modules/storage/netapp/na_elementsw_network_interfaces.py | 44 | 10836 | #!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
'''
Element Software Node Network Interfaces - Bond 1G and 10G configuration
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_network_interfaces
short_description: NetApp Element Software Configure Node Network Interfaces
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Configure Element SW Node Network Interfaces for Bond 1G and 10G IP address.
options:
method:
description:
- Type of Method used to configure the interface.
- method depends on other settings such as the use of a static IP address, which will change the method to static.
- loopback - Used to define the IPv4 loopback interface.
- manual - Used to define interfaces for which no configuration is done by default.
- dhcp - May be used to obtain an IP address via DHCP.
- static - Used to define Ethernet interfaces with statically allocated IPv4 addresses.
choices: ['loopback', 'manual', 'dhcp', 'static']
required: true
ip_address_1g:
description:
- IP address for the 1G network.
required: true
ip_address_10g:
description:
- IP address for the 10G network.
required: true
subnet_1g:
description:
- 1GbE Subnet Mask.
required: true
subnet_10g:
description:
- 10GbE Subnet Mask.
required: true
gateway_address_1g:
description:
- Router network address to send packets out of the local network.
required: true
gateway_address_10g:
description:
- Router network address to send packets out of the local network.
required: true
mtu_1g:
description:
- Maximum Transmission Unit for 1GbE, Largest packet size that a network protocol can transmit.
- Must be greater than or equal to 1500 bytes.
default: '1500'
mtu_10g:
description:
- Maximum Transmission Unit for 10GbE, Largest packet size that a network protocol can transmit.
- Must be greater than or equal to 1500 bytes.
default: '1500'
dns_nameservers:
description:
- List of addresses for domain name servers.
dns_search_domains:
description:
- List of DNS search domains.
bond_mode_1g:
description:
- Bond mode for 1GbE configuration.
choices: ['ActivePassive', 'ALB', 'LACP']
default: 'ActivePassive'
bond_mode_10g:
description:
- Bond mode for 10GbE configuration.
choices: ['ActivePassive', 'ALB', 'LACP']
default: 'ActivePassive'
lacp_1g:
description:
- Link Aggregation Control Protocol useful only if LACP is selected as the Bond Mode.
- Slow - Packets are transmitted at 30 second intervals.
- Fast - Packets are transmitted in 1 second intervals.
choices: ['Fast', 'Slow']
default: 'Slow'
lacp_10g:
description:
- Link Aggregation Control Protocol useful only if LACP is selected as the Bond Mode.
- Slow - Packets are transmitted at 30 second intervals.
- Fast - Packets are transmitted in 1 second intervals.
choices: ['Fast', 'Slow']
default: 'Slow'
virtual_network_tag:
description:
- This is the primary network tag. All nodes in a cluster have the same VLAN tag.
'''
EXAMPLES = """
- name: Set Node network interfaces configuration for Bond 1G and 10G properties
tags:
- elementsw_network_interfaces
na_elementsw_network_interfaces:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
method: static
ip_address_1g: 10.226.109.68
ip_address_10g: 10.226.201.72
subnet_1g: 255.255.255.0
subnet_10g: 255.255.255.0
gateway_address_1g: 10.193.139.1
gateway_address_10g: 10.193.140.1
mtu_1g: 1500
mtu_10g: 9000
bond_mode_1g: ActivePassive
bond_mode_10g: LACP
lacp_10g: Fast
"""
RETURN = """
msg:
description: Success message
returned: success
type: str
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
try:
from solidfire.models import Network, NetworkConfig
HAS_SF_SDK = True
except Exception:
HAS_SF_SDK = False
class ElementSWNetworkInterfaces(object):
"""
Element Software Network Interfaces - Bond 1G and 10G Network configuration
"""
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(
method=dict(type='str', required=True, choices=['loopback', 'manual', 'dhcp', 'static']),
ip_address_1g=dict(type='str', required=True),
ip_address_10g=dict(type='str', required=True),
subnet_1g=dict(type='str', required=True),
subnet_10g=dict(type='str', required=True),
gateway_address_1g=dict(type='str', required=True),
gateway_address_10g=dict(type='str', required=True),
mtu_1g=dict(type='str', default='1500'),
mtu_10g=dict(type='str', default='1500'),
dns_nameservers=dict(type='list'),
dns_search_domains=dict(type='list'),
bond_mode_1g=dict(type='str', default='ActivePassive', choices=['ActivePassive', 'ALB', 'LACP']),
bond_mode_10g=dict(type='str', default='ActivePassive', choices=['ActivePassive', 'ALB', 'LACP']),
lacp_1g=dict(type='str', default='Slow', choices=['Fast', 'Slow']),
lacp_10g=dict(type='str', default='Slow', choices=['Fast', 'Slow']),
virtual_network_tag=dict(type='str'),
)
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True,
)
input_params = self.module.params
self.method = input_params['method']
self.ip_address_1g = input_params['ip_address_1g']
self.ip_address_10g = input_params['ip_address_10g']
self.subnet_1g = input_params['subnet_1g']
self.subnet_10g = input_params['subnet_10g']
self.gateway_address_1g = input_params['gateway_address_1g']
self.gateway_address_10g = input_params['gateway_address_10g']
self.mtu_1g = input_params['mtu_1g']
self.mtu_10g = input_params['mtu_10g']
self.dns_nameservers = input_params['dns_nameservers']
self.dns_search_domains = input_params['dns_search_domains']
self.bond_mode_1g = input_params['bond_mode_1g']
self.bond_mode_10g = input_params['bond_mode_10g']
self.lacp_1g = input_params['lacp_1g']
self.lacp_10g = input_params['lacp_10g']
self.virtual_network_tag = input_params['virtual_network_tag']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module, port=442)
def set_network_config(self):
"""
set network configuration
"""
try:
self.sfe.set_network_config(network=self.network_object)
except Exception as exception_object:
self.module.fail_json(msg='Error network setting for node %s' % (to_native(exception_object)),
exception=traceback.format_exc())
def get_network_params_object(self):
"""
Get Element SW Network object
:description: get Network object
:return: NetworkConfig object
:rtype: object(NetworkConfig object)
"""
try:
bond_1g_network = NetworkConfig(method=self.method,
address=self.ip_address_1g,
netmask=self.subnet_1g,
gateway=self.gateway_address_1g,
mtu=self.mtu_1g,
dns_nameservers=self.dns_nameservers,
dns_search=self.dns_search_domains,
bond_mode=self.bond_mode_1g,
bond_lacp_rate=self.lacp_1g,
virtual_network_tag=self.virtual_network_tag)
bond_10g_network = NetworkConfig(method=self.method,
address=self.ip_address_10g,
netmask=self.subnet_10g,
gateway=self.gateway_address_10g,
mtu=self.mtu_10g,
dns_nameservers=self.dns_nameservers,
dns_search=self.dns_search_domains,
bond_mode=self.bond_mode_10g,
bond_lacp_rate=self.lacp_10g,
virtual_network_tag=self.virtual_network_tag)
network_object = Network(bond1_g=bond_1g_network,
bond10_g=bond_10g_network)
return network_object
except Exception as e:
self.module.fail_json(msg='Error with setting up network object for node 1G and 10G configuration : %s' % to_native(e),
exception=to_native(e))
def apply(self):
"""
Check connection and initialize node with cluster ownership
"""
changed = False
result_message = None
self.network_object = self.get_network_params_object()
if self.network_object is not None:
self.set_network_config()
changed = True
else:
result_message = "Skipping changes, No change requested"
self.module.exit_json(changed=changed, msg=result_message)
def main():
"""
Main function
"""
elementsw_network_interfaces = ElementSWNetworkInterfaces()
elementsw_network_interfaces.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
xNovax/SickRage | lib/github/GitTag.py | 74 | 4322 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.GitAuthor
import github.GitObject
class GitTag(github.GithubObject.CompletableGithubObject):
"""
This class represents GitTags as returned for example by http://developer.github.com/v3/todo
"""
@property
def message(self):
"""
:type: string
"""
self._completeIfNotSet(self._message)
return self._message.value
@property
def object(self):
"""
:type: :class:`github.GitObject.GitObject`
"""
self._completeIfNotSet(self._object)
return self._object.value
@property
def sha(self):
"""
:type: string
"""
self._completeIfNotSet(self._sha)
return self._sha.value
@property
def tag(self):
"""
:type: string
"""
self._completeIfNotSet(self._tag)
return self._tag.value
@property
def tagger(self):
"""
:type: :class:`github.GitAuthor.GitAuthor`
"""
self._completeIfNotSet(self._tagger)
return self._tagger.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def _initAttributes(self):
self._message = github.GithubObject.NotSet
self._object = github.GithubObject.NotSet
self._sha = github.GithubObject.NotSet
self._tag = github.GithubObject.NotSet
self._tagger = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "message" in attributes: # pragma no branch
self._message = self._makeStringAttribute(attributes["message"])
if "object" in attributes: # pragma no branch
self._object = self._makeClassAttribute(github.GitObject.GitObject, attributes["object"])
if "sha" in attributes: # pragma no branch
self._sha = self._makeStringAttribute(attributes["sha"])
if "tag" in attributes: # pragma no branch
self._tag = self._makeStringAttribute(attributes["tag"])
if "tagger" in attributes: # pragma no branch
self._tagger = self._makeClassAttribute(github.GitAuthor.GitAuthor, attributes["tagger"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
calfonso/ansible | lib/ansible/inventory/data.py | 23 | 9607 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.inventory.group import Group
from ansible.inventory.host import Host
from ansible.module_utils.six import iteritems
from ansible.utils.vars import combine_vars
from ansible.utils.path import basedir
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class InventoryData(object):
"""
Holds inventory data (host and group objects).
Using it's methods should guarantee expected relationships and data.
"""
def __init__(self):
# the inventory object holds a list of groups
self.groups = {}
self.hosts = {}
# provides 'groups' magic var, host object has group_names
self._groups_dict_cache = {}
# current localhost, implicit or explicit
self.localhost = None
self.current_source = None
# Always create the 'all' and 'ungrouped' groups,
for group in ('all', 'ungrouped'):
self.add_group(group)
self.add_child('all', 'ungrouped')
def serialize(self):
self._groups_dict_cache = None
data = {
'groups': self.groups,
'hosts': self.hosts,
'local': self.localhost,
'source': self.current_source,
}
return data
def deserialize(self, data):
self._groups_dict_cache = {}
self.hosts = data.get('hosts')
self.groups = data.get('groups')
self.localhost = data.get('local')
self.current_source = data.get('source')
def _create_implicit_localhost(self, pattern):
if self.localhost:
new_host = self.localhost
else:
new_host = Host(pattern)
new_host.address = "127.0.0.1"
new_host.implicit = True
# set localhost defaults
py_interp = sys.executable
if not py_interp:
# sys.executable is not set in some cornercases. see issue #13585
py_interp = '/usr/bin/python'
display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default. '
'You can correct this by setting ansible_python_interpreter for localhost')
new_host.set_variable("ansible_python_interpreter", py_interp)
new_host.set_variable("ansible_connection", 'local')
self.localhost = new_host
return new_host
def reconcile_inventory(self):
''' Ensure inventory basic rules, run after updates '''
display.debug('Reconcile groups and hosts in inventory.')
self.current_source = None
group_names = set()
# set group vars from group_vars/ files and vars plugins
for g in self.groups:
group = self.groups[g]
group_names.add(group.name)
# ensure all groups inherit from 'all'
if group.name != 'all' and not group.get_ancestors():
self.add_child('all', group.name)
host_names = set()
# get host vars from host_vars/ files and vars plugins
for host in self.hosts.values():
host_names.add(host.name)
mygroups = host.get_groups()
if self.groups['ungrouped'] in mygroups:
# clear ungrouped of any incorrectly stored by parser
if set(mygroups).difference(set([self.groups['all'], self.groups['ungrouped']])):
self.groups['ungrouped'].remove_host(host)
elif not host.implicit:
# add ungrouped hosts to ungrouped, except implicit
length = len(mygroups)
if length == 0 or (length == 1 and self.groups['all'] in mygroups):
self.add_child('ungrouped', host.name)
# special case for implicit hosts
if host.implicit:
host.vars = combine_vars(self.groups['all'].get_vars(), host.vars)
# warn if overloading identifier as both group and host
for conflict in group_names.intersection(host_names):
display.warning("Found both group and host with same name: %s" % conflict)
self._groups_dict_cache = {}
def get_host(self, hostname):
''' fetch host object using name deal with implicit localhost '''
matching_host = self.hosts.get(hostname, None)
# if host is not in hosts dict
if matching_host is None and hostname in C.LOCALHOST:
# might need to create implicit localhost
matching_host = self._create_implicit_localhost(hostname)
return matching_host
def add_group(self, group):
''' adds a group to inventory if not there already '''
if group not in self.groups:
g = Group(group)
self.groups[group] = g
self._groups_dict_cache = {}
display.debug("Added group %s to inventory" % group)
else:
display.debug("group %s already in inventory" % group)
def remove_group(self, group):
if group in self.groups:
del self.groups[group]
display.debug("Removed group %s from inventory" % group)
self._groups_dict_cache = {}
for host in self.hosts:
h = self.hosts[host]
h.remove_group(group)
def add_host(self, host, group=None, port=None):
''' adds a host to inventory and possibly a group if not there already '''
g = None
if group:
if group in self.groups:
g = self.groups[group]
else:
raise AnsibleError("Could not find group %s in inventory" % group)
if host not in self.hosts:
h = Host(host, port)
self.hosts[host] = h
if self.current_source: # set to 'first source' in which host was encountered
self.set_variable(host, 'inventory_file', self.current_source)
self.set_variable(host, 'inventory_dir', basedir(self.current_source))
else:
self.set_variable(host, 'inventory_file', None)
self.set_variable(host, 'inventory_dir', None)
display.debug("Added host %s to inventory" % (host))
# set default localhost from inventory to avoid creating an implicit one. Last localhost defined 'wins'.
if host in C.LOCALHOST:
if self.localhost is None:
self.localhost = self.hosts[host]
display.vvvv("Set default localhost to %s" % h)
else:
display.warning("A duplicate localhost-like entry was found (%s). First found localhost was %s" % (h, self.localhost.name))
else:
h = self.hosts[host]
if g:
g.add_host(h)
self._groups_dict_cache = {}
display.debug("Added host %s to group %s" % (host, group))
def remove_host(self, host):
if host in self.hosts:
del self.hosts[host]
for group in self.groups:
g = self.groups[group]
g.remove_host(host)
def set_variable(self, entity, varname, value):
''' sets a varible for an inventory object '''
if entity in self.groups:
inv_object = self.groups[entity]
elif entity in self.hosts:
inv_object = self.hosts[entity]
else:
raise AnsibleError("Could not identify group or host named %s" % entity)
inv_object.set_variable(varname, value)
display.debug('set %s for %s' % (varname, entity))
def add_child(self, group, child):
''' Add host or group to group '''
if group in self.groups:
g = self.groups[group]
if child in self.groups:
g.add_child_group(self.groups[child])
elif child in self.hosts:
g.add_host(self.hosts[child])
else:
raise AnsibleError("%s is not a known host nor group" % child)
self._groups_dict_cache = {}
display.debug('Group %s now contains %s' % (group, child))
else:
raise AnsibleError("%s is not a known group" % group)
def get_groups_dict(self):
"""
We merge a 'magic' var 'groups' with group name keys and hostname list values into every host variable set. Cache for speed.
"""
if not self._groups_dict_cache:
for (group_name, group) in iteritems(self.groups):
self._groups_dict_cache[group_name] = [h.name for h in group.get_hosts()]
return self._groups_dict_cache
| gpl-3.0 |
lordB8r/polls | ENV/lib/python2.7/site-packages/django/template/loaders/app_directories.py | 114 | 2362 | """
Wrapper for loading templates from "templates" directories in INSTALLED_APPS
packages.
"""
import os
import sys
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template.base import TemplateDoesNotExist
from django.template.loader import BaseLoader
from django.utils._os import safe_join
from django.utils.importlib import import_module
from django.utils import six
# At compile time, cache the directories to search.
if not six.PY3:
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
app_template_dirs = []
for app in settings.INSTALLED_APPS:
try:
mod = import_module(app)
except ImportError as e:
raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0]))
template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates')
if os.path.isdir(template_dir):
if not six.PY3:
template_dir = template_dir.decode(fs_encoding)
app_template_dirs.append(template_dir)
# It won't change, so convert it to a tuple to save memory.
app_template_dirs = tuple(app_template_dirs)
class Loader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = app_template_dirs
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of template_dir.
pass
def load_template_source(self, template_name, template_dirs=None):
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
pass
raise TemplateDoesNotExist(template_name)
| mit |
rhattersley/cartopy | lib/cartopy/tests/test_img_transform.py | 4 | 3508 | # (C) British Crown Copyright 2014 - 2017, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import numpy as np
from numpy.testing import assert_array_equal
import cartopy.img_transform as img_trans
import cartopy.crs as ccrs
def test_griding_data_std_range():
# Data which exists inside the standard projection bounds i.e.
# [-180, 180].
target_prj = ccrs.PlateCarree()
# create 3 data points
lats = np.array([65, 10, -45])
lons = np.array([-90, 0, 90])
data = np.array([1, 2, 3])
data_trans = ccrs.Geodetic()
target_x, target_y, extent = img_trans.mesh_projection(target_prj, 8, 4)
image = img_trans.regrid(data, lons, lats, data_trans, target_prj,
target_x, target_y,
mask_extrapolated=True)
# The expected image. n.b. on a map the data is reversed in the y axis.
expected = np.array([[3, 3, 3, 3, 3, 3, 3, 3],
[3, 1, 2, 2, 2, 3, 3, 3],
[1, 1, 1, 2, 2, 2, 3, 1],
[1, 1, 1, 1, 1, 1, 1, 1]], dtype=np.float64)
expected_mask = np.array(
[[True, True, True, True, True, True, True, True],
[True, False, False, False, False, False, False, True],
[True, False, False, False, False, False, False, True],
[True, True, True, True, True, True, True, True]])
assert_array_equal([-180, 180, -90, 90], extent)
assert_array_equal(expected, image)
assert_array_equal(expected_mask, image.mask)
def test_griding_data_outside_projection():
# Data which exists outside the standard projection e.g. [0, 360] rather
# than [-180, 180].
target_prj = ccrs.PlateCarree()
# create 3 data points
lats = np.array([65, 10, -45])
lons = np.array([120, 180, 240])
data = np.array([1, 2, 3])
data_trans = ccrs.Geodetic()
target_x, target_y, extent = img_trans.mesh_projection(target_prj, 8, 4)
image = img_trans.regrid(data, lons, lats, data_trans, target_prj,
target_x, target_y,
mask_extrapolated=True)
# The expected image. n.b. on a map the data is reversed in the y axis.
expected = np.array(
[[3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 1, 2, 2],
[2, 2, 3, 1, 1, 1, 1, 2],
[1, 1, 1, 1, 1, 1, 1, 1]], dtype=np.float64)
expected_mask = np.array(
[[True, True, True, True, True, True, True, True],
[False, False, True, True, True, True, False, False],
[False, False, True, True, True, True, False, False],
[True, True, True, True, True, True, True, True]])
assert_array_equal([-180, 180, -90, 90], extent)
assert_array_equal(expected, image)
assert_array_equal(expected_mask, image.mask)
| lgpl-3.0 |
GodBlessPP/2015cdb | static/Brython3.1.1-20150328-091302/Lib/_collections.py | 603 | 19111 | # "High performance data structures
# "
# copied from pypy repo
#
# Copied and completed from the sandbox of CPython
# (nondist/sandbox/collections/pydeque.py rev 1.1, Raymond Hettinger)
#
# edited for Brython line 558 : catch ImportError instead of AttributeError
import operator
#try:
# from thread import get_ident as _thread_ident
#except ImportError:
def _thread_ident():
return -1
n = 30
LFTLNK = n
RGTLNK = n+1
BLOCKSIZ = n+2
# The deque's size limit is d.maxlen. The limit can be zero or positive, or
# None. After an item is added to a deque, we check to see if the size has
# grown past the limit. If it has, we get the size back down to the limit by
# popping an item off of the opposite end. The methods that can trigger this
# are append(), appendleft(), extend(), and extendleft().
#class deque(object):
class deque:
def __new__(cls, iterable=(), *args, **kw):
#fixme
#self = super(deque, cls).__new__(cls, *args, **kw)
self=object.__new__(cls, *args, **kw)
self.clear()
return self
def __init__(self, iterable=(), maxlen=None):
object.__init__(self)
self.clear()
if maxlen is not None:
if maxlen < 0:
raise ValueError("maxlen must be non-negative")
self._maxlen = maxlen
add = self.append
for elem in iterable:
add(elem)
@property
def maxlen(self):
return self._maxlen
def clear(self):
self.right = self.left = [None] * BLOCKSIZ
self.rightndx = n//2 # points to last written element
self.leftndx = n//2+1
self.length = 0
self.state = 0
def append(self, x):
self.state += 1
self.rightndx += 1
if self.rightndx == n:
newblock = [None] * BLOCKSIZ
self.right[RGTLNK] = newblock
newblock[LFTLNK] = self.right
self.right = newblock
self.rightndx = 0
self.length += 1
self.right[self.rightndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.popleft()
def appendleft(self, x):
self.state += 1
self.leftndx -= 1
if self.leftndx == -1:
newblock = [None] * BLOCKSIZ
self.left[LFTLNK] = newblock
newblock[RGTLNK] = self.left
self.left = newblock
self.leftndx = n-1
self.length += 1
self.left[self.leftndx] = x
if self.maxlen is not None and self.length > self.maxlen:
self.pop()
def extend(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.append(elem)
def extendleft(self, iterable):
if iterable is self:
iterable = list(iterable)
for elem in iterable:
self.appendleft(elem)
def pop(self):
if self.left is self.right and self.leftndx > self.rightndx:
#raise IndexError, "pop from an empty deque" # does not work in brython
raise IndexError("pop from an empty deque")
x = self.right[self.rightndx]
self.right[self.rightndx] = None
self.length -= 1
self.rightndx -= 1
self.state += 1
if self.rightndx == -1:
prevblock = self.right[LFTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[RGTLNK] = None
self.right[LFTLNK] = None
self.right = prevblock
self.rightndx = n-1
return x
def popleft(self):
if self.left is self.right and self.leftndx > self.rightndx:
#raise IndexError, "pop from an empty deque"
raise IndexError("pop from an empty deque")
x = self.left[self.leftndx]
self.left[self.leftndx] = None
self.length -= 1
self.leftndx += 1
self.state += 1
if self.leftndx == n:
prevblock = self.left[RGTLNK]
if prevblock is None:
# the deque has become empty; recenter instead of freeing block
self.rightndx = n//2
self.leftndx = n//2+1
else:
prevblock[LFTLNK] = None
self.left[RGTLNK] = None
self.left = prevblock
self.leftndx = 0
return x
def count(self, value):
c = 0
for item in self:
if item == value:
c += 1
return c
def remove(self, value):
# Need to be defensive for mutating comparisons
for i in range(len(self)):
if self[i] == value:
del self[i]
return
raise ValueError("deque.remove(x): x not in deque")
def rotate(self, n=1):
length = len(self)
if length == 0:
return
halflen = (length+1) >> 1
if n > halflen or n < -halflen:
n %= length
if n > halflen:
n -= length
elif n < -halflen:
n += length
while n > 0:
self.appendleft(self.pop())
n -= 1
while n < 0:
self.append(self.popleft())
n += 1
def reverse(self):
"reverse *IN PLACE*"
leftblock = self.left
rightblock = self.right
leftindex = self.leftndx
rightindex = self.rightndx
for i in range(self.length // 2):
# Validate that pointers haven't met in the middle
assert leftblock != rightblock or leftindex < rightindex
# Swap
(rightblock[rightindex], leftblock[leftindex]) = (
leftblock[leftindex], rightblock[rightindex])
# Advance left block/index pair
leftindex += 1
if leftindex == n:
leftblock = leftblock[RGTLNK]
assert leftblock is not None
leftindex = 0
# Step backwards with the right block/index pair
rightindex -= 1
if rightindex == -1:
rightblock = rightblock[LFTLNK]
assert rightblock is not None
rightindex = n - 1
def __repr__(self):
threadlocalattr = '__repr' + str(_thread_ident())
if threadlocalattr in self.__dict__:
return 'deque([...])'
else:
self.__dict__[threadlocalattr] = True
try:
if self.maxlen is not None:
return 'deque(%r, maxlen=%s)' % (list(self), self.maxlen)
else:
return 'deque(%r)' % (list(self),)
finally:
del self.__dict__[threadlocalattr]
def __iter__(self):
return deque_iterator(self, self._iter_impl)
def _iter_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in block[l:r]:
yield elem
if self.state != original_state:
giveup()
block = block[RGTLNK]
def __reversed__(self):
return deque_iterator(self, self._reversed_impl)
def _reversed_impl(self, original_state, giveup):
if self.state != original_state:
giveup()
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
for elem in reversed(block[l:r]):
yield elem
if self.state != original_state:
giveup()
block = block[LFTLNK]
def __len__(self):
#sum = 0
#block = self.left
#while block:
# sum += n
# block = block[RGTLNK]
#return sum + self.rightndx - self.leftndx + 1 - n
return self.length
def __getref(self, index):
if index >= 0:
block = self.left
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
span = r-l
if index < span:
return block, l+index
index -= span
block = block[RGTLNK]
else:
block = self.right
while block:
l, r = 0, n
if block is self.left:
l = self.leftndx
if block is self.right:
r = self.rightndx + 1
negative_span = l-r
if index >= negative_span:
return block, r+index
index -= negative_span
block = block[LFTLNK]
raise IndexError("deque index out of range")
def __getitem__(self, index):
block, index = self.__getref(index)
return block[index]
def __setitem__(self, index, value):
block, index = self.__getref(index)
block[index] = value
def __delitem__(self, index):
length = len(self)
if index >= 0:
if index >= length:
raise IndexError("deque index out of range")
self.rotate(-index)
self.popleft()
self.rotate(index)
else:
#index = ~index #todo until bit wise operators are in bython
index= index^(2**31)
if index >= length:
raise IndexError("deque index out of range")
self.rotate(index)
self.pop()
self.rotate(-index)
def __reduce_ex__(self, proto):
return type(self), (list(self), self.maxlen)
def __hash__(self):
#raise TypeError, "deque objects are unhashable"
raise TypeError("deque objects are unhashable")
def __copy__(self):
return self.__class__(self, self.maxlen)
# XXX make comparison more efficient
def __eq__(self, other):
if isinstance(other, deque):
return list(self) == list(other)
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, deque):
return list(self) != list(other)
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, deque):
return list(self) < list(other)
else:
return NotImplemented
def __le__(self, other):
if isinstance(other, deque):
return list(self) <= list(other)
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, deque):
return list(self) > list(other)
else:
return NotImplemented
def __ge__(self, other):
if isinstance(other, deque):
return list(self) >= list(other)
else:
return NotImplemented
def __iadd__(self, other):
self.extend(other)
return self
class deque_iterator(object):
def __init__(self, deq, itergen):
self.counter = len(deq)
def giveup():
self.counter = 0
#raise RuntimeError, "deque mutated during iteration"
raise RuntimeError("deque mutated during iteration")
self._gen = itergen(deq.state, giveup)
def next(self):
res = self._gen.next()
self.counter -= 1
return res
def __iter__(self):
return self
class defaultdict(dict):
def __init__(self, *args, **kwds):
if len(args) > 0:
default_factory = args[0]
args = args[1:]
if not callable(default_factory) and default_factory is not None:
raise TypeError("first argument must be callable")
else:
default_factory = None
dict.__init__(self, args, kwds)
self.default_factory = default_factory
self.update(args, kwds)
#super(defaultdict, self).__init__(*args, **kwds)
#fixme.. had to add this function to get defaultdict working with brython correctly
def __getitem__(self, key):
if self.__contains__(key):
return dict.__getitem__(self,key)
return self.__missing__(key)
def __missing__(self, key):
# from defaultdict docs
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __repr__(self, recurse=set()):
if id(self) in recurse:
return "defaultdict(...)"
try:
recurse.add(id(self))
return "defaultdict(%s, %s)" % (repr(self.default_factory), super(defaultdict, self).__repr__())
finally:
recurse.remove(id(self))
def copy(self):
return type(self)(self.default_factory, self)
def __copy__(self):
return self.copy()
def __reduce__(self):
#
#__reduce__ must return a 5-tuple as follows:
#
# - factory function
# - tuple of args for the factory function
# - additional state (here None)
# - sequence iterator (here None)
# - dictionary iterator (yielding successive (key, value) pairs
# This API is used by pickle.py and copy.py.
#
return (type(self), (self.default_factory,), None, None, self.iteritems())
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
def namedtuple(typename, field_names, verbose=False, rename=False):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
if rename:
names = list(field_names)
seen = set()
for i, name in enumerate(names):
if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
or not name or name[0].isdigit() or name.startswith('_')
or name in seen):
names[i] = '_%d' % i
seen.add(name)
field_names = tuple(names)
for name in (typename,) + field_names:
if not min(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
return tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(self):
'Return a new dict which maps field names to their values'
return dict(zip(self._fields, self)) \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
if verbose:
print(template)
# Execute the template string in a temporary namespace
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
_property=property, _tuple=tuple)
try:
exec(template,namespace)
except SyntaxError as e:
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return result
if __name__ == '__main__':
Point = namedtuple('Point', ['x', 'y'])
p = Point(11, y=22)
print(p[0]+p[1])
x,y=p
print(x,y)
print(p.x+p.y)
print(p)
| gpl-3.0 |
tangentlabs/django-fancypages | tests/unit/blocks/test_two_column_layout_block.py | 1 | 1087 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.test import TestCase
from django.test import RequestFactory
from django.template import RequestContext
from fancypages.models import Container
from fancypages.models.blocks import TwoColumnLayoutBlock
from fancypages.test import factories
class TestTwoColumnLayoutBlock(TestCase):
def setUp(self):
super(TestTwoColumnLayoutBlock, self).setUp()
self.user = factories.UserFactory.build()
self.request = RequestFactory().get('/')
self.request_context = RequestContext(self.request, {})
self.request_context['user'] = self.user
def test_generates_two_empty_containers_when_rendered(self):
container = Container.objects.create(name='test-container')
block = TwoColumnLayoutBlock.objects.create(container=container)
self.assertEquals(block.containers.count(), 0)
renderer = block.get_renderer_class()(block, self.request_context)
renderer.render()
self.assertEquals(block.containers.count(), 2)
| bsd-3-clause |
fredericlepied/ansible | lib/ansible/parsing/dataloader.py | 14 | 20158 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import json
import re
import tempfile
from yaml import YAMLError
from ansible.module_utils.six import text_type, string_types
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
from ansible.module_utils.basic import is_executable
from ansible.module_utils.six import binary_type, text_type
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.vault import VaultLib, b_HEADER, is_encrypted, is_encrypted_file, parse_vaulttext_envelope
from ansible.parsing.quoting import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
# Tries to determine if a path is inside a role, last dir must be 'tasks'
# this is not perfect but people should really avoid 'tasks' dirs outside roles when using Ansible.
RE_TASKS = re.compile(u'(?:^|%s)+tasks%s?$' % (os.path.sep, os.path.sep))
class DataLoader:
'''
The DataLoader class is used to load and parse YAML or JSON content,
either from a given file name or from a string that was previously
read in through other means. A Vault password can be specified, and
any vault-encrypted files will be decrypted.
Data read from files will also be cached, so the file will never be
read from disk more than once.
Usage:
dl = DataLoader()
# optionally: dl.set_vault_password('foo')
ds = dl.load('...')
ds = dl.load_from_file('/path/to/file')
'''
def __init__(self):
self._basedir = '.'
self._FILE_CACHE = dict()
self._tempfiles = set()
# initialize the vault stuff with an empty password
# TODO: replace with a ref to something that can get the password
# a creds/auth provider
# self.set_vault_password(None)
self._vaults = {}
self._vault = VaultLib()
self.set_vault_secrets(None)
# TODO: since we can query vault_secrets late, we could provide this to DataLoader init
def set_vault_secrets(self, vault_secrets):
self._vault.secrets = vault_secrets
def load(self, data, file_name='<string>', show_content=True):
'''
Creates a python datastructure from the given data, which can be either
a JSON or YAML string.
'''
new_data = None
# YAML parser will take JSON as it is a subset.
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
in_data = text_type(data)
else:
in_data = data
try:
# we first try to load this data as JSON
new_data = json.loads(data)
except:
# must not be JSON, let the rest try
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
in_data = text_type(data)
else:
in_data = data
try:
new_data = self._safe_load(in_data, file_name=file_name)
except YAMLError as yaml_exc:
self._handle_error(yaml_exc, file_name, show_content)
if isinstance(data, AnsibleUnicode):
new_data = AnsibleUnicode(new_data)
new_data.ansible_pos = data.ansible_pos
return new_data
def load_from_file(self, file_name, cache=True, unsafe=False):
''' Loads data from a file, which can contain either JSON or YAML. '''
file_name = self.path_dwim(file_name)
display.debug("Loading data from %s" % file_name)
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if cache and file_name in self._FILE_CACHE:
parsed_data = self._FILE_CACHE[file_name]
else:
# read the file contents and load the data structure from them
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
self._FILE_CACHE[file_name] = parsed_data
if unsafe:
return parsed_data
else:
# return a deep copy here, so the cache is not affected
return copy.deepcopy(parsed_data)
def path_exists(self, path):
path = self.path_dwim(path)
return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))
def is_file(self, path):
path = self.path_dwim(path)
return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull
def is_directory(self, path):
path = self.path_dwim(path)
return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))
def list_directory(self, path):
path = self.path_dwim(path)
return os.listdir(path)
def is_executable(self, path):
'''is the given path executable?'''
path = self.path_dwim(path)
return is_executable(path)
def _safe_load(self, stream, file_name=None):
''' Implements yaml.safe_load(), except using our custom loader class. '''
loader = AnsibleLoader(stream, file_name, self._vault.secrets)
try:
return loader.get_single_data()
finally:
try:
loader.dispose()
except AttributeError:
pass # older versions of yaml don't have dispose function, ignore
def _get_file_contents(self, file_name):
'''
Reads the file contents from the given file name
If the contents are vault-encrypted, it will decrypt them and return
the decrypted data
:arg file_name: The name of the file to read. If this is a relative
path, it will be expanded relative to the basedir
:raises AnsibleFileNotFOund: if the file_name does not refer to a file
:raises AnsibleParserError: if we were unable to read the file
:return: Returns a byte string of the file contents
'''
if not file_name or not isinstance(file_name, (binary_type, text_type)):
raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))
b_file_name = to_bytes(self.path_dwim(file_name))
# This is what we really want but have to fix unittests to make it pass
# if not os.path.exists(b_file_name) or not os.path.isfile(b_file_name):
if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)
show_content = True
try:
with open(b_file_name, 'rb') as f:
data = f.read()
if is_encrypted(data):
# FIXME: plugin vault selector
b_ciphertext, b_version, cipher_name, vault_id = parse_vaulttext_envelope(data)
data = self._vault.decrypt(data, filename=b_file_name)
show_content = False
return (data, show_content)
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)), orig_exc=e)
def _handle_error(self, yaml_exc, file_name, show_content):
'''
Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
file name/position where a YAML exception occurred, and raises an AnsibleParserError
to display the syntax exception information.
'''
# if the YAML exception contains a problem mark, use it to construct
# an object the error class can use to display the faulty line
err_obj = None
if hasattr(yaml_exc, 'problem_mark'):
err_obj = AnsibleBaseYAMLObject()
err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)
raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content, orig_exc=yaml_exc)
def get_basedir(self):
''' returns the current basedir '''
return self._basedir
def set_basedir(self, basedir):
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
self._basedir = to_text(basedir)
def path_dwim(self, given):
'''
make relative paths work like folks expect.
'''
given = unquote(given)
given = to_text(given, errors='surrogate_or_strict')
if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'):
path = given
else:
basedir = to_text(self._basedir, errors='surrogate_or_strict')
path = os.path.join(basedir, given)
return unfrackpath(path, follow=False)
def _is_role(self, path):
''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc '''
b_path = to_bytes(path, errors='surrogate_or_strict')
b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict')
for finddir in (b'meta', b'tasks'):
for suffix in (b'.yml', b'.yaml', b''):
b_main = b'main%s' % (suffix)
b_tasked = b'%s/%s' % (finddir, b_main)
if (
RE_TASKS.search(path) and
os.path.exists(os.path.join(b_path, b_main)) or
os.path.exists(os.path.join(b_upath, b_tasked)) or
os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
):
return True
return False
def path_dwim_relative(self, path, dirname, source, is_role=False):
'''
find one file in either a role or playbook dir with or without
explicitly named dirname subdirs
Used in action plugins and lookups to find supplemental files that
could be in either place.
'''
search = []
source = to_text(source, errors='surrogate_or_strict')
# I have full path, nothing else needs to be looked at
if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'):
search.append(unfrackpath(source, follow=False))
else:
# base role/play path + templates/files/vars + relative filename
search.append(os.path.join(path, dirname, source))
basedir = unfrackpath(path, follow=False)
# not told if role, but detect if it is a role and if so make sure you get correct base path
if not is_role:
is_role = self._is_role(path)
if is_role and RE_TASKS.search(path):
basedir = unfrackpath(os.path.dirname(path), follow=False)
cur_basedir = self._basedir
self.set_basedir(basedir)
# resolved base role/play path + templates/files/vars + relative filename
search.append(unfrackpath(os.path.join(basedir, dirname, source), follow=False))
self.set_basedir(cur_basedir)
if is_role and not source.endswith(dirname):
# look in role's tasks dir w/o dirname
search.append(unfrackpath(os.path.join(basedir, 'tasks', source), follow=False))
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(unfrackpath(os.path.join(dirname, source), follow=False))
# try to create absolute path for loader basedir
search.append(unfrackpath(os.path.join(basedir, source), follow=False))
# try to create absolute path for dirname + filename
search.append(self.path_dwim(os.path.join(dirname, source)))
# try to create absolute path for filename
search.append(self.path_dwim(source))
for candidate in search:
if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
break
return candidate
def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
'''
find one file in first path in stack taking roles into account and adding play basedir as fallback
:arg paths: A list of text strings which are the paths to look for the filename in.
:arg dirname: A text string representing a directory. The directory
is prepended to the source to form the path to search for.
:arg source: A text string which is the filename to search for
:rtype: A text string
:returns: An absolute path to the filename ``source`` if found
:raises: An AnsibleFileNotFound Exception if the file is found to exist in the search paths
'''
b_dirname = to_bytes(dirname)
b_source = to_bytes(source)
result = None
search = []
if source is None:
display.warning('Invalid request to find a file that matches a "null" value')
elif source and (source.startswith('~') or source.startswith(os.path.sep)):
# path is absolute, no relative needed, check existence and return source
test_path = unfrackpath(b_source, follow=False)
if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
result = test_path
else:
display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
for path in paths:
upath = unfrackpath(path, follow=False)
b_upath = to_bytes(upath, errors='surrogate_or_strict')
b_mydir = os.path.dirname(b_upath)
# if path is in role and 'tasks' not there already, add it into the search
if is_role or self._is_role(path):
if b_mydir.endswith(b'tasks'):
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
else:
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != b_dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
elif b_dirname not in b_source.split(b'/'):
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
# always append basedir as last resort
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
for b_candidate in search:
display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
if os.path.exists(b_candidate):
result = to_text(b_candidate)
break
if result is None:
raise AnsibleFileNotFound(file_name=source, paths=[to_text(p) for p in search])
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def get_real_file(self, file_path, decrypt=True):
"""
If the file is vault encrypted return a path to a temporary decrypted file
If the file is not encrypted then the path is returned
Temporary files are cleanup in the destructor
"""
if not file_path or not isinstance(file_path, (binary_type, text_type)):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
raise AnsibleFileNotFound(file_name=file_path)
real_path = self.path_dwim(file_path)
try:
if decrypt:
with open(to_bytes(real_path), 'rb') as f:
# Limit how much of the file is read since we do not know
# whether this is a vault file and therefore it could be very
# large.
if is_encrypted_file(f, count=len(b_HEADER)):
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
data = f.read()
if not self._vault.secrets:
raise AnsibleParserError("A vault password or secret must be specified to decrypt %s" % to_native(file_path))
data = self._vault.decrypt(data, filename=real_path)
# Make a temp file
real_path = self._create_content_tempfile(data)
self._tempfiles.add(real_path)
return real_path
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)
def cleanup_tmp_file(self, file_path):
"""
Removes any temporary files created from a previous call to
get_real_file. file_path must be the path returned from a
previous call to get_real_file.
"""
if file_path in self._tempfiles:
os.unlink(file_path)
self._tempfiles.remove(file_path)
def cleanup_all_tmp_files(self):
for f in self._tempfiles:
try:
self.cleanup_tmp_file(f)
except Exception as e:
display.warning("Unable to cleanup temp files: %s" % to_native(e))
| gpl-3.0 |
sameerparekh/pants | tests/python/pants_test/base/test_generator.py | 31 | 1087 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import unittest
from pants.base.generator import TemplateData
class TemplateDataTest(unittest.TestCase):
def setUp(self):
self.data = TemplateData(foo='bar', baz=42)
def test_member_access(self):
try:
self.data.bip
self.fail("Access to undefined template data slots should raise")
except AttributeError:
# expected
pass
def test_member_mutation(self):
try:
self.data.baz = 1 / 137
self.fail("Mutation of a template data's slots should not be allowed")
except AttributeError:
# expected
pass
def test_extend(self):
self.assertEqual(self.data.extend(jake=0.3), TemplateData(baz=42, foo='bar', jake=0.3))
def test_equals(self):
self.assertEqual(self.data, TemplateData(baz=42).extend(foo='bar'))
| apache-2.0 |
tonyteate/pydocxgae | pydocx/tests/__init__.py | 1 | 5431 | #from unittest import TestCase
import re
from contextlib import contextmanager
from pydocx.parsers.Docx2Html import Docx2Html
from pydocx.utils import (
parse_xml_from_string,
)
from pydocx.tests.document_builder import DocxBuilder as DXB
from unittest import TestCase
STYLE = (
'<style>'
'.pydocx-insert {color:green;}'
'.pydocx-delete {color:red;text-decoration:line-through;}'
'.pydocx-center {text-align:center;}'
'.pydocx-right {text-align:right;}'
'.pydocx-left {text-align:left;}'
'.pydocx-comment {color:blue;}'
'.pydocx-underline {text-decoration: underline;}'
'.pydocx-caps {text-transform:uppercase;}'
'.pydocx-small-caps {font-variant: small-caps;}'
'.pydocx-strike {text-decoration: line-through;}'
'.pydocx-hidden {visibility: hidden;}'
'body {width:612px;margin:0px auto;}'
'</style>'
)
BASE_HTML = '''
<html>
<head>
%s
</head>
<body>%%s</body>
</html>
''' % STYLE
def assert_html_equal(actual_html, expected_html):
assert collapse_html(
actual_html,
) == collapse_html(
expected_html
), actual_html
def collapse_html(html):
"""
Remove insignificant whitespace from the html.
>>> print collapse_html('''\\
... <h1>
... Heading
... </h1>
... ''')
<h1>Heading</h1>
>>> print collapse_html('''\\
... <p>
... Paragraph with
... multiple lines.
... </p>
... ''')
<p>Paragraph with multiple lines.</p>
"""
def smart_space(match):
# Put a space in between lines, unless exactly one side of the line
# break butts up against a tag.
before = match.group(1)
after = match.group(2)
space = ' '
if before == '>' or after == '<':
space = ''
return before + space + after
# Replace newlines and their surrounding whitespace with a single space (or
# empty string)
html = re.sub(
r'(>?)\s*\n\s*(<?)',
smart_space,
html,
)
return html.strip()
class XMLDocx2Html(Docx2Html):
"""
Create the object without passing in a path to the document, set them
manually.
"""
def __init__(self, *args, **kwargs):
# Pass in nothing for the path
super(XMLDocx2Html, self).__init__(path=None, *args, **kwargs)
def _build_data(
self,
path,
document_xml=None,
rels_dict=None,
numbering_dict=None,
styles_dict=None,
*args, **kwargs):
self._test_rels_dict = rels_dict
if rels_dict:
for value in rels_dict.values():
self._image_data['word/%s' % value] = 'word/%s' % value
self.numbering_root = None
if numbering_dict is not None:
self.numbering_root = parse_xml_from_string(
DXB.numbering(numbering_dict),
)
self.numbering_dict = numbering_dict
# Intentionally not calling super
if document_xml is not None:
self.root = parse_xml_from_string(document_xml)
self.zip_path = ''
# This is the standard page width for a word document, Also the page
# width that we are looking for in the test.
self.page_width = 612
self.styles_dict = styles_dict
def _parse_rels_root(self, *args, **kwargs):
if self._test_rels_dict is None:
return {}
return self._test_rels_dict
def get_list_style(self, num_id, ilvl):
try:
return self.numbering_dict[num_id][ilvl]
except KeyError:
return 'decimal'
def _parse_styles(self):
if self.styles_dict is None:
return {}
return self.styles_dict
DEFAULT_NUMBERING_DICT = {
'1': {
'0': 'decimal',
'1': 'decimal',
},
'2': {
'0': 'lowerLetter',
'1': 'lowerLetter',
},
}
class _TranslationTestCase(TestCase):
expected_output = None
relationship_dict = None
styles_dict = None
numbering_dict = DEFAULT_NUMBERING_DICT
run_expected_output = True
parser = XMLDocx2Html
use_base_html = True
convert_root_level_upper_roman = False
def get_xml(self):
raise NotImplementedError()
@contextmanager
def toggle_run_expected_output(self):
self.run_expected_output = not self.run_expected_output
yield
self.run_expected_output = not self.run_expected_output
def test_expected_output(self):
if self.expected_output is None:
raise NotImplementedError('expected_output is not defined')
if not self.run_expected_output:
return
# Create the xml
tree = self.get_xml()
# Verify the final output.
parser = self.parser
def image_handler(self, src, *args, **kwargs):
return src
parser.image_handler = image_handler
html = parser(
convert_root_level_upper_roman=self.convert_root_level_upper_roman,
document_xml=tree,
rels_dict=self.relationship_dict,
numbering_dict=self.numbering_dict,
styles_dict=self.styles_dict,
).parsed
if self.use_base_html:
assert_html_equal(html, BASE_HTML % self.expected_output)
else:
assert_html_equal(html, self.expected_output)
| apache-2.0 |
levilucio/SyVOLT | tests/TestModules/kiltera_backward2trace_MDL.py | 1 | 8531 | """
__kiltera_backward2trace_MDL.py_____________________________________________________
Automatically generated AToM3 Model File (Do not modify directly)
Author: levi
Modified: Tue Jan 27 12:00:37 2015
____________________________________________________________________________________
"""
from stickylink import *
from widthXfillXdecoration import *
from MatchModel import *
from ApplyModel import *
from Trigger_S import *
from Trigger_T import *
from paired_with import *
from match_contains import *
from apply_contains import *
from graph_match_contains import *
from graph_MatchModel import *
from graph_apply_contains import *
from graph_Trigger_T import *
from graph_Trigger_S import *
from graph_paired_with import *
from graph_ApplyModel import *
from ATOM3Enum import *
from ATOM3String import *
from ATOM3BottomType import *
from ATOM3Constraint import *
from ATOM3Attribute import *
from ATOM3Float import *
from ATOM3List import *
from ATOM3Link import *
from ATOM3Connection import *
from ATOM3Boolean import *
from ATOM3Appearance import *
from ATOM3Text import *
from ATOM3Action import *
from ATOM3Integer import *
from ATOM3Port import *
from ATOM3MSEnum import *
def kiltera_backward2trace_MDL(self, rootNode, UMLRT2Kiltera_MMRootNode=None):
# --- Generating attributes code for ASG UMLRT2Kiltera_MM ---
if( UMLRT2Kiltera_MMRootNode ):
# author
UMLRT2Kiltera_MMRootNode.author.setValue('Annonymous')
# description
UMLRT2Kiltera_MMRootNode.description.setValue('\n')
UMLRT2Kiltera_MMRootNode.description.setHeight(15)
# name
UMLRT2Kiltera_MMRootNode.name.setValue('kiltera_backward2trace')
# --- ASG attributes over ---
self.obj100=MatchModel(self)
self.obj100.isGraphObjectVisual = True
if(hasattr(self.obj100, '_setHierarchicalLink')):
self.obj100._setHierarchicalLink(False)
self.obj100.graphClass_= graph_MatchModel
if self.genGraphics:
new_obj = graph_MatchModel(340.0,60.0,self.obj100)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("MatchModel", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj100.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj100)
self.globalAndLocalPostcondition(self.obj100, rootNode)
self.obj100.postAction( rootNode.CREATE )
self.obj101=ApplyModel(self)
self.obj101.isGraphObjectVisual = True
if(hasattr(self.obj101, '_setHierarchicalLink')):
self.obj101._setHierarchicalLink(False)
self.obj101.graphClass_= graph_ApplyModel
if self.genGraphics:
new_obj = graph_ApplyModel(340.0,280.0,self.obj101)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("ApplyModel", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj101.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj101)
self.globalAndLocalPostcondition(self.obj101, rootNode)
self.obj101.postAction( rootNode.CREATE )
self.obj102=Trigger_S(self)
self.obj102.isGraphObjectVisual = True
if(hasattr(self.obj102, '_setHierarchicalLink')):
self.obj102._setHierarchicalLink(False)
# classtype
self.obj102.classtype.setValue('t_')
# cardinality
self.obj102.cardinality.setValue('1')
# name
self.obj102.name.setValue('s_')
self.obj102.graphClass_= graph_Trigger_S
if self.genGraphics:
new_obj = graph_Trigger_S(560.0,120.0,self.obj102)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("Trigger_S", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj102.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj102)
self.globalAndLocalPostcondition(self.obj102, rootNode)
self.obj102.postAction( rootNode.CREATE )
self.obj103=Trigger_T(self)
self.obj103.isGraphObjectVisual = True
if(hasattr(self.obj103, '_setHierarchicalLink')):
self.obj103._setHierarchicalLink(False)
# classtype
self.obj103.classtype.setValue('t_')
# cardinality
self.obj103.cardinality.setValue('1')
# name
self.obj103.name.setValue('s_')
self.obj103.graphClass_= graph_Trigger_T
if self.genGraphics:
new_obj = graph_Trigger_T(560.0,340.0,self.obj103)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("Trigger_T", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
new_obj.layConstraints['scale'] = [1.0, 1.0]
else: new_obj = None
self.obj103.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj103)
self.globalAndLocalPostcondition(self.obj103, rootNode)
self.obj103.postAction( rootNode.CREATE )
self.obj104=paired_with(self)
self.obj104.isGraphObjectVisual = True
if(hasattr(self.obj104, '_setHierarchicalLink')):
self.obj104._setHierarchicalLink(False)
self.obj104.graphClass_= graph_paired_with
if self.genGraphics:
new_obj = graph_paired_with(460.5,202.0,self.obj104)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("paired_with", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
else: new_obj = None
self.obj104.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj104)
self.globalAndLocalPostcondition(self.obj104, rootNode)
self.obj104.postAction( rootNode.CREATE )
self.obj105=match_contains(self)
self.obj105.isGraphObjectVisual = True
if(hasattr(self.obj105, '_setHierarchicalLink')):
self.obj105._setHierarchicalLink(False)
self.obj105.graphClass_= graph_match_contains
if self.genGraphics:
new_obj = graph_match_contains(594.0,127.0,self.obj105)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("match_contains", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
else: new_obj = None
self.obj105.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj105)
self.globalAndLocalPostcondition(self.obj105, rootNode)
self.obj105.postAction( rootNode.CREATE )
self.obj106=apply_contains(self)
self.obj106.isGraphObjectVisual = True
if(hasattr(self.obj106, '_setHierarchicalLink')):
self.obj106._setHierarchicalLink(False)
self.obj106.graphClass_= graph_apply_contains
if self.genGraphics:
new_obj = graph_apply_contains(597.5,352.0,self.obj106)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag("apply_contains", new_obj.tag)
new_obj.layConstraints = dict() # Graphical Layout Constraints
else: new_obj = None
self.obj106.graphObject_ = new_obj
# Add node to the root: rootNode
rootNode.addNode(self.obj106)
self.globalAndLocalPostcondition(self.obj106, rootNode)
self.obj106.postAction( rootNode.CREATE )
# Connections for obj100 (graphObject_: Obj0) of type MatchModel
self.drawConnections(
(self.obj100,self.obj105,[458.0, 91.0, 594.0, 127.0],"true", 2),
(self.obj100,self.obj104,[458.0, 91.0, 460.5, 202.0],"true", 2) )
# Connections for obj101 (graphObject_: Obj1) of type ApplyModel
self.drawConnections(
(self.obj101,self.obj106,[463.0, 313.0, 597.5, 352.0],"true", 2) )
# Connections for obj102 (graphObject_: Obj2) named s_
self.drawConnections(
)
# Connections for obj103 (graphObject_: Obj3) named s_
self.drawConnections(
)
# Connections for obj104 (graphObject_: Obj4) of type paired_with
self.drawConnections(
(self.obj104,self.obj101,[460.5, 202.0, 463.0, 313.0],"true", 2) )
# Connections for obj105 (graphObject_: Obj5) of type match_contains
self.drawConnections(
(self.obj105,self.obj102,[594.0, 127.0, 730.0, 163.0],"true", 2) )
# Connections for obj106 (graphObject_: Obj6) of type apply_contains
self.drawConnections(
(self.obj106,self.obj103,[597.5, 352.0, 732.0, 391.0],"true", 2) )
newfunction = kiltera_backward2trace_MDL
loadedMMName = 'UMLRT2Kiltera_MM_META'
atom3version = '0.3'
| mit |
loseblue/vim-ycm-windows-64 | python/ycm/client/command_request.py | 10 | 3213 | #!/usr/bin/env python
#
# Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import vim
from ycm.client.base_request import BaseRequest, BuildRequestData, ServerError
from ycm import vimsupport
from ycmd.utils import ToUtf8IfNeeded
def _EnsureBackwardsCompatibility( arguments ):
if arguments and arguments[ 0 ] == 'GoToDefinitionElseDeclaration':
arguments[ 0 ] = 'GoTo'
return arguments
class CommandRequest( BaseRequest ):
def __init__( self, arguments, completer_target = None ):
super( CommandRequest, self ).__init__()
self._arguments = _EnsureBackwardsCompatibility( arguments )
self._completer_target = ( completer_target if completer_target
else 'filetype_default' )
self._is_goto_command = (
self._arguments and self._arguments[ 0 ].startswith( 'GoTo' ) )
self._response = None
def Start( self ):
request_data = BuildRequestData()
request_data.update( {
'completer_target': self._completer_target,
'command_arguments': self._arguments
} )
try:
self._response = self.PostDataToHandler( request_data,
'run_completer_command' )
except ServerError as e:
vimsupport.PostVimMessage( e )
def Response( self ):
return self._response
def RunPostCommandActionsIfNeeded( self ):
if not self._is_goto_command or not self.Done() or not self._response:
return
if isinstance( self._response, list ):
defs = [ _BuildQfListItem( x ) for x in self._response ]
vim.eval( 'setqflist( %s )' % repr( defs ) )
vim.eval( 'youcompleteme#OpenGoToList()' )
else:
vimsupport.JumpToLocation( self._response[ 'filepath' ],
self._response[ 'line_num' ],
self._response[ 'column_num' ] )
def SendCommandRequest( arguments, completer ):
request = CommandRequest( arguments, completer )
# This is a blocking call.
request.Start()
request.RunPostCommandActionsIfNeeded()
return request.Response()
def _BuildQfListItem( goto_data_item ):
qf_item = {}
if 'filepath' in goto_data_item:
qf_item[ 'filename' ] = ToUtf8IfNeeded( goto_data_item[ 'filepath' ] )
if 'description' in goto_data_item:
qf_item[ 'text' ] = ToUtf8IfNeeded( goto_data_item[ 'description' ] )
if 'line_num' in goto_data_item:
qf_item[ 'lnum' ] = goto_data_item[ 'line_num' ]
if 'column_num' in goto_data_item:
qf_item[ 'col' ] = goto_data_item[ 'column_num' ] - 1
return qf_item
| gpl-3.0 |
YosuaMichael/elasticsearch | dev-tools/prepare_release_candidate.py | 48 | 19842 | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# Prepare a release
#
# 1. Update the Version.java to remove the snapshot bit
# 2. Remove the -SNAPSHOT suffix in all pom.xml files
#
# USAGE:
#
# python3 ./dev-tools/prepare-release.py
#
# Note: Ensure the script is run from the elasticsearch top level directory
#
import fnmatch
import argparse
from prepare_release_update_documentation import update_reference_docs
import subprocess
import tempfile
import re
import os
import shutil
from functools import partial
import sys
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
POM_FILE = 'pom.xml'
MAIL_TEMPLATE = """
Hi all
The new release candidate for %(version)s based on this commit[1] is now available, including the x-plugins, and RPM/deb repos:
- ZIP [2]
- tar.gz [3]
- RPM [4]
- deb [5]
Plugins can be installed as follows,
bin/plugin -Des.plugins.staging=true install cloud-aws
The same goes for the x-plugins:
bin/plugin -Des.plugins.staging=true install license
bin/plugin -Des.plugins.staging=true install shield
bin/plugin -Des.plugins.staging=true install watcher
To install the deb from an APT repo:
APT line sources.list line:
deb http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(major_minor_version)s/debian/ stable main
To install the RPM, create a YUM file like:
/etc/yum.repos.d/elasticsearch.repo
containing:
[elasticsearch-2.0]
name=Elasticsearch repository for packages
baseurl=http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(major_minor_version)s/centos
gpgcheck=1
gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
enabled=1
To smoke-test the release please run:
python3 -B ./dev-tools/smoke_test_rc.py --version %(version)s --hash %(hash)s --plugins license,shield,watcher
NOTE: this script requires JAVA_HOME to point to a Java 7 Runtime
[1] https://github.com/elastic/elasticsearch/commit/%(hash)s
[2] http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip
[3] http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/tar/elasticsearch/%(version)s/elasticsearch-%(version)s.tar.gz
[4] http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm
[5] http://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb
"""
# console colors
COLOR_OK = '\033[92m'
COLOR_END = '\033[0m'
COLOR_FAIL = '\033[91m'
def run(command, env_vars=None):
if env_vars:
for key, value in env_vars.items():
os.putenv(key, value)
print('*** Running: %s%s%s' % (COLOR_OK, command, COLOR_END))
if os.system(command):
raise RuntimeError(' FAILED: %s' % (command))
def ensure_checkout_is_clean():
# Make sure no local mods:
s = subprocess.check_output('git diff --shortstat', shell=True).decode('utf-8')
if len(s) > 0:
raise RuntimeError('git diff --shortstat is non-empty got:\n%s' % s)
# Make sure no untracked files:
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
if 'Untracked files:' in s:
if 'dev-tools/__pycache__/' in s:
print('*** NOTE: invoke python with -B to prevent __pycache__ directories ***')
raise RuntimeError('git status shows untracked files got:\n%s' % s)
# Make sure we have all changes from origin:
if 'is behind' in s:
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch got:\n%s' % (s))
# Make sure we no local unpushed changes (this is supposed to be a clean area):
if 'is ahead' in s:
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch got:\n%s' % (s))
# Reads the given file and applies the
# callback to it. If the callback changed
# a line the given file is replaced with
# the modified input.
def process_file(file_path, line_callback):
fh, abs_path = tempfile.mkstemp()
modified = False
with open(abs_path,'w', encoding='utf-8') as new_file:
with open(file_path, encoding='utf-8') as old_file:
for line in old_file:
new_line = line_callback(line)
modified = modified or (new_line != line)
new_file.write(new_line)
os.close(fh)
if modified:
#Remove original file
os.remove(file_path)
#Move new file
shutil.move(abs_path, file_path)
return True
else:
# nothing to do - just remove the tmp file
os.remove(abs_path)
return False
# Moves the Version.java file from a snapshot to a release
def remove_version_snapshot(version_file, release):
# 1.0.0.Beta1 -> 1_0_0_Beta1
release = release.replace('.', '_')
release = release.replace('-', '_')
pattern = 'new Version(V_%s_ID, true' % (release)
replacement = 'new Version(V_%s_ID, false' % (release)
def callback(line):
return line.replace(pattern, replacement)
processed = process_file(version_file, callback)
if not processed:
raise RuntimeError('failed to remove snapshot version for %s' % (release))
def rename_local_meta_files(path):
for root, _, file_names in os.walk(path):
for file_name in fnmatch.filter(file_names, 'maven-metadata-local.xml*'):
full_path = os.path.join(root, file_name)
os.rename(full_path, os.path.join(root, file_name.replace('-local', '')))
# Checks the pom.xml for the release version.
# This method fails if the pom file has no SNAPSHOT version set ie.
# if the version is already on a release version we fail.
# Returns the next version string ie. 0.90.7
def find_release_version():
with open('pom.xml', encoding='utf-8') as file:
for line in file:
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
if match:
return match.group(1)
raise RuntimeError('Could not find release version in branch')
# Checks if the produced RPM is signed with the supplied GPG key
def ensure_rpm_is_signed(rpm, gpg_key):
rpm_check_signature_cmd = 'rpm -v -K %s | grep -qi %s' % (rpm, gpg_key)
try:
subprocess.check_output(rpm_check_signature_cmd, shell=True)
except:
raise RuntimeError('Aborting. RPM does not seem to be signed, check with: rpm -v -K %s' % rpm)
# Checks if a command exists, needed for external binaries
def check_command_exists(name, cmd):
try:
subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
raise RuntimeError('Could not run command %s - please make sure it is installed and in $PATH' % (name))
def run_and_print(text, run_function):
try:
print(text, end='')
run_function()
print(COLOR_OK + 'OK' + COLOR_END)
return True
except RuntimeError:
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
return False
def check_env_var(text, env_var):
try:
print(text, end='')
os.environ[env_var]
print(COLOR_OK + 'OK' + COLOR_END)
return True
except KeyError:
print(COLOR_FAIL + 'NOT OK' + COLOR_END)
return False
def check_environment_and_commandline_tools(check_only):
checks = list()
checks.append(check_env_var('Checking for AWS env configuration AWS_SECRET_KEY... ', 'AWS_SECRET_KEY'))
checks.append(check_env_var('Checking for AWS env configuration AWS_ACCESS_KEY... ', 'AWS_ACCESS_KEY'))
checks.append(run_and_print('Checking command: rpm... ', partial(check_command_exists, 'rpm', 'rpm --version')))
checks.append(run_and_print('Checking command: dpkg... ', partial(check_command_exists, 'dpkg', 'dpkg --version')))
checks.append(run_and_print('Checking command: gpg... ', partial(check_command_exists, 'gpg', 'gpg --version')))
checks.append(run_and_print('Checking command: expect... ', partial(check_command_exists, 'expect', 'expect -v')))
checks.append(run_and_print('Checking command: createrepo... ', partial(check_command_exists, 'createrepo', 'createrepo --version')))
checks.append(run_and_print('Checking command: s3cmd... ', partial(check_command_exists, 's3cmd', 's3cmd --version')))
checks.append(run_and_print('Checking command: deb-s3... ', partial(check_command_exists, 'deb-s3', 'deb-s3 -h')))
checks.append(run_and_print('Checking command: rpm-s3... ', partial(check_command_exists, 'rpm-s3', 'rpm-s3 -h')))
if check_only:
sys.exit(0)
if False in checks:
print("Exiting due to failing checks")
sys.exit(0)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Builds and publishes a Elasticsearch Release')
parser.add_argument('--deploy-sonatype', dest='deploy_sonatype', action='store_true',
help='Installs and Deploys the release on a sonatype staging repository.')
parser.add_argument('--deploy-s3', dest='deploy_s3', action='store_true',
help='Pushes artifacts to the S3 staging area')
parser.add_argument('--deploy-s3-repos', dest='deploy_s3_repos', action='store_true',
help='Creates package repositories in S3 repo')
parser.add_argument('--no-install', dest='no_install', action='store_true',
help='Does not run "mvn install", expects this to be run already and reuses artifacts from local repo, only useful with --deploy-s3/--deploy-s3-repos, after sonatype deplomeny to ensure same artifacts')
parser.add_argument('--skip-doc-check', dest='skip_doc_check', action='store_false',
help='Skips any checks for pending documentation changes')
parser.add_argument('--skip-tests', dest='skip_tests', action='store_true',
help='Skips any test runs')
parser.add_argument('--gpg-key', dest='gpg_key', default="D88E42B4",
help='Allows you to specify a different gpg_key to be used instead of the default release key')
parser.add_argument('--bucket', '-b', dest='bucket', default="download.elasticsearch.org",
help='Allows you to specify a different s3 bucket to upload the artifacts to')
parser.add_argument('--quiet', dest='quiet', action='store_true',
help='Runs the script in quiet mode')
parser.add_argument('--check', dest='check', action='store_true',
help='Checks and reports for all requirements and then exits')
# by default, we only run mvn install and dont push anything repo
parser.set_defaults(deploy_sonatype=False)
parser.set_defaults(deploy_s3=False)
parser.set_defaults(deploy_s3_repos=False)
parser.set_defaults(no_install=False)
# other defaults
parser.set_defaults(skip_doc_check=False)
parser.set_defaults(quiet=False)
parser.set_defaults(skip_tests=False)
args = parser.parse_args()
skip_doc_check = args.skip_doc_check
gpg_key = args.gpg_key
bucket = args.bucket
deploy_sonatype = args.deploy_sonatype
deploy_s3 = args.deploy_s3
deploy_s3_repos = args.deploy_s3_repos
run_mvn_install = not args.no_install
skip_tests = args.skip_tests
check_environment_and_commandline_tools(args.check)
if not run_mvn_install and deploy_sonatype:
print('Using --no-install and --deploy-sonatype together does not work. Exiting')
sys.exit(-1)
print('*** Preparing a release candidate: ', end='')
print('deploy sonatype: %s%s%s' % (COLOR_OK if deploy_sonatype else COLOR_FAIL, 'yes' if deploy_sonatype else 'no', COLOR_END), end='')
print(', deploy s3: %s%s%s' % (COLOR_OK if deploy_s3 else COLOR_FAIL, 'yes' if deploy_s3 else 'no', COLOR_END), end='')
print(', deploy s3 repos: %s%s%s' % (COLOR_OK if deploy_s3_repos else COLOR_FAIL, 'yes' if deploy_s3_repos else 'no', COLOR_END), end='')
print('')
shortHash = subprocess.check_output('git log --pretty=format:"%h" -n 1', shell=True).decode('utf-8')
releaseDirectory = os.getenv('HOME') + '/elastic-releases'
release_version = find_release_version()
localRepo = '%s/elasticsearch-%s-%s' % (releaseDirectory, release_version, shortHash)
localRepoElasticsearch = localRepo + '/org/elasticsearch'
ensure_checkout_is_clean()
if not re.match('(\d+\.\d+)\.*',release_version):
raise RuntimeError('illegal release version format: %s' % (release_version))
major_minor_version = re.match('(\d+\.\d+)\.*',release_version).group(1)
print('*** Preparing release version: [%s]' % release_version)
if not skip_doc_check:
print('*** Check for pending documentation changes')
pending_files = update_reference_docs(release_version)
if pending_files:
raise RuntimeError('pending coming[%s] documentation changes found in %s' % (release_version, pending_files))
run('cd dev-tools && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('cd rest-api-spec && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
remove_version_snapshot(VERSION_FILE, release_version)
print('*** Done removing snapshot version. DO NOT COMMIT THIS, WHEN CREATING A RELEASE CANDIDATE.')
if not os.path.exists(releaseDirectory):
os.mkdir(releaseDirectory)
if os.path.exists(localRepoElasticsearch) and run_mvn_install:
print('clean local repository %s' % localRepoElasticsearch)
shutil.rmtree(localRepoElasticsearch)
mvn_target = 'deploy' if deploy_sonatype else 'install'
tests = '-DskipTests' if skip_tests else '-Dskip.integ.tests=true'
install_command = 'mvn clean %s -Prelease %s -Dgpg.key="%s" -Dpackaging.rpm.rpmbuild=/usr/bin/rpmbuild -Drpm.sign=true -Dmaven.repo.local=%s -Dno.commit.pattern="\\bno(n|)commit\\b" -Dforbidden.test.signatures=""' % (mvn_target, tests, gpg_key, localRepo)
clean_repo_command = 'find %s -name _remote.repositories -exec rm {} \;' % (localRepoElasticsearch)
if not run_mvn_install:
print('')
print('*** By choosing --no-install we assume you ran the following commands successfully:')
print(' %s' % (install_command))
print(' 1. Remove all _remote.repositories: %s' % (clean_repo_command))
rename_metadata_files_command = 'for i in $(find %s -name "maven-metadata-local.xml*") ; do mv "$i" "${i/-local/}" ; done' % (localRepoElasticsearch)
print(' 2. Rename all maven metadata files: %s' % (rename_metadata_files_command))
else:
for cmd in [install_command, clean_repo_command]:
run(cmd)
rename_local_meta_files(localRepoElasticsearch)
rpm = '%s/distribution/rpm/elasticsearch/%s/elasticsearch-%s.rpm' % (localRepoElasticsearch, release_version, release_version)
print('Ensuring that RPM has been signed')
ensure_rpm_is_signed(rpm, gpg_key)
# repository push commands
s3cmd_sync_to_staging_bucket_cmd = 's3cmd sync -P %s s3://%s/elasticsearch/staging/%s-%s/org/' % (localRepoElasticsearch, bucket, release_version, shortHash)
s3_bucket_sync_to = '%s/elasticsearch/staging/%s-%s/repos/' % (bucket, release_version, shortHash)
s3cmd_sync_official_repo_cmd = 's3cmd sync s3://packages.elasticsearch.org/elasticsearch/%s s3://%s' % (major_minor_version, s3_bucket_sync_to)
debs3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/debian' % (release_version, shortHash, major_minor_version)
debs3_upload_cmd = 'deb-s3 upload --preserve-versions %s/distribution/deb/elasticsearch/%s/elasticsearch-%s.deb -b %s --prefix %s --sign %s --arch amd64' % (localRepoElasticsearch, release_version, release_version, bucket, debs3_prefix, gpg_key)
debs3_list_cmd = 'deb-s3 list -b %s --prefix %s' % (bucket, debs3_prefix)
debs3_verify_cmd = 'deb-s3 verify -b %s --prefix %s' % (bucket, debs3_prefix)
rpms3_prefix = 'elasticsearch/staging/%s-%s/repos/%s/centos' % (release_version, shortHash, major_minor_version)
rpms3_upload_cmd = 'rpm-s3 -v -b %s -p %s --sign --visibility public-read -k 0 %s' % (bucket, rpms3_prefix, rpm)
if deploy_s3:
run(s3cmd_sync_to_staging_bucket_cmd)
else:
print('')
print('*** To push a release candidate to s3 run: ')
print(' 1. Sync %s into S3 bucket' % (localRepoElasticsearch))
print (' %s' % (s3cmd_sync_to_staging_bucket_cmd))
if deploy_s3_repos:
print('*** Syncing official package repository into staging s3 bucket')
run(s3cmd_sync_official_repo_cmd)
print('*** Uploading debian package (you will be prompted for the passphrase!)')
run(debs3_upload_cmd)
run(debs3_list_cmd)
run(debs3_verify_cmd)
print('*** Uploading rpm package (you will be prompted for the passphrase!)')
run(rpms3_upload_cmd)
else:
print('*** To create repositories on S3 run:')
print(' 1. Sync existing repo into staging: %s' % s3cmd_sync_official_repo_cmd)
print(' 2. Upload debian package (and sign it): %s' % debs3_upload_cmd)
print(' 3. List all debian packages: %s' % debs3_list_cmd)
print(' 4. Verify debian packages: %s' % debs3_verify_cmd)
print(' 5. Upload RPM: %s' % rpms3_upload_cmd)
print('')
print('NOTE: the above mvn command will promt you several times for the GPG passphrase of the key you specified you can alternatively pass it via -Dgpg.passphrase=yourPassPhrase')
print(' since RPM signing doesn\'t support gpg-agents the recommended way to set the password is to add a release profile to your settings.xml:')
print("""
<profiles>
<profile>
<id>release</id>
<properties>
<gpg.passphrase>YourPasswordGoesHere</gpg.passphrase>
</properties>
</profile>
</profiles>
""")
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
print('*** Once the release is deployed and published send out the following mail to dev@elastic.co:')
string_format_dict = {'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version, 'bucket': bucket}
print(MAIL_TEMPLATE % string_format_dict)
print('')
print('You can verify that pushing to the staging repository pushed all the artifacts by running (log into sonatype to find out the correct id):')
print(' python -B dev-tools/validate-maven-repository.py %s https://oss.sonatype.org/service/local/repositories/orgelasticsearch-IDTOFINDOUT/content/org/elasticsearch ' %(localRepoElasticsearch))
print('')
print('To publish the release and the repo on S3 execute the following commands:')
print(' s3cmd cp --recursive s3://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/repos/%(major_minor_version)s/ s3://packages.elasticsearch.org/elasticsearch/%(major_minor_version)s' % string_format_dict)
print(' s3cmd cp --recursive s3://%(bucket)s/elasticsearch/staging/%(version)s-%(hash)s/org/ s3://%(bucket)s/elasticsearch/release/org' % string_format_dict)
print('Now go ahead and tag the release:')
print(' git tag -a v%(version)s %(hash)s' % string_format_dict)
print(' git push origin v%(version)s' % string_format_dict )
| apache-2.0 |
yinquan529/platform-external-chromium_org | build/config/win/get_msvc_config_real.py | 53 | 22345 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file copies the logic from GYP to find the MSVC configuration. It's not
# currently used because it is too slow. We will probably build this
# functionality into the C++ code in the future.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# Use the 64-on-64 compiler if we can.
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValue(key, value):
"""Use reg.exe to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _RegistryKeyExists(key):
"""Use reg.exe to see if a key exists.
Args:
key: The registry key to check.
Return:
True if the key exists
"""
if not _RegistryQuery(key):
return False
return True
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (11)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, 'vcexpress.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif os.path.exists(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto'):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version or 'e' not in msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to an "e" version (e.g. 2010e)')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
return open(path, mode)
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
return vs_version
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = OpenOutput(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
#print SelectVisualStudioVersion().DefaultToolset()
#GenerateEnvironmentFiles("D:\\src\\src1\\src\\out\\gn\\eraseme", {})
#print '"', GetVSVersion({}).Path(), '"'
print '"', GetVSVersion({}).sdk_based, '"'
#-------------------------------------------------------------------------------
version_info = {
'2010': {
'includes': [
'VC\\atlmfc\\include',
],
},
}
| bsd-3-clause |
mozilla/bedrock | tests/functional/firefox/test_all.py | 4 | 3785 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.all import FirefoxAllPage
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_release(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
product = page.select_product('Firefox')
product.select_platform('Windows 64-bit')
product.select_language('English (US)')
assert page.is_desktop_download_button_displayed
assert page.is_desktop_download_link_valid
assert 'product=firefox-latest-ssl' and 'os=win64' and 'lang=en-US' in page.desktop_download_link
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_beta(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
product = page.select_product('Firefox Beta')
product.select_platform('macOS')
product.select_language(u'German — Deutsch')
assert page.is_desktop_download_button_displayed
assert page.is_desktop_download_link_valid
assert 'product=firefox-beta-latest-ssl' and 'os=osx' and 'lang=de' in page.desktop_download_link
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_developer(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
product = page.select_product('Firefox Developer Edition')
product.select_platform('Linux 64-bit')
product.select_language('English (US)')
assert page.is_desktop_download_button_displayed
assert page.is_desktop_download_link_valid
assert 'product=firefox-devedition-latest-ssl' and 'os=linux64' and 'lang=en-US' in page.desktop_download_link
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_nightly(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
product = page.select_product('Firefox Nightly')
product.select_platform('Windows 32-bit')
product.select_language(u'German — Deutsch')
assert page.is_desktop_download_button_displayed
assert page.is_desktop_download_link_valid
assert 'product=firefox-nightly-latest-ssl' and 'os=win' and 'lang=de' in page.desktop_download_link
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_esr(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
product = page.select_product('Firefox Extended Support Release')
product.select_platform('Linux 32-bit')
product.select_language('English (US)')
assert page.is_desktop_download_button_displayed
assert page.is_desktop_download_link_valid
assert 'product=firefox-esr-latest-ssl' and 'os=linux' and 'lang=en-US' in page.desktop_download_link
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_android(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
page.select_product('Firefox Android')
assert page.is_android_download_button_displayed
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_android_beta(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
page.select_product('Firefox Android Beta')
assert page.is_android_beta_download_button_displayed
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_android_nightly(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
page.select_product('Firefox Android Nightly')
assert page.is_android_nightly_download_button_displayed
@pytest.mark.smoke
@pytest.mark.nondestructive
def test_firefox_ios(base_url, selenium):
page = FirefoxAllPage(selenium, base_url).open()
page.select_product('Firefox iOS')
assert page.is_ios_download_button_displayed
| mpl-2.0 |
jallohm/django | django/core/handlers/base.py | 234 | 13346 | from __future__ import unicode_literals
import logging
import sys
import types
import warnings
from django import http
from django.conf import settings
from django.core import signals, urlresolvers
from django.core.exceptions import (
MiddlewareNotUsed, PermissionDenied, SuspiciousOperation,
)
from django.db import connections, transaction
from django.http.multipartparser import MultiPartParserError
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.utils.module_loading import import_string
from django.views import debug
logger = logging.getLogger('django.request')
class BaseHandler(object):
# Changes that are always applied to a response (in this order).
response_fixes = [
http.conditional_content_removal,
]
def __init__(self):
self._request_middleware = None
self._view_middleware = None
self._template_response_middleware = None
self._response_middleware = None
self._exception_middleware = None
def load_middleware(self):
"""
Populate middleware lists from settings.MIDDLEWARE_CLASSES.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._response_middleware = []
self._exception_middleware = []
request_middleware = []
for middleware_path in settings.MIDDLEWARE_CLASSES:
mw_class = import_string(middleware_path)
try:
mw_instance = mw_class()
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if six.text_type(exc):
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
else:
logger.debug('MiddlewareNotUsed: %r', middleware_path)
continue
if hasattr(mw_instance, 'process_request'):
request_middleware.append(mw_instance.process_request)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.append(mw_instance.process_view)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.insert(0, mw_instance.process_template_response)
if hasattr(mw_instance, 'process_response'):
self._response_middleware.insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
self._exception_middleware.insert(0, mw_instance.process_exception)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._request_middleware = request_middleware
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
for db in connections.all():
if (db.settings_dict['ATOMIC_REQUESTS']
and db.alias not in non_atomic_requests):
view = transaction.atomic(using=db.alias)(view)
return view
def get_exception_response(self, request, resolver, status_code, exception):
try:
callback, param_dict = resolver.resolve_error_handler(status_code)
# Unfortunately, inspect.getargspec result is not trustable enough
# depending on the callback wrapping in decorators (frequent for handlers).
# Falling back on try/except:
try:
response = callback(request, **dict(param_dict, exception=exception))
except TypeError:
warnings.warn(
"Error handlers should accept an exception parameter. Update "
"your code as this parameter will be required in Django 2.0",
RemovedInDjango20Warning, stacklevel=2
)
response = callback(request, **param_dict)
except:
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
# Setup default url resolver for this thread, this code is outside
# the try/except so we don't get a spurious "unbound local
# variable" exception in the event an exception is raised before
# resolver is set
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
# Use a flag to check if the response was rendered to prevent
# multiple renderings or to force rendering if necessary.
response_is_rendered = False
try:
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if response is None:
if hasattr(request, 'urlconf'):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.get_resolver(urlconf)
resolver_match = resolver.resolve(request.path_info)
callback, callback_args, callback_kwargs = resolver_match
request.resolver_match = resolver_match
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
# Complain if the view returned None (a common error).
if response is None:
if isinstance(callback, types.FunctionType): # FBV
view_name = callback.__name__
else: # CBV
view_name = callback.__class__.__name__ + '.__call__'
raise ValueError("The view %s.%s didn't return an HttpResponse object. It returned None instead."
% (callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_template_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
response_is_rendered = True
except http.Http404 as exc:
logger.warning('Not Found: %s', request.path,
extra={
'status_code': 404,
'request': request
})
if settings.DEBUG:
response = debug.technical_404_response(request, exc)
else:
response = self.get_exception_response(request, resolver, 404, exc)
except PermissionDenied as exc:
logger.warning(
'Forbidden (Permission denied): %s', request.path,
extra={
'status_code': 403,
'request': request
})
response = self.get_exception_response(request, resolver, 403, exc)
except MultiPartParserError as exc:
logger.warning(
'Bad request (Unable to parse request body): %s', request.path,
extra={
'status_code': 400,
'request': request
})
response = self.get_exception_response(request, resolver, 400, exc)
except SuspiciousOperation as exc:
# The request logger receives events for any problematic request
# The security logger receives events for all SuspiciousOperations
security_logger = logging.getLogger('django.security.%s' %
exc.__class__.__name__)
security_logger.error(
force_text(exc),
extra={
'status_code': 400,
'request': request
})
if settings.DEBUG:
return debug.technical_500_response(request, *sys.exc_info(), status_code=400)
response = self.get_exception_response(request, resolver, 400, exc)
except SystemExit:
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
raise
except: # Handle everything else.
# Get the exception info now, in case another exception is thrown later.
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
try:
# Apply response middleware, regardless of the response
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
# Complain if the response middleware returned None (a common error).
if response is None:
raise ValueError(
"%s.process_response didn't return an "
"HttpResponse object. It returned None instead."
% (middleware_method.__self__.__class__.__name__))
response = self.apply_response_fixes(request, response)
except: # Any exception should be gathered and handled
signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
response._closable_objects.append(request)
# If the exception handler returns a TemplateResponse that has not
# been rendered, force it to be rendered.
if not response_is_rendered and callable(getattr(response, 'render', None)):
response = response.render()
return response
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, raise it.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
raise
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses). Can be overridden by subclasses who want
customised 500 handling.
Be *very* careful when overriding this because the error could be
caused by anything, so assuming something like the database is always
available would be an error.
"""
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
logger.error('Internal Server Error: %s', request.path,
exc_info=exc_info,
extra={
'status_code': 500,
'request': request
}
)
if settings.DEBUG:
return debug.technical_500_response(request, *exc_info)
# If Http500 handler is not installed, re-raise last exception
if resolver.urlconf_module is None:
six.reraise(*exc_info)
# Return an HttpResponse that displays a friendly error message.
callback, param_dict = resolver.resolve_error_handler(500)
return callback(request, **param_dict)
def apply_response_fixes(self, request, response):
"""
Applies each of the functions in self.response_fixes to the request and
response, modifying the response in the process. Returns the new
response.
"""
for func in self.response_fixes:
response = func(request, response)
return response
| bsd-3-clause |
40223210/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/logging/config.py | 739 | 35619 | # Copyright 2001-2013 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, logging, logging.handlers, socket, struct, traceback, re
import io
try:
import _thread as thread
import threading
except ImportError: #pragma: no cover
thread = None
from socketserver import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
if sys.platform == "win32":
RESET_ERROR = 10054 #WSAECONNRESET
else:
RESET_ERROR = 104 #ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
"""
import configparser
cp = configparser.ConfigParser(defaults)
if hasattr(fname, 'readline'):
cp.read_file(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers, disable_existing_loggers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = name.split('.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _strip_spaces(alist):
return map(lambda x: x.strip(), alist)
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp["formatters"]["keys"]
if not len(flist):
return {}
flist = flist.split(",")
flist = _strip_spaces(flist)
formatters = {}
for form in flist:
sectname = "formatter_%s" % form
fs = cp.get(sectname, "format", raw=True, fallback=None)
dfs = cp.get(sectname, "datefmt", raw=True, fallback=None)
c = logging.Formatter
class_name = cp[sectname].get("class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp["handlers"]["keys"]
if not len(hlist):
return {}
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
section = cp["handler_%s" % hand]
klass = section["class"]
fmt = section.get("formatter", "")
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = section["args"]
args = eval(args, vars(logging))
h = klass(*args)
if "level" in section:
level = section["level"]
h.setLevel(logging._levelNames[level])
if len(fmt):
h.setFormatter(formatters[fmt])
if issubclass(klass, logging.handlers.MemoryHandler):
target = section.get("target", "")
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _handle_existing_loggers(existing, child_loggers, disable_existing):
"""
When (re)configuring logging, handle loggers which were in the previous
configuration but are not in the new configuration. There's no point
deleting them as other threads may continue to hold references to them;
and by disabling them, you stop them doing any logging.
However, don't disable children of named loggers, as that's probably not
what was intended by the user. Also, allow existing loggers to NOT be
disabled if disable_existing is false.
"""
root = logging.root
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
else:
logger.disabled = disable_existing
def _install_loggers(cp, handlers, disable_existing):
"""Create and install loggers"""
# configure the root first
llist = cp["loggers"]["keys"]
llist = llist.split(",")
llist = list(map(lambda x: x.strip(), llist))
llist.remove("root")
section = cp["logger_root"]
root = logging.root
log = root
if "level" in section:
level = section["level"]
log.setLevel(logging._levelNames[level])
for h in root.handlers[:]:
root.removeHandler(h)
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
log.addHandler(handlers[hand])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
for log in llist:
section = cp["logger_%s" % log]
qn = section["qualname"]
propagate = section.getint("propagate", fallback=1)
logger = logging.getLogger(qn)
if qn in existing:
i = existing.index(qn) + 1 # start with the entry after qn
prefixed = qn + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(qn)
if "level" in section:
level = section["level"]
logger.setLevel(logging._levelNames[level])
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
logger.addHandler(handlers[hand])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = 1
# elif disable_existing_loggers:
# logger.disabled = 1
_handle_existing_loggers(existing, child_loggers, disable_existing)
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = staticmethod(__import__)
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, str): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(logging._checkLevel(level))
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except Exception as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except Exception as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
deferred = []
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except Exception as e:
if 'target not configured yet' in str(e):
deferred.append(name)
else:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Now do any that were deferred
for name in deferred:
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name) + 1 # look after name
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = True
# elif disable_existing:
# logger.disabled = True
_handle_existing_loggers(existing, child_loggers,
disable_existing)
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
style = config.get('style', '%')
result = logging.Formatter(fmt, dfmt, style)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except Exception as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
config_copy = dict(config) # for restoring in case of error
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except Exception as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
factory = c
else:
cname = config.pop('class')
klass = self.resolve(cname)
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
th = self.config['handlers'][config['target']]
if not isinstance(th, logging.Handler):
config.update(config_copy) # restore for deferred cfg
raise TypeError('target not configured yet')
config['target'] = th
except Exception as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except Exception as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(logging._checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
chunk = chunk.decode("utf-8")
try:
import json
d =json.loads(chunk)
assert isinstance(d, dict)
dictConfig(d)
except:
#Apply new configuration.
file = io.StringIO(chunk)
try:
fileConfig(file)
except (KeyboardInterrupt, SystemExit): #pragma: no cover
raise
except:
traceback.print_exc()
if self.server.ready:
self.server.ready.set()
except socket.error as e:
if not isinstance(e.args, tuple):
raise
else:
errcode = e.args[0]
if errcode != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None, ready=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
self.socket.close()
class Server(threading.Thread):
def __init__(self, rcvr, hdlr, port):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
ready=self.ready)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return Server(ConfigSocketReceiver, ConfigStreamHandler, port)
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
logging._acquireLock()
try:
if _listener:
_listener.abort = 1
_listener = None
finally:
logging._releaseLock()
| gpl-3.0 |
jamielennox/tempest | tempest/common/ssh.py | 11 | 5939 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import select
import socket
import time
import warnings
import six
from tempest import exceptions
from tempest.openstack.common import log as logging
with warnings.catch_warnings():
warnings.simplefilter("ignore")
import paramiko
LOG = logging.getLogger(__name__)
class Client(object):
def __init__(self, host, username, password=None, timeout=300, pkey=None,
channel_timeout=10, look_for_keys=False, key_filename=None):
self.host = host
self.username = username
self.password = password
if isinstance(pkey, six.string_types):
pkey = paramiko.RSAKey.from_private_key(
cStringIO.StringIO(str(pkey)))
self.pkey = pkey
self.look_for_keys = look_for_keys
self.key_filename = key_filename
self.timeout = int(timeout)
self.channel_timeout = float(channel_timeout)
self.buf_size = 1024
def _get_ssh_connection(self, sleep=1.5, backoff=1):
"""Returns an ssh connection to the specified host."""
bsleep = sleep
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(
paramiko.AutoAddPolicy())
_start_time = time.time()
if self.pkey is not None:
LOG.info("Creating ssh connection to '%s' as '%s'"
" with public key authentication",
self.host, self.username)
else:
LOG.info("Creating ssh connection to '%s' as '%s'"
" with password %s",
self.host, self.username, str(self.password))
attempts = 0
while True:
try:
ssh.connect(self.host, username=self.username,
password=self.password,
look_for_keys=self.look_for_keys,
key_filename=self.key_filename,
timeout=self.channel_timeout, pkey=self.pkey)
LOG.info("ssh connection to %s@%s successfuly created",
self.username, self.host)
return ssh
except (socket.error,
paramiko.SSHException) as e:
if self._is_timed_out(_start_time):
LOG.exception("Failed to establish authenticated ssh"
" connection to %s@%s after %d attempts",
self.username, self.host, attempts)
raise exceptions.SSHTimeout(host=self.host,
user=self.username,
password=self.password)
bsleep += backoff
attempts += 1
LOG.warning("Failed to establish authenticated ssh"
" connection to %s@%s (%s). Number attempts: %s."
" Retry after %d seconds.",
self.username, self.host, e, attempts, bsleep)
time.sleep(bsleep)
def _is_timed_out(self, start_time):
return (time.time() - self.timeout) > start_time
def exec_command(self, cmd):
"""
Execute the specified command on the server.
Note that this method is reading whole command outputs to memory, thus
shouldn't be used for large outputs.
:returns: data read from standard output of the command.
:raises: SSHExecCommandFailed if command returns nonzero
status. The exception contains command status stderr content.
"""
ssh = self._get_ssh_connection()
transport = ssh.get_transport()
channel = transport.open_session()
channel.fileno() # Register event pipe
channel.exec_command(cmd)
channel.shutdown_write()
out_data = []
err_data = []
poll = select.poll()
poll.register(channel, select.POLLIN)
start_time = time.time()
while True:
ready = poll.poll(self.channel_timeout)
if not any(ready):
if not self._is_timed_out(start_time):
continue
raise exceptions.TimeoutException(
"Command: '{0}' executed on host '{1}'.".format(
cmd, self.host))
if not ready[0]: # If there is nothing to read.
continue
out_chunk = err_chunk = None
if channel.recv_ready():
out_chunk = channel.recv(self.buf_size)
out_data += out_chunk,
if channel.recv_stderr_ready():
err_chunk = channel.recv_stderr(self.buf_size)
err_data += err_chunk,
if channel.closed and not err_chunk and not out_chunk:
break
exit_status = channel.recv_exit_status()
if 0 != exit_status:
raise exceptions.SSHExecCommandFailed(
command=cmd, exit_status=exit_status,
strerror=''.join(err_data))
return ''.join(out_data)
def test_connection_auth(self):
"""Raises an exception when we can not connect to server via ssh."""
connection = self._get_ssh_connection()
connection.close()
| apache-2.0 |
miguelparaiso/OdooAccessible | addons/l10n_th/__openerp__.py | 260 | 1453 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Thailand - Accounting',
'version': '1.0',
'category': 'Localization/Account Charts',
'description': """
Chart of Accounts for Thailand.
===============================
Thai accounting chart and localization.
""",
'author': 'Almacom',
'website': 'http://almacom.co.th/',
'depends': ['account_chart'],
'data': [ 'account_data.xml' ],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
stweil/openlayers | tools/BeautifulSoup.py | 5 | 69569 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup parses a (possibly invalid) XML or HTML document into a
tree representation. It provides methods and Pythonic idioms that make
it easy to navigate, search, and modify the tree.
A well-formed XML/HTML document yields a well-formed data
structure. An ill-formed XML/HTML document yields a correspondingly
ill-formed data structure. If your document is only locally
well-formed, you can use this library to find and process the
well-formed part of it. The BeautifulSoup class
Beautiful Soup works with Python 2.2 and up. It has no external
dependencies, but you'll have more success at converting data to UTF-8
if you also install these three packages:
* chardet, for auto-detecting character encodings
http://chardet.feedparser.org/
* cjkcodecs and iconv_codec, which add more encodings to the ones supported
by stock Python.
http://cjkpython.i18n.org/
Beautiful Soup defines classes for two main parsing strategies:
* BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific
language that kind of looks like XML.
* BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid
or invalid. This class has web browser-like heuristics for
obtaining a sensible parse tree in the face of common HTML errors.
Beautiful Soup also defines a class (UnicodeDammit) for autodetecting
the encoding of an HTML or XML document, and converting it to
Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/documentation.html
"""
from __future__ import generators
__author__ = "Leonard Richardson (leonardr@segfault.org)"
__version__ = "3.0.4"
__copyright__ = "Copyright (c) 2004-2007 Leonard Richardson"
__license__ = "PSF"
from sgmllib import SGMLParser, SGMLParseError
import codecs
import types
import re
import sgmllib
try:
from htmlentitydefs import name2codepoint
except ImportError:
name2codepoint = {}
#This hack makes Beautiful Soup able to parse XML with namespaces
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
DEFAULT_OUTPUT_ENCODING = "utf-8"
# First, the classes that represent markup elements.
class PageElement:
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
def setup(self, parent=None, previous=None):
"""Sets up the initial relations between this element and
other elements."""
self.parent = parent
self.previous = previous
self.next = None
self.previousSibling = None
self.nextSibling = None
if self.parent and self.parent.contents:
self.previousSibling = self.parent.contents[-1]
self.previousSibling.nextSibling = self
def replaceWith(self, replaceWith):
oldParent = self.parent
myIndex = self.parent.contents.index(self)
if hasattr(replaceWith, 'parent') and replaceWith.parent == self.parent:
# We're replacing this element with one of its siblings.
index = self.parent.contents.index(replaceWith)
if index and index < myIndex:
# Furthermore, it comes before this element. That
# means that when we extract it, the index of this
# element will change.
myIndex = myIndex - 1
self.extract()
oldParent.insert(myIndex, replaceWith)
def extract(self):
"""Destructively rips this element out of the tree."""
if self.parent:
try:
self.parent.contents.remove(self)
except ValueError:
pass
#Find the two elements that would be next to each other if
#this element (and any children) hadn't been parsed. Connect
#the two.
lastChild = self._lastRecursiveChild()
nextElement = lastChild.next
if self.previous:
self.previous.next = nextElement
if nextElement:
nextElement.previous = self.previous
self.previous = None
lastChild.next = None
self.parent = None
if self.previousSibling:
self.previousSibling.nextSibling = self.nextSibling
if self.nextSibling:
self.nextSibling.previousSibling = self.previousSibling
self.previousSibling = self.nextSibling = None
def _lastRecursiveChild(self):
"Finds the last element beneath this object to be parsed."
lastChild = self
while hasattr(lastChild, 'contents') and lastChild.contents:
lastChild = lastChild.contents[-1]
return lastChild
def insert(self, position, newChild):
if (isinstance(newChild, basestring)
or isinstance(newChild, unicode)) \
and not isinstance(newChild, NavigableString):
newChild = NavigableString(newChild)
position = min(position, len(self.contents))
if hasattr(newChild, 'parent') and newChild.parent != None:
# We're 'inserting' an element that's already one
# of this object's children.
if newChild.parent == self:
index = self.find(newChild)
if index and index < position:
# Furthermore we're moving it further down the
# list of this object's children. That means that
# when we extract this element, our target index
# will jump down one.
position = position - 1
newChild.extract()
newChild.parent = self
previousChild = None
if position == 0:
newChild.previousSibling = None
newChild.previous = self
else:
previousChild = self.contents[position-1]
newChild.previousSibling = previousChild
newChild.previousSibling.nextSibling = newChild
newChild.previous = previousChild._lastRecursiveChild()
if newChild.previous:
newChild.previous.next = newChild
newChildsLastElement = newChild._lastRecursiveChild()
if position >= len(self.contents):
newChild.nextSibling = None
parent = self
parentsNextSibling = None
while not parentsNextSibling:
parentsNextSibling = parent.nextSibling
parent = parent.parent
if not parent: # This is the last element in the document.
break
if parentsNextSibling:
newChildsLastElement.next = parentsNextSibling
else:
newChildsLastElement.next = None
else:
nextChild = self.contents[position]
newChild.nextSibling = nextChild
if newChild.nextSibling:
newChild.nextSibling.previousSibling = newChild
newChildsLastElement.next = nextChild
if newChildsLastElement.next:
newChildsLastElement.next.previous = newChildsLastElement
self.contents.insert(position, newChild)
def findNext(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears after this Tag in the document."""
return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
def findAllNext(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before after Tag in the document."""
return self._findAll(name, attrs, text, limit, self.nextGenerator)
def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears after this Tag in the document."""
return self._findOne(self.findNextSiblings, name, attrs, text,
**kwargs)
def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs)
fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears before this Tag in the document."""
return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.previousGenerator,
**kwargs)
fetchPrevious = findAllPrevious # Compatibility with pre-3.x
def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears before this Tag in the document."""
return self._findOne(self.findPreviousSiblings, name, attrs, text,
**kwargs)
def findPreviousSiblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs)
fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0]
return r
def findParents(self, name=None, attrs={}, limit=None, **kwargs):
"""Returns the parents of this Tag that match the given
criteria."""
return self._findAll(name, attrs, None, limit, self.parentGenerator,
**kwargs)
fetchParents = findParents # Compatibility with pre-3.x
#These methods do the real heavy lifting.
def _findOne(self, method, name, attrs, text, **kwargs):
r = None
l = method(name, attrs, text, 1, **kwargs)
if l:
r = l[0]
return r
def _findAll(self, name, attrs, text, limit, generator, **kwargs):
"Iterates over a generator looking for things that match."
if isinstance(name, SoupStrainer):
strainer = name
else:
# Build a SoupStrainer
strainer = SoupStrainer(name, attrs, text, **kwargs)
results = ResultSet(strainer)
g = generator()
while True:
try:
i = g.next()
except StopIteration:
break
if i:
found = strainer.search(i)
if found:
results.append(found)
if limit and len(results) >= limit:
break
return results
#These Generators can be used to navigate starting from both
#NavigableStrings and Tags.
def nextGenerator(self):
i = self
while i:
i = i.next
yield i
def nextSiblingGenerator(self):
i = self
while i:
i = i.nextSibling
yield i
def previousGenerator(self):
i = self
while i:
i = i.previous
yield i
def previousSiblingGenerator(self):
i = self
while i:
i = i.previousSibling
yield i
def parentGenerator(self):
i = self
while i:
i = i.parent
yield i
# Utility methods
def substituteEncoding(self, str, encoding=None):
encoding = encoding or "utf-8"
return str.replace("%SOUP-ENCODING%", encoding)
def toEncoding(self, s, encoding=None):
"""Encodes an object to a string in some encoding, or to Unicode.
."""
if isinstance(s, unicode):
if encoding:
s = s.encode(encoding)
elif isinstance(s, str):
if encoding:
s = s.encode(encoding)
else:
s = unicode(s)
else:
if encoding:
s = self.toEncoding(str(s), encoding)
else:
s = unicode(s)
return s
class NavigableString(unicode, PageElement):
def __getattr__(self, attr):
"""text.string gives you text. This is for backwards
compatibility for Navigable*String, but for CData* it lets you
get the string without the CData wrapper."""
if attr == 'string':
return self
else:
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
if encoding:
return self.encode(encoding)
else:
return self
class CData(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
class ProcessingInstruction(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
output = self
if "%SOUP-ENCODING%" in output:
output = self.substituteEncoding(output, encoding)
return "<?%s?>" % self.toEncoding(output, encoding)
class Comment(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!--%s-->" % NavigableString.__str__(self, encoding)
class Declaration(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!%s>" % NavigableString.__str__(self, encoding)
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
XML_SPECIAL_CHARS_TO_ENTITIES = { "'" : "squot",
'"' : "quote",
"&" : "amp",
"<" : "lt",
">" : "gt" }
def __init__(self, parser, name, attrs=None, parent=None,
previous=None):
"Basic constructor."
# We don't actually store the parser object: that lets extracted
# chunks be garbage-collected
self.parserClass = parser.__class__
self.isSelfClosing = parser.isSelfClosingTag(name)
self.name = name
if attrs == None:
attrs = []
self.attrs = attrs
self.contents = []
self.setup(parent, previous)
self.hidden = False
self.containsSubstitutions = False
def get(self, key, default=None):
"""Returns the value of the 'key' attribute for the tag, or
the value given for 'default' if it doesn't have that
attribute."""
return self._getAttrMap().get(key, default)
def has_key(self, key):
return self._getAttrMap().has_key(key)
def __getitem__(self, key):
"""tag[key] returns the value of the 'key' attribute for the tag,
and throws an exception if it's not there."""
return self._getAttrMap()[key]
def __iter__(self):
"Iterating over a tag iterates over its contents."
return iter(self.contents)
def __len__(self):
"The length of a tag is the length of its list of contents."
return len(self.contents)
def __contains__(self, x):
return x in self.contents
def __nonzero__(self):
"A tag is non-None even if it has no contents."
return True
def __setitem__(self, key, value):
"""Setting tag[key] sets the value of the 'key' attribute for the
tag."""
self._getAttrMap()
self.attrMap[key] = value
found = False
for i in range(0, len(self.attrs)):
if self.attrs[i][0] == key:
self.attrs[i] = (key, value)
found = True
if not found:
self.attrs.append((key, value))
self._getAttrMap()[key] = value
def __delitem__(self, key):
"Deleting tag[key] deletes all 'key' attributes for the tag."
for item in self.attrs:
if item[0] == key:
self.attrs.remove(item)
#We don't break because bad HTML can define the same
#attribute multiple times.
self._getAttrMap()
if self.attrMap.has_key(key):
del self.attrMap[key]
def __call__(self, *args, **kwargs):
"""Calling a tag like a function is the same as calling its
findAll() method. Eg. tag('a') returns a list of all the A tags
found within this tag."""
return apply(self.findAll, args, kwargs)
def __getattr__(self, tag):
#print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
return self.find(tag[:-3])
elif tag.find('__') != 0:
return self.find(tag)
def __eq__(self, other):
"""Returns true iff this tag has the same name, the same attributes,
and the same contents (recursively) as the given tag.
NOTE: right now this will return false if two tags have the
same attributes in a different order. Should this be fixed?"""
if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
return False
for i in range(0, len(self.contents)):
if self.contents[i] != other.contents[i]:
return False
return True
def __ne__(self, other):
"""Returns true iff this tag is not identical to the other tag,
as defined in __eq__."""
return not self == other
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
"""Renders this tag as a string."""
return self.__str__(encoding)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Returns a string or Unicode representation of this tag and
its contents. To get Unicode, pass None for encoding.
NOTE: since Python's HTML parser consumes whitespace, this
method is not certain to reproduce the whitespace present in
the original string."""
encodedName = self.toEncoding(self.name, encoding)
attrs = []
if self.attrs:
for key, val in self.attrs:
fmt = '%s="%s"'
if isString(val):
if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
val = self.substituteEncoding(val, encoding)
# The attribute value either:
#
# * Contains no embedded double quotes or single quotes.
# No problem: we enclose it in double quotes.
# * Contains embedded single quotes. No problem:
# double quotes work here too.
# * Contains embedded double quotes. No problem:
# we enclose it in single quotes.
# * Embeds both single _and_ double quotes. This
# can't happen naturally, but it can happen if
# you modify an attribute value after parsing
# the document. Now we have a bit of a
# problem. We solve it by enclosing the
# attribute in single quotes, and escaping any
# embedded single quotes to XML entities.
if '"' in val:
fmt = "%s='%s'"
# This can't happen naturally, but it can happen
# if you modify an attribute value after parsing.
if "'" in val:
val = val.replace("'", "&squot;")
# Now we're okay w/r/t quotes. But the attribute
# value might also contain angle brackets, or
# ampersands that aren't part of entities. We need
# to escape those to XML entities too.
val = re.sub("([<>]|&(?![^\s]+;))",
lambda x: "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";",
val)
attrs.append(fmt % (self.toEncoding(key, encoding),
self.toEncoding(val, encoding)))
close = ''
closeTag = ''
if self.isSelfClosing:
close = ' /'
else:
closeTag = '</%s>' % encodedName
indentTag, indentContents = 0, 0
if prettyPrint:
indentTag = indentLevel
space = (' ' * (indentTag-1))
indentContents = indentTag + 1
contents = self.renderContents(encoding, prettyPrint, indentContents)
if self.hidden:
s = contents
else:
s = []
attributeString = ''
if attrs:
attributeString = ' ' + ' '.join(attrs)
if prettyPrint:
s.append(space)
s.append('<%s%s%s>' % (encodedName, attributeString, close))
if prettyPrint:
s.append("\n")
s.append(contents)
if prettyPrint and contents and contents[-1] != "\n":
s.append("\n")
if prettyPrint and closeTag:
s.append(space)
s.append(closeTag)
if prettyPrint and closeTag and self.nextSibling:
s.append("\n")
s = ''.join(s)
return s
def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
return self.__str__(encoding, True)
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
s=[]
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.__str__(encoding)
elif isinstance(c, Tag):
s.append(c.__str__(encoding, prettyPrint, indentLevel))
if text and prettyPrint:
text = text.strip()
if text:
if prettyPrint:
s.append(" " * (indentLevel-1))
s.append(text)
if prettyPrint:
s.append("\n")
return ''.join(s)
#Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
"""Return only the first child of this Tag matching the given
criteria."""
r = None
l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
if l:
r = l[0]
return r
findChild = find
def findAll(self, name=None, attrs={}, recursive=True, text=None,
limit=None, **kwargs):
"""Extracts a list of Tag objects that match the given
criteria. You can specify the name of the Tag and any
attributes you want the Tag to have.
The value of a key-value pair in the 'attrs' map can be a
string, a list of strings, a regular expression object, or a
callable that takes a string and returns whether or not the
string matches for some custom definition of 'matches'. The
same is true of the tag name."""
generator = self.recursiveChildGenerator
if not recursive:
generator = self.childGenerator
return self._findAll(name, attrs, text, limit, generator, **kwargs)
findChildren = findAll
# Pre-3.x compatibility methods
first = find
fetch = findAll
def fetchText(self, text=None, recursive=True, limit=None):
return self.findAll(text=text, recursive=recursive, limit=limit)
def firstText(self, text=None, recursive=True):
return self.find(text=text, recursive=recursive)
#Utility methods
def append(self, tag):
"""Appends the given tag to the contents of this tag."""
self.contents.append(tag)
#Private methods
def _getAttrMap(self):
"""Initializes a map representation of this tag's attributes,
if not already initialized."""
if not getattr(self, 'attrMap'):
self.attrMap = {}
for (key, value) in self.attrs:
self.attrMap[key] = value
return self.attrMap
#Generator methods
def childGenerator(self):
for i in range(0, len(self.contents)):
yield self.contents[i]
raise StopIteration
def recursiveChildGenerator(self):
stack = [(self, 0)]
while stack:
tag, start = stack.pop()
if isinstance(tag, Tag):
for i in range(start, len(tag.contents)):
a = tag.contents[i]
yield a
if isinstance(a, Tag) and tag.contents:
if i < len(tag.contents) - 1:
stack.append((tag, i+1))
stack.append((a, 0))
break
raise StopIteration
# Next, a couple classes to represent queries and their results.
class SoupStrainer:
"""Encapsulates a number of ways of matching a markup element (tag or
text)."""
def __init__(self, name=None, attrs={}, text=None, **kwargs):
self.name = name
if isString(attrs):
kwargs['class'] = attrs
attrs = None
if kwargs:
if attrs:
attrs = attrs.copy()
attrs.update(kwargs)
else:
attrs = kwargs
self.attrs = attrs
self.text = text
def __str__(self):
if self.text:
return self.text
else:
return "%s|%s" % (self.name, self.attrs)
def searchTag(self, markupName=None, markupAttrs={}):
found = None
markup = None
if isinstance(markupName, Tag):
markup = markupName
markupAttrs = markup
callFunctionWithTagData = callable(self.name) \
and not isinstance(markupName, Tag)
if (not self.name) \
or callFunctionWithTagData \
or (markup and self._matches(markup, self.name)) \
or (not markup and self._matches(markupName, self.name)):
if callFunctionWithTagData:
match = self.name(markupName, markupAttrs)
else:
match = True
markupAttrMap = None
for attr, matchAgainst in self.attrs.items():
if not markupAttrMap:
if hasattr(markupAttrs, 'get'):
markupAttrMap = markupAttrs
else:
markupAttrMap = {}
for k,v in markupAttrs:
markupAttrMap[k] = v
attrValue = markupAttrMap.get(attr)
if not self._matches(attrValue, matchAgainst):
match = False
break
if match:
if markup:
found = markup
else:
found = markupName
return found
def search(self, markup):
#print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
if isList(markup) and not isinstance(markup, Tag):
for element in markup:
if isinstance(element, NavigableString) \
and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
# Don't bother with Tags if we're searching for text.
elif isinstance(markup, Tag):
if not self.text:
found = self.searchTag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
isString(markup):
if self._matches(markup, self.text):
found = markup
else:
raise Exception, "I don't know how to match against a %s" \
% markup.__class__
return found
def _matches(self, markup, matchAgainst):
#print "Matching %s against %s" % (markup, matchAgainst)
result = False
if matchAgainst == True and type(matchAgainst) == types.BooleanType:
result = markup != None
elif callable(matchAgainst):
result = matchAgainst(markup)
else:
#Custom match methods take the tag as an argument, but all
#other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
if markup and not isString(markup):
markup = unicode(markup)
#Now we know that chunk is either a string, or None.
if hasattr(matchAgainst, 'match'):
# It's a regexp object.
result = markup and matchAgainst.search(markup)
elif isList(matchAgainst):
result = markup in matchAgainst
elif hasattr(matchAgainst, 'items'):
result = markup.has_key(matchAgainst)
elif matchAgainst and isString(markup):
if isinstance(markup, unicode):
matchAgainst = unicode(matchAgainst)
else:
matchAgainst = str(matchAgainst)
if not result:
result = matchAgainst == markup
return result
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
def __init__(self, source):
list.__init__([])
self.source = source
# Now, some helper functions.
def isList(l):
"""Convenience method that works with all 2.x versions of Python
to determine whether or not something is listlike."""
return hasattr(l, '__iter__') \
or (type(l) in (types.ListType, types.TupleType))
def isString(s):
"""Convenience method that works with all 2.x versions of Python
to determine whether or not something is stringlike."""
try:
return isinstance(s, unicode) or isintance(s, basestring)
except NameError:
return isinstance(s, str)
def buildTagMap(default, *args):
"""Turns a list of maps, lists, or scalars into a single map.
Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
NESTING_RESET_TAGS maps out of lists and partial maps."""
built = {}
for portion in args:
if hasattr(portion, 'items'):
#It's a map. Merge it.
for k,v in portion.items():
built[k] = v
elif isList(portion):
#It's a list. Map each item to the default.
for k in portion:
built[k] = default
else:
#It's a scalar. Map it to the default.
built[portion] = default
return built
# Now, the parser classes.
class BeautifulStoneSoup(Tag, SGMLParser):
"""This class contains the basic parser and search code. It defines
a parser that knows nothing about tag behavior except for the
following:
You can't close a tag without closing all the tags it encloses.
That is, "<foo><bar></foo>" actually means
"<foo><bar></bar></foo>".
[Another possible explanation is "<foo><bar /></foo>", but since
this class defines no SELF_CLOSING_TAGS, it will never use that
explanation.]
This class is useful for parsing XML or made-up markup languages,
or when BeautifulSoup makes an assumption counter to what you were
expecting."""
XML_ENTITY_LIST = {}
for i in Tag.XML_SPECIAL_CHARS_TO_ENTITIES.values():
XML_ENTITY_LIST[i] = True
SELF_CLOSING_TAGS = {}
NESTABLE_TAGS = {}
RESET_NESTING_TAGS = {}
QUOTE_TAGS = {}
MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
lambda x: x.group(1) + ' />'),
(re.compile('<!\s+([^<>]*)>'),
lambda x: '<!' + x.group(1) + '>')
]
ROOT_TAG_NAME = u'[document]'
HTML_ENTITIES = "html"
XML_ENTITIES = "xml"
def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
markupMassage=True, smartQuotesTo=XML_ENTITIES,
convertEntities=None, selfClosingTags=None):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser.
sgmllib will process most bad HTML, and the BeautifulSoup
class has some tricks for dealing with some HTML that kills
sgmllib, but Beautiful Soup can nonetheless choke or lose data
if your data uses self-closing tags or declarations
incorrectly.
By default, Beautiful Soup uses regexes to sanitize input,
avoiding the vast majority of these problems. If the problems
don't apply to you, pass in False for markupMassage, and
you'll get better performance.
The default parser massage techniques fix the two most common
instances of invalid HTML that choke sgmllib:
<br/> (No space between name of closing tag and tag close)
<! --Comment--> (Extraneous whitespace in declaration)
You can pass in a custom list of (RE object, replace method)
tuples to get Beautiful Soup to scrub your input the way you
want."""
self.parseOnlyThese = parseOnlyThese
self.fromEncoding = fromEncoding
self.smartQuotesTo = smartQuotesTo
self.convertEntities = convertEntities
if self.convertEntities:
# It doesn't make sense to convert encoded characters to
# entities even while you're converting entities to Unicode.
# Just convert it all to Unicode.
self.smartQuotesTo = None
self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
SGMLParser.__init__(self)
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
self.markup = markup
self.markupMassage = markupMassage
try:
self._feed()
except StopParsing:
pass
self.markup = None # The markup can now be GCed
def _feed(self, inDocumentEncoding=None):
# Convert the document to Unicode.
markup = self.markup
if isinstance(markup, unicode):
if not hasattr(self, 'originalEncoding'):
self.originalEncoding = None
else:
dammit = UnicodeDammit\
(markup, [self.fromEncoding, inDocumentEncoding],
smartQuotesTo=self.smartQuotesTo)
markup = dammit.unicode
self.originalEncoding = dammit.originalEncoding
if markup:
if self.markupMassage:
if not isList(self.markupMassage):
self.markupMassage = self.MARKUP_MASSAGE
for fix, m in self.markupMassage:
markup = fix.sub(m, markup)
self.reset()
SGMLParser.feed(self, markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def __getattr__(self, methodName):
"""This method routes method call requests to either the SGMLParser
superclass or the Tag superclass, depending on the method name."""
#print "__getattr__ called on %s.%s" % (self.__class__, methodName)
if methodName.find('start_') == 0 or methodName.find('end_') == 0 \
or methodName.find('do_') == 0:
return SGMLParser.__getattr__(self, methodName)
elif methodName.find('__') != 0:
return Tag.__getattr__(self, methodName)
else:
raise AttributeError
def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name)
def reset(self):
Tag.__init__(self, self, self.ROOT_TAG_NAME)
self.hidden = 1
SGMLParser.reset(self)
self.currentData = []
self.currentTag = None
self.tagStack = []
self.quoteStack = []
self.pushTag(self)
def popTag(self):
tag = self.tagStack.pop()
# Tags with just one string-owning child get the child as a
# 'string' property, so that soup.tag.string is shorthand for
# soup.tag.contents[0]
if len(self.currentTag.contents) == 1 and \
isinstance(self.currentTag.contents[0], NavigableString):
self.currentTag.string = self.currentTag.contents[0]
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
def endData(self, containerClass=NavigableString):
if self.currentData:
currentData = ''.join(self.currentData)
if not currentData.strip():
if '\n' in currentData:
currentData = '\n'
else:
currentData = ' '
self.currentData = []
if self.parseOnlyThese and len(self.tagStack) <= 1 and \
(not self.parseOnlyThese.text or \
not self.parseOnlyThese.search(currentData)):
return
o = containerClass(currentData)
o.setup(self.currentTag, self.previous)
if self.previous:
self.previous.next = o
self.previous = o
self.currentTag.contents.append(o)
def _popToTag(self, name, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
numPops = 0
mostRecentTag = None
for i in range(len(self.tagStack)-1, 0, -1):
if name == self.tagStack[i].name:
numPops = len(self.tagStack)-i
break
if not inclusivePop:
numPops = numPops - 1
for i in range(0, numPops):
mostRecentTag = self.popTag()
return mostRecentTag
def _smartPop(self, name):
"""We need to pop up to the previous tag of this type, unless
one of this tag's nesting reset triggers comes between this
tag and the previous tag of this type, OR unless this tag is a
generic nesting trigger and another generic nesting trigger
comes between this tag and the previous tag of this type.
Examples:
<p>Foo<b>Bar<p> should pop to 'p', not 'b'.
<p>Foo<table>Bar<p> should pop to 'table', not 'p'.
<p>Foo<table><tr>Bar<p> should pop to 'tr', not 'p'.
<p>Foo<b>Bar<p> should pop to 'p', not 'b'.
<li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
<tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
<td><tr><td> *<td>* should pop to 'tr', not the first 'td'
"""
nestingResetTriggers = self.NESTABLE_TAGS.get(name)
isNestable = nestingResetTriggers != None
isResetNesting = self.RESET_NESTING_TAGS.has_key(name)
popTo = None
inclusive = True
for i in range(len(self.tagStack)-1, 0, -1):
p = self.tagStack[i]
if (not p or p.name == name) and not isNestable:
#Non-nestable tags get popped to the top or to their
#last occurrence.
popTo = name
break
if (nestingResetTriggers != None
and p.name in nestingResetTriggers) \
or (nestingResetTriggers == None and isResetNesting
and self.RESET_NESTING_TAGS.has_key(p.name)):
#If we encounter one of the nesting reset triggers
#peculiar to this tag, or we encounter another tag
#that causes nesting to reset, pop up to but not
#including that tag.
popTo = p.name
inclusive = False
break
p = p.parent
if popTo:
self._popToTag(popTo, inclusive)
def unknown_starttag(self, name, attrs, selfClosing=0):
#print "Start tag %s: %s" % (name, attrs)
if self.quoteStack:
#This is not a real tag.
#print "<%s> is not real!" % name
attrs = ''.join(map(lambda(x, y): ' %s="%s"' % (x, y), attrs))
self.handle_data('<%s%s>' % (name, attrs))
return
self.endData()
if not self.isSelfClosingTag(name) and not selfClosing:
self._smartPop(name)
if self.parseOnlyThese and len(self.tagStack) <= 1 \
and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
return
tag = Tag(self, name, attrs, self.currentTag, self.previous)
if self.previous:
self.previous.next = tag
self.previous = tag
self.pushTag(tag)
if selfClosing or self.isSelfClosingTag(name):
self.popTag()
if name in self.QUOTE_TAGS:
#print "Beginning quote (%s)" % name
self.quoteStack.append(name)
self.literal = 1
return tag
def unknown_endtag(self, name):
#print "End tag %s" % name
if self.quoteStack and self.quoteStack[-1] != name:
#This is not a real end tag.
#print "</%s> is not real!" % name
self.handle_data('</%s>' % name)
return
self.endData()
self._popToTag(name)
if self.quoteStack and self.quoteStack[-1] == name:
self.quoteStack.pop()
self.literal = (len(self.quoteStack) > 0)
def handle_data(self, data):
self.currentData.append(data)
def _toStringSubclass(self, text, subclass):
"""Adds a certain piece of text to the tree as a NavigableString
subclass."""
self.endData()
self.handle_data(text)
self.endData(subclass)
def handle_pi(self, text):
"""Handle a processing instruction as a ProcessingInstruction
object, possibly one with a %SOUP-ENCODING% slot into which an
encoding will be plugged later."""
if text[:3] == "xml":
text = "xml version='1.0' encoding='%SOUP-ENCODING%'"
self._toStringSubclass(text, ProcessingInstruction)
def handle_comment(self, text):
"Handle comments as Comment objects."
self._toStringSubclass(text, Comment)
def handle_charref(self, ref):
"Handle character references as data."
if self.convertEntities in [self.HTML_ENTITIES,
self.XML_ENTITIES]:
data = unichr(int(ref))
else:
data = '&#%s;' % ref
self.handle_data(data)
def handle_entityref(self, ref):
"""Handle entity references as data, possibly converting known
HTML entity references to the corresponding Unicode
characters."""
data = None
if self.convertEntities == self.HTML_ENTITIES or \
(self.convertEntities == self.XML_ENTITIES and \
self.XML_ENTITY_LIST.get(ref)):
try:
data = unichr(name2codepoint[ref])
except KeyError:
pass
if not data:
data = '&%s;' % ref
self.handle_data(data)
def handle_decl(self, data):
"Handle DOCTYPEs and the like as Declaration objects."
self._toStringSubclass(data, Declaration)
def parse_declaration(self, i):
"""Treat a bogus SGML declaration as raw data. Treat a CDATA
declaration as a CData object."""
j = None
if self.rawdata[i:i+9] == '<![CDATA[':
k = self.rawdata.find(']]>', i)
if k == -1:
k = len(self.rawdata)
data = self.rawdata[i+9:k]
j = k+3
self._toStringSubclass(data, CData)
else:
try:
j = SGMLParser.parse_declaration(self, i)
except SGMLParseError:
toHandle = self.rawdata[i:]
self.handle_data(toHandle)
j = i + len(toHandle)
return j
class BeautifulSoup(BeautifulStoneSoup):
"""This parser knows the following facts about HTML:
* Some tags have no closing tag and should be interpreted as being
closed as soon as they are encountered.
* The text inside some tags (ie. 'script') may contain tags which
are not really part of the document and which should be parsed
as text, not tags. If you want to parse the text as tags, you can
always fetch it and parse it explicitly.
* Tag nesting rules:
Most tags can't be nested at all. For instance, the occurrence of
a <p> tag should implicitly close the previous <p> tag.
<p>Para1<p>Para2
should be transformed into:
<p>Para1</p><p>Para2
Some tags can be nested arbitrarily. For instance, the occurrence
of a <blockquote> tag should _not_ implicitly close the previous
<blockquote> tag.
Alice said: <blockquote>Bob said: <blockquote>Blah
should NOT be transformed into:
Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
Some tags can be nested, but the nesting is reset by the
interposition of other tags. For instance, a <tr> tag should
implicitly close the previous <tr> tag within the same <table>,
but not close a <tr> tag in another table.
<table><tr>Blah<tr>Blah
should be transformed into:
<table><tr>Blah</tr><tr>Blah
but,
<tr>Blah<table><tr>Blah
should NOT be transformed into
<tr>Blah<table></tr><tr>Blah
Differing assumptions about tag nesting rules are a major source
of problems with the BeautifulSoup class. If BeautifulSoup is not
treating as nestable a tag your page author treats as nestable,
try ICantBelieveItsBeautifulSoup, MinimalSoup, or
BeautifulStoneSoup before writing your own subclass."""
def __init__(self, *args, **kwargs):
if not kwargs.has_key('smartQuotesTo'):
kwargs['smartQuotesTo'] = self.HTML_ENTITIES
BeautifulStoneSoup.__init__(self, *args, **kwargs)
SELF_CLOSING_TAGS = buildTagMap(None,
['br' , 'hr', 'input', 'img', 'meta',
'spacer', 'link', 'frame', 'base'])
QUOTE_TAGS = {'script': None}
#According to the HTML standard, each of these inline tags can
#contain another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_INLINE_TAGS = ['span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
'center']
#According to the HTML standard, these block tags can contain
#another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_BLOCK_TAGS = ['blockquote', 'div', 'fieldset', 'ins', 'del']
#Lists can contain other lists, but there are restrictions.
NESTABLE_LIST_TAGS = { 'ol' : [],
'ul' : [],
'li' : ['ul', 'ol'],
'dl' : [],
'dd' : ['dl'],
'dt' : ['dl'] }
#Tables can contain other tables, but there are restrictions.
NESTABLE_TABLE_TAGS = {'table' : [],
'tr' : ['table', 'tbody', 'tfoot', 'thead'],
'td' : ['tr'],
'th' : ['tr'],
'thead' : ['table'],
'tbody' : ['table'],
'tfoot' : ['table'],
}
NON_NESTABLE_BLOCK_TAGS = ['address', 'form', 'p', 'pre']
#If one of these tags is encountered, all tags up to the next tag of
#this type are popped.
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
NON_NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS,
NESTABLE_TABLE_TAGS)
NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
# Used to detect the charset in a META tag; see start_meta
CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)")
def start_meta(self, attrs):
"""Beautiful Soup can detect a charset included in a META tag,
try to convert the document to that charset, and re-parse the
document from the beginning."""
httpEquiv = None
contentType = None
contentTypeIndex = None
tagNeedsEncodingSubstitution = False
for i in range(0, len(attrs)):
key, value = attrs[i]
key = key.lower()
if key == 'http-equiv':
httpEquiv = value
elif key == 'content':
contentType = value
contentTypeIndex = i
if httpEquiv and contentType: # It's an interesting meta tag.
match = self.CHARSET_RE.search(contentType)
if match:
if getattr(self, 'declaredHTMLEncoding') or \
(self.originalEncoding == self.fromEncoding):
# This is our second pass through the document, or
# else an encoding was specified explicitly and it
# worked. Rewrite the meta tag.
newAttr = self.CHARSET_RE.sub\
(lambda(match):match.group(1) +
"%SOUP-ENCODING%", value)
attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
newAttr)
tagNeedsEncodingSubstitution = True
else:
# This is our first pass through the document.
# Go through it again with the new information.
newCharset = match.group(3)
if newCharset and newCharset != self.originalEncoding:
self.declaredHTMLEncoding = newCharset
self._feed(self.declaredHTMLEncoding)
raise StopParsing
tag = self.unknown_starttag("meta", attrs)
if tag and tagNeedsEncodingSubstitution:
tag.containsSubstitutions = True
class StopParsing(Exception):
pass
class ICantBelieveItsBeautifulSoup(BeautifulSoup):
"""The BeautifulSoup class is oriented towards skipping over
common HTML errors like unclosed tags. However, sometimes it makes
errors of its own. For instance, consider this fragment:
<b>Foo<b>Bar</b></b>
This is perfectly valid (if bizarre) HTML. However, the
BeautifulSoup class will implicitly close the first b tag when it
encounters the second 'b'. It will think the author wrote
"<b>Foo<b>Bar", and didn't close the first 'b' tag, because
there's no real-world reason to bold something that's already
bold. When it encounters '</b></b>' it will close two more 'b'
tags, for a grand total of three tags closed instead of two. This
can throw off the rest of your document structure. The same is
true of a number of other tags, listed below.
It's much more common for someone to forget to close a 'b' tag
than to actually use nested 'b' tags, and the BeautifulSoup class
handles the common case. This class handles the not-co-common
case: where you can't believe someone wrote what they did, but
it's valid HTML and BeautifulSoup screwed up by assuming it
wouldn't be."""
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
['em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
'big']
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ['noscript']
NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
class MinimalSoup(BeautifulSoup):
"""The MinimalSoup class is for parsing HTML that contains
pathologically bad markup. It makes no assumptions about tag
nesting, but it does know which tags are self-closing, that
<script> tags contain Javascript and should not be parsed, that
META tags may contain encoding information, and so on.
This also makes it better for subclassing than BeautifulStoneSoup
or BeautifulSoup."""
RESET_NESTING_TAGS = buildTagMap('noscript')
NESTABLE_TAGS = {}
class BeautifulSOAP(BeautifulStoneSoup):
"""This class will push a tag with only a single string child into
the tag's parent as an attribute. The attribute's name is the tag
name, and the value is the string child. An example should give
the flavor of the change:
<foo><bar>baz</bar></foo>
=>
<foo bar="baz"><bar>baz</bar></foo>
You can then access fooTag['bar'] instead of fooTag.barTag.string.
This is, of course, useful for scraping structures that tend to
use subelements instead of attributes, such as SOAP messages. Note
that it modifies its input, so don't print the modified version
out.
I'm not sure how many people really want to use this class; let me
know if you do. Mainly I like the name."""
def popTag(self):
if len(self.tagStack) > 1:
tag = self.tagStack[-1]
parent = self.tagStack[-2]
parent._getAttrMap()
if (isinstance(tag, Tag) and len(tag.contents) == 1 and
isinstance(tag.contents[0], NavigableString) and
not parent.attrMap.has_key(tag.name)):
parent[tag.name] = tag.contents[0]
BeautifulStoneSoup.popTag(self)
#Enterprise class names! It has come to our attention that some people
#think the names of the Beautiful Soup parser classes are too silly
#and "unprofessional" for use in enterprise screen-scraping. We feel
#your pain! For such-minded folk, the Beautiful Soup Consortium And
#All-Night Kosher Bakery recommends renaming this file to
#"RobustParser.py" (or, in cases of extreme enterprisness,
#"RobustParserBeanInterface.class") and using the following
#enterprise-friendly class aliases:
class RobustXMLParser(BeautifulStoneSoup):
pass
class RobustHTMLParser(BeautifulSoup):
pass
class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
pass
class RobustInsanelyWackAssHTMLParser(MinimalSoup):
pass
class SimplifyingSOAPParser(BeautifulSOAP):
pass
######################################################
#
# Bonus library: Unicode, Dammit
#
# This class forces XML data into a standard format (usually to UTF-8
# or Unicode). It is heavily based on code from Mark Pilgrim's
# Universal Feed Parser. It does not rewrite the XML or HTML to
# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
# (XML) and BeautifulSoup.start_meta (HTML).
# Autodetects character encodings.
# Download from http://chardet.feedparser.org/
try:
import chardet
# import chardet.constants
# chardet.constants._debug = 1
except:
chardet = None
chardet = None
# cjkcodecs and iconv_codec make Python know about more character encodings.
# Both are available from http://cjkpython.i18n.org/
# They're built in if you use Python 2.4.
try:
import cjkcodecs.aliases
except:
pass
try:
import iconv_codec
except:
pass
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" }
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml'):
self.markup, documentEncoding, sniffedEncoding = \
self._detectEncoding(markup)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
self.originalEncoding = None
self.unicode = unicode(markup)
return
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
if u: break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
if u: break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
u = self._convertFrom(chardet.detect(self.markup)['encoding'])
# As a last resort, try utf-8 and windows-1252:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
if u: break
self.unicode = u
if not u: self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
entity."""
sub = self.MS_CHARS.get(orig)
if type(sub) == types.TupleType:
if self.smartQuotesTo == 'xml':
sub = '&#x%s;' % sub[1]
else:
sub = '&%s;' % sub[0]
return sub
def _convertFrom(self, proposed):
proposed = self.find_codec(proposed)
if not proposed or proposed in self.triedEncodings:
return None
self.triedEncodings.append(proposed)
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
markup = re.compile("([\x80-\x9f])").sub \
(lambda(x): self._subMSChar(x.group(1)),
markup)
try:
# print "Trying to convert document to %s" % proposed
u = self._toUnicode(markup, proposed)
self.markup = u
self.originalEncoding = proposed
except Exception, e:
# print "That didn't work!"
# print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == '\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == '\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
return newdata
def _detectEncoding(self, xml_data):
"""Given a document, tries to detect its XML encoding."""
xml_encoding = sniffed_xml_encoding = None
try:
if xml_data[:4] == '\x4c\x6f\xa7\x94':
# EBCDIC
xml_data = self._ebcdic_to_ascii(xml_data)
elif xml_data[:4] == '\x00\x3c\x00\x3f':
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x3f\x00':
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
(xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\x00\x3c':
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x00\x00':
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\xfe\xff':
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\xff\xfe\x00\x00':
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == '\xef\xbb\xbf':
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
sniffed_xml_encoding = 'ascii'
pass
xml_encoding_match = re.compile \
('^<\?.*encoding=[\'"](.*?)[\'"].*\?>')\
.match(xml_data)
except:
xml_encoding_match = None
if xml_encoding_match:
xml_encoding = xml_encoding_match.groups()[0].lower()
if sniffed_xml_encoding and \
(xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
'iso-10646-ucs-4', 'ucs-4', 'csucs4',
'utf-16', 'utf-32', 'utf_16', 'utf_32',
'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
or (charset and self._codec(charset.replace("-", ""))) \
or (charset and self._codec(charset.replace("-", "_"))) \
or charset
def _codec(self, charset):
if not charset: return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except LookupError:
pass
return codec
EBCDIC_TO_ASCII_MAP = None
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
201,202,106,107,108,109,110,111,112,113,114,203,204,205,
206,207,208,209,126,115,116,117,118,119,120,121,122,210,
211,212,213,214,215,216,217,218,219,220,221,222,223,224,
225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
250,251,252,253,254,255)
import string
c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
MS_CHARS = { '\x80' : ('euro', '20AC'),
'\x81' : ' ',
'\x82' : ('sbquo', '201A'),
'\x83' : ('fnof', '192'),
'\x84' : ('bdquo', '201E'),
'\x85' : ('hellip', '2026'),
'\x86' : ('dagger', '2020'),
'\x87' : ('Dagger', '2021'),
'\x88' : ('circ', '2C6'),
'\x89' : ('permil', '2030'),
'\x8A' : ('Scaron', '160'),
'\x8B' : ('lsaquo', '2039'),
'\x8C' : ('OElig', '152'),
'\x8D' : '?',
'\x8E' : ('#x17D', '17D'),
'\x8F' : '?',
'\x90' : '?',
'\x91' : ('lsquo', '2018'),
'\x92' : ('rsquo', '2019'),
'\x93' : ('ldquo', '201C'),
'\x94' : ('rdquo', '201D'),
'\x95' : ('bull', '2022'),
'\x96' : ('ndash', '2013'),
'\x97' : ('mdash', '2014'),
'\x98' : ('tilde', '2DC'),
'\x99' : ('trade', '2122'),
'\x9a' : ('scaron', '161'),
'\x9b' : ('rsaquo', '203A'),
'\x9c' : ('oelig', '153'),
'\x9d' : '?',
'\x9e' : ('#x17E', '17E'),
'\x9f' : ('Yuml', ''),}
#######################################################################
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin.read())
print soup.prettify()
| bsd-2-clause |
jhawkesworth/ansible | test/units/modules/storage/netapp/test_netapp_e_iscsi_interface.py | 45 | 11656 | # (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from mock import MagicMock
from ansible.module_utils import basic, netapp
from ansible.modules.storage.netapp import netapp_e_host
from ansible.modules.storage.netapp.netapp_e_host import Host
from ansible.modules.storage.netapp.netapp_e_iscsi_interface import IscsiInterface
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
import unittest
import mock
import pytest
import json
from units.compat.mock import patch
from ansible.module_utils._text import to_bytes
class IscsiInterfaceTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'disabled',
'name': 1,
'controller': 'A',
}
REQ_FUNC = 'ansible.modules.storage.netapp.netapp_e_iscsi_interface.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_validate_params(self):
"""Ensure we can pass valid parameters to the module"""
# Provide a range of valid values for each
for controller in ['A', 'B']:
for i in range(1, 10):
for mtu in [1500, 2500, 9000]:
self._set_args(dict(
state='disabled',
name=i,
controller=controller,
mtu=mtu,
))
iface = IscsiInterface()
def test_invalid_params(self):
"""Ensure that our input validation catches invalid parameters"""
# Currently a 'C' controller is invalid
self._set_args(dict(
state='disabled',
name=1,
controller="C",
))
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
# Each of these mtu values are invalid
for mtu in [500, 1499, 9001]:
self._set_args({
'state': 'disabled',
'name': 1,
'controller': 'A',
'mtu': mtu
})
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
def test_interfaces(self):
"""Validate that we are processing the interface list properly"""
self._set_args()
interfaces = [
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='fc', )
]
# Ensure we filter out anything without an interfaceType of iscsi
expected = [iface['iscsi'] for iface in interfaces if iface['interfaceType'] == 'iscsi']
# We expect a single call to the API: retrieve the list of interfaces from the objectGraph.
with mock.patch(self.REQ_FUNC, return_value=(200, interfaces)):
iface = IscsiInterface()
interfaces = iface.interfaces
self.assertEquals(interfaces, expected)
def test_interfaces_fail(self):
"""Ensure we fail gracefully on an error to retrieve the interfaces"""
self._set_args()
with self.assertRaises(AnsibleFailJson) as result:
# Simulate a failed call to the API
with mock.patch(self.REQ_FUNC, side_effect=Exception("Failure")):
iface = IscsiInterface()
interfaces = iface.interfaces
def test_fetch_target_interface_bad_channel(self):
"""Ensure we fail correctly when a bad channel is provided"""
self._set_args()
interfaces = list(dict(channel=1, controllerId='1'))
with self.assertRaisesRegexp(AnsibleFailJson, r".*?channels include.*"):
with mock.patch.object(IscsiInterface, 'interfaces', return_value=interfaces):
iface = IscsiInterface()
interfaces = iface.fetch_target_interface()
def test_make_update_body_dhcp(self):
"""Ensure the update body generates correctly for a transition from static to dhcp"""
self._set_args(dict(state='enabled',
config_method='dhcp')
)
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ),
ipv4AddressConfigMethod='configStatic', ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ),
)
# Test a transition from static to dhcp
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEquals(body['settings']['ipv4Enabled'][0], True)
self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configDhcp')
def test_make_update_body_static(self):
"""Ensure the update body generates correctly for a transition from dhcp to static"""
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressConfigMethod='configDhcp',
ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ), ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ), )
self._set_args(dict(state='enabled',
config_method='static',
address='10.10.10.10',
subnet_mask='255.255.255.0',
gateway='1.1.1.1'))
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEquals(body['settings']['ipv4Enabled'][0], True)
self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configStatic')
self.assertEquals(body['settings']['ipv4Address'][0], '10.10.10.10')
self.assertEquals(body['settings']['ipv4SubnetMask'][0], '255.255.255.0')
self.assertEquals(body['settings']['ipv4GatewayAddress'][0], '1.1.1.1')
CONTROLLERS = dict(A='1', B='2')
def test_update_bad_controller(self):
"""Ensure a bad controller fails gracefully"""
self._set_args(dict(controller='B'))
inst = IscsiInterface()
with self.assertRaises(AnsibleFailJson) as result:
with mock.patch.object(inst, 'get_controllers', return_value=dict(A='1')) as get_controllers:
inst()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update(self, get_controllers):
"""Validate the good path"""
self._set_args()
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_not_required(self, get_controllers):
"""Ensure we don't trigger the update if one isn't required or if check mode is enabled"""
self._set_args()
# make_update_body will report that no change is required, so we should see no call to the API.
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(False, {})):
inst()
request.assert_not_called()
self.assertFalse(result.exception.args[0]['changed'], msg="No change was expected.")
# Since check_mode is enabled, we will run everything normally, but not make a request to the API
# to perform the actual change.
inst = IscsiInterface()
inst.check_mode = True
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_not_called()
self.assertTrue(result.exception.args[0]['changed'], msg="A change was expected.")
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_fail_busy(self, get_controllers):
"""Ensure we fail correctly on receiving a busy response from the API."""
self._set_args()
inst = IscsiInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r".*?busy.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, dict(retcode="3"))) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
@mock.patch.object(IscsiInterface, 'make_update_body', return_value=(True, {}))
def test_update_fail(self, get_controllers, make_body):
"""Ensure we fail correctly on receiving a normal failure from the API."""
self._set_args()
inst = IscsiInterface()
# Test a 422 error with a non-busy status
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test a 401 (authentication) error
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(401, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test with a connection failure
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Connection failure.*") as result:
with mock.patch(self.REQ_FUNC, side_effect=Exception()) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
| gpl-3.0 |
ArcEye/machinekit-testing | lib/python/drivers/ADS7828.py | 12 | 2774 | #!/usr/bin/python
import smbus
class ADS7828:
# Config Register
__ADS7828_CONFIG_SD_DIFFERENTIAL = 0b00000000
__ADS7828_CONFIG_SD_SINGLE = 0b10000000
__ADS7828_CONFIG_CS_CH0 = 0b00000000
__ADS7828_CONFIG_CS_CH2 = 0b00010000
__ADS7828_CONFIG_CS_CH4 = 0b00100000
__ADS7828_CONFIG_CS_CH6 = 0b00110000
__ADS7828_CONFIG_CS_CH1 = 0b01000000
__ADS7828_CONFIG_CS_CH3 = 0b01010000
__ADS7828_CONFIG_CS_CH5 = 0b01100000
__ADS7828_CONFIG_CS_CH7 = 0b01110000
__ADS7828_CONFIG_PD_OFF = 0b00000000
__ADS7828_CONFIG_PD_REFOFF_ADON = 0b00000100
__ADS7828_CONFIG_PD_REFON_ADOFF = 0b00001000
__ADS7828_CONFIG_PD_REFON_ADON = 0b00001100
def __init__(self, address=0x48, busId=2, debug=False):
self.i2c = smbus.SMBus(busId)
self.address = address
self.debug = debug
def readChannel(self, ch):
config = 0
config |= self.__ADS7828_CONFIG_SD_SINGLE
config |= self.__ADS7828_CONFIG_PD_REFOFF_ADON
if ch == 0:
config |= self.__ADS7828_CONFIG_CS_CH0
elif ch == 1:
config |= self.__ADS7828_CONFIG_CS_CH1
elif ch == 2:
config |= self.__ADS7828_CONFIG_CS_CH2
elif ch == 3:
config |= self.__ADS7828_CONFIG_CS_CH3
elif ch == 4:
config |= self.__ADS7828_CONFIG_CS_CH4
elif ch == 5:
config |= self.__ADS7828_CONFIG_CS_CH5
elif ch == 6:
config |= self.__ADS7828_CONFIG_CS_CH6
elif ch == 7:
config |= self.__ADS7828_CONFIG_CS_CH7
data = self.i2c.read_i2c_block_data(self. address, config, 2)
return ((data[0] << 8) + data[1])
#adc = ADS7828(0x48, 2, True)
#test = []
#r2temp = R2Temp("semitec_103GT_2")
#while True:
# #print(("ch0: " + str(adc.readChannel(0))))
# time.sleep(0.3)
# print(("ch0: " + str(adc.readChannel(0))))
# print(("ch1: " + str(adc.readChannel(1))))
# print(("ch2: " + str(adc.readChannel(2))))
# print(("ch3: " + str(adc.readChannel(3))))
# print(("ch4: " + str(adc.readChannel(4))))
# print(("ch5: " + str(adc.readChannel(5))))
# print(("ch6: " + str(adc.readChannel(6))))
# print(("ch7: " + str(adc.readChannel(7))))
# print(("---------------------------"))
# adcValue = float(adc.readChannel(0))
# test.append(adcValue)
# if (len(test) > 20):
# test.pop(0)
# sum = 0.0
# for value in test:
# sum += value
# sum /= len(test)
# print sum
# print 4095.0/sum
# R1=4700.0
# R2=R1/(max(4095.0/sum,0.000001)-1)
# print round(r2temp.r2t(R2)*10.0)/10.0 | lgpl-2.1 |
tedder/ansible | test/units/modules/network/edgeswitch/test_edgeswitch_vlan.py | 29 | 5640 | # (c) 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.edgeswitch import edgeswitch_vlan
from ansible.modules.network.edgeswitch.edgeswitch_vlan import parse_vlan_brief, parse_interfaces_switchport
from units.modules.utils import set_module_args
from .edgeswitch_module import TestEdgeswitchModule, load_fixture
class TestEdgeswitchVlanModule(TestEdgeswitchModule):
module = edgeswitch_vlan
def setUp(self):
super(TestEdgeswitchVlanModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.edgeswitch.edgeswitch_vlan.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.edgeswitch.edgeswitch_vlan.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestEdgeswitchVlanModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for command in commands:
if command.startswith('vlan ') or command == 'exit':
output.append('')
else:
filename = str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture('edgeswitch_vlan_%s' % filename))
return output
self.run_commands.side_effect = load_from_file
self.load_config.return_value = {}
def test_edgeswitch_vlan_create(self):
set_module_args({'vlan_id': '200', 'name': 'video', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan database',
'vlan 200',
'vlan name 200 \"video\"',
'exit'
]
self.assertEqual(result['commands'], expected_commands)
def test_edgeswitch_vlan_id_startwith_100(self):
set_module_args({'vlan_id': '100', 'name': 'voice', 'state': 'present'})
result = self.execute_module(changed=False)
expected_commands = []
self.assertEqual(result['commands'], expected_commands)
def test_edgeswitch_vlan_rename(self):
set_module_args({'vlan_id': '100', 'name': 'video', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan database',
'vlan name 100 \"video\"',
'exit'
]
self.assertEqual(result['commands'], expected_commands)
def test_edgeswitch_vlan_with_interfaces_range(self):
set_module_args({'vlan_id': '100', 'name': 'voice', 'state': 'present', 'tagged_interfaces': ['0/6-0/8']})
result = self.execute_module(changed=True)
expected_commands = [
'interface 0/6-0/8',
'vlan participation include 100',
'vlan tagging 100',
]
self.assertEqual(result['commands'], expected_commands)
def test_edgeswitch_vlan_with_interfaces_and_newvlan(self):
set_module_args({'vlan_id': '3', 'name': 'vlan3', 'state': 'present', 'untagged_interfaces': ['0/8', '0/7']})
result = self.execute_module(changed=True)
expected_commands = [
'vlan database',
'vlan 3',
'vlan name 3 \"vlan3\"',
'exit',
'interface 0/7-0/8',
'vlan participation include 3',
'vlan pvid 3',
]
self.assertEqual(result['commands'], expected_commands)
def test_parse_interfaces_switchport(self):
result = parse_interfaces_switchport(load_fixture('edgeswitch_vlan_show_interfaces_switchport'))
i1 = {
'interface': '0/1',
'pvid_mode': '1',
'untagged_vlans': ['1'],
'tagged_vlans': ['100'],
'forbidden_vlans': [''],
}
i3 = {
'interface': '0/3',
'pvid_mode': '1',
'untagged_vlans': [''],
'tagged_vlans': ['100'],
'forbidden_vlans': ['1'],
}
i5 = {
'interface': '0/5',
'pvid_mode': '100',
'untagged_vlans': ['100'],
'tagged_vlans': [''],
'forbidden_vlans': [''],
}
self.assertEqual(result['0/1'], i1)
self.assertEqual(result['0/3'], i3)
self.assertEqual(result['0/5'], i5)
def test_parse_vlan_brief(self):
result = parse_vlan_brief(load_fixture('edgeswitch_vlan_show_vlan_brief'))
obj = [
{
'vlan_id': '1',
'name': 'default'
},
{
'vlan_id': '100',
'name': 'voice'
}
]
self.assertEqual(result, obj)
| gpl-3.0 |
AltSchool/django-allauth | allauth/socialaccount/providers/odnoklassniki/views.py | 10 | 2186 | import requests
from hashlib import md5
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from .provider import OdnoklassnikiProvider
USER_FIELDS = ['uid',
'locale',
'first_name',
'last_name',
'name',
'gender',
'age',
'birthday',
'has_email',
'current_status',
'current_status_id',
'current_status_date',
'online',
'photo_id',
'pic_1', # aka pic50x50
'pic_2', # aka pic128max
'pic190x190', # small
'pic640x480', # medium
'pic1024x768', # big
'location']
class OdnoklassnikiOAuth2Adapter(OAuth2Adapter):
provider_id = OdnoklassnikiProvider.id
access_token_url = 'http://api.odnoklassniki.ru/oauth/token.do'
authorize_url = 'http://www.odnoklassniki.ru/oauth/authorize'
profile_url = 'http://api.odnoklassniki.ru/fb.do'
access_token_method = 'POST'
def complete_login(self, request, app, token, **kwargs):
data = {'method': 'users.getCurrentUser',
'access_token': token.token,
'fields': ','.join(USER_FIELDS),
'format': 'JSON',
'application_key': app.key}
suffix = md5(
'{0:s}{1:s}'.format(
data['access_token'], app.secret).encode('utf-8')).hexdigest()
check_list = sorted(['{0:s}={1:s}'.format(k, v)
for k, v in data.items() if k != 'access_token'])
data['sig'] = md5(
(''.join(check_list) + suffix).encode('utf-8')).hexdigest()
response = requests.get(self.profile_url, params=data)
extra_data = response.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(OdnoklassnikiOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(OdnoklassnikiOAuth2Adapter)
| mit |
peeringdb/django-peeringdb | django_peeringdb/const.py | 1 | 4145 | from django.utils.translation import gettext_lazy as _
MEDIA = (("Ethernet", _("Ethernet")), ("ATM", _("ATM")), ("Multiple", _("Multiple")))
POC_ROLES = (
("Abuse", _("Abuse")),
("Maintenance", _("Maintenance")),
("Policy", _("Policy")),
("Technical", _("Technical")),
("NOC", _("NOC")),
("Public Relations", _("Public Relations")),
("Sales", _("Sales")),
)
POLICY_GENERAL = (
("Open", _("Open")),
("Selective", _("Selective")),
("Restrictive", _("Restrictive")),
("No", _("No")),
)
POLICY_LOCATIONS = (
("Not Required", _("Not Required")),
("Preferred", _("Preferred")),
("Required - US", _("Required - US")),
("Required - EU", _("Required - EU")),
("Required - International", _("Required - International")),
)
POLICY_CONTRACTS = (
("Not Required", _("Not Required")),
("Private Only", _("Private Only")),
("Required", _("Required")),
)
PROTOCOLS = (
("IPv4", _("IPv4")),
("IPv6", _("IPv6")),
)
RATIOS = (
("", _("Not Disclosed")),
("Not Disclosed", _("Not Disclosed")),
("Heavy Outbound", _("Heavy Outbound")),
("Mostly Outbound", _("Mostly Outbound")),
("Balanced", _("Balanced")),
("Mostly Inbound", _("Mostly Inbound")),
("Heavy Inbound", _("Heavy Inbound")),
)
REGIONS = (
("North America", _("North America")),
("Asia Pacific", _("Asia Pacific")),
("Europe", _("Europe")),
("South America", _("South America")),
("Africa", _("Africa")),
("Australia", _("Australia")),
("Middle East", _("Middle East")),
)
SCOPES = (
("", _("Not Disclosed")),
("Not Disclosed", _("Not Disclosed")),
("Regional", _("Regional")),
("North America", _("North America")),
("Asia Pacific", _("Asia Pacific")),
("Europe", _("Europe")),
("South America", _("South America")),
("Africa", _("Africa")),
("Australia", _("Australia")),
("Middle East", _("Middle East")),
("Global", _("Global")),
)
TRAFFIC = (
("", _("Not Disclosed")),
("0-20Mbps", _("0-20Mbps")),
("20-100Mbps", _("20-100Mbps")),
("100-1000Mbps", _("100-1000Mbps")),
("1-5Gbps", _("1-5Gbps")),
("5-10Gbps", _("5-10Gbps")),
("10-20Gbps", _("10-20Gbps")),
("20-50Gbps", _("20-50Gbps")),
("50-100Gbps", _("50-100Gbps")),
("100-200Gbps", _("100-200Gbps")),
("200-300Gbps", _("200-300Gbps")),
("300-500Gbps", _("300-500Gbps")),
("500-1000Gbps", _("500-1000Gbps")),
("1-5Tbps", _("1-5Tbps")),
("5-10Tbps", _("5-10Tbps")),
("10-20Tbps", _("10-20Tbps")),
("20-50Tbps", _("20-50Tbps")),
("50-100Tbps", _("50-100Tbps")),
("100+Tbps", _("100+Tbps")),
)
NET_TYPES = (
("", _("Not Disclosed")),
("Not Disclosed", _("Not Disclosed")),
("NSP", _("NSP")),
("Content", _("Content")),
("Cable/DSL/ISP", _("Cable/DSL/ISP")),
("Enterprise", _("Enterprise")),
("Educational/Research", _("Educational/Research")),
("Non-Profit", _("Non-Profit")),
("Route Server", _("Route Server")),
("Network Services", _("Network Services")),
("Route Collector", _("Route Collector")),
("Government", _("Government")),
)
VISIBILITY = (
("Private", _("Private")),
# ('Peers', _('Peers')),
("Users", _("Users")),
("Public", _("Public")),
)
PHONE_HELP_TEXT = _(
"An E.164-formatted phone number starts with a +, "
"followed by the country code, then the national phone number "
"(dropping the leading 0 in most countries), without spaces "
"or dashes between the groups of digits"
)
SERVICE_LEVEL_TYPES = (
("", _("Not Disclosed")),
("Not Disclosed", _("Not Disclosed")),
("Best Effort (no SLA)", _("Best Effort (no SLA)")),
("Normal Business Hours", _("Normal Business Hours")),
("24/7 Support", _("24/7 Support")),
)
TERMS_TYPES = (
("", _("Not Disclosed")),
("Not Disclosed", _("Not Disclosed")),
("No Commercial Terms", _("No Commercial Terms")),
("Bundled With Other Services", _("Bundled With Other Services")),
("Non-recurring Fees Only", _("Non-recurring Fees Only")),
("Recurring Fees", _("Recurring Fees")),
)
| apache-2.0 |
FrodeSolheim/fs-uae-launcher | amitools/binfmt/hunk/HunkReader.py | 1 | 30202 | """A class for reading Amiga executables and object files in Hunk format"""
import os
import struct
import io
from types import *
from .Hunk import *
class HunkReader:
"""Load Amiga executable Hunk structures"""
def __init__(self):
self.hunks = []
self.error_string = None
self.type = None
self.header = None
self.segments = []
self.overlay = None
self.overlay_headers = None
self.overlay_segments = None
self.libs = None
self.units = None
def get_struct_summary(self, obj):
if type(obj) == ListType:
result = []
for a in obj:
v = self.get_struct_summary(a)
if v != None:
result.append(v)
return "[" + ",".join(result) + "]"
elif type(obj) == DictType:
if 'type_name' in obj:
type_name = obj['type_name']
return type_name.replace('HUNK_','')
else:
result = []
for k in list(obj.keys()):
v = self.get_struct_summary(obj[k])
if v != None:
result.append(k + ":" + v)
return '{' + ",".join(result) + '}'
else:
return None
def get_long(self, data):
return struct.unpack(">I",data)[0]
def read_long(self, f):
data = f.read(4)
if len(data) == 0:
return -1
elif len(data) != 4:
return -(len(data)+1)
return struct.unpack(">I",data)[0]
def read_word(self, f):
data = f.read(2)
if len(data) == 0:
return -1
elif len(data) != 2:
return -(len(data)+1)
return struct.unpack(">H",data)[0]
def read_name(self, f):
num_longs = self.read_long(f)
if num_longs < 0:
return -1,None
elif num_longs == 0:
return 0,""
else:
return self.read_name_size(f, num_longs)
def read_tag(self, f):
data = f.read(4)
if len(data) == 0:
return -1;
elif len(data) != 4:
return -(len(data)+1)
return data
def read_name_size(self, f, num_longs):
size = (num_longs & 0xffffff) * 4
data = f.read(size)
if len(data) < size:
return -1,None
endpos = data.find(b'\0')
if endpos == -1:
return size,data.decode('latin-1')
elif endpos == 0:
return 0,""
else:
return size,data[:endpos].decode('latin-1')
def get_index_name(self, strtab, offset):
end = strtab.find(b'\0',offset)
if end == -1:
return strtab[offset:].decode('latin-1')
else:
return strtab[offset:end].decode('latin-1')
def is_valid_first_hunk_type(self, hunk_type):
return hunk_type == HUNK_HEADER or hunk_type == HUNK_LIB or hunk_type == HUNK_UNIT
def parse_header(self, f, hunk):
names = []
hunk['names'] = names
while True:
l,s = self.read_name(f)
if l < 0:
self.error_string = "Error parsing HUNK_HEADER names"
return RESULT_INVALID_HUNK_FILE
elif l == 0:
break
names.append(s)
# table size and hunk range
table_size = self.read_long(f)
first_hunk = self.read_long(f)
last_hunk = self.read_long(f)
if table_size < 0 or first_hunk < 0 or last_hunk < 0:
self.error_string = "HUNK_HEADER invalid table_size or first_hunk or last_hunk"
return RESULT_INVALID_HUNK_FILE
hunk['table_size'] = table_size
hunk['first_hunk'] = first_hunk
hunk['last_hunk'] = last_hunk
# determine number of hunks in size table
num_hunks = last_hunk - first_hunk + 1
hunk_table = []
for a in range(num_hunks):
hunk_info = {}
hunk_size = self.read_long(f)
if hunk_size < 0:
self.error_string = "HUNK_HEADER contains invalid hunk_size"
return RESULT_INVALID_HUNK_FILE
hunk_bytes = hunk_size & ~HUNKF_ALL
hunk_bytes *= 4 # longs to bytes
hunk_info['size'] = hunk_bytes
self.set_mem_flags(hunk_info, hunk_size & HUNKF_ALL, 30)
hunk_table.append(hunk_info)
hunk['hunks'] = hunk_table
return RESULT_OK
def parse_code_or_data(self, f, hunk):
num_longs = self.read_long(f)
if num_longs < 0:
self.error_string = "%s has invalid size" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
# read in hunk data
size = num_longs * 4
hunk['size'] = size & ~HUNKF_ALL
flags = size & HUNKF_ALL
self.set_mem_flags(hunk, flags, 30)
hunk['data_file_offset'] = f.tell()
data = f.read(hunk['size'])
hunk['data'] = data
return RESULT_OK
def parse_bss(self, f, hunk):
num_longs = self.read_long(f)
if num_longs < 0:
self.error_string = "%s has invalid size" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
# read in hunk data
size = num_longs * 4
hunk['size'] = size & ~HUNKF_ALL
flags = size & HUNKF_ALL
self.set_mem_flags(hunk, flags, 30)
return RESULT_OK
def parse_reloc(self, f, hunk):
num_relocs = 1
reloc = {}
hunk['reloc'] = reloc
while num_relocs != 0:
num_relocs = self.read_long(f)
if num_relocs < 0:
self.error_string = "%s has invalid number of relocations" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
elif num_relocs == 0:
# last relocation found
break
# build reloc map
hunk_num = self.read_long(f)
if hunk_num < 0:
self.error_string = "%s has invalid hunk num" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
offsets = []
for a in range(num_relocs & 0xffff):
offset = self.read_long(f)
if offset < 0:
self.error_string = "%s has invalid relocation #%d offset %d (num_relocs=%d hunk_num=%d, offset=%d)" \
% (hunk['type_name'],a,offset,num_relocs,hunk_num,f.tell())
return RESULT_INVALID_HUNK_FILE
offsets.append(offset)
reloc[hunk_num] = offsets
return RESULT_OK
def parse_reloc_short(self, f, hunk):
num_relocs = 1
reloc = {}
hunk['reloc'] = reloc
total_words = 0
while num_relocs != 0:
num_relocs = self.read_word(f)
if num_relocs < 0:
self.error_string = "%s has invalid number of relocations" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
elif num_relocs == 0:
# last relocation found
total_words += 1
break
# build reloc map
hunk_num = self.read_word(f)
if hunk_num < 0:
self.error_string = "%s has invalid hunk num" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
offsets = []
count = num_relocs & 0xffff
total_words += count + 2
for a in range(count):
offset = self.read_word(f)
if offset < 0:
self.error_string = "%s has invalid relocation #%d offset %d (num_relocs=%d hunk_num=%d, offset=%d)" \
% (hunk['type_name'],a,offset,num_relocs,hunk_num,f.tell())
return RESULT_INVALID_HUNK_FILE
offsets.append(offset)
reloc[hunk_num] = offsets
# padding
if total_words & 1 == 1:
self.read_word(f)
return RESULT_OK
def parse_symbol(self, f, hunk):
name_len = 1
symbols = []
hunk['symbols'] = symbols
while name_len > 0:
(name_len, name) = self.read_name(f)
if name_len < 0:
self.error_string = "%s has invalid symbol name" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
elif name_len == 0:
# last name occurred
break
value = self.read_long(f)
if value < 0:
self.error_string = "%s has invalid symbol value" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
symbols.append( (name,value) )
return RESULT_OK
def parse_debug(self, f, hunk):
num_longs = self.read_long(f)
if num_longs < 0:
self.error_string = "%s has invalid size" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
size = num_longs * 4
offset = self.read_long(f)
hunk['debug_offset'] = offset;
tag = self.read_tag(f)
hunk['debug_type'] = tag;
size -= 8
if tag == 'LINE':
# parse LINE: source line -> code offset mapping
l = self.read_long(f)
size -= l * 4 + 4;
l,n = self.read_name_size(f,l)
src_map = []
hunk['src_file'] = n
hunk['src_map'] = src_map
while size > 0:
line_no = self.read_long(f)
offset = self.read_long(f)
size -= 8
src_map.append([line_no,offset])
else:
# read unknown DEBUG hunk
hunk['data'] = f.read(size)
return RESULT_OK
def find_first_code_hunk(self):
for hunk in self.hunks:
if hunk['type'] == HUNK_CODE:
return hunk
return None
def parse_overlay(self, f, hunk):
# read size of overlay hunk
ov_size = self.read_long(f)
if ov_size < 0:
self.error_string = "%s has invalid size" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
# read data of overlay
byte_size = (ov_size + 1) *4
ov_data = f.read(byte_size)
hunk['ov_data'] = ov_data
# check: first get header hunk
hdr_hunk = self.hunks[0]
if hdr_hunk['type'] != HUNK_HEADER:
self.error_string = "%s has no header hunk" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
# first find the code segment of the overlay manager
overlay_mgr_hunk = self.find_first_code_hunk()
if overlay_mgr_hunk == None:
self.error_string = "%s has no overlay manager hunk" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
# check overlay manager
overlay_mgr_data = overlay_mgr_hunk['data']
magic = self.get_long(overlay_mgr_data[4:8])
if magic != 0xabcd:
self.error_string = "no valid overlay manager magic found"
return RESULT_INVALID_HUNK_FILE
# check for standard overlay manager
magic2 = self.get_long(overlay_mgr_data[24:28])
magic3 = self.get_long(overlay_mgr_data[28:32])
magic4 = self.get_long(overlay_mgr_data[32:36])
std_overlay = (magic2 == 0x5ba0) and (magic3 == 0x074f7665) and (magic4 == 0x726c6179)
hunk['ov_std'] = std_overlay
return RESULT_OK
def parse_lib(self, f, hunk):
lib_size = self.read_long(f)
hunk['lib_file_offset'] = f.tell()
return RESULT_OK,lib_size * 4
def parse_index(self, f, hunk):
index_size = self.read_long(f)
total_size = index_size * 4
# first read string table
strtab_size = self.read_word(f)
strtab = f.read(strtab_size)
total_size -= strtab_size + 2
# read units
units = []
hunk['units'] = units
unit_no = 0
while total_size > 2:
# read name of unit
name_offset = self.read_word(f)
total_size -= 2
unit = {}
units.append(unit)
unit['unit_no'] = unit_no
unit_no += 1
# generate unit name
unit['name'] = self.get_index_name(strtab, name_offset)
# hunks in unit
hunk_begin = self.read_word(f)
num_hunks = self.read_word(f)
total_size -= 4
unit['hunk_begin_offset'] = hunk_begin
# for all hunks in unit
ihunks = []
unit['hunk_infos'] = ihunks
for a in range(num_hunks):
ihunk = {}
ihunks.append(ihunk)
# get hunk info
name_offset = self.read_word(f)
hunk_size = self.read_word(f)
hunk_type = self.read_word(f)
total_size -= 6
ihunk['name'] = self.get_index_name(strtab, name_offset)
ihunk['size'] = hunk_size
ihunk['type'] = hunk_type & 0x3fff
self.set_mem_flags(ihunk,hunk_type & 0xc000,14)
ihunk['type_name'] = hunk_names[hunk_type & 0x3fff]
# get references
num_refs = self.read_word(f)
total_size -= 2
if num_refs > 0:
refs = []
ihunk['refs'] = refs
for b in range(num_refs):
ref = {}
name_offset = self.read_word(f)
total_size -= 2
name = self.get_index_name(strtab, name_offset)
if name == '':
# 16 bit refs point to the previous zero byte before the string entry...
name = self.get_index_name(strtab, name_offset+1)
ref['bits'] = 16
else:
ref['bits'] = 32
ref['name'] = name
refs.append(ref)
# get definitions
num_defs = self.read_word(f)
total_size -= 2
if num_defs > 0:
defs = []
ihunk['defs'] = defs
for b in range(num_defs):
name_offset = self.read_word(f)
def_value = self.read_word(f)
def_type_flags = self.read_word(f)
def_type = def_type_flags & 0x3fff
def_flags = def_type_flags & 0xc000
total_size -= 6
name = self.get_index_name(strtab, name_offset)
d = { 'name':name, 'value':def_value,'type':def_type}
self.set_mem_flags(d,def_flags,14)
defs.append(d)
# align hunk
if total_size == 2:
self.read_word(f)
elif total_size != 0:
self.error_string = "%s has invalid padding: %d" % (hunk['type_name'], total_size)
return RESULT_INVALID_HUNK_FILE
return RESULT_OK
def parse_ext(self, f, hunk):
ext_def = []
ext_ref = []
ext_common = []
hunk['ext_def'] = ext_def
hunk['ext_ref'] = ext_ref
hunk['ext_common'] = ext_common
ext_type_size = 1
while ext_type_size > 0:
# ext type | size
ext_type_size = self.read_long(f)
if ext_type_size < 0:
self.error_string = "%s has invalid size" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
ext_type = ext_type_size >> EXT_TYPE_SHIFT
ext_size = ext_type_size & EXT_TYPE_SIZE_MASK
# ext name
l,ext_name = self.read_name_size(f, ext_size)
if l < 0:
self.error_string = "%s has invalid name" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
elif l == 0:
break
# create local ext object
ext = { 'type' : ext_type, 'name' : ext_name }
# check and setup type name
if ext_type not in ext_names:
self.error_string = "%s has unspported ext entry %d" % (hunk['type_name'],ext_type)
return RESULT_INVALID_HUNK_FILE
ext['type_name'] = ext_names[ext_type]
# ext common
if ext_type == EXT_ABSCOMMON or ext_type == EXT_RELCOMMON:
ext['common_size'] = self.read_long(f)
ext_common.append(ext)
# ext def
elif ext_type == EXT_DEF or ext_type == EXT_ABS or ext_type == EXT_RES:
ext['def'] = self.read_long(f)
ext_def.append(ext)
# ext ref
else:
num_refs = self.read_long(f)
if num_refs == 0:
num_refs = 1
refs = []
for a in range(num_refs):
ref = self.read_long(f)
refs.append(ref)
ext['refs'] = refs
ext_ref.append(ext)
return RESULT_OK
def parse_unit_or_name(self, f, hunk):
l,n = self.read_name(f)
if l < 0:
self.error_string = "%s has invalid name" % (hunk['type_name'])
return RESULT_INVALID_HUNK_FILE
elif l > 0:
hunk['name'] = n
else:
hunk['name'] = ""
return RESULT_OK
def set_mem_flags(self, hunk, flags, shift):
f = flags >> shift
if f & 1 == 1:
hunk['memf'] = 'chip'
elif f & 2 == 2:
hunk['memf'] = 'fast'
else:
hunk['memf'] = ''
# ----- public functions -----
"""Read a hunk file and build internal hunk structure
Return status and set self.error_string on failure
"""
def read_file(self, hfile):
with open(hfile, "rb") as f:
return self.read_file_obj(hfile, f)
"""Read a hunk from memory"""
def read_mem(self, name, data):
fobj = io.StringIO(data)
return self.read_file_obj(name, fobj)
def read_file_obj(self, hfile, f):
self.hunks = []
is_first_hunk = True
is_exe = False
was_end = False
was_overlay = False
self.error_string = None
lib_size = 0
last_file_offset = 0
while True:
hunk_file_offset = f.tell()
# read hunk type
hunk_raw_type = self.read_long(f)
if hunk_raw_type == -1 or hunk_raw_type == -2: # tolerate extra byte at end
if is_first_hunk:
self.error_string = "No valid hunk file: '%s' is empty" % (hfile)
return RESULT_NO_HUNK_FILE
else:
# eof
break
elif hunk_raw_type < 0:
if is_first_hunk:
self.error_string = "No valid hunk file: '%s' is too short" % (hfile)
return RESULT_NO_HUNK_FILE
else:
self.error_string = "Error reading hunk type @%08x" % (f.tell())
return RESULT_INVALID_HUNK_FILE
hunk_type = hunk_raw_type & HUNK_TYPE_MASK
hunk_flags = hunk_raw_type & HUNK_FLAGS_MASK
# check range of hunk type
if hunk_type not in hunk_names:
# no hunk file?
if is_first_hunk:
self.error_string = "No hunk file: '%s' type was %d" % (hfile, hunk_type)
return RESULT_NO_HUNK_FILE
elif was_end:
# garbage after an end tag is ignored
return RESULT_OK
elif was_overlay:
# seems to be a custom overlay -> read to end of file
ov_custom_data = f.read()
self.hunks[-1]['custom_data'] = ov_custom_data
return RESULT_OK
else:
self.error_string = "Invalid hunk type %d/%x found at @%08x" % (hunk_type,hunk_type,f.tell())
return RESULT_INVALID_HUNK_FILE
else:
# check for valid first hunk type
if is_first_hunk:
if not self.is_valid_first_hunk_type(hunk_type):
self.error_string = "No hunk file: '%s' first hunk type was %d" % (hfile, hunk_type)
return RESULT_NO_HUNK_FILE
else:
is_exe = hunk_type == HUNK_HEADER
is_first_hunk = False
was_end = False
was_overlay = False
# V37 fix: in an executable DREL32 is wrongly assigned and actually is a RELOC32SHORT
if hunk_type == HUNK_DREL32 and is_exe:
hunk_type = HUNK_RELOC32SHORT
hunk = { 'type' : hunk_type, 'hunk_file_offset' : hunk_file_offset }
self.hunks.append(hunk)
hunk['type_name'] = hunk_names[hunk_type]
self.set_mem_flags(hunk, hunk_flags, 30)
# account for lib
last_hunk_size = hunk_file_offset - last_file_offset
if lib_size > 0:
lib_size -= last_hunk_size
if lib_size > 0:
hunk['in_lib'] = True
# ----- HUNK_HEADER -----
if hunk_type == HUNK_HEADER:
result = self.parse_header(f,hunk)
# ----- HUNK_CODE/HUNK_DATA ------
elif hunk_type == HUNK_CODE or hunk_type == HUNK_DATA or hunk_type == HUNK_PPC_CODE:
result = self.parse_code_or_data(f,hunk)
# ---- HUNK_BSS ----
elif hunk_type == HUNK_BSS:
result = self.parse_bss(f,hunk)
# ----- HUNK_<reloc> -----
elif hunk_type == HUNK_RELRELOC32 or hunk_type == HUNK_ABSRELOC16 \
or hunk_type == HUNK_RELRELOC8 or hunk_type == HUNK_RELRELOC16 or hunk_type == HUNK_ABSRELOC32 \
or hunk_type == HUNK_DREL32 or hunk_type == HUNK_DREL16 or hunk_type == HUNK_DREL8 \
or hunk_type == HUNK_RELRELOC26:
result = self.parse_reloc(f,hunk)
# ---- HUNK_<reloc short> -----
elif hunk_type == HUNK_RELOC32SHORT:
result = self.parse_reloc_short(f,hunk)
# ----- HUNK_SYMBOL -----
elif hunk_type == HUNK_SYMBOL:
result = self.parse_symbol(f,hunk)
# ----- HUNK_DEBUG -----
elif hunk_type == HUNK_DEBUG:
result = self.parse_debug(f,hunk)
# ----- HUNK_END -----
elif hunk_type == HUNK_END:
was_end = True
result = RESULT_OK
# ----- HUNK_OVERLAY -----
elif hunk_type == HUNK_OVERLAY:
result = self.parse_overlay(f,hunk)
was_overlay = True
# ----- HUNK_BREAK -----
elif hunk_type == HUNK_BREAK:
result = RESULT_OK
# ----- HUNK_LIB -----
elif hunk_type == HUNK_LIB:
result,lib_size = self.parse_lib(f,hunk)
lib_size += 8 # add size of HUNK_LIB itself
# ----- HUNK_INDEX -----
elif hunk_type == HUNK_INDEX:
result = self.parse_index(f,hunk)
# ----- HUNK_EXT -----
elif hunk_type == HUNK_EXT:
result = self.parse_ext(f,hunk)
# ----- HUNK_UNIT -----
elif hunk_type == HUNK_UNIT or hunk_type == HUNK_NAME:
result = self.parse_unit_or_name(f,hunk)
# ----- oops! unsupported hunk -----
else:
self.error_string = "unsupported hunk %d" % (hunk_type)
return RESULT_UNSUPPORTED_HUNKS
# a parse error occurred
if result != RESULT_OK:
return result
last_file_offset = hunk_file_offset
return RESULT_OK
"""Return a list with all the hunk type names that were found
"""
def get_hunk_summary(self):
return self.get_struct_summary(self.hunks)
# ---------- Build Segments from Hunks ----------
def build_loadseg(self):
in_header = True
seek_begin = False
segment = None
segment_list = self.segments
for e in self.hunks:
hunk_type = e['type']
# check for end of header
if in_header and hunk_type in loadseg_valid_begin_hunks:
in_header = False
seek_begin = True
if in_header:
if hunk_type == HUNK_HEADER:
# we are in an overlay!
if self.overlay != None:
segment_list = []
self.overlay_segments.append(segment_list)
self.overlay_headers.append(e)
else:
# set load_seg() header
self.header = e
# start a new segment
segment = []
# setup hunk counter
hunk_no = e['first_hunk']
# we allow a debug hunk in header for SAS compatibility
elif hunk_type == HUNK_DEBUG:
segment.append(e)
else:
self.error_string = "Expected header in loadseg: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
elif seek_begin:
# a new hunk shall begin
if hunk_type in loadseg_valid_begin_hunks:
segment = [e]
segment_list.append(segment)
seek_header = False
seek_begin = False
e['hunk_no'] = hunk_no
e['alloc_size'] = self.header['hunks'][hunk_no]['size']
hunk_no += 1
# add an extra overlay "hunk"
elif hunk_type == HUNK_OVERLAY:
# assume hunk to be empty
if self.overlay != None:
self.error_string = "Multiple overlay in loadseg: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
self.overlay = e
self.overlay_headers = []
self.overlay_segments = []
in_header = True
# break
elif hunk_type == HUNK_BREAK:
# assume hunk to be empty
in_header = True
# broken hunk: multiple END or other hunks
elif hunk_type in [HUNK_END, HUNK_NAME, HUNK_DEBUG]:
pass
else:
self.error_string = "Expected hunk start in loadseg: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
else:
# an extra block in hunk or end is expected
if hunk_type == HUNK_END:
seek_begin = True
# contents of hunk
elif hunk_type in loadseg_valid_extra_hunks or hunk_type == HUNK_DREL32:
segment.append(e)
# broken hunk file without END tag
elif hunk_type in loadseg_valid_begin_hunks:
segment = [e]
segment_list.append(segment)
seek_header = False
seek_begin = False
e['hunk_no'] = hunk_no
e['alloc_size'] = self.header['hunks'][hunk_no]['size']
hunk_no += 1
# unecpected hunk?!
else:
self.error_string = "Unexpected hunk extra in loadseg: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
return True
def build_unit(self):
force_unit = True
in_hunk = False
name = None
segment = None
unit = None
self.units = []
unit_no = 0
for e in self.hunks:
hunk_type = e['type']
# optional unit as first entry
if hunk_type == HUNK_UNIT:
unit = {}
unit['name'] = e['name']
unit['unit_no'] = unit_no
unit['segments'] = []
unit['unit'] = e
unit_no += 1
self.units.append(unit)
force_unit = False
hunk_no = 0
elif force_unit:
self.error_string = "Expected name hunk in unit: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
elif not in_hunk:
# begin a named hunk
if hunk_type == HUNK_NAME:
name = e['name']
# main hunk block
elif hunk_type in unit_valid_main_hunks:
segment = [e]
unit['segments'].append(segment)
# give main block the NAME
if name != None:
e['name'] = name
name = None
e['hunk_no'] = hunk_no
hunk_no += 1
in_hunk = True
# broken hunk: ignore multi ENDs
elif hunk_type == HUNK_END:
pass
else:
self.error_string = "Expected main hunk in unit: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
else:
# a hunk is finished
if hunk_type == HUNK_END:
in_hunk = False
# contents of hunk
elif hunk_type in unit_valid_extra_hunks:
segment.append(e)
# unecpected hunk?!
else:
self.error_string = "Unexpected hunk in unit: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
return True
def build_lib(self):
self.libs = []
lib_segments = []
seek_lib = True
seek_main = False
for e in self.hunks:
hunk_type = e['type']
# seeking for a LIB hunk
if seek_lib:
if hunk_type == HUNK_LIB:
segment_list = []
lib_segments.append(segment_list)
seek_lib = False
seek_main = True
hunk_no = 0
# get start address of lib hunk in file
lib_file_offset = e['lib_file_offset']
else:
self.error_string = "Expected lib hunk in lib: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
elif seek_main:
# end of lib? -> index!
if hunk_type == HUNK_INDEX:
seek_main = False
seek_lib = True
lib_units = []
if not self.resolve_index_hunks(e, segment_list, lib_units):
self.error_string = "Error resolving index hunks!"
return False
lib = {}
lib['units'] = lib_units
lib['lib_no'] = len(self.libs)
lib['index'] = e
self.libs.append(lib)
# start of a hunk
elif hunk_type in unit_valid_main_hunks:
segment = [e]
e['hunk_no'] = hunk_no
hunk_no += 1
segment_list.append(segment)
seek_main = False
# calc relative lib address
hunk_lib_offset = e['hunk_file_offset'] - lib_file_offset
e['hunk_lib_offset'] = hunk_lib_offset
else:
self.error_string = "Expected main hunk in lib: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
else:
# end hunk
if hunk_type == HUNK_END:
seek_main = True
# extra contents
elif hunk_type in unit_valid_extra_hunks:
segment.append(e)
else:
self.error_string = "Unexpected hunk in lib: %s %d/%x" % (e['type_name'], hunk_type, hunk_type)
return False
return True
"""Resolve hunks referenced in the index"""
def resolve_index_hunks(self, index, segment_list, lib_units):
units = index['units']
no = 0
for unit in units:
lib_unit = {}
unit_segments = []
lib_unit['segments'] = unit_segments
lib_unit['name'] = unit['name']
lib_unit['unit_no'] = no
lib_unit['index_unit'] = unit
lib_units.append(lib_unit)
no += 1
# try to find segment with start offset
hunk_offset = unit['hunk_begin_offset']
found = False
for segment in segment_list:
hunk_no = segment[0]['hunk_no']
lib_off = segment[0]['hunk_lib_offset'] // 4 # is in longwords
if lib_off == hunk_offset:
# found segment
num_segs = len(unit['hunk_infos'])
for i in range(num_segs):
info = unit['hunk_infos'][i]
seg = segment_list[hunk_no+i]
unit_segments.append(seg)
# renumber hunk
seg[0]['hunk_no'] = i
seg[0]['name'] = info['name']
seg[0]['index_hunk'] = info
found = True
if not found:
return False
return True
"""From the hunk list build a set of segments that form the actual binary"""
def build_segments(self):
self.segments = []
if len(self.hunks) == 0:
self.type = TYPE_UNKNOWN
return False
# determine type of file from first hunk
first_hunk_type = self.hunks[0]['type']
if first_hunk_type == HUNK_HEADER:
self.type = TYPE_LOADSEG
return self.build_loadseg()
elif first_hunk_type == HUNK_UNIT:
self.type = TYPE_UNIT
return self.build_unit()
elif first_hunk_type == HUNK_LIB:
self.type = TYPE_LIB
return self.build_lib()
else:
self.type = TYPE_UNKNOWN
return False
"""Return a summary of the created segment structure"""
def get_segment_summary(self):
return self.get_struct_summary(self.segments)
def get_overlay_segment_summary(self):
if self.overlay_segments != None:
return self.get_struct_summary(self.overlay_segments)
else:
return None
def get_libs_summary(self):
if self.libs != None:
return self.get_struct_summary(self.libs)
else:
return None
def get_units_summary(self):
if self.units != None:
return self.get_struct_summary(self.units)
else:
return None
| gpl-2.0 |
unifieddigitalmedia/sinopia_inn-M.E.A.N_stack | node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py | 1569 | 23354 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest, convert_to_binary)
else:
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
def _CopyStringsFile(self, source, dest, convert_to_binary):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
env = os.environ.copy()
# Ref:
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
# The problem with this flag is that it resets the file mtime on the file to
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env['ZERO_AR_DATE'] = '1'
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
This invokes 'actool' to compile all the inputs .xcassets files. The
|keys| arguments is a json-encoded dictionary of extra arguments to
pass to 'actool' when the asset catalogs contains an application icon
or a launch image.
Note that 'actool' does not create the Assets.car file if the asset
catalogs does not contains imageset.
"""
command_line = [
'xcrun', 'actool', '--output-format', 'human-readable-text',
'--compress-pngs', '--notices', '--warnings', '--errors',
]
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
if is_iphone_target:
platform = os.environ['CONFIGURATION'].split('-')[-1]
if platform not in ('iphoneos', 'iphonesimulator'):
platform = 'iphonesimulator'
command_line.extend([
'--platform', platform, '--target-device', 'iphone',
'--target-device', 'ipad', '--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
])
else:
command_line.extend([
'--platform', 'macosx', '--target-device', 'mac',
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
'--compile',
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
])
if keys:
keys = json.loads(keys)
for key, value in keys.iteritems():
arg_name = '--' + key
if isinstance(value, bool):
if value:
command_line.append(arg_name)
elif isinstance(value, list):
for v in value:
command_line.append(arg_name)
command_line.append(str(v))
else:
command_line.append(arg_name)
command_line.append(str(value))
# Note: actool crashes if inputs path are relative, so use os.path.abspath
# to get absolute path name for inputs.
command_line.extend(map(os.path.abspath, inputs))
subprocess.check_call(command_line)
def ExecMergeInfoPlist(self, output, *inputs):
"""Merge multiple .plist files into a single .plist file."""
merged_plist = {}
for path in inputs:
plist = self._LoadPlistMaybeBinary(path)
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
for key, value in plist.iteritems():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
self._MergePlist(merged_value, value)
merged_plist[key] = merged_value
else:
merged_plist[key] = value
else:
merged_plist[key] = value
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
bretlowery/snakr | lib/django/conf/locale/de_CH/formats.py | 504 | 1445 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
from __future__ import unicode_literals
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
# these are the separators for non-monetary numbers. For monetary numbers,
# the DECIMAL_SEPARATOR is a . (decimal point) and the THOUSAND_SEPARATOR is a
# ' (single quote).
# For details, please refer to http://www.bk.admin.ch/dokumentation/sprachen/04915/05016/index.html?lang=de
# (in German) and the documentation
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
jikortus/pykickstart | tests/commands/group.py | 1 | 2759 | #
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from tests.baseclass import CommandTest, CommandSequenceTest
from pykickstart.version import F12
class F12_TestCase(CommandTest):
command = "group"
def runTest(self):
# pass
self.assert_parse("group --name=test", "group --name=test\n")
self.assert_parse("group --name=test --gid=1000", "group --name=test --gid=1000\n")
self.assertFalse(self.assert_parse("group --name=test") == None)
self.assertTrue(self.assert_parse("group --name=testA") != \
self.assert_parse("group --name=testB"))
self.assertFalse(self.assert_parse("group --name=testA") == \
self.assert_parse("group --name=testB"))
# fail
# missing required option --name
self.assert_parse_error("group")
# --name requires an argument
self.assert_parse_error("group --name")
# --gid requires int argument
self.assert_parse_error("group --name=test --uid=id")
# unknown option
self.assert_parse_error("group --name=test --unknown=value")
# extra test coverage
cmd = self.handler().commands[self.command]
cmd.groupList = ["test"]
self.assertEqual(cmd.__str__(), "test")
gd = self.handler().GroupData()
gd.name = ""
self.assertEqual(gd.__str__(), "group\n")
class F12_Duplicate_TestCase(CommandSequenceTest):
def __init__(self, *args, **kwargs):
CommandSequenceTest.__init__(self, *args, **kwargs)
self.version = F12
def runTest(self):
self.assert_parse("""
group --name=test
group --name=othertest""")
self.assert_parse_error("""
group --name=test --gid=1000
group --name=test --gid=1010""", UserWarning)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
gberl001/Arduino | arduino-core/src/processing/app/i18n/python/requests/utils.py | 204 | 17497 | # -*- coding: utf-8 -*-
"""
requests.utils
~~~~~~~~~~~~~~
This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
import cgi
import codecs
import os
import platform
import re
import sys
import zlib
from netrc import netrc, NetrcParseError
from . import __version__
from . import certs
from .compat import parse_http_list as _parse_list_header
from .compat import quote, urlparse, bytes, str, OrderedDict, urlunparse
from .cookies import RequestsCookieJar, cookiejar_from_dict
_hush_pyflakes = (RequestsCookieJar,)
NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
def dict_to_sequence(d):
"""Returns an internal sequence dictionary update."""
if hasattr(d, 'items'):
d = d.items()
return d
def super_len(o):
if hasattr(o, '__len__'):
return len(o)
if hasattr(o, 'len'):
return o.len
if hasattr(o, 'fileno'):
return os.fstat(o.fileno()).st_size
def get_netrc_auth(url):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES)
netrc_path = None
for loc in locations:
if os.path.exists(loc) and not netrc_path:
netrc_path = loc
# Abort early if there isn't one.
if netrc_path is None:
return netrc_path
ri = urlparse(url)
# Strip port numbers from netloc
host = ri.netloc.split(':')[0]
try:
_netrc = netrc(netrc_path).authenticators(host)
if _netrc:
# Return with login / password
login_i = (0 if _netrc[0] else 1)
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth
pass
# AppEngine hackiness.
except (ImportError, AttributeError):
pass
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>':
return os.path.basename(name)
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
OrderedDict, e.g.,
::
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
ValueError: need more than 1 value to unpack
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
return OrderedDict(value)
def to_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. If it can be, return a list of tuples, e.g.,
::
>>> to_key_val_list([('key', 'val')])
[('key', 'val')]
>>> to_key_val_list({'key': 'val'})
[('key', 'val')]
>>> to_key_val_list('string')
ValueError: cannot encode objects that are not 2-tuples.
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
if isinstance(value, dict):
value = value.items()
return list(value)
# From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
It basically works like :func:`parse_set_header` just that items
may appear multiple times and case sensitivity is preserved.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
To create a header from the :class:`list` again, use the
:func:`dump_header` function.
:param value: a string with a list header.
:return: :class:`list`
"""
result = []
for item in _parse_list_header(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
# From mitsuhiko/werkzeug (used with permission).
def parse_dict_header(value):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict:
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it will be `None`:
>>> parse_dict_header('key_without_value')
{'key_without_value': None}
To create a header from the :class:`dict` again, use the
:func:`dump_header` function.
:param value: a string with a dict header.
:return: :class:`dict`
"""
result = {}
for item in _parse_list_header(value):
if '=' not in item:
result[item] = None
continue
name, value = item.split('=', 1)
if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1])
result[name] = value
return result
# From mitsuhiko/werkzeug (used with permission).
def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value
def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
"""
cookie_dict = {}
for cookie in cj:
cookie_dict[cookie.name] = cookie.value
return cookie_dict
def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cj: CookieJar to insert cookies into.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
cj2 = cookiejar_from_dict(cookie_dict)
cj.update(cj2)
return cj
def get_encodings_from_content(content):
"""Returns encodings from given content string.
:param content: bytestring to extract encodings from.
"""
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
return charset_re.findall(content)
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
if 'text' in content_type:
return 'ISO-8859-1'
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode('', final=True)
if rv:
yield rv
def iter_slices(string, slice_length):
"""Iterate over slices of a string."""
pos = 0
while pos < len(string):
yield string[pos:pos + slice_length]
pos += slice_length
def get_unicode_from_response(r):
"""Returns the requested content back in unicode.
:param r: Response object to get unicode content from.
Tried:
1. charset from content-type
2. every encodings from ``<meta ... charset=XXX>``
3. fall back and replace all unicode characters
"""
tried_encodings = []
# Try charset from content-type
encoding = get_encoding_from_headers(r.headers)
if encoding:
try:
return str(r.content, encoding)
except UnicodeError:
tried_encodings.append(encoding)
# Fall back:
try:
return str(r.content, encoding, errors='replace')
except TypeError:
return r.content
def stream_decompress(iterator, mode='gzip'):
"""Stream decodes an iterator over compressed data
:param iterator: An iterator over compressed data
:param mode: 'gzip' or 'deflate'
:return: An iterator over decompressed data
"""
if mode not in ['gzip', 'deflate']:
raise ValueError('stream_decompress mode must be gzip or deflate')
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
dec = zlib.decompressobj(zlib_mode)
try:
for chunk in iterator:
rv = dec.decompress(chunk)
if rv:
yield rv
except zlib.error:
# If there was an error decompressing, just return the raw chunk
yield chunk
# Continue to return the rest of the raw data
for chunk in iterator:
yield chunk
else:
# Make sure everything has been returned from the decompression object
buf = dec.decompress(bytes())
rv = buf + dec.flush()
if rv:
yield rv
def stream_untransfer(gen, resp):
ce = resp.headers.get('content-encoding', '').lower()
if 'gzip' in ce:
gen = stream_decompress(gen, mode='gzip')
elif 'deflate' in ce:
gen = stream_decompress(gen, mode='deflate')
return gen
# The unreserved URI characters (RFC 3986)
UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ "0123456789-._~")
def unquote_unreserved(uri):
"""Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
"""
parts = uri.split('%')
for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
c = chr(int(h, 16))
if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:]
else:
parts[i] = '%' + parts[i]
else:
parts[i] = '%' + parts[i]
return ''.join(parts)
def requote_uri(uri):
"""Re-quote the given URI.
This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
"""
# Unquote only the unreserved characters
# Then quote only illegal characters (do not quote reserved, unreserved,
# or '%')
return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~")
def get_environ_proxies(url):
"""Return a dict of environment proxies."""
proxy_keys = [
'all',
'http',
'https',
'ftp',
'socks'
]
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
# we're getting isn't in the no_proxy list.
no_proxy = get_proxy('no_proxy')
if no_proxy:
# We need to check whether we match here. We need to see if we match
# the end of the netloc, both with and without the port.
no_proxy = no_proxy.split(',')
netloc = urlparse(url).netloc
for host in no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return {}
# If we get here, we either didn't have no_proxy set or we're not going
# anywhere that no_proxy applies to.
proxies = [(key, get_proxy(key + '_proxy')) for key in proxy_keys]
return dict([(key, val) for (key, val) in proxies if val])
def default_user_agent():
"""Return a string representing the default user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
try:
p_system = platform.system()
p_release = platform.release()
except IOError:
p_system = 'Unknown'
p_release = 'Unknown'
return " ".join(['python-requests/%s' % __version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (p_system, p_release)])
def default_headers():
return {
'User-Agent': default_user_agent(),
'Accept-Encoding': ', '.join(('gzip', 'deflate', 'compress')),
'Accept': '*/*'
}
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
"""
links = []
replace_chars = " '\""
for val in value.split(","):
try:
url, params = val.split(";", 1)
except ValueError:
url, params = val, ''
link = {}
link["url"] = url.strip("<> '\"")
for param in params.split(";"):
try:
key, value = param.split("=")
except ValueError:
break
link[key.strip(replace_chars)] = value.strip(replace_chars)
links.append(link)
return links
# Null bytes; no need to recreate these on each call to guess_json_utf
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
_null2 = _null * 2
_null3 = _null * 3
def guess_json_utf(data):
# JSON always starts with two ASCII characters, so detection is as
# easy as counting the nulls and from their location and count
# determine the encoding. Also detect a BOM, if present.
sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM32_BE):
return 'utf-32' # BOM included
if sample[:3] == codecs.BOM_UTF8:
return 'utf-8-sig' # BOM included, MS style (discouraged)
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
return 'utf-16' # BOM included
nullcount = sample.count(_null)
if nullcount == 0:
return 'utf-8'
if nullcount == 2:
if sample[::2] == _null2: # 1st and 3rd are null
return 'utf-16-be'
if sample[1::2] == _null2: # 2nd and 4th are null
return 'utf-16-le'
# Did not detect 2 valid UTF-16 ascii-range characters
if nullcount == 3:
if sample[:3] == _null3:
return 'utf-32-be'
if sample[1:] == _null3:
return 'utf-32-le'
# Did not detect a valid UTF-32 ascii-range character
return None
def prepend_scheme_if_needed(url, new_scheme):
'''Given a URL that may or may not have a scheme, prepend the given scheme.
Does not replace a present scheme with the one provided as an argument.'''
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
# urlparse is a finicky beast, and sometimes decides that there isn't a
# netloc present. Assume that it's being over-cautious, and switch netloc
# and path if urlparse decided there was no netloc.
if not netloc:
netloc, path = path, netloc
return urlunparse((scheme, netloc, path, params, query, fragment))
def get_auth_from_url(url):
"""Given a url with authentication components, extract them into a tuple of
username,password."""
if url:
parsed = urlparse(url)
return (parsed.username, parsed.password)
else:
return ('', '')
| lgpl-2.1 |
dreamhost/teuthology | teuthology/report.py | 6 | 17353 | import os
import yaml
import json
import re
import requests
import logging
import socket
from datetime import datetime
import teuthology
from .config import config
from .job_status import get_status, set_status
report_exceptions = (requests.exceptions.RequestException, socket.error)
def init_logging():
"""
Set up logging for the module
:returns: a logger
"""
log = logging.getLogger(__name__)
return log
def main(args):
run = args['--run']
job = args['--job']
dead = args['--dead']
refresh = dead or args['--refresh']
server = args['--server']
if server:
config.results_server = server
if args['--verbose']:
teuthology.log.setLevel(logging.DEBUG)
archive_base = os.path.abspath(os.path.expanduser(args['--archive'])) or \
config.archive_base
save = not args['--no-save']
log = init_logging()
reporter = ResultsReporter(archive_base, save=save, refresh=refresh,
log=log)
if dead and not job:
for run_name in run:
try_mark_run_dead(run[0])
elif dead and len(run) == 1 and job:
reporter.report_jobs(run[0], job, dead=True)
elif len(run) == 1 and job:
reporter.report_jobs(run[0], job)
elif run and len(run) > 1:
reporter.report_runs(run)
elif run:
reporter.report_run(run[0])
elif args['--all-runs']:
reporter.report_all_runs()
class ResultsSerializer(object):
"""
This class exists to poke around in the archive directory doing things like
assembling lists of test runs, lists of their jobs, and merging sets of job
YAML files together to form JSON objects.
"""
yamls = ('orig.config.yaml', 'config.yaml', 'info.yaml', 'summary.yaml')
def __init__(self, archive_base, log=None):
self.archive_base = archive_base or config.archive_base
self.log = log or init_logging()
def job_info(self, run_name, job_id, pretty=False, simple=False):
"""
Given a run name and job id, merge the job's YAML files together.
:param run_name: The name of the run.
:param job_id: The job's id.
:param simple(bool): Read less data for speed (only orig.config.yaml/info.yaml)
:returns: A dict.
"""
job_archive_dir = os.path.join(self.archive_base,
run_name,
job_id)
job_info = {}
if simple:
self.yamls = ('orig.config.yaml', 'info.yaml')
for yaml_name in self.yamls:
yaml_path = os.path.join(job_archive_dir, yaml_name)
if not os.path.exists(yaml_path):
continue
with file(yaml_path) as yaml_file:
partial_info = yaml.safe_load(yaml_file)
if partial_info is not None:
job_info.update(partial_info)
if 'job_id' not in job_info:
job_info['job_id'] = job_id
if simple:
return job_info
log_path = os.path.join(job_archive_dir, 'teuthology.log')
if os.path.exists(log_path):
mtime = int(os.path.getmtime(log_path))
mtime_dt = datetime.fromtimestamp(mtime)
job_info['updated'] = str(mtime_dt)
return job_info
def json_for_job(self, run_name, job_id, pretty=False):
"""
Given a run name and job id, merge the job's YAML files together to
create a JSON object.
:param run_name: The name of the run.
:param job_id: The job's id.
:returns: A JSON object.
"""
job_info = self.job_info(run_name, job_id, pretty)
if pretty:
job_json = json.dumps(job_info, sort_keys=True, indent=4)
else:
job_json = json.dumps(job_info)
return job_json
def jobs_for_run(self, run_name):
"""
Given a run name, look on the filesystem for directories containing job
information, and return a dict mapping job IDs to job directories.
:param run_name: The name of the run.
:returns: A dict like: {'1': '/path/to/1', '2': 'path/to/2'}
"""
archive_dir = os.path.join(self.archive_base, run_name)
if not os.path.isdir(archive_dir):
return {}
jobs = {}
for item in os.listdir(archive_dir):
if not re.match('\d+$', item):
continue
job_id = item
job_dir = os.path.join(archive_dir, job_id)
if os.path.isdir(job_dir):
jobs[job_id] = job_dir
return jobs
def running_jobs_for_run(self, run_name):
"""
Like jobs_for_run(), but only returns jobs with no summary.yaml
:param run_name: The name of the run.
:returns: A dict like: {'1': '/path/to/1', '2': 'path/to/2'}
"""
jobs = self.jobs_for_run(run_name)
for job_id in jobs.keys():
if os.path.exists(os.path.join(jobs[job_id], 'summary.yaml')):
jobs.pop(job_id)
return jobs
@property
def all_runs(self):
"""
Look in the base archive directory for all test runs. Return a list of
their names.
"""
archive_base = self.archive_base
if not os.path.isdir(archive_base):
return []
runs = []
for run_name in os.listdir(archive_base):
if not os.path.isdir(os.path.join(archive_base, run_name)):
continue
runs.append(run_name)
return runs
class ResultsReporter(object):
last_run_file = 'last_successful_run'
def __init__(self, archive_base=None, base_uri=None, save=False,
refresh=False, log=None):
self.log = log or init_logging()
self.archive_base = archive_base or config.archive_base
self.base_uri = base_uri or config.results_server
if self.base_uri:
self.base_uri = self.base_uri.rstrip('/')
self.serializer = ResultsSerializer(archive_base, log=self.log)
self.save_last_run = save
self.refresh = refresh
self.session = self._make_session()
if not self.base_uri:
msg = "No results_server set in {yaml}; cannot report results"
self.log.warn(msg.format(yaml=config.yaml_path))
def _make_session(self, max_retries=10):
session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=max_retries)
session.mount('http://', adapter)
return session
def report_all_runs(self):
"""
Report *all* runs in self.archive_dir to the results server.
"""
all_runs = self.serializer.all_runs
last_run = self.last_run
if self.save_last_run and last_run and last_run in all_runs:
next_index = all_runs.index(last_run) + 1
runs = all_runs[next_index:]
else:
runs = all_runs
return self.report_runs(runs)
def report_runs(self, run_names):
"""
Report several runs to the results server.
:param run_names: The names of the runs.
"""
num_runs = len(run_names)
num_jobs = 0
self.log.info("Posting %s runs", num_runs)
for run in run_names:
job_count = self.report_run(run)
num_jobs += job_count
if self.save_last_run:
self.last_run = run
del self.last_run
self.log.info("Total: %s jobs in %s runs", num_jobs, len(run_names))
def report_run(self, run_name, dead=False):
"""
Report a single run to the results server.
:param run_name: The name of the run.
:returns: The number of jobs reported.
"""
jobs = self.serializer.jobs_for_run(run_name)
self.log.info("{name} {jobs} jobs dead={dead}".format(
name=run_name,
jobs=len(jobs),
dead=str(dead),
))
if jobs:
if not self.refresh:
response = self.session.head("{base}/runs/{name}/".format(
base=self.base_uri, name=run_name))
if response.status_code == 200:
self.log.info(" already present; skipped")
return 0
self.report_jobs(run_name, jobs.keys(), dead=dead)
elif not jobs:
self.log.debug(" no jobs; skipped")
return len(jobs)
def report_jobs(self, run_name, job_ids, dead=False):
"""
Report several jobs to the results server.
:param run_name: The name of the run.
:param job_ids: The jobs' ids
"""
for job_id in job_ids:
self.report_job(run_name, job_id, dead=dead)
def report_job(self, run_name, job_id, job_info=None, dead=False):
"""
Report a single job to the results server.
:param run_name: The name of the run. The run must already exist.
:param job_id: The job's id
:param job_info: The job's info dict. Optional - if not present, we
look at the archive.
"""
if job_info is not None and not isinstance(job_info, dict):
raise TypeError("job_info must be a dict")
run_uri = "{base}/runs/{name}/jobs/".format(
base=self.base_uri, name=run_name,)
if job_info is None:
job_info = self.serializer.job_info(run_name, job_id)
if dead and get_status(job_info) is None:
set_status(job_info, 'dead')
job_json = json.dumps(job_info)
headers = {'content-type': 'application/json'}
response = self.session.post(run_uri, data=job_json, headers=headers)
if response.status_code == 200:
return job_id
# This call is wrapped in a try/except because of:
# http://tracker.ceph.com/issues/8166
try:
resp_json = response.json()
except ValueError:
resp_json = dict()
if resp_json:
msg = resp_json.get('message', '')
else:
msg = response.text
if msg and msg.endswith('already exists'):
job_uri = os.path.join(run_uri, job_id, '')
response = self.session.put(job_uri, data=job_json,
headers=headers)
elif msg:
self.log.error(
"POST to {uri} failed with status {status}: {msg}".format(
uri=run_uri,
status=response.status_code,
msg=msg,
))
response.raise_for_status()
return job_id
@property
def last_run(self):
"""
The last run to be successfully reported.
"""
if hasattr(self, '__last_run'):
return self.__last_run
elif os.path.exists(self.last_run_file):
with file(self.last_run_file) as f:
self.__last_run = f.read().strip()
return self.__last_run
@last_run.setter
def last_run(self, run_name):
self.__last_run = run_name
with file(self.last_run_file, 'w') as f:
f.write(run_name)
@last_run.deleter
def last_run(self):
self.__last_run = None
if os.path.exists(self.last_run_file):
os.remove(self.last_run_file)
def get_jobs(self, run_name, job_id=None, fields=None):
"""
Query the results server for jobs in a run
:param run_name: The name of the run
:param job_id: Optionally get a single job instead of all
:param fields: Optional. A list of fields to include in the result.
Defaults to returning all fields.
"""
uri = "{base}/runs/{name}/jobs/".format(base=self.base_uri,
name=run_name)
if job_id:
uri = os.path.join(uri, job_id)
if fields:
if 'job_id' not in fields:
fields.append('job_id')
uri += "?fields=" + ','.join(fields)
response = self.session.get(uri)
response.raise_for_status()
return response.json()
def delete_job(self, run_name, job_id):
"""
Delete a job from the results server.
:param run_name: The name of the run
:param job_id: The job's id
"""
uri = "{base}/runs/{name}/jobs/{job_id}/".format(
base=self.base_uri, name=run_name, job_id=job_id)
response = self.session.delete(uri)
response.raise_for_status()
def delete_jobs(self, run_name, job_ids):
"""
Delete multiple jobs from the results server.
:param run_name: The name of the run
:param job_ids: A list of job ids
"""
for job_id in job_ids:
self.delete_job(self, run_name, job_id)
def delete_run(self, run_name):
"""
Delete a run from the results server.
:param run_name: The name of the run
"""
uri = "{base}/runs/{name}/".format(
base=self.base_uri, name=run_name)
response = self.session.delete(uri)
response.raise_for_status()
def push_job_info(run_name, job_id, job_info, base_uri=None):
"""
Push a job's info (example: ctx.config) to the results server.
:param run_name: The name of the run.
:param job_id: The job's id
:param job_info: A dict containing the job's information.
:param base_uri: The endpoint of the results server. If you leave it out
ResultsReporter will ask teuthology.config.
"""
reporter = ResultsReporter()
if not reporter.base_uri:
return
reporter.report_job(run_name, job_id, job_info)
def try_push_job_info(job_config, extra_info=None):
"""
Wrap push_job_info, gracefully doing nothing if:
Anything inheriting from requests.exceptions.RequestException is raised
A socket.error is raised
config.results_server is not set
config['job_id'] is not present or is None
:param job_config: The ctx.config object to push
:param extra_info: Optional second dict to push
"""
log = init_logging()
if not config.results_server:
log.warning('No result_server in config; not reporting results')
return
if job_config.get('job_id') is None:
log.warning('No job_id found; not reporting results')
return
run_name = job_config['name']
job_id = job_config['job_id']
if extra_info is not None:
job_info = extra_info.copy()
job_info.update(job_config)
else:
job_info = job_config
try:
log.debug("Pushing job info to %s", config.results_server)
push_job_info(run_name, job_id, job_info)
return
except report_exceptions:
log.exception("Could not report results to %s",
config.results_server)
def try_delete_jobs(run_name, job_ids, delete_empty_run=True):
"""
Using the same error checking and retry mechanism as try_push_job_info(),
delete one or more jobs
:param run_name: The name of the run.
:param job_ids: Either a single job_id, or a list of job_ids
:param delete_empty_run: If this would empty the run, delete it.
"""
log = init_logging()
if isinstance(job_ids, int):
job_ids = [str(job_ids)]
elif isinstance(job_ids, basestring):
job_ids = [job_ids]
reporter = ResultsReporter()
if not reporter.base_uri:
return
log.debug("Deleting jobs from {server}: {jobs}".format(
server=config.results_server, jobs=str(job_ids)))
if delete_empty_run:
got_jobs = reporter.get_jobs(run_name, fields=['job_id'])
got_job_ids = [j['job_id'] for j in got_jobs]
if sorted(got_job_ids) == sorted(job_ids):
try:
reporter.delete_run(run_name)
return
except report_exceptions:
log.exception("Run deletion failed")
def try_delete_job(job_id):
try:
reporter.delete_job(run_name, job_id)
return
except report_exceptions:
log.exception("Job deletion failed")
for job_id in job_ids:
try_delete_job(job_id)
def try_mark_run_dead(run_name):
"""
Using the same error checking and retry mechanism as try_push_job_info(),
mark any unfinished runs as dead.
:param run_name: The name of the run.
"""
log = init_logging()
reporter = ResultsReporter()
if not reporter.base_uri:
return
log.debug("Marking run as dead: {name}".format(name=run_name))
jobs = reporter.get_jobs(run_name, fields=['status'])
for job in jobs:
if job['status'] not in ['pass', 'fail', 'dead']:
job_id = job['job_id']
try:
log.info("Marking job {job_id} as dead".format(job_id=job_id))
reporter.report_job(run_name, job['job_id'], dead=True)
except report_exceptions:
log.exception("Could not mark job as dead: {job_id}".format(
job_id=job_id))
| mit |
aldryn/djangocms-cascade | cmsplugin_cascade/south_migrations/0006_auto__add_unique_pluginextrafields_plugin_type_site.py | 6 | 5287 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'PluginExtraFields', fields ['plugin_type', 'site']
db.create_unique(u'cmsplugin_cascade_pluginextrafields', ['plugin_type', 'site_id'])
def backwards(self, orm):
# Removing unique constraint on 'PluginExtraFields', fields ['plugin_type', 'site']
db.delete_unique(u'cmsplugin_cascade_pluginextrafields', ['plugin_type', 'site_id'])
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'cmsplugin_cascade.cascadeelement': {
'Meta': {'object_name': 'CascadeElement', 'db_table': "'cmsplugin_cascade_element'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'+'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['cms.CMSPlugin']"}),
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'})
},
'cmsplugin_cascade.pluginextrafields': {
'Meta': {'unique_together': "(('plugin_type', 'site'),)", 'object_name': 'PluginExtraFields'},
'css_classes': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inline_styles': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"})
},
u'cmsplugin_cascade.sharablecascadeelement': {
'Meta': {'object_name': 'SharableCascadeElement', 'db_table': "u'cmsplugin_cascade_sharableelement'"},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'+'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['cms.CMSPlugin']"}),
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
'shared_glossary': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cmsplugin_cascade.SharedGlossary']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'cmsplugin_cascade.sharedglossary': {
'Meta': {'unique_together': "((u'plugin_type', u'identifier'),)", 'object_name': 'SharedGlossary'},
'glossary': ('jsonfield.fields.JSONField', [], {'default': '{}', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cmsplugin_cascade'] | mit |
Hellowlol/plexpy | lib/unidecode/x057.py | 252 | 4631 | data = (
'Guo ', # 0x00
'Yin ', # 0x01
'Hun ', # 0x02
'Pu ', # 0x03
'Yu ', # 0x04
'Han ', # 0x05
'Yuan ', # 0x06
'Lun ', # 0x07
'Quan ', # 0x08
'Yu ', # 0x09
'Qing ', # 0x0a
'Guo ', # 0x0b
'Chuan ', # 0x0c
'Wei ', # 0x0d
'Yuan ', # 0x0e
'Quan ', # 0x0f
'Ku ', # 0x10
'Fu ', # 0x11
'Yuan ', # 0x12
'Yuan ', # 0x13
'E ', # 0x14
'Tu ', # 0x15
'Tu ', # 0x16
'Tu ', # 0x17
'Tuan ', # 0x18
'Lue ', # 0x19
'Hui ', # 0x1a
'Yi ', # 0x1b
'Yuan ', # 0x1c
'Luan ', # 0x1d
'Luan ', # 0x1e
'Tu ', # 0x1f
'Ya ', # 0x20
'Tu ', # 0x21
'Ting ', # 0x22
'Sheng ', # 0x23
'Pu ', # 0x24
'Lu ', # 0x25
'Iri ', # 0x26
'Ya ', # 0x27
'Zai ', # 0x28
'Wei ', # 0x29
'Ge ', # 0x2a
'Yu ', # 0x2b
'Wu ', # 0x2c
'Gui ', # 0x2d
'Pi ', # 0x2e
'Yi ', # 0x2f
'Di ', # 0x30
'Qian ', # 0x31
'Qian ', # 0x32
'Zhen ', # 0x33
'Zhuo ', # 0x34
'Dang ', # 0x35
'Qia ', # 0x36
'Akutsu ', # 0x37
'Yama ', # 0x38
'Kuang ', # 0x39
'Chang ', # 0x3a
'Qi ', # 0x3b
'Nie ', # 0x3c
'Mo ', # 0x3d
'Ji ', # 0x3e
'Jia ', # 0x3f
'Zhi ', # 0x40
'Zhi ', # 0x41
'Ban ', # 0x42
'Xun ', # 0x43
'Tou ', # 0x44
'Qin ', # 0x45
'Fen ', # 0x46
'Jun ', # 0x47
'Keng ', # 0x48
'Tun ', # 0x49
'Fang ', # 0x4a
'Fen ', # 0x4b
'Ben ', # 0x4c
'Tan ', # 0x4d
'Kan ', # 0x4e
'Pi ', # 0x4f
'Zuo ', # 0x50
'Keng ', # 0x51
'Bi ', # 0x52
'Xing ', # 0x53
'Di ', # 0x54
'Jing ', # 0x55
'Ji ', # 0x56
'Kuai ', # 0x57
'Di ', # 0x58
'Jing ', # 0x59
'Jian ', # 0x5a
'Tan ', # 0x5b
'Li ', # 0x5c
'Ba ', # 0x5d
'Wu ', # 0x5e
'Fen ', # 0x5f
'Zhui ', # 0x60
'Po ', # 0x61
'Pan ', # 0x62
'Tang ', # 0x63
'Kun ', # 0x64
'Qu ', # 0x65
'Tan ', # 0x66
'Zhi ', # 0x67
'Tuo ', # 0x68
'Gan ', # 0x69
'Ping ', # 0x6a
'Dian ', # 0x6b
'Gua ', # 0x6c
'Ni ', # 0x6d
'Tai ', # 0x6e
'Pi ', # 0x6f
'Jiong ', # 0x70
'Yang ', # 0x71
'Fo ', # 0x72
'Ao ', # 0x73
'Liu ', # 0x74
'Qiu ', # 0x75
'Mu ', # 0x76
'Ke ', # 0x77
'Gou ', # 0x78
'Xue ', # 0x79
'Ba ', # 0x7a
'Chi ', # 0x7b
'Che ', # 0x7c
'Ling ', # 0x7d
'Zhu ', # 0x7e
'Fu ', # 0x7f
'Hu ', # 0x80
'Zhi ', # 0x81
'Chui ', # 0x82
'La ', # 0x83
'Long ', # 0x84
'Long ', # 0x85
'Lu ', # 0x86
'Ao ', # 0x87
'Tay ', # 0x88
'Pao ', # 0x89
'[?] ', # 0x8a
'Xing ', # 0x8b
'Dong ', # 0x8c
'Ji ', # 0x8d
'Ke ', # 0x8e
'Lu ', # 0x8f
'Ci ', # 0x90
'Chi ', # 0x91
'Lei ', # 0x92
'Gai ', # 0x93
'Yin ', # 0x94
'Hou ', # 0x95
'Dui ', # 0x96
'Zhao ', # 0x97
'Fu ', # 0x98
'Guang ', # 0x99
'Yao ', # 0x9a
'Duo ', # 0x9b
'Duo ', # 0x9c
'Gui ', # 0x9d
'Cha ', # 0x9e
'Yang ', # 0x9f
'Yin ', # 0xa0
'Fa ', # 0xa1
'Gou ', # 0xa2
'Yuan ', # 0xa3
'Die ', # 0xa4
'Xie ', # 0xa5
'Ken ', # 0xa6
'Jiong ', # 0xa7
'Shou ', # 0xa8
'E ', # 0xa9
'Ha ', # 0xaa
'Dian ', # 0xab
'Hong ', # 0xac
'Wu ', # 0xad
'Kua ', # 0xae
'[?] ', # 0xaf
'Tao ', # 0xb0
'Dang ', # 0xb1
'Kai ', # 0xb2
'Gake ', # 0xb3
'Nao ', # 0xb4
'An ', # 0xb5
'Xing ', # 0xb6
'Xian ', # 0xb7
'Huan ', # 0xb8
'Bang ', # 0xb9
'Pei ', # 0xba
'Ba ', # 0xbb
'Yi ', # 0xbc
'Yin ', # 0xbd
'Han ', # 0xbe
'Xu ', # 0xbf
'Chui ', # 0xc0
'Cen ', # 0xc1
'Geng ', # 0xc2
'Ai ', # 0xc3
'Peng ', # 0xc4
'Fang ', # 0xc5
'Que ', # 0xc6
'Yong ', # 0xc7
'Xun ', # 0xc8
'Jia ', # 0xc9
'Di ', # 0xca
'Mai ', # 0xcb
'Lang ', # 0xcc
'Xuan ', # 0xcd
'Cheng ', # 0xce
'Yan ', # 0xcf
'Jin ', # 0xd0
'Zhe ', # 0xd1
'Lei ', # 0xd2
'Lie ', # 0xd3
'Bu ', # 0xd4
'Cheng ', # 0xd5
'Gomi ', # 0xd6
'Bu ', # 0xd7
'Shi ', # 0xd8
'Xun ', # 0xd9
'Guo ', # 0xda
'Jiong ', # 0xdb
'Ye ', # 0xdc
'Nian ', # 0xdd
'Di ', # 0xde
'Yu ', # 0xdf
'Bu ', # 0xe0
'Ya ', # 0xe1
'Juan ', # 0xe2
'Sui ', # 0xe3
'Pi ', # 0xe4
'Cheng ', # 0xe5
'Wan ', # 0xe6
'Ju ', # 0xe7
'Lun ', # 0xe8
'Zheng ', # 0xe9
'Kong ', # 0xea
'Chong ', # 0xeb
'Dong ', # 0xec
'Dai ', # 0xed
'Tan ', # 0xee
'An ', # 0xef
'Cai ', # 0xf0
'Shu ', # 0xf1
'Beng ', # 0xf2
'Kan ', # 0xf3
'Zhi ', # 0xf4
'Duo ', # 0xf5
'Yi ', # 0xf6
'Zhi ', # 0xf7
'Yi ', # 0xf8
'Pei ', # 0xf9
'Ji ', # 0xfa
'Zhun ', # 0xfb
'Qi ', # 0xfc
'Sao ', # 0xfd
'Ju ', # 0xfe
'Ni ', # 0xff
)
| gpl-3.0 |
cvegaj/ElectriCERT | venv3/lib/python3.6/site-packages/jsonschema/tests/test_validators.py | 20 | 34795 | from collections import deque
from contextlib import contextmanager
import json
from jsonschema import FormatChecker, ValidationError
from jsonschema.tests.compat import mock, unittest
from jsonschema.validators import (
RefResolutionError, UnknownType, Draft3Validator,
Draft4Validator, RefResolver, create, extend, validator_for, validate,
)
class TestCreateAndExtend(unittest.TestCase):
def setUp(self):
self.meta_schema = {u"properties": {u"smelly": {}}}
self.smelly = mock.MagicMock()
self.validators = {u"smelly": self.smelly}
self.types = {u"dict": dict}
self.Validator = create(
meta_schema=self.meta_schema,
validators=self.validators,
default_types=self.types,
)
self.validator_value = 12
self.schema = {u"smelly": self.validator_value}
self.validator = self.Validator(self.schema)
def test_attrs(self):
self.assertEqual(self.Validator.VALIDATORS, self.validators)
self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
def test_init(self):
self.assertEqual(self.validator.schema, self.schema)
def test_iter_errors(self):
instance = "hello"
self.smelly.return_value = []
self.assertEqual(list(self.validator.iter_errors(instance)), [])
error = mock.Mock()
self.smelly.return_value = [error]
self.assertEqual(list(self.validator.iter_errors(instance)), [error])
self.smelly.assert_called_with(
self.validator, self.validator_value, instance, self.schema,
)
def test_if_a_version_is_provided_it_is_registered(self):
with mock.patch("jsonschema.validators.validates") as validates:
validates.side_effect = lambda version: lambda cls: cls
Validator = create(meta_schema={u"id": ""}, version="my version")
validates.assert_called_once_with("my version")
self.assertEqual(Validator.__name__, "MyVersionValidator")
def test_if_a_version_is_not_provided_it_is_not_registered(self):
with mock.patch("jsonschema.validators.validates") as validates:
create(meta_schema={u"id": "id"})
self.assertFalse(validates.called)
def test_extend(self):
validators = dict(self.Validator.VALIDATORS)
new = mock.Mock()
Extended = extend(self.Validator, validators={u"a new one": new})
validators.update([(u"a new one", new)])
self.assertEqual(Extended.VALIDATORS, validators)
self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
class TestIterErrors(unittest.TestCase):
def setUp(self):
self.validator = Draft3Validator({})
def test_iter_errors(self):
instance = [1, 2]
schema = {
u"disallow": u"array",
u"enum": [["a", "b", "c"], ["d", "e", "f"]],
u"minItems": 3,
}
got = (e.message for e in self.validator.iter_errors(instance, schema))
expected = [
"%r is disallowed for [1, 2]" % (schema["disallow"],),
"[1, 2] is too short",
"[1, 2] is not one of %r" % (schema["enum"],),
]
self.assertEqual(sorted(got), sorted(expected))
def test_iter_errors_multiple_failures_one_validator(self):
instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
schema = {
u"properties": {
"foo": {u"type": "string"},
"bar": {u"minItems": 2},
"baz": {u"maximum": 10, u"enum": [2, 4, 6, 8]},
},
}
errors = list(self.validator.iter_errors(instance, schema))
self.assertEqual(len(errors), 4)
class TestValidationErrorMessages(unittest.TestCase):
def message_for(self, instance, schema, *args, **kwargs):
kwargs.setdefault("cls", Draft3Validator)
with self.assertRaises(ValidationError) as e:
validate(instance, schema, *args, **kwargs)
return e.exception.message
def test_single_type_failure(self):
message = self.message_for(instance=1, schema={u"type": u"string"})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_single_type_list_failure(self):
message = self.message_for(instance=1, schema={u"type": [u"string"]})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_multiple_type_failure(self):
types = u"string", u"object"
message = self.message_for(instance=1, schema={u"type": list(types)})
self.assertEqual(message, "1 is not of type %r, %r" % types)
def test_object_without_title_type_failure(self):
type = {u"type": [{u"minimum": 3}]}
message = self.message_for(instance=1, schema={u"type": [type]})
self.assertEqual(message, "1 is not of type %r" % (type,))
def test_object_with_name_type_failure(self):
name = "Foo"
schema = {u"type": [{u"name": name, u"minimum": 3}]}
message = self.message_for(instance=1, schema=schema)
self.assertEqual(message, "1 is not of type %r" % (name,))
def test_minimum(self):
message = self.message_for(instance=1, schema={"minimum": 2})
self.assertEqual(message, "1 is less than the minimum of 2")
def test_maximum(self):
message = self.message_for(instance=1, schema={"maximum": 0})
self.assertEqual(message, "1 is greater than the maximum of 0")
def test_dependencies_failure_has_single_element_not_list(self):
depend, on = "bar", "foo"
schema = {u"dependencies": {depend: on}}
message = self.message_for({"bar": 2}, schema)
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
def test_additionalItems_single_failure(self):
message = self.message_for(
[2], {u"items": [], u"additionalItems": False},
)
self.assertIn("(2 was unexpected)", message)
def test_additionalItems_multiple_failures(self):
message = self.message_for(
[1, 2, 3], {u"items": [], u"additionalItems": False}
)
self.assertIn("(1, 2, 3 were unexpected)", message)
def test_additionalProperties_single_failure(self):
additional = "foo"
schema = {u"additionalProperties": False}
message = self.message_for({additional: 2}, schema)
self.assertIn("(%r was unexpected)" % (additional,), message)
def test_additionalProperties_multiple_failures(self):
schema = {u"additionalProperties": False}
message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
self.assertIn(repr("foo"), message)
self.assertIn(repr("bar"), message)
self.assertIn("were unexpected)", message)
def test_invalid_format_default_message(self):
checker = FormatChecker(formats=())
check_fn = mock.Mock(return_value=False)
checker.checks(u"thing")(check_fn)
schema = {u"format": u"thing"}
message = self.message_for("bla", schema, format_checker=checker)
self.assertIn(repr("bla"), message)
self.assertIn(repr("thing"), message)
self.assertIn("is not a", message)
def test_additionalProperties_false_patternProperties(self):
schema = {u"type": u"object",
u"additionalProperties": False,
u"patternProperties": {
u"^abc$": {u"type": u"string"},
u"^def$": {u"type": u"string"}
}}
message = self.message_for({u"zebra": 123}, schema,
cls=Draft4Validator)
self.assertEqual(
message,
"{} does not match any of the regexes: {}, {}".format(
repr(u"zebra"), repr(u"^abc$"), repr(u"^def$"),
),
)
message = self.message_for({u"zebra": 123, u"fish": 456}, schema,
cls=Draft4Validator)
self.assertEqual(
message,
"{}, {} do not match any of the regexes: {}, {}".format(
repr(u"fish"), repr(u"zebra"), repr(u"^abc$"), repr(u"^def$")
),
)
class TestValidationErrorDetails(unittest.TestCase):
# TODO: These really need unit tests for each individual validator, rather
# than just these higher level tests.
def test_anyOf(self):
instance = 5
schema = {
"anyOf": [
{"minimum": 20},
{"type": "string"},
],
}
validator = Draft4Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "anyOf")
self.assertEqual(e.validator_value, schema["anyOf"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["anyOf"]))
self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "minimum")
self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["anyOf"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "minimum"]))
self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
self.assertEqual(
e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
)
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "type")
self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
self.assertEqual(e2.instance, instance)
self.assertEqual(e2.schema, schema["anyOf"][1])
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque([]))
self.assertEqual(e2.relative_path, deque([]))
self.assertEqual(e2.absolute_path, deque([]))
self.assertEqual(e2.schema_path, deque([1, "type"]))
self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
self.assertEqual(len(e2.context), 0)
def test_type(self):
instance = {"foo": 1}
schema = {
"type": [
{"type": "integer"},
{
"type": "object",
"properties": {"foo": {"enum": [2]}},
},
],
}
validator = Draft3Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "type")
self.assertEqual(e.validator_value, schema["type"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["type"]))
self.assertEqual(e.relative_schema_path, deque(["type"]))
self.assertEqual(e.absolute_schema_path, deque(["type"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "type")
self.assertEqual(e1.validator_value, schema["type"][0]["type"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["type"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "type"]))
self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "enum")
self.assertEqual(e2.validator_value, [2])
self.assertEqual(e2.instance, 1)
self.assertEqual(e2.schema, {u"enum": [2]})
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e2.relative_path, deque(["foo"]))
self.assertEqual(e2.absolute_path, deque(["foo"]))
self.assertEqual(
e2.schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.absolute_schema_path,
deque(["type", 1, "properties", "foo", "enum"]),
)
self.assertFalse(e2.context)
def test_single_nesting(self):
instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
schema = {
"properties": {
"foo": {"type": "string"},
"bar": {"minItems": 2},
"baz": {"maximum": 10, "enum": [2, 4, 6, 8]},
},
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["baz"]))
self.assertEqual(e3.path, deque(["baz"]))
self.assertEqual(e4.path, deque(["foo"]))
self.assertEqual(e1.relative_path, deque(["bar"]))
self.assertEqual(e2.relative_path, deque(["baz"]))
self.assertEqual(e3.relative_path, deque(["baz"]))
self.assertEqual(e4.relative_path, deque(["foo"]))
self.assertEqual(e1.absolute_path, deque(["bar"]))
self.assertEqual(e2.absolute_path, deque(["baz"]))
self.assertEqual(e3.absolute_path, deque(["baz"]))
self.assertEqual(e4.absolute_path, deque(["foo"]))
self.assertEqual(e1.validator, "minItems")
self.assertEqual(e2.validator, "enum")
self.assertEqual(e3.validator, "maximum")
self.assertEqual(e4.validator, "type")
def test_multiple_nesting(self):
instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"]
schema = {
"type": "string",
"items": {
"type": ["string", "object"],
"properties": {
"foo": {"enum": [1, 3]},
"bar": {
"type": "array",
"properties": {
"bar": {"required": True},
"baz": {"minItems": 2},
},
},
},
},
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e2.path, deque([0]))
self.assertEqual(e3.path, deque([1, "bar"]))
self.assertEqual(e4.path, deque([1, "bar", "bar"]))
self.assertEqual(e5.path, deque([1, "bar", "baz"]))
self.assertEqual(e6.path, deque([1, "foo"]))
self.assertEqual(e1.schema_path, deque(["type"]))
self.assertEqual(e2.schema_path, deque(["items", "type"]))
self.assertEqual(
list(e3.schema_path), ["items", "properties", "bar", "type"],
)
self.assertEqual(
list(e4.schema_path),
["items", "properties", "bar", "properties", "bar", "required"],
)
self.assertEqual(
list(e5.schema_path),
["items", "properties", "bar", "properties", "baz", "minItems"]
)
self.assertEqual(
list(e6.schema_path), ["items", "properties", "foo", "enum"],
)
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "type")
self.assertEqual(e3.validator, "type")
self.assertEqual(e4.validator, "required")
self.assertEqual(e5.validator, "minItems")
self.assertEqual(e6.validator, "enum")
def test_recursive(self):
schema = {
"definitions": {
"node": {
"anyOf": [{
"type": "object",
"required": ["name", "children"],
"properties": {
"name": {
"type": "string",
},
"children": {
"type": "object",
"patternProperties": {
"^.*$": {
"$ref": "#/definitions/node",
},
},
},
},
}],
},
},
"type": "object",
"required": ["root"],
"properties": {"root": {"$ref": "#/definitions/node"}},
}
instance = {
"root": {
"name": "root",
"children": {
"a": {
"name": "a",
"children": {
"ab": {
"name": "ab",
# missing "children"
},
},
},
},
},
}
validator = Draft4Validator(schema)
e, = validator.iter_errors(instance)
self.assertEqual(e.absolute_path, deque(["root"]))
self.assertEqual(
e.absolute_schema_path, deque(["properties", "root", "anyOf"]),
)
e1, = e.context
self.assertEqual(e1.absolute_path, deque(["root", "children", "a"]))
self.assertEqual(
e1.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
],
),
)
e2, = e1.context
self.assertEqual(
e2.absolute_path, deque(
["root", "children", "a", "children", "ab"],
),
)
self.assertEqual(
e2.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
],
),
)
def test_additionalProperties(self):
instance = {"bar": "bar", "foo": 2}
schema = {"additionalProperties": {"type": "integer", "minimum": 5}}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_patternProperties(self):
instance = {"bar": 1, "foo": 2}
schema = {
"patternProperties": {
"bar": {"type": "string"},
"foo": {"minimum": 5},
},
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems(self):
instance = ["foo", 1]
schema = {
"items": [],
"additionalItems": {"type": "integer", "minimum": 5},
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([0]))
self.assertEqual(e2.path, deque([1]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems_with_items(self):
instance = ["foo", "bar", 1]
schema = {
"items": [{}],
"additionalItems": {"type": "integer", "minimum": 5},
}
validator = Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([1]))
self.assertEqual(e2.path, deque([2]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
class ValidatorTestMixin(object):
def setUp(self):
self.instance = mock.Mock()
self.schema = {}
self.resolver = mock.Mock()
self.validator = self.validator_class(self.schema)
def test_valid_instances_are_valid(self):
errors = iter([])
with mock.patch.object(
self.validator, "iter_errors", return_value=errors,
):
self.assertTrue(
self.validator.is_valid(self.instance, self.schema)
)
def test_invalid_instances_are_not_valid(self):
errors = iter([mock.Mock()])
with mock.patch.object(
self.validator, "iter_errors", return_value=errors,
):
self.assertFalse(
self.validator.is_valid(self.instance, self.schema)
)
def test_non_existent_properties_are_ignored(self):
instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
validate(instance=instance, schema={my_property: my_value})
def test_it_creates_a_ref_resolver_if_not_provided(self):
self.assertIsInstance(self.validator.resolver, RefResolver)
def test_it_delegates_to_a_ref_resolver(self):
resolver = RefResolver("", {})
schema = {"$ref": mock.Mock()}
with mock.patch.object(resolver, "resolve") as resolve:
resolve.return_value = "url", {"type": "integer"}
with self.assertRaises(ValidationError):
self.validator_class(schema, resolver=resolver).validate(None)
resolve.assert_called_once_with(schema["$ref"])
def test_it_delegates_to_a_legacy_ref_resolver(self):
"""
Legacy RefResolvers support only the context manager form of
resolution.
"""
class LegacyRefResolver(object):
@contextmanager
def resolving(this, ref):
self.assertEqual(ref, "the ref")
yield {"type": "integer"}
resolver = LegacyRefResolver()
schema = {"$ref": "the ref"}
with self.assertRaises(ValidationError):
self.validator_class(schema, resolver=resolver).validate(None)
def test_is_type_is_true_for_valid_type(self):
self.assertTrue(self.validator.is_type("foo", "string"))
def test_is_type_is_false_for_invalid_type(self):
self.assertFalse(self.validator.is_type("foo", "array"))
def test_is_type_evades_bool_inheriting_from_int(self):
self.assertFalse(self.validator.is_type(True, "integer"))
self.assertFalse(self.validator.is_type(True, "number"))
def test_is_type_raises_exception_for_unknown_type(self):
with self.assertRaises(UnknownType):
self.validator.is_type("foo", object())
class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
validator_class = Draft3Validator
def test_is_type_is_true_for_any_type(self):
self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
self.assertTrue(self.validator.is_type(True, "boolean"))
self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
def test_non_string_custom_types(self):
schema = {'type': [None]}
cls = self.validator_class(schema, types={None: type(None)})
cls.validate(None, schema)
class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
validator_class = Draft4Validator
class TestBuiltinFormats(unittest.TestCase):
"""
The built-in (specification-defined) formats do not raise type errors.
If an instance or value is not a string, it should be ignored.
"""
for format in FormatChecker.checkers:
def test(self, format=format):
v = Draft4Validator({"format": format}, format_checker=FormatChecker())
v.validate(123)
name = "test_{0}_ignores_non_strings".format(format)
test.__name__ = name
setattr(TestBuiltinFormats, name, test)
del test # Ugh py.test. Stop discovering top level tests.
class TestValidatorFor(unittest.TestCase):
def test_draft_3(self):
schema = {"$schema": "http://json-schema.org/draft-03/schema"}
self.assertIs(validator_for(schema), Draft3Validator)
schema = {"$schema": "http://json-schema.org/draft-03/schema#"}
self.assertIs(validator_for(schema), Draft3Validator)
def test_draft_4(self):
schema = {"$schema": "http://json-schema.org/draft-04/schema"}
self.assertIs(validator_for(schema), Draft4Validator)
schema = {"$schema": "http://json-schema.org/draft-04/schema#"}
self.assertIs(validator_for(schema), Draft4Validator)
def test_custom_validator(self):
Validator = create(meta_schema={"id": "meta schema id"}, version="12")
schema = {"$schema": "meta schema id"}
self.assertIs(validator_for(schema), Validator)
def test_validator_for_jsonschema_default(self):
self.assertIs(validator_for({}), Draft4Validator)
def test_validator_for_custom_default(self):
self.assertIs(validator_for({}, default=None), None)
class TestValidate(unittest.TestCase):
def test_draft3_validator_is_chosen(self):
schema = {"$schema": "http://json-schema.org/draft-03/schema#"}
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
# Make sure it works without the empty fragment
schema = {"$schema": "http://json-schema.org/draft-03/schema"}
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
def test_draft4_validator_is_chosen(self):
schema = {"$schema": "http://json-schema.org/draft-04/schema#"}
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
validate({}, schema)
chk_schema.assert_called_once_with(schema)
def test_draft4_validator_is_the_default(self):
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
validate({}, {})
chk_schema.assert_called_once_with({})
class TestRefResolver(unittest.TestCase):
base_uri = ""
stored_uri = "foo://stored"
stored_schema = {"stored": "schema"}
def setUp(self):
self.referrer = {}
self.store = {self.stored_uri: self.stored_schema}
self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
def test_it_does_not_retrieve_schema_urls_from_the_network(self):
ref = Draft3Validator.META_SCHEMA["id"]
with mock.patch.object(self.resolver, "resolve_remote") as remote:
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
self.assertFalse(remote.called)
def test_it_resolves_local_refs(self):
ref = "#/properties/foo"
self.referrer["properties"] = {"foo": object()}
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, self.referrer["properties"]["foo"])
def test_it_resolves_local_refs_with_id(self):
schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}}
resolver = RefResolver.from_schema(schema)
with resolver.resolving("#/a") as resolved:
self.assertEqual(resolved, schema["a"])
with resolver.resolving("http://bar/schema#/a") as resolved:
self.assertEqual(resolved, schema["a"])
def test_it_retrieves_stored_refs(self):
with self.resolver.resolving(self.stored_uri) as resolved:
self.assertIs(resolved, self.stored_schema)
self.resolver.store["cached_ref"] = {"foo": 12}
with self.resolver.resolving("cached_ref#/foo") as resolved:
self.assertEqual(resolved, 12)
def test_it_retrieves_unstored_refs_via_requests(self):
ref = "http://bar#baz"
schema = {"baz": 12}
with mock.patch("jsonschema.validators.requests") as requests:
requests.get.return_value.json.return_value = schema
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, 12)
requests.get.assert_called_once_with("http://bar")
def test_it_retrieves_unstored_refs_via_urlopen(self):
ref = "http://bar#baz"
schema = {"baz": 12}
with mock.patch("jsonschema.validators.requests", None):
with mock.patch("jsonschema.validators.urlopen") as urlopen:
urlopen.return_value.read.return_value = (
json.dumps(schema).encode("utf8"))
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, 12)
urlopen.assert_called_once_with("http://bar")
def test_it_can_construct_a_base_uri_from_a_schema(self):
schema = {"id": "foo"}
resolver = RefResolver.from_schema(schema)
self.assertEqual(resolver.base_uri, "foo")
self.assertEqual(resolver.resolution_scope, "foo")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("#") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("foo") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("foo#") as resolved:
self.assertEqual(resolved, schema)
def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
schema = {}
resolver = RefResolver.from_schema(schema)
self.assertEqual(resolver.base_uri, "")
self.assertEqual(resolver.resolution_scope, "")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("#") as resolved:
self.assertEqual(resolved, schema)
def test_custom_uri_scheme_handlers(self):
schema = {"foo": "bar"}
ref = "foo://bar"
foo_handler = mock.Mock(return_value=schema)
resolver = RefResolver("", {}, handlers={"foo": foo_handler})
with resolver.resolving(ref) as resolved:
self.assertEqual(resolved, schema)
foo_handler.assert_called_once_with(ref)
def test_cache_remote_on(self):
ref = "foo://bar"
foo_handler = mock.Mock()
resolver = RefResolver(
"", {}, cache_remote=True, handlers={"foo": foo_handler},
)
with resolver.resolving(ref):
pass
with resolver.resolving(ref):
pass
foo_handler.assert_called_once_with(ref)
def test_cache_remote_off(self):
ref = "foo://bar"
foo_handler = mock.Mock()
resolver = RefResolver(
"", {}, cache_remote=False, handlers={"foo": foo_handler},
)
with resolver.resolving(ref):
pass
self.assertEqual(foo_handler.call_count, 1)
def test_if_you_give_it_junk_you_get_a_resolution_error(self):
ref = "foo://bar"
foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
resolver = RefResolver("", {}, handlers={"foo": foo_handler})
with self.assertRaises(RefResolutionError) as err:
with resolver.resolving(ref):
pass
self.assertEqual(str(err.exception), "Oh no! What's this?")
def test_helpful_error_message_on_failed_pop_scope(self):
resolver = RefResolver("", {})
resolver.pop_scope()
with self.assertRaises(RefResolutionError) as exc:
resolver.pop_scope()
self.assertIn("Failed to pop the scope", str(exc.exception))
class UniqueTupleItemsMixin(object):
"""
A tuple instance properly formats validation errors for uniqueItems.
See https://github.com/Julian/jsonschema/pull/224
"""
def test_it_properly_formats_an_error_message(self):
validator = self.validator_class(
schema={"uniqueItems": True},
types={"array": (tuple,)},
)
with self.assertRaises(ValidationError) as e:
validator.validate((1, 1))
self.assertIn("(1, 1) has non-unique elements", str(e.exception))
class TestDraft4UniqueTupleItems(UniqueTupleItemsMixin, unittest.TestCase):
validator_class = Draft4Validator
class TestDraft3UniqueTupleItems(UniqueTupleItemsMixin, unittest.TestCase):
validator_class = Draft3Validator
def sorted_errors(errors):
def key(error):
return (
[str(e) for e in error.path],
[str(e) for e in error.schema_path],
)
return sorted(errors, key=key)
| gpl-3.0 |
mwcz/phyton | old_files/src/photos/migrations/0004_auto__add_field_photo_image_hash.py | 1 | 3502 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Photo.image_hash'
db.add_column('photos_photo', 'image_hash', self.gf('django.db.models.fields.CharField')(default='01234567890123456789012345678901', max_length=32), keep_default=False)
def backwards(self, orm):
# Deleting field 'Photo.image_hash'
db.delete_column('photos_photo', 'image_hash')
models = {
'photos.photo': {
'Meta': {'object_name': 'Photo'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'image_hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'mod_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'palette0': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette1': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette2': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette3': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette4': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette5': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette6': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'palette7': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'permalink': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'post_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shot_date': ('django.db.models.fields.DateField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'suggest0': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest1': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest2': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest3': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest4': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest5': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest6': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'suggest7': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
}
}
complete_apps = ['photos']
| agpl-3.0 |
Tehsmash/networking-cisco | networking_cisco/plugins/cisco/extensions/routerhostingdevice.py | 3 | 2145 | # Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_cisco import backwards_compatibility as bc
ROUTERHOSTINGDEVICE = 'routerhost'
ROUTERHOSTINGDEVICE_ALIAS = ROUTERHOSTINGDEVICE
HOSTING_DEVICE_ATTR = ROUTERHOSTINGDEVICE + ':hosting_device'
EXTENDED_ATTRIBUTES_2_0 = {
'routers': {
HOSTING_DEVICE_ATTR: {'allow_post': False, 'allow_put': False,
'default': bc.constants.ATTR_NOT_SPECIFIED,
'is_visible': True},
}
}
class Routerhostingdevice(bc.extensions.ExtensionDescriptor):
"""Extension class to introduce hosting device information for routers.
This class is used by Neutron's extension framework to add hosting_device
attribute to Neutron Routers implemented in virtual/physical appliances.
"""
@classmethod
def get_name(cls):
return "Hosting info for routing service"
@classmethod
def get_alias(cls):
return ROUTERHOSTINGDEVICE_ALIAS
@classmethod
def get_description(cls):
return ("Introduces hosting_device attribute for Neutron routers "
"implemented in virtual/physical appliances")
@classmethod
def get_namespace(cls):
return ("http://docs.openstack.org/ext/" + ROUTERHOSTINGDEVICE +
"/api/v1.0")
@classmethod
def get_updated(cls):
return "2014-02-07T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
| apache-2.0 |
Black-Cog/Forge | core/ColorScience.py | 1 | 3207 |
import math
class ColorScience():
"""Methodes relative at Color Science"""
@staticmethod
def linearToSrgb(color):
colorOut = []
for i in range(3):
if color[i] <= 0.0031308:
colorTmp = color[i] * 12.92
colorOut.append( round(colorTmp, 6) )
else:
colorTmp = 1.055 * ( pow(color[i], 1 / 2.4) ) - 0.055
colorOut.append( round(colorTmp, 6) )
return colorOut
@staticmethod
def srgbToLinear(color):
colorOut = []
for i in range(3):
if color[i] <= 0.04045:
colorTmp = color[i] / 12.92
colorOut.append( round(colorTmp, 6) )
else:
colorTmp = pow( ( (color[i]+0.055) / 1.055 ), 2.4 )
colorOut.append( round(colorTmp, 6) )
return colorOut
@staticmethod
def floatToBits8(color):
return [ int(i*255) for i in color ]
@staticmethod
def bits8ToFloat(color):
return [ float(i/255) for i in color ]
@staticmethod
def applyLut(color, lut):
return None
@staticmethod
def removeLut(color, lut):
return None
def rgbToHsv(self, color):
# todo : fix forumla to don't have to make this ugly colorspace convertion
# colorSrgb = self.linearToSrgb( color )
colorSrgb = color
r = colorSrgb[0]
g = colorSrgb[1]
b = colorSrgb[2]
mini = min( r, g, b )
maxi = max( r, g, b )
v = maxi
delta = maxi - mini
if maxi:
s = delta / maxi
else:
s = 0
h = -1
return [ h, s, v ]
if delta:
if r == maxi:
h = ( g - b ) / delta
elif g == maxi:
h = 2 + ( b - r ) / delta
else:
h = 4 + ( r - g ) / delta
h *= 60.0
if h < 0 : h += 360
h /= 360.0
else:
h = 0
return [ h, s, v ]
def hsvToRgb(self, color):
h = color[0]
s = color[1]
v = color[2]
step = h / (1.0 / 6.0)
pos = step - math.floor( step )
if math.floor(step) % 2 : m = ( 1.0 - pos ) * v
else : m = pos * v
maximum = 1.0 * v
minimum = (1.0 - s) * v
medium = m + ( (1.0 - s)*(v - m) )
switchValue = math.floor( step )
if switchValue == 0:
r = maximum
g = medium
b = minimum
if switchValue == 1:
r = medium
g = maximum
b = minimum
if switchValue == 2:
r = minimum
g = maximum
b = medium
if switchValue == 3:
r = minimum
g = medium
b = maximum
if switchValue == 4:
r = medium
g = minimum
b = maximum
if switchValue == 5 or switchValue == 6 or switchValue == -6 :
r = maximum
g = minimum
b = medium
rgb = [r, g, b]
# todo : fix forumla to don't have to make this ugly colorspace convertion
# rgb = self.srgbToLinear( rgb )
return rgb
def hslToRgb(self, color):
h = color[0]
s = color[1]
l = color[2]
if s == 0:
r = l
g = l
b = l
else:
def hueTorgb(p, q, t):
if(t < 0.0) : t += 1.0
if(t > 1.0) : t -= 1.0
if(t < 1.0/6.0) : return p + (q - p) * 6.0 * t
if(t < 1.0/2.0) : return q
if(t < 2.0/3.0) : return p + (q - p) * (2.0/3.0 - t) * 6.0
return p
if l < 0.5 : q = l * (1.0 + s)
else : q = l + s - l * s
p = 2.0 * l - q
r = hue2rgb( p, q, h + 1.0/3.0 )
g = hue2rgb( p, q, h )
b = hue2rgb( p, q, h - 1.0/3.0 )
rgb = [r, g, b]
# todo : fix forumla to don't have to make this ugly colorspace convertion
rgb = self.srgbToLinear( rgb )
return rgb
| bsd-3-clause |
ptisserand/ansible | test/units/modules/network/ios/ios_module.py | 73 | 2506 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class TestIosModule(ModuleTestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
| gpl-3.0 |
tanmaykm/edx-platform | openedx/core/djangoapps/bookmarks/services.py | 44 | 4441 | """
Bookmarks service.
"""
import logging
from django.core.exceptions import ObjectDoesNotExist
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from request_cache.middleware import RequestCache
from . import DEFAULT_FIELDS, api
log = logging.getLogger(__name__)
CACHE_KEY_TEMPLATE = u"bookmarks.list.{}.{}"
class BookmarksService(object):
"""
A service that provides access to the bookmarks API.
When bookmarks() or is_bookmarked() is called for the
first time, the service fetches and caches all the bookmarks
of the user for the relevant course. So multiple calls to
get bookmark status during a request (for, example when
rendering courseware and getting bookmarks status for search
results) will not cause repeated queries to the database.
"""
def __init__(self, user, **kwargs):
super(BookmarksService, self).__init__(**kwargs)
self._user = user
def _bookmarks_cache(self, course_key, fetch=False):
"""
Return the user's bookmarks cache for a particular course.
Arguments:
course_key (CourseKey): course_key of the course whose bookmarks cache should be returned.
fetch (Bool): if the bookmarks should be fetched and cached if they already aren't.
"""
store = modulestore()
course_key = store.fill_in_run(course_key)
if course_key.run is None:
return []
cache_key = CACHE_KEY_TEMPLATE.format(self._user.id, course_key)
bookmarks_cache = RequestCache.get_request_cache().data.get(cache_key, None)
if bookmarks_cache is None and fetch is True:
bookmarks_cache = api.get_bookmarks(
self._user, course_key=course_key, fields=DEFAULT_FIELDS
)
RequestCache.get_request_cache().data[cache_key] = bookmarks_cache
return bookmarks_cache
def bookmarks(self, course_key):
"""
Return a list of bookmarks for the course for the current user.
Arguments:
course_key: CourseKey of the course for which to retrieve the user's bookmarks for.
Returns:
list of dict:
"""
return self._bookmarks_cache(course_key, fetch=True)
def is_bookmarked(self, usage_key):
"""
Return whether the block has been bookmarked by the user.
Arguments:
usage_key: UsageKey of the block.
Returns:
Bool
"""
usage_id = unicode(usage_key)
bookmarks_cache = self._bookmarks_cache(usage_key.course_key, fetch=True)
for bookmark in bookmarks_cache:
if bookmark['usage_id'] == usage_id:
return True
return False
def set_bookmarked(self, usage_key):
"""
Adds a bookmark for the block.
Arguments:
usage_key: UsageKey of the block.
Returns:
Bool indicating whether the bookmark was added.
"""
try:
bookmark = api.create_bookmark(user=self._user, usage_key=usage_key)
except ItemNotFoundError:
log.error(u'Block with usage_id: %s not found.', usage_key)
return False
bookmarks_cache = self._bookmarks_cache(usage_key.course_key)
if bookmarks_cache is not None:
bookmarks_cache.append(bookmark)
return True
def unset_bookmarked(self, usage_key):
"""
Removes the bookmark for the block.
Arguments:
usage_key: UsageKey of the block.
Returns:
Bool indicating whether the bookmark was removed.
"""
try:
api.delete_bookmark(self._user, usage_key=usage_key)
except ObjectDoesNotExist:
log.error(u'Bookmark with usage_id: %s does not exist.', usage_key)
return False
bookmarks_cache = self._bookmarks_cache(usage_key.course_key)
if bookmarks_cache is not None:
deleted_bookmark_index = None
usage_id = unicode(usage_key)
for index, bookmark in enumerate(bookmarks_cache):
if bookmark['usage_id'] == usage_id:
deleted_bookmark_index = index
break
if deleted_bookmark_index is not None:
bookmarks_cache.pop(deleted_bookmark_index)
return True
| agpl-3.0 |
yashodhank/frappe | frappe/frappeclient.py | 6 | 6590 | import requests
import json
import frappe
class AuthError(Exception):
pass
class FrappeException(Exception):
pass
class FrappeClient(object):
def __init__(self, url, username, password, verify=True):
self.verify = verify
self.session = requests.session()
self.url = url
self.login(username, password)
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.logout()
def login(self, username, password):
r = self.session.post(self.url, data={
'cmd': 'login',
'usr': username,
'pwd': password
}, verify=self.verify)
if r.status_code==200 and r.json().get('message') == "Logged In":
return r.json()
else:
raise AuthError
def logout(self):
self.session.get(self.url, params={
'cmd': 'logout',
}, verify=self.verify)
def get_list(self, doctype, fields='"*"', filters=None, limit_start=0, limit_page_length=0):
"""Returns list of records of a particular type"""
if not isinstance(fields, basestring):
fields = json.dumps(fields)
params = {
"fields": fields,
}
if filters:
params["filters"] = json.dumps(filters)
if limit_page_length:
params["limit_start"] = limit_start
params["limit_page_length"] = limit_page_length
res = self.session.get(self.url + "/api/resource/" + doctype, params=params, verify=self.verify)
return self.post_process(res)
def insert(self, doc):
res = self.session.post(self.url + "/api/resource/" + doc.get("doctype"),
data={"data":frappe.as_json(doc)}, verify=self.verify)
return self.post_process(res)
def update(self, doc):
url = self.url + "/api/resource/" + doc.get("doctype") + "/" + doc.get("name")
res = self.session.put(url, data={"data":frappe.as_json(doc)}, verify=self.verify)
return self.post_process(res)
def bulk_update(self, docs):
return self.post_request({
"cmd": "frappe.client.bulk_update",
"docs": frappe.as_json(docs)
})
def delete(self, doctype, name):
return self.post_request({
"cmd": "frappe.model.delete_doc",
"doctype": doctype,
"name": name
})
def submit(self, doclist):
return self.post_request({
"cmd": "frappe.client.submit",
"doclist": frappe.as_json(doclist)
})
def get_value(self, doctype, fieldname=None, filters=None):
return self.get_request({
"cmd": "frappe.client.get_value",
"doctype": doctype,
"fieldname": fieldname or "name",
"filters": frappe.as_json(filters)
})
def set_value(self, doctype, docname, fieldname, value):
return self.post_request({
"cmd": "frappe.client.set_value",
"doctype": doctype,
"name": docname,
"fieldname": fieldname,
"value": value
})
def cancel(self, doctype, name):
return self.post_request({
"cmd": "frappe.client.cancel",
"doctype": doctype,
"name": name
})
def get_doc(self, doctype, name="", filters=None, fields=None):
params = {}
if filters:
params["filters"] = json.dumps(filters)
if fields:
params["fields"] = json.dumps(fields)
res = self.session.get(self.url + "/api/resource/" + doctype + "/" + name,
params=params, verify=self.verify)
return self.post_process(res)
def rename_doc(self, doctype, old_name, new_name):
params = {
"cmd": "frappe.client.rename_doc",
"doctype": doctype,
"old_name": old_name,
"new_name": new_name
}
return self.post_request(params)
def migrate_doctype(self, doctype, filters=None, update=None, verbose=1, exclude=None, preprocess=None):
"""Migrate records from another doctype"""
meta = frappe.get_meta(doctype)
tables = {}
for df in meta.get_table_fields():
if verbose: print "getting " + df.options
tables[df.fieldname] = self.get_list(df.options, limit_page_length=999999)
# get links
if verbose: print "getting " + doctype
docs = self.get_list(doctype, limit_page_length=999999, filters=filters)
# build - attach children to parents
if tables:
docs = [frappe._dict(doc) for doc in docs]
docs_map = dict((doc.name, doc) for doc in docs)
for fieldname in tables:
for child in tables[fieldname]:
child = frappe._dict(child)
if child.parent in docs_map:
docs_map[child.parent].setdefault(fieldname, []).append(child)
if verbose: print "inserting " + doctype
for doc in docs:
if exclude and doc["name"] in exclude:
continue
if preprocess:
preprocess(doc)
if not doc.get("owner"):
doc["owner"] = "Administrator"
if doctype != "User" and not frappe.db.exists("User", doc.get("owner")):
frappe.get_doc({"doctype": "User", "email": doc.get("owner"),
"first_name": doc.get("owner").split("@")[0] }).insert()
if update:
doc.update(update)
doc["doctype"] = doctype
new_doc = frappe.get_doc(doc)
new_doc.insert()
if not meta.istable:
if doctype != "Communication":
self.migrate_doctype("Communication", {"reference_doctype": doctype, "reference_name": doc["name"]},
update={"reference_name": new_doc.name}, verbose=0)
if doctype != "File":
self.migrate_doctype("File", {"attached_to_doctype": doctype,
"attached_to_name": doc["name"]}, update={"attached_to_name": new_doc.name}, verbose=0)
def migrate_single(self, doctype):
doc = self.get_doc(doctype, doctype)
doc = frappe.get_doc(doc)
# change modified so that there is no error
doc.modified = frappe.db.get_single_value(doctype, "modified")
frappe.get_doc(doc).insert()
def get_api(self, method, params={}):
res = self.session.get(self.url + "/api/method/" + method + "/",
params=params, verify=self.verify)
return self.post_process(res)
def post_api(self, method, params={}):
res = self.session.post(self.url + "/api/method/" + method + "/",
params=params, verify=self.verify)
return self.post_process(res)
def get_request(self, params):
res = self.session.get(self.url, params=self.preprocess(params), verify=self.verify)
res = self.post_process(res)
return res
def post_request(self, data):
res = self.session.post(self.url, data=self.preprocess(data), verify=self.verify)
res = self.post_process(res)
return res
def preprocess(self, params):
"""convert dicts, lists to json"""
for key, value in params.iteritems():
if isinstance(value, (dict, list)):
params[key] = json.dumps(value)
return params
def post_process(self, response):
try:
rjson = response.json()
except ValueError:
print response.text
raise
if rjson and ("exc" in rjson) and rjson["exc"]:
raise FrappeException(rjson["exc"])
if 'message' in rjson:
return rjson['message']
elif 'data' in rjson:
return rjson['data']
else:
return None
| mit |
BenProjex/ArchProject | Chip.py | 1 | 16126 | from abc import ABCMeta, abstractmethod, abstractproperty
import Wire
###############################################################
# Abstract Chip Class
#AbstractMethod Action: What function the chip performs
#AbstractProperty Name: Debugging tool retrieving the name of the chip
###############################################################
class Chip:
__metaclass__ = ABCMeta
@abstractmethod
def Action(self): pass
@property
@abstractmethod
def name(self):
# Name property will be provided by the inheriting class
# This will mostly be used for debugging
pass
#####################REGISTERChip##############################
#Input: 1 Input wire, 1 Chip Select Wire, 1 Clock Wire
#
#Function:
###############################################################
class REGISTERChip(Chip):
def __init__(self, wire_1, cs_wire, clock_wire, chip_id):
self.wire_1 = wire_1
self.cs_wire = cs_wire
self.clock_wire= clock_wire
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
self.register_value=None
#At the moment, Registers simply set their input to their output
def Action(self):
if self.cs_wire.get_value()==0: #If the chip is not selected, do nothing
return
if self.clock_wire.get_value() == 1: #If the chip is selected and the clock is high, update value from input and update output
self.register_value=self.wire_1.get_value()
self.output_wire.set_value(self.register_value)
else: #Regardless of the chip being selected, if the clock is low, do nothing.
return
def name(self):
print(self.chip_id)
def get_value(self):
return self.register_value
##########################XORChip##############################
# Inputs: two input wires and a chip id
#
# Function: XORs the values of the two wires and
# sets the value of the output wire to
# to that value.
###############################################################
class XORChip(Chip):
#sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
#xors values using '^' and sets output wires value
def Action(self):
self.output_wire.set_value((self.wire_1.get_value() ^ self.wire_2.get_value()))
#prints the name of the chip id for testing
def name(self):
print(self.chip_id)
###########################ANDChip#############################
# Inputs: two input wires and a chip id
#
# Function: ANDs the values of the two wires and
# sets the value of the output wire to
# to that value.
###############################################################
class ANDChip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, chip_id):
self.in_wire_1=wire_1
self.in_wire_2=wire_2
self.out_wire= Wire.Wire(chip_id+"_out_wire")
self.chip_id = chip_id
# ands values using '&' and sets output wires value
def Action(self):
val_1 = self.in_wire_1.get_value()
val_2 = self.in_wire_2.get_value()
self.out_wire.set_value(val_1 & val_2)
# prints the name of the chip id for testing
def name(self):
print("Chip ID: "+self.chip_id)
#########################NOTChip###############################
# Inputs: one input wires and a chip id
#
# Function: NOTs the value of the wire and
# sets the value of the output wire to
# to that value.
###############################################################
class NOTChip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, chip_id):
self.wire_1 = wire_1
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
# nots value using '~' and sets output wires value
def Action(self):
if self.wire_1.get_value()==0:
self.output_wire.set_value(255)
else:
self.output_wire.set_value(~ self.wire_1.get_value())
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
##########################ORChip###############################
# Inputs: two input wires and a chip id
#
# Function: ORs the values of the two wires and
# sets the value of the output wire to
# to that value.
###############################################################
class ORChip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
# ors values using '|' and sets output wires value
def Action(self):
self.output_wire.set_value((self.wire_1.get_value() | self.wire_2.get_value()))
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
#####################MUX2to1Chip###############################
# Inputs: two input wires, select wire and a chip id
#
# Function: set the value of the output wire to the
# correct input wire determined by the select wire
###############################################################
class MUX2to1Chip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, select_wire_1, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.select = select_wire_1.get_value()
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
# use the select variable to determine the output value from the input wires
def Action(self):
if self.select == 0:
self.output_wire.set_value(self.wire_1.get_value())
else:
self.output_wire.set_value(self.wire_2.get_value())
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
###################MUX4to1Chip##################################
# Inputs: four input wires, select wire and a chip id
#
# Function: set the value of the output wire to the
# correct input wire determined by the select wire
###############################################################
class MUX4to1Chip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, wire_3, wire_4, select_wire_1, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.wire_3 = wire_3
self.wire_4 = wire_4
self.select = select_wire_1.get_value()
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
# use the select variable to determine the output value from the input wires
def Action(self):
if self.select == 0:
self.output_wire.set_value(self.wire_1.get_value())
elif self.select == 1:
self.output_wire.set_value(self.wire_2.get_value())
elif self.select == 2:
self.output_wire.set_value(self.wire_3.get_value())
else:
self.output_wire.set_value(self.wire_4.get_value())
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
###########################MUX8to1Chip#########################
# Inputs: eight input wires, select wire and a chip id
#
# Function: set the value of the output wire to the
# correct input wire determined by the select wire
###############################################################
class MUX8to1Chip(Chip):
# sets the values of the wires to the input and names the output wire
def __init__(self, wire_1, wire_2, wire_3, wire_4, wire_5, wire_6, wire_7, wire_8, select_wire_1, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.wire_3 = wire_3
self.wire_4 = wire_4
self.wire_5 = wire_5
self.wire_6 = wire_6
self.wire_7 = wire_7
self.wire_8 = wire_8
self.select = select_wire_1.get_value()
self.output_wire = Wire.Wire(self.chip_id + "_OUT_WIRE")
self.chip_id = chip_id
# use the select variable to determine the output value from the input wires
def Action(self):
if self.select == 0:
self.output_wire.set_value(self.wire_1.get_value())
elif self.select == 1:
self.output_wire.set_value(self.wire_2.get_value())
elif self.select == 2:
self.output_wire.set_value(self.wire_3.get_value())
elif self.select == 3:
self.output_wire.set_value(self.wire_4.get_value())
elif self.select == 4:
self.output_wire.set_value(self.wire_5.get_value())
elif self.select == 5:
self.output_wire.set_value(self.wire_6.get_value())
elif self.select == 6:
self.output_wire.set_value(self.wire_7.get_value())
else:
self.output_wire.set_value(self.wire_8.get_value())
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
##########################DEMUX2to4Chip#########################
#Inputs: 2 input wires, 2 enable wires, 2 Select wires
#Function: Sets input wire's value to output wires value based on Select wires.
#Assumptions: Select Wires must be a value between 0 and 3
#As it stands, Select_2 can be enabled when select_1 isnt, and it will work
#as intended. I dont think this is how it works in Ortiz's system but it shouldnt cause any problems
#Since we are using short's and not bits, it is possible for the two select wires
#to choose the same register, this will error out if attempted.
###############################################################
class DEMUX2to4Chip(Chip):
def __init__(self, wire_1, wire_2, enable_wire_1, enable_wire_2, select_wire_1, select_wire_2, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.enable_wire_1 = enable_wire_1
self.enable_wire_2 = enable_wire_2
self.select_wire_1 = select_wire_1 #Select wires should be a number from 0 to 3, representing the 4 register options
self.select_wire_2 = select_wire_2
self.output_wire_0 = Wire.Wire(self.chip_id + "_OUT_WIRE_0")
self.output_wire_1 = Wire.Wire(self.chip_id + "_OUT_WIRE_1")
self.output_wire_2 = Wire.Wire(self.chip_id + "_OUT_WIRE_2")
self.output_wire_3 = Wire.Wire(self.chip_id + "_OUT_WIRE_3")
self.chip_id = chip_id
#DEMUX2to4's action determines which input to read from based on the enable wires, then send the input to the corresponding
#register.
#Example: A demux with enable_wire_1 ON and enable_wire_2 ON will read from selects 1 and 2. Selects 1 and 2 should be selecting different
#registers otherwise it will error out. Assuming Selects 1 and 2 are different registers between 0 and 3, inputs 1 and 2 are passed to the
#respective registers
def Action(self):
if self.enable_wire_1.get_value() == 0 and self.enable_wire_2.get_value() == 0 :
print("No Demux Action, both enable wires disabled")
#If only enable wire 1 is enabled, pass the values to register 0-3 based on the select wire 1
elif self.enable_wire_1.get_value() == 1 and self.enable_wire_2.get_value() == 0 :
select= self.select_wire_1.get_value()
if select == 0 :
self.output_wire_0.set_value(self.wire_1.get_value())
elif select == 1 :
self.output_wire_1.set_value(self.wire_1.get_value())
elif select == 2:
self.output_wire_2.set_value(self.wire_1.get_value())
elif select == 3:
self.output_wire_3.set_value(self.wire_1.get_value())
else:
print("Error, select wire must hold a value between 0 and 3")
return
# I dont believe there should be a situation where sel_1 is off and sel_2 is on, but just in case
#it will be supported, needs to be discussed.
elif self.enable_wire_1.get_value() == 0 and self.enable_wire_2.get_value() == 1:
select = self.select_wire_1.get_value()
if select == 0:
self.output_wire_0.set_value(self.wire_2.get_value())
elif select == 1:
self.output_wire_1.set_value(self.wire_2.get_value())
elif select == 2:
self.output_wire_2.set_value(self.wire_2.get_value())
elif select == 3:
self.output_wire_3.set_value(self.wire_2.get_value())
else:
print("Error, select wire must hold a value between 0 and 3")
return
#
elif self.enable_wire_1.get_value() == 1 and self.enable_wire_2.get_value() == 1:
select_1 = self.select_wire_1.get_value()
select_2 = self.select_wire_2.get_value()
if select_1 == select_2:
print("Error in DEMUX, both select wires have selected the same register")
return
#Pass input 1 to the register chosen by select 1
if select_1 == 0:
self.output_wire_0.set_value(self.wire_1.get_value())
elif select_1 == 1:
self.output_wire_1.set_value(self.wire_1.get_value())
elif select_1 == 2:
self.output_wire_2.set_value(self.wire_1.get_value())
elif select_1== 3:
self.output_wire_3.set_value(self.wire_1.get_value())
else:
print("Error, select wire must hold a value between 0 and 3")
return
# Pass input 2 to the register chosen by select 2
if select_2 == 0:
self.output_wire_0.set_value(self.wire_2.get_value())
elif select_2 == 1:
self.output_wire_1.set_value(self.wire_2.get_value())
elif select_2 == 2:
self.output_wire_2.set_value(self.wire_2.get_value())
elif select_2 == 3:
self.output_wire_3.set_value(self.wire_2.get_value())
else:
print("Error, select wire must hold a value between 0 and 3")
return
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
##########################ADDSUBChip############################
#
###############################################################
class ADDSUBChip(Chip):
def __init__(self, wire_1, wire_2, cin_wire_1, chip_id):
self.wire_1 = wire_1
self.wire_2 = wire_2
self.cin_wire_1=cin_wire_1
self.v_output_wire = Wire.Wire(self.chip_id + "_V_OUT_WIRE")
self.c_output_wire = Wire.Wire(self.chip_id + "_C_OUT_WIRE")
self.chip_id = chip_id
def Action(self):
self.v_output_wire.set_value()
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
###########################FLAGSChip###########################
#
###############################################################
class FLAGSChip(Chip):
def __init__(self, wire_1, cs_wire, clock_wire, chip_id):
self.wire_1 = wire_1
self.cs_wire=cs_wire
self.clock_wire=clock_wire
self.z_output_wire = Wire.Wire(self.chip_id + "_Z_OUT_WIRE")
self.v_output_wire = Wire.Wire(self.chip_id + "_V_OUT_WIRE")
self.n_output_wire = Wire.Wire(self.chip_id + "_N_OUT_WIRE")
self.c_output_wire = Wire.Wire(self.chip_id + "_C_OUT_WIRE")
self.chip_id = chip_id
def Action(self):
self.z_output_wire.set_value()
# prints the name of the chip id for testing
def name(self):
print(self.chip_id)
| gpl-3.0 |
wearpants/osf.io | api_tests/logs/views/test_log_params.py | 12 | 2793 | # -*- coding: utf-8 -*-
import httplib as http
from framework.auth.core import Auth
from nose.tools import * # noqa
from test_log_detail import LogsTestCase
from tests.factories import (
ProjectFactory,
PrivateLinkFactory
)
from api.base.settings.defaults import API_BASE
# TODO add tests for other log params
class TestLogContributors(LogsTestCase):
def test_contributor_added_log_has_contributor_info_in_params(self):
url = self.url + '{}/'.format(self.log_add_contributor._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
params = res.json['data']['attributes']['params']
params_node = params['params_node']
contributors = params['contributors'][0]
assert_equal(params_node['id'], self.node._id)
assert_equal(params_node['title'], self.node.title)
assert_equal(contributors['family_name'], self.user.family_name)
assert_equal(contributors['full_name'], self.user.fullname)
assert_equal(contributors['given_name'], self.user.given_name)
assert_equal(contributors['unregistered_name'], None)
def test_unregistered_contributor_added_has_contributor_info_in_params(self):
project = ProjectFactory(creator=self.user)
project.add_unregistered_contributor('Robert Jackson', 'robert@gmail.com', auth=Auth(self.user), save=True)
unregistered_contributor = project.contributors[1]
relevant_log = project.logs[-1]
url = '/{}logs/{}/'.format(API_BASE, relevant_log._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
params = res.json['data']['attributes']['params']
params_node = params['params_node']
contributors = params['contributors'][0]
assert_equal(params_node['id'], project._id)
assert_equal(params_node['title'], project.title)
assert_equal(contributors['family_name'], 'Jackson')
assert_equal(contributors['full_name'], 'Robert Jackson')
assert_equal(contributors['given_name'], 'Robert')
assert_equal(contributors['unregistered_name'], 'Robert Jackson')
def test_params_do_not_appear_on_private_project_with_anonymous_view_only_link(self):
private_link = PrivateLinkFactory(anonymous=True)
private_link.nodes.append(self.node)
private_link.save()
url = self.url + '{}/'.format(self.log_add_contributor._id)
res = self.app.get(url, {'view_only': private_link.key}, expect_errors=True)
assert_equal(res.status_code, 200)
data = res.json['data']
assert_in('attributes', data)
assert_not_in('params', data['attributes'])
body = res.body
assert_not_in(self.user._id, body)
| apache-2.0 |
mollstam/UnrealPy | UnrealPyEmbed/Source/Python/Lib/python27/lib-tk/tkCommonDialog.py | 193 | 1418 | # base class for tk common dialogues
#
# this module provides a base class for accessing the common
# dialogues available in Tk 4.2 and newer. use tkFileDialog,
# tkColorChooser, and tkMessageBox to access the individual
# dialogs.
#
# written by Fredrik Lundh, May 1997
#
from Tkinter import *
class Dialog:
command = None
def __init__(self, master=None, **options):
# FIXME: should this be placed on the module level instead?
if TkVersion < 4.2:
raise TclError, "this module requires Tk 4.2 or newer"
self.master = master
self.options = options
if not master and options.get('parent'):
self.master = options['parent']
def _fixoptions(self):
pass # hook
def _fixresult(self, widget, result):
return result # hook
def show(self, **options):
# update instance options
for k, v in options.items():
self.options[k] = v
self._fixoptions()
# we need a dummy widget to properly process the options
# (at least as long as we use Tkinter 1.63)
w = Frame(self.master)
try:
s = w.tk.call(self.command, *w._options(self.options))
s = self._fixresult(w, s)
finally:
try:
# get rid of the widget
w.destroy()
except:
pass
return s
| mit |
jjx02230808/project0223 | examples/linear_model/plot_lasso_and_elasticnet.py | 73 | 2074 | """
========================================
Lasso and Elastic Net for Sparse Signals
========================================
Estimates Lasso and Elastic-Net regression models on a manually generated
sparse signal corrupted with an additive noise. Estimated coefficients are
compared with the ground-truth.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import r2_score
###############################################################################
# generate some sparse data to play with
np.random.seed(42)
n_samples, n_features = 50, 200
X = np.random.randn(n_samples, n_features)
coef = 3 * np.random.randn(n_features)
inds = np.arange(n_features)
np.random.shuffle(inds)
coef[inds[10:]] = 0 # sparsify coef
y = np.dot(X, coef)
# add noise
y += 0.01 * np.random.normal((n_samples,))
# Split data in train set and test set
n_samples = X.shape[0]
X_train, y_train = X[:n_samples / 2], y[:n_samples / 2]
X_test, y_test = X[n_samples / 2:], y[n_samples / 2:]
###############################################################################
# Lasso
from sklearn.linear_model import Lasso
alpha = 0.1
lasso = Lasso(alpha=alpha)
y_pred_lasso = lasso.fit(X_train, y_train).predict(X_test)
r2_score_lasso = r2_score(y_test, y_pred_lasso)
print(lasso)
print("r^2 on test data : %f" % r2_score_lasso)
###############################################################################
# ElasticNet
from sklearn.linear_model import ElasticNet
enet = ElasticNet(alpha=alpha, l1_ratio=0.7)
y_pred_enet = enet.fit(X_train, y_train).predict(X_test)
r2_score_enet = r2_score(y_test, y_pred_enet)
print(enet)
print("r^2 on test data : %f" % r2_score_enet)
plt.plot(enet.coef_, color='lightgreen', linewidth=2,
label='Elastic net coefficients')
plt.plot(lasso.coef_, color='gold', linewidth=2,
label='Lasso coefficients')
plt.plot(coef, '--', color='navy', label='original coefficients')
plt.legend(loc='best')
plt.title("Lasso R^2: %f, Elastic Net R^2: %f"
% (r2_score_lasso, r2_score_enet))
plt.show()
| bsd-3-clause |
40223119/2015cda | static/Brython3.1.1-20150328-091302/Lib/stat.py | 765 | 4304 | """Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# Indices for stat struct members in the tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
"""Return the portion of the file's mode that can be set by
os.chmod().
"""
return mode & 0o7777
def S_IFMT(mode):
"""Return the portion of the file's mode that describes the
file type.
"""
return mode & 0o170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFBLK = 0o060000 # block device
S_IFREG = 0o100000 # regular file
S_IFIFO = 0o010000 # fifo (named pipe)
S_IFLNK = 0o120000 # symbolic link
S_IFSOCK = 0o140000 # socket file
# Functions to test for each file type
def S_ISDIR(mode):
"""Return True if mode is from a directory."""
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
"""Return True if mode is from a character special device file."""
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
"""Return True if mode is from a block special device file."""
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
"""Return True if mode is from a regular file."""
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
"""Return True if mode is from a FIFO (named pipe)."""
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
"""Return True if mode is from a symbolic link."""
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
"""Return True if mode is from a socket."""
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 0o4000 # set UID bit
S_ISGID = 0o2000 # set GID bit
S_ENFMT = S_ISGID # file locking enforcement
S_ISVTX = 0o1000 # sticky bit
S_IREAD = 0o0400 # Unix V7 synonym for S_IRUSR
S_IWRITE = 0o0200 # Unix V7 synonym for S_IWUSR
S_IEXEC = 0o0100 # Unix V7 synonym for S_IXUSR
S_IRWXU = 0o0700 # mask for owner permissions
S_IRUSR = 0o0400 # read by owner
S_IWUSR = 0o0200 # write by owner
S_IXUSR = 0o0100 # execute by owner
S_IRWXG = 0o0070 # mask for group permissions
S_IRGRP = 0o0040 # read by group
S_IWGRP = 0o0020 # write by group
S_IXGRP = 0o0010 # execute by group
S_IRWXO = 0o0007 # mask for others (not in group) permissions
S_IROTH = 0o0004 # read by others
S_IWOTH = 0o0002 # write by others
S_IXOTH = 0o0001 # execute by others
# Names for file flags
UF_NODUMP = 0x00000001 # do not dump file
UF_IMMUTABLE = 0x00000002 # file may not be changed
UF_APPEND = 0x00000004 # file may only be appended to
UF_OPAQUE = 0x00000008 # directory is opaque when viewed through a union stack
UF_NOUNLINK = 0x00000010 # file may not be renamed or deleted
UF_COMPRESSED = 0x00000020 # OS X: file is hfs-compressed
UF_HIDDEN = 0x00008000 # OS X: file should not be displayed
SF_ARCHIVED = 0x00010000 # file may be archived
SF_IMMUTABLE = 0x00020000 # file may not be changed
SF_APPEND = 0x00040000 # file may only be appended to
SF_NOUNLINK = 0x00100000 # file may not be renamed or deleted
SF_SNAPSHOT = 0x00200000 # file is a snapshot file
_filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((S_IRUSR, "r"),),
((S_IWUSR, "w"),),
((S_IXUSR|S_ISUID, "s"),
(S_ISUID, "S"),
(S_IXUSR, "x")),
((S_IRGRP, "r"),),
((S_IWGRP, "w"),),
((S_IXGRP|S_ISGID, "s"),
(S_ISGID, "S"),
(S_IXGRP, "x")),
((S_IROTH, "r"),),
((S_IWOTH, "w"),),
((S_IXOTH|S_ISVTX, "t"),
(S_ISVTX, "T"),
(S_IXOTH, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form '-rwxrwxrwx'."""
perm = []
for table in _filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
| gpl-3.0 |
BaxterStockman/ansible-modules-core | cloud/openstack/os_networks_facts.py | 6 | 4383 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_networks_facts
short_description: Retrieve facts about one or more OpenStack networks.
version_added: "2.0"
author: "Davide Agnello (@dagnello)"
description:
- Retrieve facts about one or more networks from OpenStack.
requirements:
- "python >= 2.6"
- "shade"
options:
name:
description:
- Name or ID of the Network
required: false
filters:
description:
- A dictionary of meta data to use for further filtering. Elements of
this dictionary may be additional dictionaries.
required: false
extends_documentation_fragment: openstack
'''
EXAMPLES = '''
- name: Gather facts about previously created networks
os_networks_facts:
auth:
auth_url: 'https://your_api_url.com:9000/v2.0'
username: user
password: password
project_name: someproject
- name: Show openstack networks
debug:
var: openstack_networks
- name: Gather facts about a previously created network by name
os_networks_facts:
auth:
auth_url: 'https://your_api_url.com:9000/v2.0'
username: user
password: password
project_name: someproject
name: network1
- name: Show openstack networks
debug:
var: openstack_networks
- name: Gather facts about a previously created network with filter
# Note: name and filters parameters are Not mutually exclusive
os_networks_facts:
auth:
auth_url: 'https://your_api_url.com:9000/v2.0'
username: user
password: password
project_name: someproject
filters:
tenant_id: 55e2ce24b2a245b09f181bf025724cbe
subnets:
- 057d4bdf-6d4d-4728-bb0f-5ac45a6f7400
- 443d4dc0-91d4-4998-b21c-357d10433483
- name: Show openstack networks
debug:
var: openstack_networks
'''
RETURN = '''
openstack_networks:
description: has all the openstack facts about the networks
returned: always, but can be null
type: complex
contains:
id:
description: Unique UUID.
returned: success
type: string
name:
description: Name given to the network.
returned: success
type: string
status:
description: Network status.
returned: success
type: string
subnets:
description: Subnet(s) included in this network.
returned: success
type: list of strings
tenant_id:
description: Tenant id associated with this network.
returned: success
type: string
shared:
description: Network shared flag.
returned: success
type: boolean
'''
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=False, default=None),
filters=dict(required=False, type='dict', default=None)
)
module = AnsibleModule(argument_spec)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
try:
cloud = shade.openstack_cloud(**module.params)
networks = cloud.search_networks(module.params['name'],
module.params['filters'])
module.exit_json(changed=False, ansible_facts=dict(
openstack_networks=networks))
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
apporc/cinder | cinder/api/contrib/admin_actions.py | 8 | 16079 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import oslo_messaging as messaging
import webob
from webob import exc
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder import backup
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder import rpc
from cinder import utils
from cinder import volume
LOG = logging.getLogger(__name__)
class AdminController(wsgi.Controller):
"""Abstract base class for AdminControllers."""
collection = None # api collection to extend
# FIXME(clayg): this will be hard to keep up-to-date
# Concrete classes can expand or over-ride
valid_status = set(['creating',
'available',
'deleting',
'error',
'error_deleting', ])
def __init__(self, *args, **kwargs):
super(AdminController, self).__init__(*args, **kwargs)
# singular name of the resource
self.resource_name = self.collection.rstrip('s')
self.volume_api = volume.API()
self.backup_api = backup.API()
def _update(self, *args, **kwargs):
raise NotImplementedError()
def _get(self, *args, **kwargs):
raise NotImplementedError()
def _delete(self, *args, **kwargs):
raise NotImplementedError()
def validate_update(self, body):
update = {}
try:
update['status'] = body['status'].lower()
except (TypeError, KeyError):
raise exc.HTTPBadRequest(explanation=_("Must specify 'status'"))
if update['status'] not in self.valid_status:
raise exc.HTTPBadRequest(
explanation=_("Must specify a valid status"))
return update
def authorize(self, context, action_name):
# e.g. "snapshot_admin_actions:reset_status"
action = '%s_admin_actions:%s' % (self.resource_name, action_name)
extensions.extension_authorizer('volume', action)(context)
@wsgi.action('os-reset_status')
def _reset_status(self, req, id, body):
"""Reset status on the resource."""
context = req.environ['cinder.context']
self.authorize(context, 'reset_status')
update = self.validate_update(body['os-reset_status'])
msg = "Updating %(resource)s '%(id)s' with '%(update)r'"
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
notifier_info = dict(id=id, update=update)
notifier = rpc.get_notifier('volumeStatusUpdate')
notifier.info(context, self.collection + '.reset_status.start',
notifier_info)
try:
self._update(context, id, update)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
notifier.info(context, self.collection + '.reset_status.end',
notifier_info)
return webob.Response(status_int=202)
@wsgi.action('os-force_delete')
def _force_delete(self, req, id, body):
"""Delete a resource, bypassing the check that it must be available."""
context = req.environ['cinder.context']
self.authorize(context, 'force_delete')
try:
resource = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
self._delete(context, resource, force=True)
return webob.Response(status_int=202)
class VolumeAdminController(AdminController):
"""AdminController for Volumes."""
collection = 'volumes'
# FIXME(jdg): We're appending additional valid status
# entries to the set we declare in the parent class
# this doesn't make a ton of sense, we should probably
# look at the structure of this whole process again
# Perhaps we don't even want any definitions in the abstract
# parent class?
valid_status = AdminController.valid_status.union(
('attaching', 'in-use', 'detaching', 'maintenance'))
valid_attach_status = ('detached', 'attached',)
valid_migration_status = ('migrating', 'error',
'success', 'completing',
'none', 'starting',)
def _update(self, *args, **kwargs):
db.volume_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return self.volume_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete(*args, **kwargs)
def validate_update(self, body):
update = {}
status = body.get('status', None)
attach_status = body.get('attach_status', None)
migration_status = body.get('migration_status', None)
valid = False
if status:
valid = True
update = super(VolumeAdminController, self).validate_update(body)
if attach_status:
valid = True
update['attach_status'] = attach_status.lower()
if update['attach_status'] not in self.valid_attach_status:
raise exc.HTTPBadRequest(
explanation=_("Must specify a valid attach status"))
if migration_status:
valid = True
update['migration_status'] = migration_status.lower()
if update['migration_status'] not in self.valid_migration_status:
raise exc.HTTPBadRequest(
explanation=_("Must specify a valid migration status"))
if update['migration_status'] == 'none':
update['migration_status'] = None
if not valid:
raise exc.HTTPBadRequest(
explanation=_("Must specify 'status', 'attach_status' "
"or 'migration_status' for update."))
return update
@wsgi.action('os-force_detach')
def _force_detach(self, req, id, body):
"""Roll back a bad detach after the volume been disconnected."""
context = req.environ['cinder.context']
self.authorize(context, 'force_detach')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
try:
connector = body['os-force_detach'].get('connector', None)
except KeyError:
raise webob.exc.HTTPBadRequest(
explanation=_("Must specify 'connector'."))
try:
self.volume_api.terminate_connection(context, volume, connector)
except exception.VolumeBackendAPIException as error:
msg = _("Unable to terminate volume connection from backend.")
raise webob.exc.HTTPInternalServerError(explanation=msg)
attachment_id = body['os-force_detach'].get('attachment_id', None)
try:
self.volume_api.detach(context, volume, attachment_id)
except messaging.RemoteError as error:
if error.exc_type in ['VolumeAttachmentNotFound',
'InvalidVolume']:
msg = "Error force detaching volume - %(err_type)s: " \
"%(err_msg)s" % {'err_type': error.exc_type,
'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
# There are also few cases where force-detach call could fail
# due to db or volume driver errors. These errors shouldn't
# be exposed to the user and in such cases it should raise
# 500 error.
raise
return webob.Response(status_int=202)
@wsgi.action('os-migrate_volume')
def _migrate_volume(self, req, id, body):
"""Migrate a volume to the specified host."""
context = req.environ['cinder.context']
self.authorize(context, 'migrate_volume')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
params = body['os-migrate_volume']
try:
host = params['host']
except KeyError:
raise exc.HTTPBadRequest(explanation=_("Must specify 'host'."))
force_host_copy = utils.get_bool_param('force_host_copy', params)
lock_volume = utils.get_bool_param('lock_volume', params)
self.volume_api.migrate_volume(context, volume, host, force_host_copy,
lock_volume)
return webob.Response(status_int=202)
@wsgi.action('os-migrate_volume_completion')
def _migrate_volume_completion(self, req, id, body):
"""Complete an in-progress migration."""
context = req.environ['cinder.context']
self.authorize(context, 'migrate_volume_completion')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
params = body['os-migrate_volume_completion']
try:
new_volume_id = params['new_volume']
except KeyError:
raise exc.HTTPBadRequest(
explanation=_("Must specify 'new_volume'"))
try:
new_volume = self._get(context, new_volume_id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
error = params.get('error', False)
ret = self.volume_api.migrate_volume_completion(context, volume,
new_volume, error)
return {'save_volume_id': ret}
@wsgi.action('os-enable_replication')
def _enable_replication(self, req, id, body):
"""Enable/Re-enable replication on replciation capable volume.
Admin only method, used primarily for cases like disable/re-enable
replication process on a replicated volume for maintenance or testing
"""
context = req.environ['cinder.context']
self.authorize(context, 'enable_replication')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
self.volume_api.enable_replication(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-disable_replication')
def _disable_replication(self, req, id, body):
"""Disable replication on replciation capable volume.
Admin only method, used to instruct a backend to
disable replication process to a replicated volume.
"""
context = req.environ['cinder.context']
self.authorize(context, 'disable_replication')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
self.volume_api.disable_replication(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-failover_replication')
def _failover_replication(self, req, id, body):
"""Failover a replicating volume to it's secondary
Admin only method, used to force a fail-over to
a replication target. Optional secondary param to
indicate what device to promote in case of multiple
replication targets.
"""
context = req.environ['cinder.context']
self.authorize(context, 'failover_replication')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
secondary = body['os-failover_replication'].get('secondary', None)
self.volume_api.failover_replication(context, volume, secondary)
return webob.Response(status_int=202)
@wsgi.action('os-list_replication_targets')
def _list_replication_targets(self, req, id, body):
"""Show replication targets for the specified host.
Admin only method, used to display configured
replication target devices for the specified volume.
"""
# TODO(jdg): We'll want an equivalent type of command
# to querie a backend host (show configuration for a
# specified backend), but priority here is for
# a volume as it's likely to be more useful.
context = req.environ['cinder.context']
self.authorize(context, 'list_replication_targets')
try:
volume = self._get(context, id)
except exception.VolumeNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
# Expected response is a dict is a dict with unkonwn
# keys. Should be of the form:
# {'volume_id': xx, 'replication_targets':[{k: v, k1: v1...}]}
return self.volume_api.list_replication_targets(context, volume)
class SnapshotAdminController(AdminController):
"""AdminController for Snapshots."""
collection = 'snapshots'
def _update(self, *args, **kwargs):
context = args[0]
snapshot_id = args[1]
fields = args[2]
snapshot = objects.Snapshot.get_by_id(context, snapshot_id)
snapshot.update(fields)
snapshot.save()
def _get(self, *args, **kwargs):
return self.volume_api.get_snapshot(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete_snapshot(*args, **kwargs)
class BackupAdminController(AdminController):
"""AdminController for Backups."""
collection = 'backups'
valid_status = set(['available',
'error'
])
def _get(self, *args, **kwargs):
return self.backup_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.backup_api.delete(*args, **kwargs)
@wsgi.action('os-reset_status')
def _reset_status(self, req, id, body):
"""Reset status on the resource."""
context = req.environ['cinder.context']
self.authorize(context, 'reset_status')
update = self.validate_update(body['os-reset_status'])
msg = "Updating %(resource)s '%(id)s' with '%(update)r'"
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
notifier_info = {'id': id, 'update': update}
notifier = rpc.get_notifier('backupStatusUpdate')
notifier.info(context, self.collection + '.reset_status.start',
notifier_info)
try:
self.backup_api.reset_status(context=context, backup_id=id,
status=update['status'])
except exception.BackupNotFound as e:
raise exc.HTTPNotFound(explanation=e.msg)
return webob.Response(status_int=202)
class Admin_actions(extensions.ExtensionDescriptor):
"""Enable admin actions."""
name = "AdminActions"
alias = "os-admin-actions"
namespace = "http://docs.openstack.org/volume/ext/admin-actions/api/v1.1"
updated = "2012-08-25T00:00:00+00:00"
def get_controller_extensions(self):
exts = []
for class_ in (VolumeAdminController, SnapshotAdminController,
BackupAdminController):
controller = class_()
extension = extensions.ControllerExtension(
self, class_.collection, controller)
exts.append(extension)
return exts
| apache-2.0 |
lxsmnv/spark | python/pyspark/mllib/clustering.py | 10 | 36955 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import array as pyarray
import warnings
if sys.version > '3':
xrange = range
basestring = str
from math import exp, log
from numpy import array, random, tile
from collections import namedtuple
from pyspark import SparkContext, since
from pyspark.rdd import RDD, ignore_unicode_prefix
from pyspark.mllib.common import JavaModelWrapper, callMLlibFunc, callJavaFunc, _py2java, _java2py
from pyspark.mllib.linalg import SparseVector, _convert_to_vector, DenseVector
from pyspark.mllib.regression import LabeledPoint
from pyspark.mllib.stat.distribution import MultivariateGaussian
from pyspark.mllib.util import Saveable, Loader, inherit_doc, JavaLoader, JavaSaveable
from pyspark.streaming import DStream
__all__ = ['BisectingKMeansModel', 'BisectingKMeans', 'KMeansModel', 'KMeans',
'GaussianMixtureModel', 'GaussianMixture', 'PowerIterationClusteringModel',
'PowerIterationClustering', 'StreamingKMeans', 'StreamingKMeansModel',
'LDA', 'LDAModel']
@inherit_doc
class BisectingKMeansModel(JavaModelWrapper):
"""
A clustering model derived from the bisecting k-means method.
>>> data = array([0.0,0.0, 1.0,1.0, 9.0,8.0, 8.0,9.0]).reshape(4, 2)
>>> bskm = BisectingKMeans()
>>> model = bskm.train(sc.parallelize(data, 2), k=4)
>>> p = array([0.0, 0.0])
>>> model.predict(p)
0
>>> model.k
4
>>> model.computeCost(p)
0.0
.. versionadded:: 2.0.0
"""
def __init__(self, java_model):
super(BisectingKMeansModel, self).__init__(java_model)
self.centers = [c.toArray() for c in self.call("clusterCenters")]
@property
@since('2.0.0')
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy
arrays."""
return self.centers
@property
@since('2.0.0')
def k(self):
"""Get the number of clusters"""
return self.call("k")
@since('2.0.0')
def predict(self, x):
"""
Find the cluster that each of the points belongs to in this
model.
:param x:
A data point (or RDD of points) to determine cluster index.
:return:
Predicted cluster index or an RDD of predicted cluster indices
if the input is an RDD.
"""
if isinstance(x, RDD):
vecs = x.map(_convert_to_vector)
return self.call("predict", vecs)
x = _convert_to_vector(x)
return self.call("predict", x)
@since('2.0.0')
def computeCost(self, x):
"""
Return the Bisecting K-means cost (sum of squared distances of
points to their nearest center) for this model on the given
data. If provided with an RDD of points returns the sum.
:param point:
A data point (or RDD of points) to compute the cost(s).
"""
if isinstance(x, RDD):
vecs = x.map(_convert_to_vector)
return self.call("computeCost", vecs)
return self.call("computeCost", _convert_to_vector(x))
class BisectingKMeans(object):
"""
A bisecting k-means algorithm based on the paper "A comparison of
document clustering techniques" by Steinbach, Karypis, and Kumar,
with modification to fit Spark.
The algorithm starts from a single cluster that contains all points.
Iteratively it finds divisible clusters on the bottom level and
bisects each of them using k-means, until there are `k` leaf
clusters in total or no leaf clusters are divisible.
The bisecting steps of clusters on the same level are grouped
together to increase parallelism. If bisecting all divisible
clusters on the bottom level would result more than `k` leaf
clusters, larger clusters get higher priority.
Based on
U{http://glaros.dtc.umn.edu/gkhome/fetch/papers/docclusterKDDTMW00.pdf}
Steinbach, Karypis, and Kumar, A comparison of document clustering
techniques, KDD Workshop on Text Mining, 2000.
.. versionadded:: 2.0.0
"""
@classmethod
@since('2.0.0')
def train(self, rdd, k=4, maxIterations=20, minDivisibleClusterSize=1.0, seed=-1888008604):
"""
Runs the bisecting k-means algorithm return the model.
:param rdd:
Training points as an `RDD` of `Vector` or convertible
sequence types.
:param k:
The desired number of leaf clusters. The actual number could
be smaller if there are no divisible leaf clusters.
(default: 4)
:param maxIterations:
Maximum number of iterations allowed to split clusters.
(default: 20)
:param minDivisibleClusterSize:
Minimum number of points (if >= 1.0) or the minimum proportion
of points (if < 1.0) of a divisible cluster.
(default: 1)
:param seed:
Random seed value for cluster initialization.
(default: -1888008604 from classOf[BisectingKMeans].getName.##)
"""
java_model = callMLlibFunc(
"trainBisectingKMeans", rdd.map(_convert_to_vector),
k, maxIterations, minDivisibleClusterSize, seed)
return BisectingKMeansModel(java_model)
@inherit_doc
class KMeansModel(Saveable, Loader):
"""A clustering model derived from the k-means method.
>>> data = array([0.0,0.0, 1.0,1.0, 9.0,8.0, 8.0,9.0]).reshape(4, 2)
>>> model = KMeans.train(
... sc.parallelize(data), 2, maxIterations=10, initializationMode="random",
... seed=50, initializationSteps=5, epsilon=1e-4)
>>> model.predict(array([0.0, 0.0])) == model.predict(array([1.0, 1.0]))
True
>>> model.predict(array([8.0, 9.0])) == model.predict(array([9.0, 8.0]))
True
>>> model.k
2
>>> model.computeCost(sc.parallelize(data))
2.0000000000000004
>>> model = KMeans.train(sc.parallelize(data), 2)
>>> sparse_data = [
... SparseVector(3, {1: 1.0}),
... SparseVector(3, {1: 1.1}),
... SparseVector(3, {2: 1.0}),
... SparseVector(3, {2: 1.1})
... ]
>>> model = KMeans.train(sc.parallelize(sparse_data), 2, initializationMode="k-means||",
... seed=50, initializationSteps=5, epsilon=1e-4)
>>> model.predict(array([0., 1., 0.])) == model.predict(array([0, 1.1, 0.]))
True
>>> model.predict(array([0., 0., 1.])) == model.predict(array([0, 0, 1.1]))
True
>>> model.predict(sparse_data[0]) == model.predict(sparse_data[1])
True
>>> model.predict(sparse_data[2]) == model.predict(sparse_data[3])
True
>>> isinstance(model.clusterCenters, list)
True
>>> import os, tempfile
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = KMeansModel.load(sc, path)
>>> sameModel.predict(sparse_data[0]) == model.predict(sparse_data[0])
True
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
>>> data = array([-383.1,-382.9, 28.7,31.2, 366.2,367.3]).reshape(3, 2)
>>> model = KMeans.train(sc.parallelize(data), 3, maxIterations=0,
... initialModel = KMeansModel([(-1000.0,-1000.0),(5.0,5.0),(1000.0,1000.0)]))
>>> model.clusterCenters
[array([-1000., -1000.]), array([ 5., 5.]), array([ 1000., 1000.])]
.. versionadded:: 0.9.0
"""
def __init__(self, centers):
self.centers = centers
@property
@since('1.0.0')
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return self.centers
@property
@since('1.4.0')
def k(self):
"""Total number of clusters."""
return len(self.centers)
@since('0.9.0')
def predict(self, x):
"""
Find the cluster that each of the points belongs to in this
model.
:param x:
A data point (or RDD of points) to determine cluster index.
:return:
Predicted cluster index or an RDD of predicted cluster indices
if the input is an RDD.
"""
best = 0
best_distance = float("inf")
if isinstance(x, RDD):
return x.map(self.predict)
x = _convert_to_vector(x)
for i in xrange(len(self.centers)):
distance = x.squared_distance(self.centers[i])
if distance < best_distance:
best = i
best_distance = distance
return best
@since('1.4.0')
def computeCost(self, rdd):
"""
Return the K-means cost (sum of squared distances of points to
their nearest center) for this model on the given
data.
:param rdd:
The RDD of points to compute the cost on.
"""
cost = callMLlibFunc("computeCostKmeansModel", rdd.map(_convert_to_vector),
[_convert_to_vector(c) for c in self.centers])
return cost
@since('1.4.0')
def save(self, sc, path):
"""
Save this model to the given path.
"""
java_centers = _py2java(sc, [_convert_to_vector(c) for c in self.centers])
java_model = sc._jvm.org.apache.spark.mllib.clustering.KMeansModel(java_centers)
java_model.save(sc._jsc.sc(), path)
@classmethod
@since('1.4.0')
def load(cls, sc, path):
"""
Load a model from the given path.
"""
java_model = sc._jvm.org.apache.spark.mllib.clustering.KMeansModel.load(sc._jsc.sc(), path)
return KMeansModel(_java2py(sc, java_model.clusterCenters()))
class KMeans(object):
"""
.. versionadded:: 0.9.0
"""
@classmethod
@since('0.9.0')
def train(cls, rdd, k, maxIterations=100, runs=1, initializationMode="k-means||",
seed=None, initializationSteps=2, epsilon=1e-4, initialModel=None):
"""
Train a k-means clustering model.
:param rdd:
Training points as an `RDD` of `Vector` or convertible
sequence types.
:param k:
Number of clusters to create.
:param maxIterations:
Maximum number of iterations allowed.
(default: 100)
:param runs:
This param has no effect since Spark 2.0.0.
:param initializationMode:
The initialization algorithm. This can be either "random" or
"k-means||".
(default: "k-means||")
:param seed:
Random seed value for cluster initialization. Set as None to
generate seed based on system time.
(default: None)
:param initializationSteps:
Number of steps for the k-means|| initialization mode.
This is an advanced setting -- the default of 2 is almost
always enough.
(default: 2)
:param epsilon:
Distance threshold within which a center will be considered to
have converged. If all centers move less than this Euclidean
distance, iterations are stopped.
(default: 1e-4)
:param initialModel:
Initial cluster centers can be provided as a KMeansModel object
rather than using the random or k-means|| initializationModel.
(default: None)
"""
if runs != 1:
warnings.warn("The param `runs` has no effect since Spark 2.0.0.")
clusterInitialModel = []
if initialModel is not None:
if not isinstance(initialModel, KMeansModel):
raise Exception("initialModel is of "+str(type(initialModel))+". It needs "
"to be of <type 'KMeansModel'>")
clusterInitialModel = [_convert_to_vector(c) for c in initialModel.clusterCenters]
model = callMLlibFunc("trainKMeansModel", rdd.map(_convert_to_vector), k, maxIterations,
runs, initializationMode, seed, initializationSteps, epsilon,
clusterInitialModel)
centers = callJavaFunc(rdd.context, model.clusterCenters)
return KMeansModel([c.toArray() for c in centers])
@inherit_doc
class GaussianMixtureModel(JavaModelWrapper, JavaSaveable, JavaLoader):
"""
A clustering model derived from the Gaussian Mixture Model method.
>>> from pyspark.mllib.linalg import Vectors, DenseMatrix
>>> from numpy.testing import assert_equal
>>> from shutil import rmtree
>>> import os, tempfile
>>> clusterdata_1 = sc.parallelize(array([-0.1,-0.05,-0.01,-0.1,
... 0.9,0.8,0.75,0.935,
... -0.83,-0.68,-0.91,-0.76 ]).reshape(6, 2), 2)
>>> model = GaussianMixture.train(clusterdata_1, 3, convergenceTol=0.0001,
... maxIterations=50, seed=10)
>>> labels = model.predict(clusterdata_1).collect()
>>> labels[0]==labels[1]
False
>>> labels[1]==labels[2]
False
>>> labels[4]==labels[5]
True
>>> model.predict([-0.1,-0.05])
0
>>> softPredicted = model.predictSoft([-0.1,-0.05])
>>> abs(softPredicted[0] - 1.0) < 0.001
True
>>> abs(softPredicted[1] - 0.0) < 0.001
True
>>> abs(softPredicted[2] - 0.0) < 0.001
True
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = GaussianMixtureModel.load(sc, path)
>>> assert_equal(model.weights, sameModel.weights)
>>> mus, sigmas = list(
... zip(*[(g.mu, g.sigma) for g in model.gaussians]))
>>> sameMus, sameSigmas = list(
... zip(*[(g.mu, g.sigma) for g in sameModel.gaussians]))
>>> mus == sameMus
True
>>> sigmas == sameSigmas
True
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
>>> data = array([-5.1971, -2.5359, -3.8220,
... -5.2211, -5.0602, 4.7118,
... 6.8989, 3.4592, 4.6322,
... 5.7048, 4.6567, 5.5026,
... 4.5605, 5.2043, 6.2734])
>>> clusterdata_2 = sc.parallelize(data.reshape(5,3))
>>> model = GaussianMixture.train(clusterdata_2, 2, convergenceTol=0.0001,
... maxIterations=150, seed=4)
>>> labels = model.predict(clusterdata_2).collect()
>>> labels[0]==labels[1]
True
>>> labels[2]==labels[3]==labels[4]
True
.. versionadded:: 1.3.0
"""
@property
@since('1.4.0')
def weights(self):
"""
Weights for each Gaussian distribution in the mixture, where weights[i] is
the weight for Gaussian i, and weights.sum == 1.
"""
return array(self.call("weights"))
@property
@since('1.4.0')
def gaussians(self):
"""
Array of MultivariateGaussian where gaussians[i] represents
the Multivariate Gaussian (Normal) Distribution for Gaussian i.
"""
return [
MultivariateGaussian(gaussian[0], gaussian[1])
for gaussian in self.call("gaussians")]
@property
@since('1.4.0')
def k(self):
"""Number of gaussians in mixture."""
return len(self.weights)
@since('1.3.0')
def predict(self, x):
"""
Find the cluster to which the point 'x' or each point in RDD 'x'
has maximum membership in this model.
:param x:
A feature vector or an RDD of vectors representing data points.
:return:
Predicted cluster label or an RDD of predicted cluster labels
if the input is an RDD.
"""
if isinstance(x, RDD):
cluster_labels = self.predictSoft(x).map(lambda z: z.index(max(z)))
return cluster_labels
else:
z = self.predictSoft(x)
return z.argmax()
@since('1.3.0')
def predictSoft(self, x):
"""
Find the membership of point 'x' or each point in RDD 'x' to all mixture components.
:param x:
A feature vector or an RDD of vectors representing data points.
:return:
The membership value to all mixture components for vector 'x'
or each vector in RDD 'x'.
"""
if isinstance(x, RDD):
means, sigmas = zip(*[(g.mu, g.sigma) for g in self.gaussians])
membership_matrix = callMLlibFunc("predictSoftGMM", x.map(_convert_to_vector),
_convert_to_vector(self.weights), means, sigmas)
return membership_matrix.map(lambda x: pyarray.array('d', x))
else:
return self.call("predictSoft", _convert_to_vector(x)).toArray()
@classmethod
@since('1.5.0')
def load(cls, sc, path):
"""Load the GaussianMixtureModel from disk.
:param sc:
SparkContext.
:param path:
Path to where the model is stored.
"""
model = cls._load_java(sc, path)
wrapper = sc._jvm.org.apache.spark.mllib.api.python.GaussianMixtureModelWrapper(model)
return cls(wrapper)
class GaussianMixture(object):
"""
Learning algorithm for Gaussian Mixtures using the expectation-maximization algorithm.
.. versionadded:: 1.3.0
"""
@classmethod
@since('1.3.0')
def train(cls, rdd, k, convergenceTol=1e-3, maxIterations=100, seed=None, initialModel=None):
"""
Train a Gaussian Mixture clustering model.
:param rdd:
Training points as an `RDD` of `Vector` or convertible
sequence types.
:param k:
Number of independent Gaussians in the mixture model.
:param convergenceTol:
Maximum change in log-likelihood at which convergence is
considered to have occurred.
(default: 1e-3)
:param maxIterations:
Maximum number of iterations allowed.
(default: 100)
:param seed:
Random seed for initial Gaussian distribution. Set as None to
generate seed based on system time.
(default: None)
:param initialModel:
Initial GMM starting point, bypassing the random
initialization.
(default: None)
"""
initialModelWeights = None
initialModelMu = None
initialModelSigma = None
if initialModel is not None:
if initialModel.k != k:
raise Exception("Mismatched cluster count, initialModel.k = %s, however k = %s"
% (initialModel.k, k))
initialModelWeights = list(initialModel.weights)
initialModelMu = [initialModel.gaussians[i].mu for i in range(initialModel.k)]
initialModelSigma = [initialModel.gaussians[i].sigma for i in range(initialModel.k)]
java_model = callMLlibFunc("trainGaussianMixtureModel", rdd.map(_convert_to_vector),
k, convergenceTol, maxIterations, seed,
initialModelWeights, initialModelMu, initialModelSigma)
return GaussianMixtureModel(java_model)
class PowerIterationClusteringModel(JavaModelWrapper, JavaSaveable, JavaLoader):
"""
Model produced by [[PowerIterationClustering]].
>>> import math
>>> def genCircle(r, n):
... points = []
... for i in range(0, n):
... theta = 2.0 * math.pi * i / n
... points.append((r * math.cos(theta), r * math.sin(theta)))
... return points
>>> def sim(x, y):
... dist2 = (x[0] - y[0]) * (x[0] - y[0]) + (x[1] - y[1]) * (x[1] - y[1])
... return math.exp(-dist2 / 2.0)
>>> r1 = 1.0
>>> n1 = 10
>>> r2 = 4.0
>>> n2 = 40
>>> n = n1 + n2
>>> points = genCircle(r1, n1) + genCircle(r2, n2)
>>> similarities = [(i, j, sim(points[i], points[j])) for i in range(1, n) for j in range(0, i)]
>>> rdd = sc.parallelize(similarities, 2)
>>> model = PowerIterationClustering.train(rdd, 2, 40)
>>> model.k
2
>>> result = sorted(model.assignments().collect(), key=lambda x: x.id)
>>> result[0].cluster == result[1].cluster == result[2].cluster == result[3].cluster
True
>>> result[4].cluster == result[5].cluster == result[6].cluster == result[7].cluster
True
>>> import os, tempfile
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = PowerIterationClusteringModel.load(sc, path)
>>> sameModel.k
2
>>> result = sorted(model.assignments().collect(), key=lambda x: x.id)
>>> result[0].cluster == result[1].cluster == result[2].cluster == result[3].cluster
True
>>> result[4].cluster == result[5].cluster == result[6].cluster == result[7].cluster
True
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
.. versionadded:: 1.5.0
"""
@property
@since('1.5.0')
def k(self):
"""
Returns the number of clusters.
"""
return self.call("k")
@since('1.5.0')
def assignments(self):
"""
Returns the cluster assignments of this model.
"""
return self.call("getAssignments").map(
lambda x: (PowerIterationClustering.Assignment(*x)))
@classmethod
@since('1.5.0')
def load(cls, sc, path):
"""
Load a model from the given path.
"""
model = cls._load_java(sc, path)
wrapper =\
sc._jvm.org.apache.spark.mllib.api.python.PowerIterationClusteringModelWrapper(model)
return PowerIterationClusteringModel(wrapper)
class PowerIterationClustering(object):
"""
Power Iteration Clustering (PIC), a scalable graph clustering algorithm
developed by [[http://www.cs.cmu.edu/~frank/papers/icml2010-pic-final.pdf Lin and Cohen]].
From the abstract: PIC finds a very low-dimensional embedding of a
dataset using truncated power iteration on a normalized pair-wise
similarity matrix of the data.
.. versionadded:: 1.5.0
"""
@classmethod
@since('1.5.0')
def train(cls, rdd, k, maxIterations=100, initMode="random"):
"""
:param rdd:
An RDD of (i, j, s\ :sub:`ij`\) tuples representing the
affinity matrix, which is the matrix A in the PIC paper. The
similarity s\ :sub:`ij`\ must be nonnegative. This is a symmetric
matrix and hence s\ :sub:`ij`\ = s\ :sub:`ji`\ For any (i, j) with
nonzero similarity, there should be either (i, j, s\ :sub:`ij`\) or
(j, i, s\ :sub:`ji`\) in the input. Tuples with i = j are ignored,
because it is assumed s\ :sub:`ij`\ = 0.0.
:param k:
Number of clusters.
:param maxIterations:
Maximum number of iterations of the PIC algorithm.
(default: 100)
:param initMode:
Initialization mode. This can be either "random" to use
a random vector as vertex properties, or "degree" to use
normalized sum similarities.
(default: "random")
"""
model = callMLlibFunc("trainPowerIterationClusteringModel",
rdd.map(_convert_to_vector), int(k), int(maxIterations), initMode)
return PowerIterationClusteringModel(model)
class Assignment(namedtuple("Assignment", ["id", "cluster"])):
"""
Represents an (id, cluster) tuple.
.. versionadded:: 1.5.0
"""
class StreamingKMeansModel(KMeansModel):
"""
Clustering model which can perform an online update of the centroids.
The update formula for each centroid is given by
* c_t+1 = ((c_t * n_t * a) + (x_t * m_t)) / (n_t + m_t)
* n_t+1 = n_t * a + m_t
where
* c_t: Centroid at the n_th iteration.
* n_t: Number of samples (or) weights associated with the centroid
at the n_th iteration.
* x_t: Centroid of the new data closest to c_t.
* m_t: Number of samples (or) weights of the new data closest to c_t
* c_t+1: New centroid.
* n_t+1: New number of weights.
* a: Decay Factor, which gives the forgetfulness.
.. note:: If a is set to 1, it is the weighted mean of the previous
and new data. If it set to zero, the old centroids are completely
forgotten.
:param clusterCenters:
Initial cluster centers.
:param clusterWeights:
List of weights assigned to each cluster.
>>> initCenters = [[0.0, 0.0], [1.0, 1.0]]
>>> initWeights = [1.0, 1.0]
>>> stkm = StreamingKMeansModel(initCenters, initWeights)
>>> data = sc.parallelize([[-0.1, -0.1], [0.1, 0.1],
... [0.9, 0.9], [1.1, 1.1]])
>>> stkm = stkm.update(data, 1.0, u"batches")
>>> stkm.centers
array([[ 0., 0.],
[ 1., 1.]])
>>> stkm.predict([-0.1, -0.1])
0
>>> stkm.predict([0.9, 0.9])
1
>>> stkm.clusterWeights
[3.0, 3.0]
>>> decayFactor = 0.0
>>> data = sc.parallelize([DenseVector([1.5, 1.5]), DenseVector([0.2, 0.2])])
>>> stkm = stkm.update(data, 0.0, u"batches")
>>> stkm.centers
array([[ 0.2, 0.2],
[ 1.5, 1.5]])
>>> stkm.clusterWeights
[1.0, 1.0]
>>> stkm.predict([0.2, 0.2])
0
>>> stkm.predict([1.5, 1.5])
1
.. versionadded:: 1.5.0
"""
def __init__(self, clusterCenters, clusterWeights):
super(StreamingKMeansModel, self).__init__(centers=clusterCenters)
self._clusterWeights = list(clusterWeights)
@property
@since('1.5.0')
def clusterWeights(self):
"""Return the cluster weights."""
return self._clusterWeights
@ignore_unicode_prefix
@since('1.5.0')
def update(self, data, decayFactor, timeUnit):
"""Update the centroids, according to data
:param data:
RDD with new data for the model update.
:param decayFactor:
Forgetfulness of the previous centroids.
:param timeUnit:
Can be "batches" or "points". If points, then the decay factor
is raised to the power of number of new points and if batches,
then decay factor will be used as is.
"""
if not isinstance(data, RDD):
raise TypeError("Data should be of an RDD, got %s." % type(data))
data = data.map(_convert_to_vector)
decayFactor = float(decayFactor)
if timeUnit not in ["batches", "points"]:
raise ValueError(
"timeUnit should be 'batches' or 'points', got %s." % timeUnit)
vectorCenters = [_convert_to_vector(center) for center in self.centers]
updatedModel = callMLlibFunc(
"updateStreamingKMeansModel", vectorCenters, self._clusterWeights,
data, decayFactor, timeUnit)
self.centers = array(updatedModel[0])
self._clusterWeights = list(updatedModel[1])
return self
class StreamingKMeans(object):
"""
Provides methods to set k, decayFactor, timeUnit to configure the
KMeans algorithm for fitting and predicting on incoming dstreams.
More details on how the centroids are updated are provided under the
docs of StreamingKMeansModel.
:param k:
Number of clusters.
(default: 2)
:param decayFactor:
Forgetfulness of the previous centroids.
(default: 1.0)
:param timeUnit:
Can be "batches" or "points". If points, then the decay factor is
raised to the power of number of new points and if batches, then
decay factor will be used as is.
(default: "batches")
.. versionadded:: 1.5.0
"""
def __init__(self, k=2, decayFactor=1.0, timeUnit="batches"):
self._k = k
self._decayFactor = decayFactor
if timeUnit not in ["batches", "points"]:
raise ValueError(
"timeUnit should be 'batches' or 'points', got %s." % timeUnit)
self._timeUnit = timeUnit
self._model = None
@since('1.5.0')
def latestModel(self):
"""Return the latest model"""
return self._model
def _validate(self, dstream):
if self._model is None:
raise ValueError(
"Initial centers should be set either by setInitialCenters "
"or setRandomCenters.")
if not isinstance(dstream, DStream):
raise TypeError(
"Expected dstream to be of type DStream, "
"got type %s" % type(dstream))
@since('1.5.0')
def setK(self, k):
"""Set number of clusters."""
self._k = k
return self
@since('1.5.0')
def setDecayFactor(self, decayFactor):
"""Set decay factor."""
self._decayFactor = decayFactor
return self
@since('1.5.0')
def setHalfLife(self, halfLife, timeUnit):
"""
Set number of batches after which the centroids of that
particular batch has half the weightage.
"""
self._timeUnit = timeUnit
self._decayFactor = exp(log(0.5) / halfLife)
return self
@since('1.5.0')
def setInitialCenters(self, centers, weights):
"""
Set initial centers. Should be set before calling trainOn.
"""
self._model = StreamingKMeansModel(centers, weights)
return self
@since('1.5.0')
def setRandomCenters(self, dim, weight, seed):
"""
Set the initial centres to be random samples from
a gaussian population with constant weights.
"""
rng = random.RandomState(seed)
clusterCenters = rng.randn(self._k, dim)
clusterWeights = tile(weight, self._k)
self._model = StreamingKMeansModel(clusterCenters, clusterWeights)
return self
@since('1.5.0')
def trainOn(self, dstream):
"""Train the model on the incoming dstream."""
self._validate(dstream)
def update(rdd):
self._model.update(rdd, self._decayFactor, self._timeUnit)
dstream.foreachRDD(update)
@since('1.5.0')
def predictOn(self, dstream):
"""
Make predictions on a dstream.
Returns a transformed dstream object
"""
self._validate(dstream)
return dstream.map(lambda x: self._model.predict(x))
@since('1.5.0')
def predictOnValues(self, dstream):
"""
Make predictions on a keyed dstream.
Returns a transformed dstream object.
"""
self._validate(dstream)
return dstream.mapValues(lambda x: self._model.predict(x))
class LDAModel(JavaModelWrapper, JavaSaveable, Loader):
""" A clustering model derived from the LDA method.
Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
Terminology
- "word" = "term": an element of the vocabulary
- "token": instance of a term appearing in a document
- "topic": multinomial distribution over words representing some concept
References:
- Original LDA paper (journal version):
Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
>>> from pyspark.mllib.linalg import Vectors
>>> from numpy.testing import assert_almost_equal, assert_equal
>>> data = [
... [1, Vectors.dense([0.0, 1.0])],
... [2, SparseVector(2, {0: 1.0})],
... ]
>>> rdd = sc.parallelize(data)
>>> model = LDA.train(rdd, k=2, seed=1)
>>> model.vocabSize()
2
>>> model.describeTopics()
[([1, 0], [0.5..., 0.49...]), ([0, 1], [0.5..., 0.49...])]
>>> model.describeTopics(1)
[([1], [0.5...]), ([0], [0.5...])]
>>> topics = model.topicsMatrix()
>>> topics_expect = array([[0.5, 0.5], [0.5, 0.5]])
>>> assert_almost_equal(topics, topics_expect, 1)
>>> import os, tempfile
>>> from shutil import rmtree
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = LDAModel.load(sc, path)
>>> assert_equal(sameModel.topicsMatrix(), model.topicsMatrix())
>>> sameModel.vocabSize() == model.vocabSize()
True
>>> try:
... rmtree(path)
... except OSError:
... pass
.. versionadded:: 1.5.0
"""
@since('1.5.0')
def topicsMatrix(self):
"""Inferred topics, where each topic is represented by a distribution over terms."""
return self.call("topicsMatrix").toArray()
@since('1.5.0')
def vocabSize(self):
"""Vocabulary size (number of terms or terms in the vocabulary)"""
return self.call("vocabSize")
@since('1.6.0')
def describeTopics(self, maxTermsPerTopic=None):
"""Return the topics described by weighted terms.
WARNING: If vocabSize and k are large, this can return a large object!
:param maxTermsPerTopic:
Maximum number of terms to collect for each topic.
(default: vocabulary size)
:return:
Array over topics. Each topic is represented as a pair of
matching arrays: (term indices, term weights in topic).
Each topic's terms are sorted in order of decreasing weight.
"""
if maxTermsPerTopic is None:
topics = self.call("describeTopics")
else:
topics = self.call("describeTopics", maxTermsPerTopic)
return topics
@classmethod
@since('1.5.0')
def load(cls, sc, path):
"""Load the LDAModel from disk.
:param sc:
SparkContext.
:param path:
Path to where the model is stored.
"""
if not isinstance(sc, SparkContext):
raise TypeError("sc should be a SparkContext, got type %s" % type(sc))
if not isinstance(path, basestring):
raise TypeError("path should be a basestring, got type %s" % type(path))
model = callMLlibFunc("loadLDAModel", sc, path)
return LDAModel(model)
class LDA(object):
"""
.. versionadded:: 1.5.0
"""
@classmethod
@since('1.5.0')
def train(cls, rdd, k=10, maxIterations=20, docConcentration=-1.0,
topicConcentration=-1.0, seed=None, checkpointInterval=10, optimizer="em"):
"""Train a LDA model.
:param rdd:
RDD of documents, which are tuples of document IDs and term
(word) count vectors. The term count vectors are "bags of
words" with a fixed-size vocabulary (where the vocabulary size
is the length of the vector). Document IDs must be unique
and >= 0.
:param k:
Number of topics to infer, i.e., the number of soft cluster
centers.
(default: 10)
:param maxIterations:
Maximum number of iterations allowed.
(default: 20)
:param docConcentration:
Concentration parameter (commonly named "alpha") for the prior
placed on documents' distributions over topics ("theta").
(default: -1.0)
:param topicConcentration:
Concentration parameter (commonly named "beta" or "eta") for
the prior placed on topics' distributions over terms.
(default: -1.0)
:param seed:
Random seed for cluster initialization. Set as None to generate
seed based on system time.
(default: None)
:param checkpointInterval:
Period (in iterations) between checkpoints.
(default: 10)
:param optimizer:
LDAOptimizer used to perform the actual calculation. Currently
"em", "online" are supported.
(default: "em")
"""
model = callMLlibFunc("trainLDAModel", rdd, k, maxIterations,
docConcentration, topicConcentration, seed,
checkpointInterval, optimizer)
return LDAModel(model)
def _test():
import doctest
import pyspark.mllib.clustering
globs = pyspark.mllib.clustering.__dict__.copy()
globs['sc'] = SparkContext('local[4]', 'PythonTest', batchSize=2)
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
globs['sc'].stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
AnhellO/DAS_Sistemas | Ago-Dic-2017/Enrique Castillo/Ordinario/test/Lib/site-packages/pip/_vendor/requests/packages/chardet/gb2312prober.py | 2994 | 1681 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312SMModel
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(GB2312SMModel)
self._mDistributionAnalyzer = GB2312DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "GB2312"
| mit |
abridgett/boto | boto/rds/dbsecuritygroup.py | 185 | 6651 | # Copyright (c) 2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an DBSecurityGroup
"""
from boto.ec2.securitygroup import SecurityGroup
class DBSecurityGroup(object):
"""
Represents an RDS database security group
Properties reference available from the AWS documentation at
http://docs.amazonwebservices.com/AmazonRDS/latest/APIReference/API_DeleteDBSecurityGroup.html
:ivar Status: The current status of the security group. Possible values are
[ active, ? ]. Reference documentation lacks specifics of possibilities
:ivar connection: :py:class:`boto.rds.RDSConnection` associated with the current object
:ivar description: The description of the security group
:ivar ec2_groups: List of :py:class:`EC2 Security Group
<boto.ec2.securitygroup.SecurityGroup>` objects that this security
group PERMITS
:ivar ip_ranges: List of :py:class:`boto.rds.dbsecuritygroup.IPRange`
objects (containing CIDR addresses) that this security group PERMITS
:ivar name: Name of the security group
:ivar owner_id: ID of the owner of the security group. Can be 'None'
"""
def __init__(self, connection=None, owner_id=None,
name=None, description=None):
self.connection = connection
self.owner_id = owner_id
self.name = name
self.description = description
self.ec2_groups = []
self.ip_ranges = []
def __repr__(self):
return 'DBSecurityGroup:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'IPRange':
cidr = IPRange(self)
self.ip_ranges.append(cidr)
return cidr
elif name == 'EC2SecurityGroup':
ec2_grp = EC2SecurityGroup(self)
self.ec2_groups.append(ec2_grp)
return ec2_grp
else:
return None
def endElement(self, name, value, connection):
if name == 'OwnerId':
self.owner_id = value
elif name == 'DBSecurityGroupName':
self.name = value
elif name == 'DBSecurityGroupDescription':
self.description = value
elif name == 'IPRanges':
pass
else:
setattr(self, name, value)
def delete(self):
return self.connection.delete_dbsecurity_group(self.name)
def authorize(self, cidr_ip=None, ec2_group=None):
"""
Add a new rule to this DBSecurity group.
You need to pass in either a CIDR block to authorize or
and EC2 SecurityGroup.
:type cidr_ip: string
:param cidr_ip: A valid CIDR IP range to authorize
:type ec2_group: :class:`boto.ec2.securitygroup.SecurityGroup`
:param ec2_group: An EC2 security group to authorize
:rtype: bool
:return: True if successful.
"""
if isinstance(ec2_group, SecurityGroup):
group_name = ec2_group.name
group_owner_id = ec2_group.owner_id
else:
group_name = None
group_owner_id = None
return self.connection.authorize_dbsecurity_group(self.name,
cidr_ip,
group_name,
group_owner_id)
def revoke(self, cidr_ip=None, ec2_group=None):
"""
Revoke access to a CIDR range or EC2 SecurityGroup.
You need to pass in either a CIDR block or
an EC2 SecurityGroup from which to revoke access.
:type cidr_ip: string
:param cidr_ip: A valid CIDR IP range to revoke
:type ec2_group: :class:`boto.ec2.securitygroup.SecurityGroup`
:param ec2_group: An EC2 security group to revoke
:rtype: bool
:return: True if successful.
"""
if isinstance(ec2_group, SecurityGroup):
group_name = ec2_group.name
group_owner_id = ec2_group.owner_id
return self.connection.revoke_dbsecurity_group(
self.name,
ec2_security_group_name=group_name,
ec2_security_group_owner_id=group_owner_id)
# Revoking by CIDR IP range
return self.connection.revoke_dbsecurity_group(
self.name, cidr_ip=cidr_ip)
class IPRange(object):
"""
Describes a CIDR address range for use in a DBSecurityGroup
:ivar cidr_ip: IP Address range
"""
def __init__(self, parent=None):
self.parent = parent
self.cidr_ip = None
self.status = None
def __repr__(self):
return 'IPRange:%s' % self.cidr_ip
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'CIDRIP':
self.cidr_ip = value
elif name == 'Status':
self.status = value
else:
setattr(self, name, value)
class EC2SecurityGroup(object):
"""
Describes an EC2 security group for use in a DBSecurityGroup
"""
def __init__(self, parent=None):
self.parent = parent
self.name = None
self.owner_id = None
def __repr__(self):
return 'EC2SecurityGroup:%s' % self.name
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'EC2SecurityGroupName':
self.name = value
elif name == 'EC2SecurityGroupOwnerId':
self.owner_id = value
else:
setattr(self, name, value)
| mit |
flacjacket/sympy | sympy/polys/domains/realdomain.py | 2 | 3404 | """Implementation of :class:`RealDomain` class. """
from sympy.polys.domains.characteristiczero import CharacteristicZero
from sympy.polys.domains.simpledomain import SimpleDomain
from sympy.polys.domains.groundtypes import SymPyRealType
from sympy.polys.polyerrors import DomainError, CoercionFailed
import math
class RealDomain(CharacteristicZero, SimpleDomain): # XXX: should be a field
"""Abstract domain for real numbers. """
rep = 'RR'
is_Exact = False
is_Numerical = True
_convert_excludes = [
SymPyRealType('+inf'),
SymPyRealType('-inf'),
]
def as_integer_ratio(self, a, **args):
"""Convert real number to a (numer, denom) pair. """
v, n = math.frexp(a) # XXX: hack, will work only for floats
for i in xrange(300):
if v != math.floor(v):
v, n = 2*v, n-1
else:
break
numer, denom = int(v), 1
m = 1 << abs(n)
if n > 0:
numer *= m
else:
denom = m
n, d = self.limit_denom(numer, denom, **args)
if a and not n:
return numer, denom
else:
return n, d
def limit_denom(self, n, d, **args):
"""Find closest rational to `n/d` (up to ``max_denom``). """
max_denom = args.get('max_denom', 1000000)
if d <= max_denom:
return n, d
from sympy.polys.domains import QQ
self = QQ(n, d)
p0, q0, p1, q1 = 0, 1, 1, 0
while True:
a = n//d
q2 = q0 + a*q1
if q2 > max_denom:
break
p0, q0, p1, q1, n, d = \
p1, q1, p0 + a*p1, q2, d, n - a*d
k = (max_denom - q0)//q1
P1, Q1 = p0 + k*p1, q0 + k*q1
bound1 = QQ(P1, Q1)
bound2 = QQ(p1, q1)
if abs(bound2 - self) <= abs(bound1 - self):
return p1, q1
else:
return P1, Q1
def get_ring(self):
"""Returns a ring associated with ``self``. """
raise DomainError('there is no ring associated with %s' % self)
def get_field(self):
"""Returns a field associated with ``self``. """
raise DomainError('there is no field associated with %s' % self)
def get_exact(self):
"""Returns an exact domain associated with ``self``. """
from sympy.polys.domains import QQ
return QQ
def exquo(self, a, b):
"""Exact quotient of ``a`` and ``b``. """
return a / b
def quo(self, a, b):
"""Quotient of ``a`` and ``b``. """
return a / b
def rem(self, a, b):
"""Remainder of ``a`` and ``b``. """
return a % b
def div(self, a, b):
"""Division of ``a`` and ``b``. """
return (a / b, a % b)
def gcd(self, a, b):
"""Returns GCD of ``a`` and ``b``. """
return self.one
def lcm(self, a, b):
"""Returns LCM of ``a`` and ``b``. """
return a*b
def to_sympy(self, a):
"""Convert ``a`` to SymPy number. """
return SymPyRealType(a)
def from_sympy(self, a):
"""Convert SymPy's number to ``dtype``. """
b = a.evalf()
if b.is_Number and b not in self._convert_excludes:
return self.dtype(b)
else:
raise CoercionFailed("expected Number object, got %s" % a)
| bsd-3-clause |
samhutchins/tools | video_production/transcode-for-edit.py | 1 | 1479 | #!/usr/bin/env python3.6
import os, argparse, subprocess
def main():
parser = argparse.ArgumentParser()
parser.add_argument("project")
args = parser.parse_args()
home = os.path.join(os.path.expanduser("~"), "Videos", "Mine", "WIP")
dest_dir = os.path.join(home, args.project, "footage")
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
videos = []
for root, _, files in os.walk(os.getcwd()):
for file in files:
videos.append(os.path.join(root, file))
videos = [x for x in videos if x[-4:].lower() == ".mov" or x[-4:].lower() == ".mp4"]
for video in videos:
transcode(video, dest_dir)
def transcode(video, dest_dir):
size = subprocess.check_output('ffprobe -v quiet -print_format csv=p=0 -select_streams v:0 -show_entries stream=height,width "' + video + '"').decode('utf-8').rstrip()
width = int(size.split(",")[0])
args = ""
if 1920 < width:
args = " -vf scale=1920:-1"
destination = get_filename(video, dest_dir)
command = 'ffmpeg -hide_banner -v error -stats -y -i "' + video + '" -vcodec prores -profile:v 3' + args + " " + destination
print("Transcoding " + os.path.basename(video))
subprocess.run(command)
def get_filename(file, dest_dir):
basename = os.path.basename(file)
filename = os.path.splitext(basename)[0]
return '"' + os.path.join(dest_dir, filename + ".mov") + '"'
if __name__ == "__main__":
main() | mit |
javachengwc/hue | apps/spark/src/spark/job_server_api.py | 12 | 4052 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import posixpath
import threading
from desktop.lib.rest.http_client import HttpClient
from desktop.lib.rest.resource import Resource
from spark.conf import get_livy_server_url
LOG = logging.getLogger(__name__)
DEFAULT_USER = 'hue'
_API_VERSION = 'v1'
_JSON_CONTENT_TYPE = 'application/json'
_BINARY_CONTENT_TYPE = 'application/octet-stream'
_TEXT_CONTENT_TYPE = 'text/plain'
_api_cache = None
_api_cache_lock = threading.Lock()
def get_api(user):
global _api_cache
if _api_cache is None:
_api_cache_lock.acquire()
try:
if _api_cache is None:
_api_cache = JobServerApi(get_livy_server_url())
finally:
_api_cache_lock.release()
_api_cache.setuser(user)
return _api_cache
class JobServerApi(object):
def __init__(self, oozie_url):
self._url = posixpath.join(oozie_url)
self._client = HttpClient(self._url, logger=LOG)
self._root = Resource(self._client)
self._security_enabled = False
self._thread_local = threading.local()
def __str__(self):
return "JobServerApi at %s" % (self._url,)
@property
def url(self):
return self._url
@property
def security_enabled(self):
return self._security_enabled
@property
def user(self):
return self._thread_local.user
def setuser(self, user):
if hasattr(user, 'username'):
self._thread_local.user = user.username
else:
self._thread_local.user = user
def get_status(self):
return self._root.get('sessions')
def create_session(self, **properties):
properties['proxyUser'] = self.user
return self._root.post('sessions', data=json.dumps(properties), contenttype='application/json')
def get_session(self, uuid):
return self._root.get('sessions/%s' % uuid)
def submit_statement(self, uuid, statement):
data = {'code': statement}
return self._root.post('sessions/%s/statements' % uuid, data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
def inspect(self, uuid, statement):
data = {'code': statement}
return self._root.post('sessions/%s/inspect' % uuid, data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
def fetch_data(self, session, statement):
return self._root.get('sessions/%s/statements/%s' % (session, statement))
def cancel(self, session):
return self._root.post('sessions/%s/interrupt' % session)
def close(self, uuid):
return self._root.delete('sessions/%s' % uuid)
def get_batches(self):
return self._root.get('batches')
def submit_batch(self, properties):
properties['proxyUser'] = self.user
return self._root.post('batches', data=json.dumps(properties), contenttype=_JSON_CONTENT_TYPE)
def get_batch(self, uuid):
return self._root.get('batches/%s' % uuid)
def get_batch_status(self, uuid):
response = self._root.get('batches/%s/state' % uuid)
return response['state']
def get_batch_log(self, uuid, startFrom=None, size=None):
params = {}
if startFrom is not None:
params['from'] = startFrom
if size is not None:
params['size'] = size
response = self._root.get('batches/%s/log' % uuid, params=params)
return '\n'.join(response['log'])
def close_batch(self, uuid):
return self._root.delete('batches/%s' % uuid)
| apache-2.0 |
endolith/scikit-image | skimage/io/tests/test_colormixer.py | 40 | 4619 | from numpy.testing import (assert_array_equal,
assert_almost_equal,
assert_equal,
assert_array_almost_equal,
)
import numpy as np
import skimage.io._plugins._colormixer as cm
class ColorMixerTest(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 200
self.img = np.zeros_like(self.state)
def test_basic(self):
self.op(self.img, self.state, 0, self.positive)
assert_array_equal(self.img[..., 0],
self.py_op(self.state[..., 0], self.positive))
def test_clip(self):
self.op(self.img, self.state, 0, self.positive_clip)
assert_array_equal(self.img[..., 0],
np.ones_like(self.img[..., 0]) * 255)
def test_negative(self):
self.op(self.img, self.state, 0, self.negative)
assert_array_equal(self.img[..., 0],
self.py_op(self.state[..., 0], self.negative))
def test_negative_clip(self):
self.op(self.img, self.state, 0, self.negative_clip)
assert_array_equal(self.img[..., 0],
np.zeros_like(self.img[..., 0]))
class TestColorMixerAdd(ColorMixerTest):
op = cm.add
py_op = np.add
positive = 50
positive_clip = 56
negative = -50
negative_clip = -220
class TestColorMixerMul(ColorMixerTest):
op = cm.multiply
py_op = np.multiply
positive = 1.2
positive_clip = 2
negative = 0.5
negative_clip = -0.5
class TestColorMixerBright(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 200
self.img = np.zeros_like(self.state)
def test_brightness_pos(self):
cm.brightness(self.img, self.state, 1.25, 1)
assert_array_equal(self.img, np.ones_like(self.img) * 251)
def test_brightness_neg(self):
cm.brightness(self.img, self.state, 0.5, -50)
assert_array_equal(self.img, np.ones_like(self.img) * 50)
def test_brightness_pos_clip(self):
cm.brightness(self.img, self.state, 2, 0)
assert_array_equal(self.img, np.ones_like(self.img) * 255)
def test_brightness_neg_clip(self):
cm.brightness(self.img, self.state, 0, 0)
assert_array_equal(self.img, np.zeros_like(self.img))
class TestColorMixer(object):
def setup(self):
self.state = np.ones((18, 33, 3), dtype=np.uint8) * 50
self.img = np.zeros_like(self.state)
def test_sigmoid(self):
import math
alpha = 1.5
beta = 1.5
c1 = 1 / (1 + math.exp(beta))
c2 = 1 / (1 + math.exp(beta - alpha)) - c1
state = self.state / 255.
cm.sigmoid_gamma(self.img, self.state, alpha, beta)
img = 1 / (1 + np.exp(beta - state * alpha))
img = np.asarray((img - c1) / c2 * 255, dtype='uint8')
assert_almost_equal(img, self.img)
def test_gamma(self):
gamma = 1.5
cm.gamma(self.img, self.state, gamma)
img = np.asarray(((self.state / 255.)**(1 / gamma)) * 255,
dtype='uint8')
assert_array_almost_equal(img, self.img)
def test_rgb_2_hsv(self):
r = 255
g = 0
b = 0
h, s, v = cm.py_rgb_2_hsv(r, g, b)
assert_almost_equal(np.array([h]), np.array([0]))
assert_almost_equal(np.array([s]), np.array([1]))
assert_almost_equal(np.array([v]), np.array([1]))
def test_hsv_2_rgb(self):
h = 0
s = 1
v = 1
r, g, b = cm.py_hsv_2_rgb(h, s, v)
assert_almost_equal(np.array([r]), np.array([255]))
assert_almost_equal(np.array([g]), np.array([0]))
assert_almost_equal(np.array([b]), np.array([0]))
def test_hsv_add(self):
cm.hsv_add(self.img, self.state, 360, 0, 0)
assert_almost_equal(self.img, self.state)
def test_hsv_add_clip_neg(self):
cm.hsv_add(self.img, self.state, 0, 0, -1)
assert_equal(self.img, np.zeros_like(self.state))
def test_hsv_add_clip_pos(self):
cm.hsv_add(self.img, self.state, 0, 0, 1)
assert_equal(self.img, np.ones_like(self.state) * 255)
def test_hsv_mul(self):
cm.hsv_multiply(self.img, self.state, 360, 1, 1)
assert_almost_equal(self.img, self.state)
def test_hsv_mul_clip_neg(self):
cm.hsv_multiply(self.img, self.state, 0, 0, 0)
assert_equal(self.img, np.zeros_like(self.state))
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| bsd-3-clause |
davidrenne/django-allauth | allauth/socialaccount/providers/oauth2/provider.py | 67 | 1156 | try:
from urllib.parse import parse_qsl
except ImportError:
from urlparse import parse_qsl
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from allauth.socialaccount.providers.base import Provider
class OAuth2Provider(Provider):
def get_login_url(self, request, **kwargs):
url = reverse(self.id + "_login")
if kwargs:
url = url + '?' + urlencode(kwargs)
return url
def get_auth_params(self, request, action):
settings = self.get_settings()
ret = settings.get('AUTH_PARAMS', {})
dynamic_auth_params = request.GET.get('auth_params', None)
if dynamic_auth_params:
ret.update(dict(parse_qsl(dynamic_auth_params)))
return ret
def get_scope(self, request):
settings = self.get_settings()
scope = settings.get('SCOPE')
if scope is None:
scope = self.get_default_scope()
dynamic_scope = request.GET.get('scope', None)
if dynamic_scope:
scope.extend(dynamic_scope.split(','))
return scope
def get_default_scope(self):
return []
| mit |
wkschwartz/django | tests/db_functions/math/test_power.py | 69 | 1784 | from decimal import Decimal
from django.db.models.functions import Power
from django.test import TestCase
from ..models import DecimalModel, FloatModel, IntegerModel
class PowerTests(TestCase):
def test_null(self):
IntegerModel.objects.create(big=100)
obj = IntegerModel.objects.annotate(
null_power_small=Power('small', 'normal'),
null_power_normal=Power('normal', 'big'),
).first()
self.assertIsNone(obj.null_power_small)
self.assertIsNone(obj.null_power_normal)
def test_decimal(self):
DecimalModel.objects.create(n1=Decimal('1.0'), n2=Decimal('-0.6'))
obj = DecimalModel.objects.annotate(n_power=Power('n1', 'n2')).first()
self.assertIsInstance(obj.n_power, Decimal)
self.assertAlmostEqual(obj.n_power, Decimal(obj.n1 ** obj.n2))
def test_float(self):
FloatModel.objects.create(f1=2.3, f2=1.1)
obj = FloatModel.objects.annotate(f_power=Power('f1', 'f2')).first()
self.assertIsInstance(obj.f_power, float)
self.assertAlmostEqual(obj.f_power, obj.f1 ** obj.f2)
def test_integer(self):
IntegerModel.objects.create(small=-1, normal=20, big=3)
obj = IntegerModel.objects.annotate(
small_power=Power('small', 'normal'),
normal_power=Power('normal', 'big'),
big_power=Power('big', 'small'),
).first()
self.assertIsInstance(obj.small_power, float)
self.assertIsInstance(obj.normal_power, float)
self.assertIsInstance(obj.big_power, float)
self.assertAlmostEqual(obj.small_power, obj.small ** obj.normal)
self.assertAlmostEqual(obj.normal_power, obj.normal ** obj.big)
self.assertAlmostEqual(obj.big_power, obj.big ** obj.small)
| bsd-3-clause |
archf/ansible | lib/ansible/modules/cloud/amazon/lambda_event.py | 26 | 14552 | #!/usr/bin/python
# (c) 2016, Pierre Jodouin <pjodouin@virtualcomputing.solutions>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: lambda_event
short_description: Creates, updates or deletes AWS Lambda function event mappings.
description:
- This module allows the management of AWS Lambda function event source mappings such as DynamoDB and Kinesis stream
events via the Ansible framework. These event source mappings are relevant only in the AWS Lambda pull model, where
AWS Lambda invokes the function.
It is idempotent and supports "Check" mode. Use module M(lambda) to manage the lambda
function itself and M(lambda_alias) to manage function aliases.
version_added: "2.2"
author: Pierre Jodouin (@pjodouin), Ryan Brown (@ryansb)
options:
lambda_function_arn:
description:
- The name or ARN of the lambda function.
required: true
aliases: ['function_name', 'function_arn']
state:
description:
- Describes the desired state.
required: true
default: "present"
choices: ["present", "absent"]
alias:
description:
- Name of the function alias. Mutually exclusive with C(version).
required: true
version:
description:
- Version of the Lambda function. Mutually exclusive with C(alias).
required: false
event_source:
description:
- Source of the event that triggers the lambda function.
required: false
default: stream
choices: ['stream']
source_params:
description:
- Sub-parameters required for event source.
- I(== stream event source ==)
- C(source_arn) The Amazon Resource Name (ARN) of the Kinesis or DynamoDB stream that is the event source.
- C(enabled) Indicates whether AWS Lambda should begin polling the event source. Default is True.
- C(batch_size) The largest number of records that AWS Lambda will retrieve from your event source at the
time of invoking your function. Default is 100.
- C(starting_position) The position in the stream where AWS Lambda should start reading.
Choices are TRIM_HORIZON or LATEST.
required: true
requirements:
- boto3
extends_documentation_fragment:
- aws
'''
EXAMPLES = '''
---
# Example that creates a lambda event notification for a DynamoDB stream
- hosts: localhost
gather_facts: no
vars:
state: present
tasks:
- name: DynamoDB stream event mapping
lambda_event:
state: "{{ state | default('present') }}"
event_source: stream
function_name: "{{ function_name }}"
alias: Dev
source_params:
source_arn: arn:aws:dynamodb:us-east-1:123456789012:table/tableName/stream/2016-03-19T19:51:37.457
enabled: True
batch_size: 100
starting_position: TRIM_HORIZON
- name: Show source event
debug:
var: lambda_stream_events
'''
RETURN = '''
---
lambda_stream_events:
description: list of dictionaries returned by the API describing stream event mappings
returned: success
type: list
'''
import re
import sys
try:
import boto3
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (HAS_BOTO3, boto3_conn, camel_dict_to_snake_dict, ec2_argument_spec,
get_aws_connection_info)
# ---------------------------------------------------------------------------------------------------
#
# Helper Functions & classes
#
# ---------------------------------------------------------------------------------------------------
class AWSConnection:
"""
Create the connection object and client objects as required.
"""
def __init__(self, ansible_obj, resources, use_boto3=True):
try:
self.region, self.endpoint, aws_connect_kwargs = get_aws_connection_info(ansible_obj, boto3=use_boto3)
self.resource_client = dict()
if not resources:
resources = ['lambda']
resources.append('iam')
for resource in resources:
aws_connect_kwargs.update(dict(region=self.region,
endpoint=self.endpoint,
conn_type='client',
resource=resource
))
self.resource_client[resource] = boto3_conn(ansible_obj, **aws_connect_kwargs)
# if region is not provided, then get default profile/session region
if not self.region:
self.region = self.resource_client['lambda'].meta.region_name
except (ClientError, ParamValidationError, MissingParametersError) as e:
ansible_obj.fail_json(msg="Unable to connect, authorize or access resource: {0}".format(e))
# set account ID
try:
self.account_id = self.resource_client['iam'].get_user()['User']['Arn'].split(':')[4]
except (ClientError, ValueError, KeyError, IndexError):
self.account_id = ''
def client(self, resource='lambda'):
return self.resource_client[resource]
def pc(key):
"""
Changes python key into Pascale case equivalent. For example, 'this_function_name' becomes 'ThisFunctionName'.
:param key:
:return:
"""
return "".join([token.capitalize() for token in key.split('_')])
def ordered_obj(obj):
"""
Order object for comparison purposes
:param obj:
:return:
"""
if isinstance(obj, dict):
return sorted((k, ordered_obj(v)) for k, v in obj.items())
if isinstance(obj, list):
return sorted(ordered_obj(x) for x in obj)
else:
return obj
def set_api_sub_params(params):
"""
Sets module sub-parameters to those expected by the boto3 API.
:param params:
:return:
"""
api_params = dict()
for param in params.keys():
param_value = params.get(param, None)
if param_value:
api_params[pc(param)] = param_value
return api_params
def validate_params(module, aws):
"""
Performs basic parameter validation.
:param module:
:param aws:
:return:
"""
function_name = module.params['lambda_function_arn']
# validate function name
if not re.search('^[\w\-:]+$', function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(function_name)
)
if len(function_name) > 64 and not function_name.startswith('arn:aws:lambda:'):
module.fail_json(msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
elif len(function_name) > 140 and function_name.startswith('arn:aws:lambda:'):
module.fail_json(msg='ARN "{0}" exceeds 140 character limit'.format(function_name))
# check if 'function_name' needs to be expanded in full ARN format
if not module.params['lambda_function_arn'].startswith('arn:aws:lambda:'):
function_name = module.params['lambda_function_arn']
module.params['lambda_function_arn'] = 'arn:aws:lambda:{0}:{1}:function:{2}'.format(aws.region, aws.account_id, function_name)
qualifier = get_qualifier(module)
if qualifier:
function_arn = module.params['lambda_function_arn']
module.params['lambda_function_arn'] = '{0}:{1}'.format(function_arn, qualifier)
return
def get_qualifier(module):
"""
Returns the function qualifier as a version or alias or None.
:param module:
:return:
"""
qualifier = None
if module.params['version'] > 0:
qualifier = str(module.params['version'])
elif module.params['alias']:
qualifier = str(module.params['alias'])
return qualifier
# ---------------------------------------------------------------------------------------------------
#
# Lambda Event Handlers
#
# This section defines a lambda_event_X function where X is an AWS service capable of initiating
# the execution of a Lambda function (pull only).
#
# ---------------------------------------------------------------------------------------------------
def lambda_event_stream(module, aws):
"""
Adds, updates or deletes lambda stream (DynamoDb, Kinesis) event notifications.
:param module:
:param aws:
:return:
"""
client = aws.client('lambda')
facts = dict()
changed = False
current_state = 'absent'
state = module.params['state']
api_params = dict(FunctionName=module.params['lambda_function_arn'])
# check if required sub-parameters are present and valid
source_params = module.params['source_params']
source_arn = source_params.get('source_arn')
if source_arn:
api_params.update(EventSourceArn=source_arn)
else:
module.fail_json(msg="Source parameter 'source_arn' is required for stream event notification.")
# check if optional sub-parameters are valid, if present
batch_size = source_params.get('batch_size')
if batch_size:
try:
source_params['batch_size'] = int(batch_size)
except ValueError:
module.fail_json(msg="Source parameter 'batch_size' must be an integer, found: {0}".format(source_params['batch_size']))
# optional boolean value needs special treatment as not present does not imply False
source_param_enabled = module.boolean(source_params.get('enabled', 'True'))
# check if event mapping exist
try:
facts = client.list_event_source_mappings(**api_params)['EventSourceMappings']
if facts:
current_state = 'present'
except ClientError as e:
module.fail_json(msg='Error retrieving stream event notification configuration: {0}'.format(e))
if state == 'present':
if current_state == 'absent':
starting_position = source_params.get('starting_position')
if starting_position:
api_params.update(StartingPosition=starting_position)
else:
module.fail_json(msg="Source parameter 'starting_position' is required for stream event notification.")
if source_arn:
api_params.update(Enabled=source_param_enabled)
if source_params.get('batch_size'):
api_params.update(BatchSize=source_params.get('batch_size'))
try:
if not module.check_mode:
facts = client.create_event_source_mapping(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error creating stream source event mapping: {0}'.format(e))
else:
# current_state is 'present'
api_params = dict(FunctionName=module.params['lambda_function_arn'])
current_mapping = facts[0]
api_params.update(UUID=current_mapping['UUID'])
mapping_changed = False
# check if anything changed
if source_params.get('batch_size') and source_params['batch_size'] != current_mapping['BatchSize']:
api_params.update(BatchSize=source_params['batch_size'])
mapping_changed = True
if source_param_enabled is not None:
if source_param_enabled:
if current_mapping['State'] not in ('Enabled', 'Enabling'):
api_params.update(Enabled=True)
mapping_changed = True
else:
if current_mapping['State'] not in ('Disabled', 'Disabling'):
api_params.update(Enabled=False)
mapping_changed = True
if mapping_changed:
try:
if not module.check_mode:
facts = client.update_event_source_mapping(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error updating stream source event mapping: {0}'.format(e))
else:
if current_state == 'present':
# remove the stream event mapping
api_params = dict(UUID=facts[0]['UUID'])
try:
if not module.check_mode:
facts = client.delete_event_source_mapping(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error removing stream source event mapping: {0}'.format(e))
return camel_dict_to_snake_dict(dict(changed=changed, events=facts))
def main():
"""Produce a list of function suffixes which handle lambda events."""
this_module = sys.modules[__name__]
source_choices = ["stream"]
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
state=dict(required=False, default='present', choices=['present', 'absent']),
lambda_function_arn=dict(required=True, default=None, aliases=['function_name', 'function_arn']),
event_source=dict(required=False, default="stream", choices=source_choices),
source_params=dict(type='dict', required=True, default=None),
alias=dict(required=False, default=None),
version=dict(type='int', required=False, default=0),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[['alias', 'version']],
required_together=[]
)
# validate dependencies
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module.')
aws = AWSConnection(module, ['lambda'])
validate_params(module, aws)
this_module_function = getattr(this_module, 'lambda_event_{0}'.format(module.params['event_source'].lower()))
results = this_module_function(module, aws)
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
hgranlund/py-chess-engine | docs/source/conf.py | 1 | 8705 | # -*- coding: utf-8 -*-
# This file is based upon the file generated by sphinx-quickstart. However,
# where sphinx-quickstart hardcodes values in this file that you input, this
# file has been changed to pull from your module's metadata module.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# Import project metadata
from pychess_engine import metadata
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# show todos
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = metadata.project
copyright = metadata.copyright
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = metadata.version
# The full version, including alpha/beta/rc tags.
release = metadata.version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = metadata.project_no_spaces + 'doc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', metadata.project_no_spaces + '.tex',
metadata.project + ' Documentation', metadata.authors_string,
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', metadata.package, metadata.project + ' Documentation',
metadata.authors_string, 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', metadata.project_no_spaces,
metadata.project + ' Documentation', metadata.authors_string,
metadata.project_no_spaces, metadata.description, 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
# Extra local configuration. This is useful for placing the class description
# in the class docstring and the __init__ parameter documentation in the
# __init__ docstring. See
# <http://sphinx-doc.org/ext/autodoc.html#confval-autoclass_content> for more
# information.
autoclass_content = 'both'
| mit |
jeenalee/servo | tests/heartbeats/characterize_android.py | 139 | 4036 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
import os
from os import path
import time
import datetime
import argparse
import subprocess
TOP_DIR = path.join("..", "..")
GUARD_TIME = 20
SUMMARY_OUTPUT = "summary.txt"
def get_command(layout_thread_count, renderer, page, profile):
"""Get the command to execute.
"""
return path.join(TOP_DIR, "mach") + " run --android" + \
" -p %d -o /sdcard/servo/output.png -y %d %s -Z profile-script-events,profile-heartbeats '%s'" % \
(profile, layout_thread_count, renderer, page)
def git_rev_hash():
"""Get the git revision hash.
"""
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).rstrip()
def git_rev_hash_short():
"""Get the git revision short hash.
"""
return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).rstrip()
def execute(base_dir, renderer, page, profile, trial, layout_thread_count):
"""Run a single execution.
"""
log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count),
"trial_" + str(trial))
if os.path.exists(log_dir):
print "Log directory already exists: " + log_dir
sys.exit(1)
os.makedirs(log_dir)
# Execute
cmd = get_command(layout_thread_count, renderer, page, profile)
print cmd
os.system(cmd)
print 'sleep ' + str(GUARD_TIME)
time.sleep(GUARD_TIME)
# Write a file that describes this execution
with open(path.join(log_dir, SUMMARY_OUTPUT), "w") as f:
f.write("Datetime (UTC): " + datetime.datetime.utcnow().isoformat())
f.write("\nPlatform: Android")
f.write("\nGit hash: " + git_rev_hash())
f.write("\nGit short hash: " + git_rev_hash_short())
f.write("\nLayout threads: " + str(layout_thread_count))
f.write("\nTrial: " + str(trial))
f.write("\nCommand: " + cmd)
def main():
"""For this script to be useful, the following conditions are needed:
- Build servo for Android in release mode with the "energy-profiling" feature enabled.
"""
# Default number of layout threads
layout_threads = 1
# Default benchmark
benchmark = "https://www.mozilla.org/"
# Default renderer
renderer = ""
# Default output directory
output_dir = "heartbeat_logs"
# Default profile interval
profile = 60
# Parsing the input of the script
parser = argparse.ArgumentParser(description="Characterize Servo timing and energy behavior on Android")
parser.add_argument("-b", "--benchmark",
default=benchmark,
help="Gets the benchmark, for example \"-b http://www.example.com\"")
parser.add_argument("-w", "--webrender",
action='store_true',
help="Use webrender backend")
parser.add_argument("-l", "--layout_threads",
help="Specify the number of threads for layout, for example \"-l 5\"")
parser.add_argument("-o", "--output",
help="Specify the log output directory, for example \"-o heartbeat_logs\"")
parser.add_argument("-p", "--profile",
default=60,
help="Profiler output interval, for example \"-p 60\"")
args = parser.parse_args()
if args.benchmark:
benchmark = args.benchmark
if args.webrender:
renderer = "-w"
if args.layout_threads:
layout_threads = int(args.layout_threads)
if args.output:
output_dir = args.output
if args.profile:
profile = args.profile
if os.path.exists(output_dir):
print "Output directory already exists: " + output_dir
sys.exit(1)
os.makedirs(output_dir)
execute(output_dir, renderer, benchmark, profile, 1, layout_threads)
if __name__ == "__main__":
main()
| mpl-2.0 |
rven/odoo | addons/im_livechat/models/im_livechat_channel.py | 1 | 15553 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import random
import re
from odoo import api, fields, models, modules, _
class ImLivechatChannel(models.Model):
""" Livechat Channel
Define a communication channel, which can be accessed with 'script_external' (script tag to put on
external website), 'script_internal' (code to be integrated with odoo website) or via 'web_page' link.
It provides rating tools, and access rules for anonymous people.
"""
_name = 'im_livechat.channel'
_inherit = ['rating.parent.mixin']
_description = 'Livechat Channel'
_rating_satisfaction_days = 7 # include only last 7 days to compute satisfaction
def _default_image(self):
image_path = modules.get_module_resource('im_livechat', 'static/src/img', 'default.png')
return base64.b64encode(open(image_path, 'rb').read())
def _default_user_ids(self):
return [(6, 0, [self._uid])]
# attribute fields
name = fields.Char('Name', required=True, help="The name of the channel")
button_text = fields.Char('Text of the Button', default='Have a Question? Chat with us.',
help="Default text displayed on the Livechat Support Button")
default_message = fields.Char('Welcome Message', default='How may I help you?',
help="This is an automated 'welcome' message that your visitor will see when they initiate a new conversation.")
input_placeholder = fields.Char('Chat Input Placeholder', help='Text that prompts the user to initiate the chat.')
header_background_color = fields.Char(default="#875A7B", help="Default background color of the channel header once open")
title_color = fields.Char(default="#FFFFFF", help="Default title color of the channel once open")
button_background_color = fields.Char(default="#878787", help="Default background color of the Livechat button")
button_text_color = fields.Char(default="#FFFFFF", help="Default text color of the Livechat button")
# computed fields
web_page = fields.Char('Web Page', compute='_compute_web_page_link', store=False, readonly=True,
help="URL to a static page where you client can discuss with the operator of the channel.")
are_you_inside = fields.Boolean(string='Are you inside the matrix?',
compute='_are_you_inside', store=False, readonly=True)
script_external = fields.Text('Script (external)', compute='_compute_script_external', store=False, readonly=True)
nbr_channel = fields.Integer('Number of conversation', compute='_compute_nbr_channel', store=False, readonly=True)
image_128 = fields.Image("Image", max_width=128, max_height=128, default=_default_image)
# relationnal fields
user_ids = fields.Many2many('res.users', 'im_livechat_channel_im_user', 'channel_id', 'user_id', string='Operators', default=_default_user_ids)
channel_ids = fields.One2many('mail.channel', 'livechat_channel_id', 'Sessions')
rule_ids = fields.One2many('im_livechat.channel.rule', 'channel_id', 'Rules')
def _are_you_inside(self):
for channel in self:
channel.are_you_inside = bool(self.env.uid in [u.id for u in channel.user_ids])
def _compute_script_external(self):
view = self.env['ir.model.data'].get_object('im_livechat', 'external_loader')
values = {
"url": self.env['ir.config_parameter'].sudo().get_param('web.base.url'),
"dbname": self._cr.dbname,
}
for record in self:
values["channel_id"] = record.id
record.script_external = view._render(values) if record.id else False
def _compute_web_page_link(self):
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
for record in self:
record.web_page = "%s/im_livechat/support/%i" % (base_url, record.id) if record.id else False
@api.depends('channel_ids')
def _compute_nbr_channel(self):
data = self.env['mail.channel'].read_group([
('livechat_channel_id', 'in', self._ids),
('channel_message_ids', '!=', False)], ['__count'], ['livechat_channel_id'], lazy=False)
channel_count = {x['livechat_channel_id'][0]: x['__count'] for x in data}
for record in self:
record.nbr_channel = channel_count.get(record.id, 0)
# --------------------------
# Action Methods
# --------------------------
def action_join(self):
self.ensure_one()
return self.write({'user_ids': [(4, self._uid)]})
def action_quit(self):
self.ensure_one()
return self.write({'user_ids': [(3, self._uid)]})
def action_view_rating(self):
""" Action to display the rating relative to the channel, so all rating of the
sessions of the current channel
:returns : the ir.action 'action_view_rating' with the correct domain
"""
self.ensure_one()
action = self.env['ir.actions.act_window']._for_xml_id('im_livechat.rating_rating_action_view_livechat_rating')
action['domain'] = [('parent_res_id', '=', self.id), ('parent_res_model', '=', 'im_livechat.channel')]
return action
# --------------------------
# Channel Methods
# --------------------------
def _get_available_users(self):
""" get available user of a given channel
:retuns : return the res.users having their im_status online
"""
self.ensure_one()
return self.user_ids.filtered(lambda user: user.im_status == 'online')
def _get_livechat_mail_channel_vals(self, anonymous_name, operator, user_id=None, country_id=None):
# partner to add to the mail.channel
operator_partner_id = operator.partner_id.id
channel_partner_to_add = [(4, operator_partner_id)]
visitor_user = False
if user_id:
visitor_user = self.env['res.users'].browse(user_id)
if visitor_user and visitor_user.active: # valid session user (not public)
channel_partner_to_add.append((4, visitor_user.partner_id.id))
return {
'channel_partner_ids': channel_partner_to_add,
'livechat_active': True,
'livechat_operator_id': operator_partner_id,
'livechat_channel_id': self.id,
'anonymous_name': False if user_id else anonymous_name,
'country_id': country_id,
'channel_type': 'livechat',
'name': ' '.join([visitor_user.display_name if visitor_user else anonymous_name, operator.livechat_username if operator.livechat_username else operator.name]),
'public': 'private',
'email_send': False,
}
def _open_livechat_mail_channel(self, anonymous_name, previous_operator_id=None, user_id=None, country_id=None):
""" Return a mail.channel given a livechat channel. It creates one with a connected operator, or return false otherwise
:param anonymous_name : the name of the anonymous person of the channel
:param previous_operator_id : partner_id.id of the previous operator that this visitor had in the past
:param user_id : the id of the logged in visitor, if any
:param country_code : the country of the anonymous person of the channel
:type anonymous_name : str
:return : channel header
:rtype : dict
If this visitor already had an operator within the last 7 days (information stored with the 'im_livechat_previous_operator_pid' cookie),
the system will first try to assign that operator if he's available (to improve user experience).
"""
self.ensure_one()
operator = False
if previous_operator_id:
available_users = self._get_available_users()
# previous_operator_id is the partner_id of the previous operator, need to convert to user
if previous_operator_id in available_users.mapped('partner_id').ids:
operator = next(available_user for available_user in available_users if available_user.partner_id.id == previous_operator_id)
if not operator:
operator = self._get_random_operator()
if not operator:
# no one available
return False
# create the session, and add the link with the given channel
mail_channel_vals = self._get_livechat_mail_channel_vals(anonymous_name, operator, user_id=user_id, country_id=country_id)
mail_channel = self.env["mail.channel"].with_context(mail_create_nosubscribe=False).sudo().create(mail_channel_vals)
mail_channel._broadcast([operator.partner_id.id])
return mail_channel.sudo().channel_info()[0]
def _get_random_operator(self):
""" Return a random operator from the available users of the channel that have the lowest number of active livechats.
A livechat is considered 'active' if it has at least one message within the 30 minutes.
(Some annoying conversions have to be made on the fly because this model holds 'res.users' as available operators
and the mail_channel model stores the partner_id of the randomly selected operator)
:return : user
:rtype : res.users
"""
operators = self._get_available_users()
if len(operators) == 0:
return False
self.env.cr.execute("""SELECT COUNT(DISTINCT c.id), c.livechat_operator_id
FROM mail_channel c
LEFT OUTER JOIN mail_message_mail_channel_rel r ON c.id = r.mail_channel_id
LEFT OUTER JOIN mail_message m ON r.mail_message_id = m.id
WHERE c.channel_type = 'livechat'
AND c.livechat_operator_id in %s
AND m.create_date > ((now() at time zone 'UTC') - interval '30 minutes')
GROUP BY c.livechat_operator_id
ORDER BY COUNT(DISTINCT c.id) asc""", (tuple(operators.mapped('partner_id').ids),))
active_channels = self.env.cr.dictfetchall()
# If inactive operator(s), return one of them
active_channel_operator_ids = [active_channel['livechat_operator_id'] for active_channel in active_channels]
inactive_operators = [operator for operator in operators if operator.partner_id.id not in active_channel_operator_ids]
if inactive_operators:
return random.choice(inactive_operators)
# If no inactive operator, active_channels is not empty as len(operators) > 0 (see above).
# Get the less active operator using the active_channels first element's count (since they are sorted 'ascending')
lowest_number_of_conversations = active_channels[0]['count']
less_active_operator = random.choice([
active_channel['livechat_operator_id'] for active_channel in active_channels
if active_channel['count'] == lowest_number_of_conversations])
# convert the selected 'partner_id' to its corresponding res.users
return next(operator for operator in operators if operator.partner_id.id == less_active_operator)
def _get_channel_infos(self):
self.ensure_one()
return {
'header_background_color': self.header_background_color,
'button_background_color': self.button_background_color,
'title_color': self.title_color,
'button_text_color': self.button_text_color,
'button_text': self.button_text,
'input_placeholder': self.input_placeholder,
'default_message': self.default_message,
"channel_name": self.name,
"channel_id": self.id,
}
def get_livechat_info(self, username='Visitor'):
self.ensure_one()
if username == 'Visitor':
username = _('Visitor')
info = {}
info['available'] = len(self._get_available_users()) > 0
info['server_url'] = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
if info['available']:
info['options'] = self._get_channel_infos()
info['options']['current_partner_id'] = self.env.user.partner_id.id
info['options']["default_username"] = username
return info
class ImLivechatChannelRule(models.Model):
""" Channel Rules
Rules defining access to the channel (countries, and url matching). It also provide the 'auto pop'
option to open automatically the conversation.
"""
_name = 'im_livechat.channel.rule'
_description = 'Livechat Channel Rules'
_order = 'sequence asc'
regex_url = fields.Char('URL Regex',
help="Regular expression specifying the web pages this rule will be applied on.")
action = fields.Selection([('display_button', 'Display the button'), ('auto_popup', 'Auto popup'), ('hide_button', 'Hide the button')],
string='Action', required=True, default='display_button',
help="* 'Display the button' displays the chat button on the pages.\n"\
"* 'Auto popup' displays the button and automatically open the conversation pane.\n"\
"* 'Hide the button' hides the chat button on the pages.")
auto_popup_timer = fields.Integer('Auto popup timer', default=0,
help="Delay (in seconds) to automatically open the conversation window. Note: the selected action must be 'Auto popup' otherwise this parameter will not be taken into account.")
channel_id = fields.Many2one('im_livechat.channel', 'Channel',
help="The channel of the rule")
country_ids = fields.Many2many('res.country', 'im_livechat_channel_country_rel', 'channel_id', 'country_id', 'Country',
help="The rule will only be applied for these countries. Example: if you select 'Belgium' and 'United States' and that you set the action to 'Hide Button', the chat button will be hidden on the specified URL from the visitors located in these 2 countries. This feature requires GeoIP installed on your server.")
sequence = fields.Integer('Matching order', default=10,
help="Given the order to find a matching rule. If 2 rules are matching for the given url/country, the one with the lowest sequence will be chosen.")
def match_rule(self, channel_id, url, country_id=False):
""" determine if a rule of the given channel matches with the given url
:param channel_id : the identifier of the channel_id
:param url : the url to match with a rule
:param country_id : the identifier of the country
:returns the rule that matches the given condition. False otherwise.
:rtype : im_livechat.channel.rule
"""
def _match(rules):
for rule in rules:
# url might not be set because it comes from referer, in that
# case match the first rule with no regex_url
if re.search(rule.regex_url or '', url or ''):
return rule
return False
# first, search the country specific rules (the first match is returned)
if country_id: # don't include the country in the research if geoIP is not installed
domain = [('country_ids', 'in', [country_id]), ('channel_id', '=', channel_id)]
rule = _match(self.search(domain))
if rule:
return rule
# second, fallback on the rules without country
domain = [('country_ids', '=', False), ('channel_id', '=', channel_id)]
return _match(self.search(domain))
| agpl-3.0 |
oshtaier/robottelo | tests/foreman/api/test_hostcollection.py | 3 | 2378 | """Unit tests for host collections."""
from robottelo.common.decorators import skip_if_bug_open
from robottelo import entities
from robottelo.test import APITestCase
# (too-many-public-methods) pylint:disable=R0904
class HostCollectionTestCase(APITestCase):
"""Tests for :class:`robottelo.entities.HostCollection`."""
@classmethod
def setUpClass(cls):
"""Create systems that can be shared by tests."""
cls.org_id = entities.Organization().create_json()['id']
cls.system_uuids = [
entities.System(organization=cls.org_id).create_json()['id']
for _
in range(2)
]
def test_create_with_system(self):
"""@Test: Create a host collection that contains a content host.
@Feature: HostCollection
@Assert: The host collection can be read back, and it includes one
content host.
"""
hc_id = entities.HostCollection(
organization=self.org_id,
system=[self.system_uuids[0]],
).create_json()['id']
self.assertEqual(
len(entities.HostCollection(id=hc_id).read().system),
1
)
def test_create_with_systems(self):
"""@Test: Create a host collection that contains content hosts.
@Feature: HostCollection
@Assert: The host collection can be read back, and it references two
content hosts.
"""
hc_id = entities.HostCollection(
organization=self.org_id,
system=self.system_uuids,
).create_json()['id']
self.assertEqual(
len(entities.HostCollection(id=hc_id).read().system),
len(self.system_uuids),
)
@skip_if_bug_open('bugzilla', 1203323)
def test_read_system_ids(self):
"""@Test: Read a host collection and look at the ``system_ids`` field.
@Feature: HostCollection
@Assert: The ``system_ids`` field matches the system IDs passed in when
creating the host collection.
"""
hc_id = entities.HostCollection(
organization=self.org_id,
system=self.system_uuids,
).create_json()['id']
hc_attrs = entities.HostCollection(id=hc_id).read_json()
self.assertEqual(
frozenset(hc_attrs['system_ids']),
frozenset(self.system_uuids),
)
| gpl-3.0 |
2014c2g5/2015cda_g3_0421 | static/Brython3.1.1-20150328-091302/Lib/configparser.py | 692 | 50025 | """Configuration file parser.
A configuration file consists of sections, lead by a "[section]" header,
and followed by "name: value" entries, with continuations and such in
the style of RFC 822.
Intrinsic defaults can be specified by passing them into the
ConfigParser constructor as a dictionary.
class:
ConfigParser -- responsible for parsing a list of
configuration files, and managing the parsed database.
methods:
__init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
delimiters=('=', ':'), comment_prefixes=('#', ';'),
inline_comment_prefixes=None, strict=True,
empty_lines_in_values=True):
Create the parser. When `defaults' is given, it is initialized into the
dictionary or intrinsic defaults. The keys must be strings, the values
must be appropriate for %()s string interpolation.
When `dict_type' is given, it will be used to create the dictionary
objects for the list of sections, for the options within a section, and
for the default values.
When `delimiters' is given, it will be used as the set of substrings
that divide keys from values.
When `comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in empty lines. Comments can be
indented.
When `inline_comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in non-empty lines.
When `strict` is True, the parser won't allow for any section or option
duplicates while reading from a single source (file, string or
dictionary). Default is True.
When `empty_lines_in_values' is False (default: True), each empty line
marks the end of an option. Otherwise, internal empty lines of
a multiline option are kept as part of the value.
When `allow_no_value' is True (default: False), options without
values are accepted; the value presented for these is None.
sections()
Return all the configuration section names, sans DEFAULT.
has_section(section)
Return whether the given section exists.
has_option(section, option)
Return whether the given option exists in the given section.
options(section)
Return list of configuration options for the named section.
read(filenames, encoding=None)
Read and parse the list of named configuration files, given by
name. A single filename is also allowed. Non-existing files
are ignored. Return list of successfully read files.
read_file(f, filename=None)
Read and parse one configuration file, given as a file object.
The filename defaults to f.name; it is only used in error
messages (if f has no `name' attribute, the string `<???>' is used).
read_string(string)
Read configuration from a given string.
read_dict(dictionary)
Read configuration from a dictionary. Keys are section names,
values are dictionaries with keys and values that should be present
in the section. If the used dictionary type preserves order, sections
and their keys will be added in order. Values are automatically
converted to strings.
get(section, option, raw=False, vars=None, fallback=_UNSET)
Return a string value for the named option. All % interpolations are
expanded in the return values, based on the defaults passed into the
constructor and the DEFAULT section. Additional substitutions may be
provided using the `vars' argument, which must be a dictionary whose
contents override any pre-existing defaults. If `option' is a key in
`vars', the value from `vars' is used.
getint(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to an integer.
getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a float.
getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a boolean (currently case
insensitively defined as 0, false, no, off for False, and 1, true,
yes, on for True). Returns False or True.
items(section=_UNSET, raw=False, vars=None)
If section is given, return a list of tuples with (name, value) for
each option in the section. Otherwise, return a list of tuples with
(section_name, section_proxy) for each section, including DEFAULTSECT.
remove_section(section)
Remove the given file section and all its options.
remove_option(section, option)
Remove the given option from the given section.
set(section, option, value)
Set the given option.
write(fp, space_around_delimiters=True)
Write the configuration state in .ini format. If
`space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
from collections.abc import MutableMapping
from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
import functools
import io
import itertools
import re
import sys
import warnings
__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
"NoOptionError", "InterpolationError", "InterpolationDepthError",
"InterpolationSyntaxError", "ParsingError",
"MissingSectionHeaderError",
"ConfigParser", "SafeConfigParser", "RawConfigParser",
"DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
DEFAULTSECT = "DEFAULT"
MAX_INTERPOLATION_DEPTH = 10
# exception classes
class Error(Exception):
"""Base class for ConfigParser exceptions."""
def _get_message(self):
"""Getter for 'message'; needed only to override deprecation in
BaseException.
"""
return self.__message
def _set_message(self, value):
"""Setter for 'message'; needed only to override deprecation in
BaseException.
"""
self.__message = value
# BaseException.message has been deprecated since Python 2.6. To prevent
# DeprecationWarning from popping up over this pre-existing attribute, use
# a new property that takes lookup precedence.
message = property(_get_message, _set_message)
def __init__(self, msg=''):
self.message = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.message
__str__ = __repr__
class NoSectionError(Error):
"""Raised when no section matches a requested option."""
def __init__(self, section):
Error.__init__(self, 'No section: %r' % (section,))
self.section = section
self.args = (section, )
class DuplicateSectionError(Error):
"""Raised when a section is repeated in an input source.
Possible repetitions that raise this exception are: multiple creation
using the API or in strict parsers when a section is found more than once
in a single input file, string or dictionary.
"""
def __init__(self, section, source=None, lineno=None):
msg = [repr(section), " already exists"]
if source is not None:
message = ["While reading from ", source]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": section ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Section ")
Error.__init__(self, "".join(msg))
self.section = section
self.source = source
self.lineno = lineno
self.args = (section, source, lineno)
class DuplicateOptionError(Error):
"""Raised by strict parsers when an option is repeated in an input source.
Current implementation raises this exception only when an option is found
more than once in a single file, string or dictionary.
"""
def __init__(self, section, option, source=None, lineno=None):
msg = [repr(option), " in section ", repr(section),
" already exists"]
if source is not None:
message = ["While reading from ", source]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": option ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Option ")
Error.__init__(self, "".join(msg))
self.section = section
self.option = option
self.source = source
self.lineno = lineno
self.args = (section, option, source, lineno)
class NoOptionError(Error):
"""A requested option was not found."""
def __init__(self, option, section):
Error.__init__(self, "No option %r in section: %r" %
(option, section))
self.option = option
self.section = section
self.args = (option, section)
class InterpolationError(Error):
"""Base class for interpolation-related exceptions."""
def __init__(self, option, section, msg):
Error.__init__(self, msg)
self.option = option
self.section = section
self.args = (option, section, msg)
class InterpolationMissingOptionError(InterpolationError):
"""A string substitution required a setting which was not available."""
def __init__(self, option, section, rawval, reference):
msg = ("Bad value substitution:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\tkey : %s\n"
"\trawval : %s\n"
% (section, option, reference, rawval))
InterpolationError.__init__(self, option, section, msg)
self.reference = reference
self.args = (option, section, rawval, reference)
class InterpolationSyntaxError(InterpolationError):
"""Raised when the source text contains invalid syntax.
Current implementation raises this exception when the source text into
which substitutions are made does not conform to the required syntax.
"""
class InterpolationDepthError(InterpolationError):
"""Raised when substitutions are nested too deeply."""
def __init__(self, option, section, rawval):
msg = ("Value interpolation too deeply recursive:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\trawval : %s\n"
% (section, option, rawval))
InterpolationError.__init__(self, option, section, msg)
self.args = (option, section, rawval)
class ParsingError(Error):
"""Raised when a configuration file does not follow legal syntax."""
def __init__(self, source=None, filename=None):
# Exactly one of `source'/`filename' arguments has to be given.
# `filename' kept for compatibility.
if filename and source:
raise ValueError("Cannot specify both `filename' and `source'. "
"Use `source'.")
elif not filename and not source:
raise ValueError("Required argument `source' not given.")
elif filename:
source = filename
Error.__init__(self, 'Source contains parsing errors: %s' % source)
self.source = source
self.errors = []
self.args = (source, )
@property
def filename(self):
"""Deprecated, use `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
return self.source
@filename.setter
def filename(self, value):
"""Deprecated, user `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
self.source = value
def append(self, lineno, line):
self.errors.append((lineno, line))
self.message += '\n\t[line %2d]: %s' % (lineno, line)
class MissingSectionHeaderError(ParsingError):
"""Raised when a key-value pair is found before any section header."""
def __init__(self, filename, lineno, line):
Error.__init__(
self,
'File contains no section headers.\nfile: %s, line: %d\n%r' %
(filename, lineno, line))
self.source = filename
self.lineno = lineno
self.line = line
self.args = (filename, lineno, line)
# Used in parser getters to indicate the default behaviour when a specific
# option is not found it to raise an exception. Created to enable `None' as
# a valid fallback value.
_UNSET = object()
class Interpolation:
"""Dummy interpolation that passes the value through with no changes."""
def before_get(self, parser, section, option, value, defaults):
return value
def before_set(self, parser, section, option, value):
return value
def before_read(self, parser, section, option, value):
return value
def before_write(self, parser, section, option, value):
return value
class BasicInterpolation(Interpolation):
"""Interpolation as implemented in the classic ConfigParser.
The option values can contain format strings which refer to other values in
the same section, or values in the special default section.
For example:
something: %(dir)s/whatever
would resolve the "%(dir)s" to the value of dir. All reference
expansions are done late, on demand. If a user needs to use a bare % in
a configuration file, she can escape it by writing %%. Other % usage
is considered a user error and raises `InterpolationSyntaxError'."""
_KEYCRE = re.compile(r"%\(([^)]+)\)s")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('%%', '') # escaped percent signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '%' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('%')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("%")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "%":
accum.append("%")
rest = rest[2:]
elif c == "(":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
var = parser.optionxform(m.group(1))
rest = rest[m.end():]
try:
v = map[var]
except KeyError:
raise InterpolationMissingOptionError(
option, section, rest, var)
if "%" in v:
self._interpolate_some(parser, option, accum, v,
section, map, depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'%%' must be followed by '%%' or '(', "
"found: %r" % (rest,))
class ExtendedInterpolation(Interpolation):
"""Advanced variant of interpolation, supports the syntax used by
`zc.buildout'. Enables interpolation between sections."""
_KEYCRE = re.compile(r"\$\{([^}]+)\}")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('$$', '') # escaped dollar signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '$' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('%')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("$")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "$":
accum.append("$")
rest = rest[2:]
elif c == "{":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
path = m.group(1).split(':')
rest = rest[m.end():]
sect = section
opt = option
try:
if len(path) == 1:
opt = parser.optionxform(path[0])
v = map[opt]
elif len(path) == 2:
sect = path[0]
opt = parser.optionxform(path[1])
v = parser.get(sect, opt, raw=True)
else:
raise InterpolationSyntaxError(
option, section,
"More than one ':' found: %r" % (rest,))
except (KeyError, NoSectionError, NoOptionError):
raise InterpolationMissingOptionError(
option, section, rest, ":".join(path))
if "$" in v:
self._interpolate_some(parser, opt, accum, v, sect,
dict(parser.items(sect, raw=True)),
depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'$' must be followed by '$' or '{', "
"found: %r" % (rest,))
class LegacyInterpolation(Interpolation):
"""Deprecated interpolation used in old versions of ConfigParser.
Use BasicInterpolation or ExtendedInterpolation instead."""
_KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
def before_get(self, parser, section, option, value, vars):
rawval = value
depth = MAX_INTERPOLATION_DEPTH
while depth: # Loop through this until it's done
depth -= 1
if value and "%(" in value:
replace = functools.partial(self._interpolation_replace,
parser=parser)
value = self._KEYCRE.sub(replace, value)
try:
value = value % vars
except KeyError as e:
raise InterpolationMissingOptionError(
option, section, rawval, e.args[0])
else:
break
if value and "%(" in value:
raise InterpolationDepthError(option, section, rawval)
return value
def before_set(self, parser, section, option, value):
return value
@staticmethod
def _interpolation_replace(match, parser):
s = match.group(1)
if s is None:
return match.group()
else:
return "%%(%s)s" % parser.optionxform(s)
class RawConfigParser(MutableMapping):
"""ConfigParser that does not do interpolation."""
# Regular expressions for parsing section headers and options
_SECT_TMPL = r"""
\[ # [
(?P<header>[^]]+) # very permissive!
\] # ]
"""
_OPT_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?P<vi>{delim})\s* # any number of space/tab,
# followed by any of the
# allowed delimiters,
# followed by any space/tab
(?P<value>.*)$ # everything up to eol
"""
_OPT_NV_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?: # any number of space/tab,
(?P<vi>{delim})\s* # optionally followed by
# any of the allowed
# delimiters, followed by any
# space/tab
(?P<value>.*))?$ # everything up to eol
"""
# Interpolation algorithm to be used if the user does not specify another
_DEFAULT_INTERPOLATION = Interpolation()
# Compiled regular expression for matching sections
SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
# Compiled regular expression for matching options with typical separators
OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching options with optional values
# delimited using typical separators
OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching leading whitespace in a line
NONSPACECRE = re.compile(r"\S")
# Possible boolean values in the configuration.
BOOLEAN_STATES = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def __init__(self, defaults=None, dict_type=_default_dict,
allow_no_value=False, *, delimiters=('=', ':'),
comment_prefixes=('#', ';'), inline_comment_prefixes=None,
strict=True, empty_lines_in_values=True,
default_section=DEFAULTSECT,
interpolation=_UNSET):
self._dict = dict_type
self._sections = self._dict()
self._defaults = self._dict()
self._proxies = self._dict()
self._proxies[default_section] = SectionProxy(self, default_section)
if defaults:
for key, value in defaults.items():
self._defaults[self.optionxform(key)] = value
self._delimiters = tuple(delimiters)
if delimiters == ('=', ':'):
self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
else:
d = "|".join(re.escape(d) for d in delimiters)
if allow_no_value:
self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d),
re.VERBOSE)
else:
self._optcre = re.compile(self._OPT_TMPL.format(delim=d),
re.VERBOSE)
self._comment_prefixes = tuple(comment_prefixes or ())
self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
self._strict = strict
self._allow_no_value = allow_no_value
self._empty_lines_in_values = empty_lines_in_values
self.default_section=default_section
self._interpolation = interpolation
if self._interpolation is _UNSET:
self._interpolation = self._DEFAULT_INTERPOLATION
if self._interpolation is None:
self._interpolation = Interpolation()
def defaults(self):
return self._defaults
def sections(self):
"""Return a list of section names, excluding [DEFAULT]"""
# self._sections will never have [DEFAULT] in it
return list(self._sections.keys())
def add_section(self, section):
"""Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT.
"""
if section == self.default_section:
raise ValueError('Invalid section name: %r' % section)
if section in self._sections:
raise DuplicateSectionError(section)
self._sections[section] = self._dict()
self._proxies[section] = SectionProxy(self, section)
def has_section(self, section):
"""Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged.
"""
return section in self._sections
def options(self, section):
"""Return a list of option names for the given section name."""
try:
opts = self._sections[section].copy()
except KeyError:
raise NoSectionError(section)
opts.update(self._defaults)
return list(opts.keys())
def read(self, filenames, encoding=None):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, str):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
except IOError:
continue
read_ok.append(filename)
return read_ok
def read_file(self, f, source=None):
"""Like read() but the argument must be a file-like object.
The `f' argument must be iterable, returning one line at a time.
Optional second argument is the `source' specifying the name of the
file being read. If not given, it is taken from f.name. If `f' has no
`name' attribute, `<???>' is used.
"""
if source is None:
try:
source = f.name
except AttributeError:
source = '<???>'
self._read(f, source)
def read_string(self, string, source='<string>'):
"""Read configuration from a given string."""
sfile = io.StringIO(string)
self.read_file(sfile, source)
def read_dict(self, dictionary, source='<dict>'):
"""Read configuration from a dictionary.
Keys are section names, values are dictionaries with keys and values
that should be present in the section. If the used dictionary type
preserves order, sections and their keys will be added in order.
All types held in the dictionary are converted to strings during
reading, including section names, option names and keys.
Optional second argument is the `source' specifying the name of the
dictionary being read.
"""
elements_added = set()
for section, keys in dictionary.items():
section = str(section)
try:
self.add_section(section)
except (DuplicateSectionError, ValueError):
if self._strict and section in elements_added:
raise
elements_added.add(section)
for key, value in keys.items():
key = self.optionxform(str(key))
if value is not None:
value = str(value)
if self._strict and (section, key) in elements_added:
raise DuplicateOptionError(section, key, source)
elements_added.add((section, key))
self.set(section, key, value)
def readfp(self, fp, filename=None):
"""Deprecated, use read_file instead."""
warnings.warn(
"This method will be removed in future versions. "
"Use 'parser.read_file()' instead.",
DeprecationWarning, stacklevel=2
)
self.read_file(fp, source=filename)
def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
"""Get an option value for a given section.
If `vars' is provided, it must be a dictionary. The option is looked up
in `vars' (if provided), `section', and in `DEFAULTSECT' in that order.
If the key is not found and `fallback' is provided, it is used as
a fallback value. `None' can be provided as a `fallback' value.
If interpolation is enabled and the optional argument `raw' is False,
all interpolations are expanded in the return values.
Arguments `raw', `vars', and `fallback' are keyword only.
The section DEFAULT is special.
"""
try:
d = self._unify_values(section, vars)
except NoSectionError:
if fallback is _UNSET:
raise
else:
return fallback
option = self.optionxform(option)
try:
value = d[option]
except KeyError:
if fallback is _UNSET:
raise NoOptionError(option, section)
else:
return fallback
if raw or value is None:
return value
else:
return self._interpolation.before_get(self, section, option, value,
d)
def _get(self, section, conv, option, **kwargs):
return conv(self.get(section, option, **kwargs))
def getint(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, int, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getfloat(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, float, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getboolean(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, self._convert_to_boolean, option,
raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def items(self, section=_UNSET, raw=False, vars=None):
"""Return a list of (name, value) tuples for each option in a section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw' is true. Additional substitutions may be provided using the
`vars' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.
"""
if section is _UNSET:
return super().items()
d = self._defaults.copy()
try:
d.update(self._sections[section])
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
if vars:
for key, value in vars.items():
d[self.optionxform(key)] = value
value_getter = lambda option: self._interpolation.before_get(self,
section, option, d[option], d)
if raw:
value_getter = lambda option: d[option]
return [(option, value_getter(option)) for option in d.keys()]
def popitem(self):
"""Remove a section from the parser and return it as
a (section_name, section_proxy) tuple. If no section is present, raise
KeyError.
The section DEFAULT is never returned because it cannot be removed.
"""
for key in self.sections():
value = self[key]
del self[key]
return key, value
raise KeyError
def optionxform(self, optionstr):
return optionstr.lower()
def has_option(self, section, option):
"""Check for the existence of a given option in a given section.
If the specified `section' is None or an empty string, DEFAULT is
assumed. If the specified `section' does not exist, returns False."""
if not section or section == self.default_section:
option = self.optionxform(option)
return option in self._defaults
elif section not in self._sections:
return False
else:
option = self.optionxform(option)
return (option in self._sections[section]
or option in self._defaults)
def set(self, section, option, value=None):
"""Set an option."""
if value:
value = self._interpolation.before_set(self, section, option,
value)
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
sectdict[self.optionxform(option)] = value
def write(self, fp, space_around_delimiters=True):
"""Write an .ini-format representation of the configuration state.
If `space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
if space_around_delimiters:
d = " {} ".format(self._delimiters[0])
else:
d = self._delimiters[0]
if self._defaults:
self._write_section(fp, self.default_section,
self._defaults.items(), d)
for section in self._sections:
self._write_section(fp, section,
self._sections[section].items(), d)
def _write_section(self, fp, section_name, section_items, delimiter):
"""Write a single section to the specified `fp'."""
fp.write("[{}]\n".format(section_name))
for key, value in section_items:
value = self._interpolation.before_write(self, section_name, key,
value)
if value is not None or not self._allow_no_value:
value = delimiter + str(value).replace('\n', '\n\t')
else:
value = ""
fp.write("{}{}\n".format(key, value))
fp.write("\n")
def remove_option(self, section, option):
"""Remove an option."""
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
option = self.optionxform(option)
existed = option in sectdict
if existed:
del sectdict[option]
return existed
def remove_section(self, section):
"""Remove a file section."""
existed = section in self._sections
if existed:
del self._sections[section]
del self._proxies[section]
return existed
def __getitem__(self, key):
if key != self.default_section and not self.has_section(key):
raise KeyError(key)
return self._proxies[key]
def __setitem__(self, key, value):
# To conform with the mapping protocol, overwrites existing values in
# the section.
# XXX this is not atomic if read_dict fails at any point. Then again,
# no update method in configparser is atomic in this implementation.
if key == self.default_section:
self._defaults.clear()
elif key in self._sections:
self._sections[key].clear()
self.read_dict({key: value})
def __delitem__(self, key):
if key == self.default_section:
raise ValueError("Cannot remove the default section.")
if not self.has_section(key):
raise KeyError(key)
self.remove_section(key)
def __contains__(self, key):
return key == self.default_section or self.has_section(key)
def __len__(self):
return len(self._sections) + 1 # the default section
def __iter__(self):
# XXX does it break when underlying container state changed?
return itertools.chain((self.default_section,), self._sections.keys())
def _read(self, fp, fpname):
"""Parse a sectioned configuration file.
Each section in a configuration file contains a header, indicated by
a name in square brackets (`[]'), plus key/value options, indicated by
`name' and `value' delimited with a specific substring (`=' or `:' by
default).
Values can span multiple lines, as long as they are indented deeper
than the first line of the value. Depending on the parser's mode, blank
lines may be treated as parts of multiline values or ignored.
Configuration files may include comments, prefixed by specific
characters (`#' and `;' by default). Comments may appear on their own
in an otherwise empty line or may be entered in lines holding values or
section names.
"""
elements_added = set()
cursect = None # None, or a dictionary
sectname = None
optname = None
lineno = 0
indent_level = 0
e = None # None, or an exception
for lineno, line in enumerate(fp, start=1):
comment_start = sys.maxsize
# strip inline comments
inline_prefixes = {p: -1 for p in self._inline_comment_prefixes}
while comment_start == sys.maxsize and inline_prefixes:
next_prefixes = {}
for prefix, index in inline_prefixes.items():
index = line.find(prefix, index+1)
if index == -1:
continue
next_prefixes[prefix] = index
if index == 0 or (index > 0 and line[index-1].isspace()):
comment_start = min(comment_start, index)
inline_prefixes = next_prefixes
# strip full line comments
for prefix in self._comment_prefixes:
if line.strip().startswith(prefix):
comment_start = 0
break
if comment_start == sys.maxsize:
comment_start = None
value = line[:comment_start].strip()
if not value:
if self._empty_lines_in_values:
# add empty line to the value, but only if there was no
# comment on the line
if (comment_start is None and
cursect is not None and
optname and
cursect[optname] is not None):
cursect[optname].append('') # newlines added at join
else:
# empty line marks end of value
indent_level = sys.maxsize
continue
# continuation line?
first_nonspace = self.NONSPACECRE.search(line)
cur_indent_level = first_nonspace.start() if first_nonspace else 0
if (cursect is not None and optname and
cur_indent_level > indent_level):
cursect[optname].append(value)
# a section header or option header?
else:
indent_level = cur_indent_level
# is it a section header?
mo = self.SECTCRE.match(value)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
if self._strict and sectname in elements_added:
raise DuplicateSectionError(sectname, fpname,
lineno)
cursect = self._sections[sectname]
elements_added.add(sectname)
elif sectname == self.default_section:
cursect = self._defaults
else:
cursect = self._dict()
self._sections[sectname] = cursect
self._proxies[sectname] = SectionProxy(self, sectname)
elements_added.add(sectname)
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self._optcre.match(value)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
if not optname:
e = self._handle_error(e, fpname, lineno, line)
optname = self.optionxform(optname.rstrip())
if (self._strict and
(sectname, optname) in elements_added):
raise DuplicateOptionError(sectname, optname,
fpname, lineno)
elements_added.add((sectname, optname))
# This check is fine because the OPTCRE cannot
# match if it would set optval to None
if optval is not None:
optval = optval.strip()
cursect[optname] = [optval]
else:
# valueless option handling
cursect[optname] = None
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
e = self._handle_error(e, fpname, lineno, line)
# if any parsing errors occurred, raise an exception
if e:
raise e
self._join_multiline_values()
def _join_multiline_values(self):
defaults = self.default_section, self._defaults
all_sections = itertools.chain((defaults,),
self._sections.items())
for section, options in all_sections:
for name, val in options.items():
if isinstance(val, list):
val = '\n'.join(val).rstrip()
options[name] = self._interpolation.before_read(self,
section,
name, val)
def _handle_error(self, exc, fpname, lineno, line):
if not exc:
exc = ParsingError(fpname)
exc.append(lineno, repr(line))
return exc
def _unify_values(self, section, vars):
"""Create a sequence of lookups with 'vars' taking priority over
the 'section' which takes priority over the DEFAULTSECT.
"""
sectiondict = {}
try:
sectiondict = self._sections[section]
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
vardict = {}
if vars:
for key, value in vars.items():
if value is not None:
value = str(value)
vardict[self.optionxform(key)] = value
return _ChainMap(vardict, sectiondict, self._defaults)
def _convert_to_boolean(self, value):
"""Return a boolean value translating from other types if necessary.
"""
if value.lower() not in self.BOOLEAN_STATES:
raise ValueError('Not a boolean: %s' % value)
return self.BOOLEAN_STATES[value.lower()]
def _validate_value_types(self, *, section="", option="", value=""):
"""Raises a TypeError for non-string values.
The only legal non-string value if we allow valueless
options is None, so we need to check if the value is a
string if:
- we do not allow valueless options, or
- we allow valueless options but the value is not None
For compatibility reasons this method is not used in classic set()
for RawConfigParsers. It is invoked in every case for mapping protocol
access and in ConfigParser.set().
"""
if not isinstance(section, str):
raise TypeError("section names must be strings")
if not isinstance(option, str):
raise TypeError("option keys must be strings")
if not self._allow_no_value or value:
if not isinstance(value, str):
raise TypeError("option values must be strings")
class ConfigParser(RawConfigParser):
"""ConfigParser implementing interpolation."""
_DEFAULT_INTERPOLATION = BasicInterpolation()
def set(self, section, option, value=None):
"""Set an option. Extends RawConfigParser.set by validating type and
interpolation syntax on the value."""
self._validate_value_types(option=option, value=value)
super().set(section, option, value)
def add_section(self, section):
"""Create a new section in the configuration. Extends
RawConfigParser.add_section by validating if the section name is
a string."""
self._validate_value_types(section=section)
super().add_section(section)
class SafeConfigParser(ConfigParser):
"""ConfigParser alias for backwards compatibility purposes."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(
"The SafeConfigParser class has been renamed to ConfigParser "
"in Python 3.2. This alias will be removed in future versions."
" Use ConfigParser directly instead.",
DeprecationWarning, stacklevel=2
)
class SectionProxy(MutableMapping):
"""A proxy for a single section from a parser."""
def __init__(self, parser, name):
"""Creates a view on a section of the specified `name` in `parser`."""
self._parser = parser
self._name = name
def __repr__(self):
return '<Section: {}>'.format(self._name)
def __getitem__(self, key):
if not self._parser.has_option(self._name, key):
raise KeyError(key)
return self._parser.get(self._name, key)
def __setitem__(self, key, value):
self._parser._validate_value_types(option=key, value=value)
return self._parser.set(self._name, key, value)
def __delitem__(self, key):
if not (self._parser.has_option(self._name, key) and
self._parser.remove_option(self._name, key)):
raise KeyError(key)
def __contains__(self, key):
return self._parser.has_option(self._name, key)
def __len__(self):
return len(self._options())
def __iter__(self):
return self._options().__iter__()
def _options(self):
if self._name != self._parser.default_section:
return self._parser.options(self._name)
else:
return self._parser.defaults()
def get(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.get(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getint(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getint(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getfloat(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getfloat(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getboolean(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getboolean(self._name, option, raw=raw, vars=vars,
fallback=fallback)
@property
def parser(self):
# The parser object of the proxy is read-only.
return self._parser
@property
def name(self):
# The name of the section on a proxy is read-only.
return self._name
| gpl-3.0 |
vnpy/vnpy | vnpy/app/algo_trading/algos/stop_algo.py | 4 | 3202 | from vnpy.trader.constant import Offset, Direction
from vnpy.trader.object import TradeData, OrderData, TickData
from vnpy.trader.engine import BaseEngine
from vnpy.app.algo_trading import AlgoTemplate
class StopAlgo(AlgoTemplate):
""""""
display_name = "Stop 条件委托"
default_setting = {
"vt_symbol": "",
"direction": [Direction.LONG.value, Direction.SHORT.value],
"stop_price": 0.0,
"volume": 0.0,
"price_add": 0.0,
"offset": [
Offset.NONE.value,
Offset.OPEN.value,
Offset.CLOSE.value,
Offset.CLOSETODAY.value,
Offset.CLOSEYESTERDAY.value
]
}
variables = [
"traded",
"vt_orderid",
"order_status",
]
def __init__(
self,
algo_engine: BaseEngine,
algo_name: str,
setting: dict
):
""""""
super().__init__(algo_engine, algo_name, setting)
# Parameters
self.vt_symbol = setting["vt_symbol"]
self.direction = Direction(setting["direction"])
self.stop_price = setting["stop_price"]
self.volume = setting["volume"]
self.price_add = setting["price_add"]
self.offset = Offset(setting["offset"])
# Variables
self.vt_orderid = ""
self.traded = 0
self.order_status = ""
self.subscribe(self.vt_symbol)
self.put_parameters_event()
self.put_variables_event()
def on_tick(self, tick: TickData):
""""""
if self.vt_orderid:
return
if self.direction == Direction.LONG:
if tick.last_price >= self.stop_price:
price = self.stop_price + self.price_add
if tick.limit_up:
price = min(price, tick.limit_up)
self.vt_orderid = self.buy(
self.vt_symbol,
price,
self.volume,
offset=self.offset
)
self.write_log(
f"停止单已触发,代码:{self.vt_symbol},方向:{self.direction}, 价格:{self.stop_price},数量:{self.volume},开平:{self.offset}")
else:
if tick.last_price <= self.stop_price:
price = self.stop_price - self.price_add
if tick.limit_down:
price = max(price, tick.limit_down)
self.vt_orderid = self.sell(
self.vt_symbol,
price,
self.volume,
offset=self.offset
)
self.write_log(
f"停止单已触发,代码:{self.vt_symbol},方向:{self.direction}, 价格:{self.stop_price},数量:{self.volume},开平:{self.offset}")
self.put_variables_event()
def on_order(self, order: OrderData):
""""""
self.traded = order.traded
self.order_status = order.status
if not order.is_active():
self.stop()
self.put_variables_event()
def on_trade(self, trade: TradeData):
""""""
pass
| mit |
gsmartway/odoo | addons/stock_account/res_config.py | 315 | 2277 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class stock_config_settings(osv.osv_memory):
_inherit = 'stock.config.settings'
_columns = {
'group_stock_inventory_valuation': fields.boolean("Generate accounting entries per stock movement",
implied_group='stock_account.group_inventory_valuation',
help="""Allows to configure inventory valuations on products and product categories."""),
'module_stock_invoice_directly': fields.boolean("Create and open the invoice when the user finish a delivery order",
help='This allows to automatically launch the invoicing wizard if the delivery is '
'to be invoiced when you send or deliver goods.\n'
'-This installs the module stock_invoice_directly.'),
'module_stock_landed_costs': fields.boolean("Calculate landed costs on products",
help="""Install the module that allows to affect landed costs on pickings, and split them onto the different products."""),
}
def onchange_landed_costs(self, cr, uid, ids, module_landed_costs, context=None):
if module_landed_costs:
return {'value': {'group_stock_inventory_valuation': True}}
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
noslenfa/tdjangorest | uw/lib/python2.7/site-packages/IPython/external/simplegeneric/_simplegeneric.py | 17 | 3113 | """This is version 0.7 of Philip J. Eby's simplegeneric module
(http://pypi.python.org/pypi/simplegeneric), patched to work with Python 3,
which doesn't support old-style classes.
"""
#Name: simplegeneric
#Version: 0.7
#Summary: Simple generic functions (similar to Python's own len(), pickle.dump(), etc.)
#Home-page: http://pypi.python.org/pypi/simplegeneric
#Author: Phillip J. Eby
#Author-email: peak@eby-sarna.com
#License: PSF or ZPL
__all__ = ["generic"]
try:
from types import ClassType, InstanceType
except ImportError:
classtypes = type
else:
classtypes = type, ClassType
def generic(func):
"""Create a simple generic function"""
_sentinel = object()
def _by_class(*args, **kw):
cls = args[0].__class__
for t in type(cls.__name__, (cls,object), {}).__mro__:
f = _gbt(t, _sentinel)
if f is not _sentinel:
return f(*args, **kw)
else:
return func(*args, **kw)
_by_type = {object: func}
try:
_by_type[InstanceType] = _by_class
except NameError: # Python 3
pass
_gbt = _by_type.get
def when_type(*types):
"""Decorator to add a method that will be called for the given types"""
for t in types:
if not isinstance(t, classtypes):
raise TypeError(
"%r is not a type or class" % (t,)
)
def decorate(f):
for t in types:
if _by_type.setdefault(t,f) is not f:
raise TypeError(
"%r already has method for type %r" % (func, t)
)
return f
return decorate
_by_object = {}
_gbo = _by_object.get
def when_object(*obs):
"""Decorator to add a method to be called for the given object(s)"""
def decorate(f):
for o in obs:
if _by_object.setdefault(id(o), (o,f))[1] is not f:
raise TypeError(
"%r already has method for object %r" % (func, o)
)
return f
return decorate
def dispatch(*args, **kw):
f = _gbo(id(args[0]), _sentinel)
if f is _sentinel:
for t in type(args[0]).__mro__:
f = _gbt(t, _sentinel)
if f is not _sentinel:
return f(*args, **kw)
else:
return func(*args, **kw)
else:
return f[1](*args, **kw)
dispatch.__name__ = func.__name__
dispatch.__dict__ = func.__dict__.copy()
dispatch.__doc__ = func.__doc__
dispatch.__module__ = func.__module__
dispatch.when_type = when_type
dispatch.when_object = when_object
dispatch.default = func
dispatch.has_object = lambda o: id(o) in _by_object
dispatch.has_type = lambda t: t in _by_type
return dispatch
def test_suite():
import doctest
return doctest.DocFileSuite(
'README.txt',
optionflags=doctest.ELLIPSIS|doctest.REPORT_ONLY_FIRST_FAILURE,
)
| apache-2.0 |
alphaBenj/zipline | zipline/data/session_bars.py | 5 | 1098 | # Copyright 2016 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import abstractproperty
from zipline.data.bar_reader import BarReader
class SessionBarReader(BarReader):
"""
Reader for OHCLV pricing data at a session frequency.
"""
@property
def data_frequency(self):
return 'session'
@abstractproperty
def sessions(self):
"""
Returns
-------
sessions : DatetimeIndex
All session labels (unionining the range for all assets) which the
reader can provide.
"""
pass
| apache-2.0 |
RoyalTS/econ-python-environment | .mywaflib/waflib/Tools/dbus.py | 10 | 2073 | #!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
"""
Compile dbus files with **dbus-binding-tool**
Typical usage::
def options(opt):
opt.load('compiler_c dbus')
def configure(conf):
conf.load('compiler_c dbus')
def build(bld):
tg = bld.program(
includes = '.',
source = bld.path.ant_glob('*.c'),
target = 'gnome-hello')
tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
"""
from waflib import Task, Errors
from waflib.TaskGen import taskgen_method, before_method
@taskgen_method
def add_dbus_file(self, filename, prefix, mode):
"""
Add a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
:param filename: xml file to compile
:type filename: string
:param prefix: dbus binding tool prefix (--prefix=prefix)
:type prefix: string
:param mode: dbus binding tool mode (--mode=mode)
:type mode: string
"""
if not hasattr(self, 'dbus_lst'):
self.dbus_lst = []
if not 'process_dbus' in self.meths:
self.meths.append('process_dbus')
self.dbus_lst.append([filename, prefix, mode])
@before_method('apply_core')
def process_dbus(self):
"""
Process the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
"""
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
node = self.path.find_resource(filename)
if not node:
raise Errors.WafError('file not found ' + filename)
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
tsk.env.DBUS_BINDING_TOOL_MODE = mode
class dbus_binding_tool(Task.Task):
"""
Compile a dbus file
"""
color = 'BLUE'
ext_out = ['.h']
run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
shell = True # temporary workaround for #795
def configure(conf):
"""
Detect the program dbus-binding-tool and set the *conf.env.DBUS_BINDING_TOOL*
"""
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
| bsd-3-clause |
rkashapov/buildbot | master/buildbot/test/unit/test_data_buildrequests.py | 2 | 23863 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import datetime
import mock
from twisted.internet import defer
from twisted.internet import reactor
from twisted.trial import unittest
from buildbot.data import buildrequests
from buildbot.data import resultspec
from buildbot.test.fake import fakedb
from buildbot.test.fake import fakemaster
from buildbot.test.util import endpoint
from buildbot.test.util import interfaces
from buildbot.util import UTC
class TestBuildRequestEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = buildrequests.BuildRequestEndpoint
resourceTypeClass = buildrequests.BuildRequest
CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC)
CLAIMED_AT_EPOCH = 266761875
SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC)
SUBMITTED_AT_EPOCH = 298297875
COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC)
COMPLETE_AT_EPOCH = 329920275
def setUp(self):
self.setUpEndpoint()
self.db.insertTestData([
fakedb.Builder(id=77, name='bbb'),
fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID),
fakedb.Worker(id=13, name='wrk'),
fakedb.Buildset(id=8822),
fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77,
priority=7, submitted_at=self.SUBMITTED_AT_EPOCH,
waited_for=1),
])
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def testGetExisting(self):
self.db.buildrequests.claimBuildRequests(
[44], claimed_at=self.CLAIMED_AT)
self.db.buildrequests.completeBuildRequests(
[44], 75, complete_at=self.COMPLETE_AT)
buildrequest = yield self.callGet(('buildrequests', 44))
self.validateData(buildrequest)
# check data formatting:
self.assertEqual(buildrequest['buildrequestid'], 44)
self.assertEqual(buildrequest['complete'], True)
self.assertEqual(buildrequest['builderid'], 77)
self.assertEqual(buildrequest['waited_for'], True)
self.assertEqual(buildrequest['claimed_at'], self.CLAIMED_AT)
self.assertEqual(buildrequest['results'], 75)
self.assertEqual(buildrequest['claimed_by_masterid'],
fakedb.FakeBuildRequestsComponent.MASTER_ID)
self.assertEqual(buildrequest['claimed'], True)
self.assertEqual(buildrequest['submitted_at'], self.SUBMITTED_AT)
self.assertEqual(buildrequest['complete_at'], self.COMPLETE_AT)
self.assertEqual(buildrequest['buildsetid'], 8822)
self.assertEqual(buildrequest['priority'], 7)
@defer.inlineCallbacks
def testGetMissing(self):
buildrequest = yield self.callGet(('buildrequests', 9999))
self.assertEqual(buildrequest, None)
class TestBuildRequestsEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = buildrequests.BuildRequestsEndpoint
resourceTypeClass = buildrequests.BuildRequest
CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC)
CLAIMED_AT_EPOCH = 266761875
SUBMITTED_AT = datetime.datetime(1979, 6, 15, 12, 31, 15, tzinfo=UTC)
SUBMITTED_AT_EPOCH = 298297875
COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC)
COMPLETE_AT_EPOCH = 329920275
def setUp(self):
self.setUpEndpoint()
self.db.insertTestData([
fakedb.Builder(id=77, name='bbb'),
fakedb.Builder(id=78, name='ccc'),
fakedb.Builder(id=79, name='ddd'),
fakedb.Master(id=fakedb.FakeBuildRequestsComponent.MASTER_ID),
fakedb.Worker(id=13, name='wrk'),
fakedb.Buildset(id=8822),
fakedb.BuildRequest(id=44, buildsetid=8822, builderid=77,
priority=7, submitted_at=self.SUBMITTED_AT_EPOCH,
waited_for=1),
fakedb.BuildRequest(id=45, buildsetid=8822, builderid=77),
fakedb.BuildRequest(id=46, buildsetid=8822, builderid=78),
])
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def testGetAll(self):
buildrequests = yield self.callGet(('buildrequests',))
[self.validateData(br) for br in buildrequests]
self.assertEqual(sorted([br['buildrequestid'] for br in buildrequests]),
[44, 45, 46])
@defer.inlineCallbacks
def testGetNoBuildRequest(self):
buildrequests = yield self.callGet(('builders', 79, 'buildrequests'))
self.assertEqual(buildrequests, [])
@defer.inlineCallbacks
def testGetBuilderid(self):
buildrequests = yield self.callGet(('builders', 78, 'buildrequests'))
[self.validateData(br) for br in buildrequests]
self.assertEqual(
sorted([br['buildrequestid'] for br in buildrequests]), [46])
@defer.inlineCallbacks
def testGetUnknownBuilderid(self):
buildrequests = yield self.callGet(('builders', 79, 'buildrequests'))
self.assertEqual(buildrequests, [])
@defer.inlineCallbacks
def testGetNoFilters(self):
getBuildRequestsMock = mock.Mock(return_value={})
self.patch(
self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock)
yield self.callGet(('buildrequests',))
getBuildRequestsMock.assert_called_with(
builderid=None,
bsid=None,
complete=None,
claimed=None,
resultSpec=resultspec.ResultSpec())
@defer.inlineCallbacks
def testGetFilters(self):
getBuildRequestsMock = mock.Mock(return_value={})
self.patch(
self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock)
f1 = resultspec.Filter('complete', 'eq', [False])
f2 = resultspec.Filter('claimed', 'eq', [True])
f3 = resultspec.Filter('buildsetid', 'eq', [55])
f4 = resultspec.Filter('branch', 'eq', ['mybranch'])
f5 = resultspec.Filter('repository', 'eq', ['myrepo'])
yield self.callGet(
('buildrequests',),
resultSpec=resultspec.ResultSpec(filters=[f1, f2, f3, f4, f5]))
getBuildRequestsMock.assert_called_with(
builderid=None,
bsid=55,
complete=False,
claimed=True,
resultSpec=resultspec.ResultSpec(filters=[f4, f5]))
@defer.inlineCallbacks
def testGetClaimedByMasterIdFilters(self):
getBuildRequestsMock = mock.Mock(return_value={})
self.patch(
self.master.db.buildrequests, 'getBuildRequests', getBuildRequestsMock)
f1 = resultspec.Filter('claimed', 'eq', [True])
f2 = resultspec.Filter('claimed_by_masterid', 'eq',
[fakedb.FakeBuildRequestsComponent.MASTER_ID])
yield self.callGet(
('buildrequests',),
resultSpec=resultspec.ResultSpec(filters=[f1, f2]))
getBuildRequestsMock.assert_called_with(
builderid=None,
bsid=None,
complete=None,
claimed=fakedb.FakeBuildRequestsComponent.MASTER_ID,
resultSpec=resultspec.ResultSpec(filters=[f1]))
@defer.inlineCallbacks
def testGetSortedLimit(self):
yield self.master.db.buildrequests.completeBuildRequests([44], 1)
res = yield self.callGet(
('buildrequests',),
resultSpec=resultspec.ResultSpec(order=['results'], limit=2))
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['results'], -1)
res = yield self.callGet(
('buildrequests',),
resultSpec=resultspec.ResultSpec(order=['-results'], limit=2))
self.assertEqual(len(res), 2)
self.assertEqual(res[0]['results'], 1)
class TestBuildRequest(interfaces.InterfaceTests, unittest.TestCase):
CLAIMED_AT = datetime.datetime(1978, 6, 15, 12, 31, 15, tzinfo=UTC)
COMPLETE_AT = datetime.datetime(1980, 6, 15, 12, 31, 15, tzinfo=UTC)
class dBLayerException(Exception):
pass
def setUp(self):
self.master = fakemaster.make_master(testcase=self,
wantMq=True, wantDb=True, wantData=True)
self.rtype = buildrequests.BuildRequest(self.master)
@defer.inlineCallbacks
def doTestCallthrough(self, dbMethodName, dbMockedMethod, method,
methodargs=None, methodkwargs=None,
expectedRes=None, expectedException=None,
expectedDbApiCalled=True):
self.patch(self.master.db.buildrequests, dbMethodName, dbMockedMethod)
if expectedException is not None:
try:
yield method(*methodargs, **methodkwargs)
except expectedException:
pass
except Exception as e:
self.fail('%s exception should be raised, but got %r' %
(expectedException, e))
else:
self.fail('%s exception should be raised' %
(expectedException,))
else:
res = yield method(*methodargs, **methodkwargs)
self.assertEqual(res, expectedRes)
if expectedDbApiCalled:
dbMockedMethod.assert_called_with(*methodargs, **methodkwargs)
def testSignatureClaimBuildRequests(self):
@self.assertArgSpecMatches(
self.master.data.updates.claimBuildRequests, # fake
self.rtype.claimBuildRequests) # real
def claimBuildRequests(self, brids, claimed_at=None, _reactor=reactor):
pass
@defer.inlineCallbacks
def testFakeDataClaimBuildRequests(self):
self.master.db.insertTestData([
fakedb.BuildRequest(id=44, buildsetid=8822),
fakedb.BuildRequest(id=55, buildsetid=8822),
])
res = yield self.master.data.updates.claimBuildRequests(
[44, 55],
claimed_at=self.CLAIMED_AT,
_reactor=reactor)
self.assertTrue(res)
@defer.inlineCallbacks
def testFakeDataClaimBuildRequestsNoneArgs(self):
res = yield self.master.data.updates.claimBuildRequests([])
self.assertTrue(res)
@defer.inlineCallbacks
def testClaimBuildRequests(self):
self.master.db.insertTestData([
fakedb.Builder(id=123),
fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123),
fakedb.BuildRequest(id=55, buildsetid=8822, builderid=123),
])
claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock,
self.rtype.claimBuildRequests,
methodargs=[[44]],
methodkwargs=dict(claimed_at=self.CLAIMED_AT,
_reactor=reactor),
expectedRes=True,
expectedException=None)
msg = {
'buildrequestid': 44,
'complete_at': None,
'complete': False,
'builderid': 123,
'waited_for': False,
'claimed_at': None,
'results': -1,
'priority': 0,
'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC),
'claimed': False,
'claimed_by_masterid': None,
'buildsetid': 8822,
}
self.assertEqual(sorted(self.master.mq.productions), sorted([
(('buildrequests', '44', 'claimed'), msg),
(('builders', '123', 'buildrequests', '44', 'claimed'), msg),
(('buildsets', '8822', 'builders', '123',
'buildrequests', '44', 'claimed'), msg),
]))
@defer.inlineCallbacks
def testClaimBuildRequestsNoBrids(self):
claimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock,
self.rtype.claimBuildRequests,
methodargs=[[]],
methodkwargs=dict(),
expectedRes=True,
expectedException=None,
expectedDbApiCalled=False)
self.assertEqual(self.master.mq.productions, [])
@defer.inlineCallbacks
def testClaimBuildRequestsAlreadyClaimed(self):
claimBuildRequestsMock = mock.Mock(
side_effect=buildrequests.AlreadyClaimedError('oups ! buildrequest already claimed'))
yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock,
self.rtype.claimBuildRequests,
methodargs=[[44]],
methodkwargs=dict(claimed_at=self.CLAIMED_AT,
_reactor=reactor),
expectedRes=False,
expectedException=None)
self.assertEqual(self.master.mq.productions, [])
@defer.inlineCallbacks
def testClaimBuildRequestsUnknownException(self):
claimBuildRequestsMock = mock.Mock(
side_effect=self.dBLayerException('oups ! unknown error'))
yield self.doTestCallthrough('claimBuildRequests', claimBuildRequestsMock,
self.rtype.claimBuildRequests,
methodargs=[[44]],
methodkwargs=dict(claimed_at=self.CLAIMED_AT,
_reactor=reactor),
expectedRes=None,
expectedException=self.dBLayerException)
self.assertEqual(self.master.mq.productions, [])
def testSignatureUnclaimBuildRequests(self):
@self.assertArgSpecMatches(
self.master.data.updates.unclaimBuildRequests, # fake
self.rtype.unclaimBuildRequests) # real
def unclaimBuildRequests(self, brids):
pass
@defer.inlineCallbacks
def testFakeDataUnclaimBuildRequests(self):
res = yield self.master.data.updates.unclaimBuildRequests([44, 55])
self.assertEqual(res, None)
@defer.inlineCallbacks
def testFakeDataUnclaimBuildRequestsNoneArgs(self):
res = yield self.master.data.updates.unclaimBuildRequests([])
self.assertEqual(res, None)
@defer.inlineCallbacks
def testUnclaimBuildRequests(self):
self.master.db.insertTestData([
fakedb.Builder(id=123),
fakedb.BuildRequest(id=44, buildsetid=8822, builderid=123),
])
unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('unclaimBuildRequests',
unclaimBuildRequestsMock,
self.rtype.unclaimBuildRequests,
methodargs=[[44]],
methodkwargs=dict(),
expectedRes=None,
expectedException=None)
msg = {
'buildrequestid': 44,
'complete_at': None,
'complete': False,
'builderid': 123,
'waited_for': False,
'claimed_at': None,
'results': -1,
'priority': 0,
'submitted_at': datetime.datetime(1970, 5, 23, 21, 21, 18, tzinfo=UTC),
'claimed': False,
'claimed_by_masterid': None,
'buildsetid': 8822,
}
self.assertEqual(sorted(self.master.mq.productions), sorted([
(('buildrequests', '44', 'unclaimed'), msg),
(('builders', '123', 'buildrequests', '44', 'unclaimed'), msg),
(('buildsets', '8822', 'builders', '123',
'buildrequests', '44', 'unclaimed'), msg),
]))
@defer.inlineCallbacks
def testUnclaimBuildRequestsNoBrids(self):
unclaimBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('unclaimBuildRequests',
unclaimBuildRequestsMock,
self.rtype.unclaimBuildRequests,
methodargs=[[]],
methodkwargs=dict(),
expectedRes=None,
expectedException=None,
expectedDbApiCalled=False)
def testSignatureCompleteBuildRequests(self):
@self.assertArgSpecMatches(
self.master.data.updates.completeBuildRequests, # fake
self.rtype.completeBuildRequests) # real
def completeBuildRequests(self, brids, results, complete_at=None,
_reactor=reactor):
pass
@defer.inlineCallbacks
def testFakeDataCompleteBuildRequests(self):
res = yield self.master.data.updates.completeBuildRequests(
[44, 55],
12,
complete_at=self.COMPLETE_AT,
_reactor=reactor)
self.assertTrue(res)
@defer.inlineCallbacks
def testFakeDataCompleteBuildRequestsNoneArgs(self):
res = yield self.master.data.updates.completeBuildRequests([], 0)
self.assertTrue(res)
@defer.inlineCallbacks
def testCompleteBuildRequests(self):
completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('completeBuildRequests',
completeBuildRequestsMock,
self.rtype.completeBuildRequests,
methodargs=[[46], 12],
methodkwargs=dict(complete_at=self.COMPLETE_AT,
_reactor=reactor),
expectedRes=True,
expectedException=None)
@defer.inlineCallbacks
def testCompleteBuildRequestsNoBrids(self):
completeBuildRequestsMock = mock.Mock(return_value=defer.succeed(None))
yield self.doTestCallthrough('completeBuildRequests',
completeBuildRequestsMock,
self.rtype.completeBuildRequests,
methodargs=[[], 0],
methodkwargs=dict(),
expectedRes=True,
expectedException=None,
expectedDbApiCalled=False)
@defer.inlineCallbacks
def testCompleteBuildRequestsNotClaimed(self):
completeBuildRequestsMock = mock.Mock(
side_effect=buildrequests.NotClaimedError('oups ! buildrequest not claimed'))
yield self.doTestCallthrough('completeBuildRequests',
completeBuildRequestsMock,
self.rtype.completeBuildRequests,
methodargs=[[46], 12],
methodkwargs=dict(complete_at=self.COMPLETE_AT,
_reactor=reactor),
expectedRes=False,
expectedException=None)
@defer.inlineCallbacks
def testCompleteBuildRequestsUnknownException(self):
completeBuildRequestsMock = mock.Mock(
side_effect=self.dBLayerException('oups ! unknown error'))
yield self.doTestCallthrough('completeBuildRequests',
completeBuildRequestsMock,
self.rtype.completeBuildRequests,
methodargs=[[46], 12],
methodkwargs=dict(complete_at=self.COMPLETE_AT,
_reactor=reactor),
expectedRes=None,
expectedException=self.dBLayerException)
@defer.inlineCallbacks
def testRebuildBuildrequest(self):
self.master.db.insertTestData([
fakedb.Builder(id=77, name='builder'),
fakedb.Master(id=88),
fakedb.Worker(id=13, name='wrk'),
fakedb.Buildset(id=8822),
fakedb.SourceStamp(id=234),
fakedb.BuildsetSourceStamp(buildsetid=8822, sourcestampid=234),
fakedb.BuildRequest(id=82, buildsetid=8822, builderid=77),
fakedb.BuildsetProperty(buildsetid=8822, property_name='prop1',
property_value='["one", "fake1"]'),
fakedb.BuildsetProperty(buildsetid=8822, property_name='prop2',
property_value='["two", "fake2"]'),
])
buildrequest = yield self.master.data.get(('buildrequests', 82))
new_bsid, brid_dict = yield self.rtype.rebuildBuildrequest(buildrequest)
self.assertEqual(list(brid_dict.keys()), [77])
buildrequest = yield self.master.data.get(('buildrequests', brid_dict[77]))
# submitted_at is the time of the test, so better not depend on it
self.assertTrue(buildrequest['submitted_at'] is not None)
buildrequest['submitted_at'] = None
self.assertEqual(buildrequest, {'buildrequestid': 1001, 'complete': False, 'waited_for': False,
'claimed_at': None, 'results': -1, 'claimed': False,
'buildsetid': 200, 'complete_at': None, 'submitted_at': None,
'builderid': 77, 'claimed_by_masterid': None, 'priority': 0})
buildset = yield self.master.data.get(('buildsets', new_bsid))
oldbuildset = yield self.master.data.get(('buildsets', 8822))
# assert same sourcestamp
self.assertEqual(buildset['sourcestamps'], oldbuildset['sourcestamps'])
buildset['sourcestamps'] = None
self.assertTrue(buildset['submitted_at'] is not None)
buildset['submitted_at'] = None
self.assertEqual(buildset, {'bsid': 200, 'complete_at': None, 'submitted_at': None,
'sourcestamps': None, 'parent_buildid': None, 'results': -1, 'parent_relationship': None, 'reason': u'rebuild', 'external_idstring': u'extid', 'complete': False})
properties = yield self.master.data.get(('buildsets', new_bsid, 'properties'))
self.assertEqual(
properties, {u'prop1': (u'one', u'fake1'), u'prop2': (u'two', u'fake2')})
| gpl-2.0 |
beiko-lab/gengis | bin/Lib/lib2to3/fixes/fix_itertools.py | 55 | 1592 | """ Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
imports from itertools are fixed in fix_itertools_import.py
If itertools is imported as something else (ie: import itertools as it;
it.izip(spam, eggs)) method calls will not get fixed.
"""
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixItertools(fixer_base.BaseFix):
BM_compatible = True
it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
PATTERN = """
power< it='itertools'
trailer<
dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
|
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
""" %(locals())
# Needs to be run after fix_(map|zip|filter)
run_order = 6
def transform(self, node, results):
prefix = None
func = results['func'][0]
if ('it' in results and
func.value not in (u'ifilterfalse', u'izip_longest')):
dot, it = (results['dot'], results['it'])
# Remove the 'itertools'
prefix = it.prefix
it.remove()
# Replace the node wich contains ('.', 'function') with the
# function (to be consistant with the second part of the pattern)
dot.remove()
func.parent.replace(func)
prefix = prefix or func.prefix
func.replace(Name(func.value[1:], prefix=prefix))
| gpl-3.0 |
sinkuri256/python-for-android | python3-alpha/python3-src/Lib/test/regex_tests.py | 59 | 8992 | # Regex test suite and benchmark suite v1.5a2
# The 3 possible outcomes for each pattern
[SUCCEED, FAIL, SYNTAX_ERROR] = range(3)
# Benchmark suite (needs expansion)
#
# The benchmark suite does not test correctness, just speed. The
# first element of each tuple is the regex pattern; the second is a
# string to match it against. The benchmarking code will embed the
# second string inside several sizes of padding, to test how regex
# matching performs on large strings.
benchmarks = [
('Python', 'Python'), # Simple text literal
('.*Python', 'Python'), # Bad text literal
('.*Python.*', 'Python'), # Worse text literal
('.*\\(Python\\)', 'Python'), # Bad text literal with grouping
('(Python\\|Perl\\|Tcl', 'Perl'), # Alternation
('\\(Python\\|Perl\\|Tcl\\)', 'Perl'), # Grouped alternation
('\\(Python\\)\\1', 'PythonPython'), # Backreference
# ('\\([0a-z][a-z]*,\\)+', 'a5,b7,c9,'), # Disable the fastmap optimization
('\\([a-z][a-z0-9]*,\\)+', 'a5,b7,c9,') # A few sets
]
# Test suite (for verifying correctness)
#
# The test suite is a list of 5- or 3-tuples. The 5 parts of a
# complete tuple are:
# element 0: a string containing the pattern
# 1: the string to match against the pattern
# 2: the expected result (SUCCEED, FAIL, SYNTAX_ERROR)
# 3: a string that will be eval()'ed to produce a test string.
# This is an arbitrary Python expression; the available
# variables are "found" (the whole match), and "g1", "g2", ...
# up to "g10" contain the contents of each group, or the
# string 'None' if the group wasn't given a value.
# 4: The expected result of evaluating the expression.
# If the two don't match, an error is reported.
#
# If the regex isn't expected to work, the latter two elements can be omitted.
tests = [
('abc', 'abc', SUCCEED,
'found', 'abc'),
('abc', 'xbc', FAIL),
('abc', 'axc', FAIL),
('abc', 'abx', FAIL),
('abc', 'xabcy', SUCCEED,
'found', 'abc'),
('abc', 'ababc', SUCCEED,
'found', 'abc'),
('ab*c', 'abc', SUCCEED,
'found', 'abc'),
('ab*bc', 'abc', SUCCEED,
'found', 'abc'),
('ab*bc', 'abbc', SUCCEED,
'found', 'abbc'),
('ab*bc', 'abbbbc', SUCCEED,
'found', 'abbbbc'),
('ab+bc', 'abbc', SUCCEED,
'found', 'abbc'),
('ab+bc', 'abc', FAIL),
('ab+bc', 'abq', FAIL),
('ab+bc', 'abbbbc', SUCCEED,
'found', 'abbbbc'),
('ab?bc', 'abbc', SUCCEED,
'found', 'abbc'),
('ab?bc', 'abc', SUCCEED,
'found', 'abc'),
('ab?bc', 'abbbbc', FAIL),
('ab?c', 'abc', SUCCEED,
'found', 'abc'),
('^abc$', 'abc', SUCCEED,
'found', 'abc'),
('^abc$', 'abcc', FAIL),
('^abc', 'abcc', SUCCEED,
'found', 'abc'),
('^abc$', 'aabc', FAIL),
('abc$', 'aabc', SUCCEED,
'found', 'abc'),
('^', 'abc', SUCCEED,
'found+"-"', '-'),
('$', 'abc', SUCCEED,
'found+"-"', '-'),
('a.c', 'abc', SUCCEED,
'found', 'abc'),
('a.c', 'axc', SUCCEED,
'found', 'axc'),
('a.*c', 'axyzc', SUCCEED,
'found', 'axyzc'),
('a.*c', 'axyzd', FAIL),
('a[bc]d', 'abc', FAIL),
('a[bc]d', 'abd', SUCCEED,
'found', 'abd'),
('a[b-d]e', 'abd', FAIL),
('a[b-d]e', 'ace', SUCCEED,
'found', 'ace'),
('a[b-d]', 'aac', SUCCEED,
'found', 'ac'),
('a[-b]', 'a-', SUCCEED,
'found', 'a-'),
('a[b-]', 'a-', SUCCEED,
'found', 'a-'),
('a[]b', '-', SYNTAX_ERROR),
('a[', '-', SYNTAX_ERROR),
('a\\', '-', SYNTAX_ERROR),
('abc\\)', '-', SYNTAX_ERROR),
('\\(abc', '-', SYNTAX_ERROR),
('a]', 'a]', SUCCEED,
'found', 'a]'),
('a[]]b', 'a]b', SUCCEED,
'found', 'a]b'),
('a[^bc]d', 'aed', SUCCEED,
'found', 'aed'),
('a[^bc]d', 'abd', FAIL),
('a[^-b]c', 'adc', SUCCEED,
'found', 'adc'),
('a[^-b]c', 'a-c', FAIL),
('a[^]b]c', 'a]c', FAIL),
('a[^]b]c', 'adc', SUCCEED,
'found', 'adc'),
('\\ba\\b', 'a-', SUCCEED,
'"-"', '-'),
('\\ba\\b', '-a', SUCCEED,
'"-"', '-'),
('\\ba\\b', '-a-', SUCCEED,
'"-"', '-'),
('\\by\\b', 'xy', FAIL),
('\\by\\b', 'yz', FAIL),
('\\by\\b', 'xyz', FAIL),
('ab\\|cd', 'abc', SUCCEED,
'found', 'ab'),
('ab\\|cd', 'abcd', SUCCEED,
'found', 'ab'),
('\\(\\)ef', 'def', SUCCEED,
'found+"-"+g1', 'ef-'),
('$b', 'b', FAIL),
('a(b', 'a(b', SUCCEED,
'found+"-"+g1', 'a(b-None'),
('a(*b', 'ab', SUCCEED,
'found', 'ab'),
('a(*b', 'a((b', SUCCEED,
'found', 'a((b'),
('a\\\\b', 'a\\b', SUCCEED,
'found', 'a\\b'),
('\\(\\(a\\)\\)', 'abc', SUCCEED,
'found+"-"+g1+"-"+g2', 'a-a-a'),
('\\(a\\)b\\(c\\)', 'abc', SUCCEED,
'found+"-"+g1+"-"+g2', 'abc-a-c'),
('a+b+c', 'aabbabc', SUCCEED,
'found', 'abc'),
('\\(a+\\|b\\)*', 'ab', SUCCEED,
'found+"-"+g1', 'ab-b'),
('\\(a+\\|b\\)+', 'ab', SUCCEED,
'found+"-"+g1', 'ab-b'),
('\\(a+\\|b\\)?', 'ab', SUCCEED,
'found+"-"+g1', 'a-a'),
('\\)\\(', '-', SYNTAX_ERROR),
('[^ab]*', 'cde', SUCCEED,
'found', 'cde'),
('abc', '', FAIL),
('a*', '', SUCCEED,
'found', ''),
('a\\|b\\|c\\|d\\|e', 'e', SUCCEED,
'found', 'e'),
('\\(a\\|b\\|c\\|d\\|e\\)f', 'ef', SUCCEED,
'found+"-"+g1', 'ef-e'),
('abcd*efg', 'abcdefg', SUCCEED,
'found', 'abcdefg'),
('ab*', 'xabyabbbz', SUCCEED,
'found', 'ab'),
('ab*', 'xayabbbz', SUCCEED,
'found', 'a'),
('\\(ab\\|cd\\)e', 'abcde', SUCCEED,
'found+"-"+g1', 'cde-cd'),
('[abhgefdc]ij', 'hij', SUCCEED,
'found', 'hij'),
('^\\(ab\\|cd\\)e', 'abcde', FAIL,
'xg1y', 'xy'),
('\\(abc\\|\\)ef', 'abcdef', SUCCEED,
'found+"-"+g1', 'ef-'),
('\\(a\\|b\\)c*d', 'abcd', SUCCEED,
'found+"-"+g1', 'bcd-b'),
('\\(ab\\|ab*\\)bc', 'abc', SUCCEED,
'found+"-"+g1', 'abc-a'),
('a\\([bc]*\\)c*', 'abc', SUCCEED,
'found+"-"+g1', 'abc-bc'),
('a\\([bc]*\\)\\(c*d\\)', 'abcd', SUCCEED,
'found+"-"+g1+"-"+g2', 'abcd-bc-d'),
('a\\([bc]+\\)\\(c*d\\)', 'abcd', SUCCEED,
'found+"-"+g1+"-"+g2', 'abcd-bc-d'),
('a\\([bc]*\\)\\(c+d\\)', 'abcd', SUCCEED,
'found+"-"+g1+"-"+g2', 'abcd-b-cd'),
('a[bcd]*dcdcde', 'adcdcde', SUCCEED,
'found', 'adcdcde'),
('a[bcd]+dcdcde', 'adcdcde', FAIL),
('\\(ab\\|a\\)b*c', 'abc', SUCCEED,
'found+"-"+g1', 'abc-ab'),
('\\(\\(a\\)\\(b\\)c\\)\\(d\\)', 'abcd', SUCCEED,
'g1+"-"+g2+"-"+g3+"-"+g4', 'abc-a-b-d'),
('[a-zA-Z_][a-zA-Z0-9_]*', 'alpha', SUCCEED,
'found', 'alpha'),
('^a\\(bc+\\|b[eh]\\)g\\|.h$', 'abh', SUCCEED,
'found+"-"+g1', 'bh-None'),
('\\(bc+d$\\|ef*g.\\|h?i\\(j\\|k\\)\\)', 'effgz', SUCCEED,
'found+"-"+g1+"-"+g2', 'effgz-effgz-None'),
('\\(bc+d$\\|ef*g.\\|h?i\\(j\\|k\\)\\)', 'ij', SUCCEED,
'found+"-"+g1+"-"+g2', 'ij-ij-j'),
('\\(bc+d$\\|ef*g.\\|h?i\\(j\\|k\\)\\)', 'effg', FAIL),
('\\(bc+d$\\|ef*g.\\|h?i\\(j\\|k\\)\\)', 'bcdd', FAIL),
('\\(bc+d$\\|ef*g.\\|h?i\\(j\\|k\\)\\)', 'reffgz', SUCCEED,
'found+"-"+g1+"-"+g2', 'effgz-effgz-None'),
('\\(\\(\\(\\(\\(\\(\\(\\(\\(a\\)\\)\\)\\)\\)\\)\\)\\)\\)', 'a', SUCCEED,
'found', 'a'),
('multiple words of text', 'uh-uh', FAIL),
('multiple words', 'multiple words, yeah', SUCCEED,
'found', 'multiple words'),
('\\(.*\\)c\\(.*\\)', 'abcde', SUCCEED,
'found+"-"+g1+"-"+g2', 'abcde-ab-de'),
('(\\(.*\\), \\(.*\\))', '(a, b)', SUCCEED,
'g2+"-"+g1', 'b-a'),
('[k]', 'ab', FAIL),
('a[-]?c', 'ac', SUCCEED,
'found', 'ac'),
('\\(abc\\)\\1', 'abcabc', SUCCEED,
'g1', 'abc'),
('\\([a-c]*\\)\\1', 'abcabc', SUCCEED,
'g1', 'abc'),
('^\\(.+\\)?B', 'AB', SUCCEED,
'g1', 'A'),
('\\(a+\\).\\1$', 'aaaaa', SUCCEED,
'found+"-"+g1', 'aaaaa-aa'),
('^\\(a+\\).\\1$', 'aaaa', FAIL),
('\\(abc\\)\\1', 'abcabc', SUCCEED,
'found+"-"+g1', 'abcabc-abc'),
('\\([a-c]+\\)\\1', 'abcabc', SUCCEED,
'found+"-"+g1', 'abcabc-abc'),
('\\(a\\)\\1', 'aa', SUCCEED,
'found+"-"+g1', 'aa-a'),
('\\(a+\\)\\1', 'aa', SUCCEED,
'found+"-"+g1', 'aa-a'),
('\\(a+\\)+\\1', 'aa', SUCCEED,
'found+"-"+g1', 'aa-a'),
('\\(a\\).+\\1', 'aba', SUCCEED,
'found+"-"+g1', 'aba-a'),
('\\(a\\)ba*\\1', 'aba', SUCCEED,
'found+"-"+g1', 'aba-a'),
('\\(aa\\|a\\)a\\1$', 'aaa', SUCCEED,
'found+"-"+g1', 'aaa-a'),
('\\(a\\|aa\\)a\\1$', 'aaa', SUCCEED,
'found+"-"+g1', 'aaa-a'),
('\\(a+\\)a\\1$', 'aaa', SUCCEED,
'found+"-"+g1', 'aaa-a'),
('\\([abc]*\\)\\1', 'abcabc', SUCCEED,
'found+"-"+g1', 'abcabc-abc'),
('\\(a\\)\\(b\\)c\\|ab', 'ab', SUCCEED,
'found+"-"+g1+"-"+g2', 'ab-None-None'),
('\\(a\\)+x', 'aaax', SUCCEED,
'found+"-"+g1', 'aaax-a'),
('\\([ac]\\)+x', 'aacx', SUCCEED,
'found+"-"+g1', 'aacx-c'),
('\\([^/]*/\\)*sub1/', 'd:msgs/tdir/sub1/trial/away.cpp', SUCCEED,
'found+"-"+g1', 'd:msgs/tdir/sub1/-tdir/'),
('\\([^.]*\\)\\.\\([^:]*\\):[T ]+\\(.*\\)', 'track1.title:TBlah blah blah', SUCCEED,
'found+"-"+g1+"-"+g2+"-"+g3', 'track1.title:TBlah blah blah-track1-title-Blah blah blah'),
('\\([^N]*N\\)+', 'abNNxyzN', SUCCEED,
'found+"-"+g1', 'abNNxyzN-xyzN'),
('\\([^N]*N\\)+', 'abNNxyz', SUCCEED,
'found+"-"+g1', 'abNN-N'),
('\\([abc]*\\)x', 'abcx', SUCCEED,
'found+"-"+g1', 'abcx-abc'),
('\\([abc]*\\)x', 'abc', FAIL),
('\\([xyz]*\\)x', 'abcx', SUCCEED,
'found+"-"+g1', 'x-'),
('\\(a\\)+b\\|aac', 'aac', SUCCEED,
'found+"-"+g1', 'aac-None'),
('\<a', 'a', SUCCEED, 'found', 'a'),
('\<a', '!', FAIL),
('a\<b', 'ab', FAIL),
('a\>', 'ab', FAIL),
('a\>', 'a!', SUCCEED, 'found', 'a'),
('a\>', 'a', SUCCEED, 'found', 'a'),
]
| apache-2.0 |
ltilve/chromium | tools/mac/symbolicate_crash.py | 178 | 19493 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script can take an Apple-style CrashReporter log and symbolicate it. This
is useful for when a user's reports aren't being uploaded, for example.
Only versions 6, 7, 8, and 9 reports are supported. For more information on the
file format, reference this document:
TN2123 <http://developer.apple.com/library/mac/#technotes/tn2004/tn2123.html>
Information on symbolication was gleaned from:
<http://developer.apple.com/tools/xcode/symbolizingcrashdumps.html>
"""
import optparse
import os.path
import re
import subprocess
import sys
# Maps binary image identifiers to binary names (minus the .dSYM portion) found
# in the archive. These are the only objects that will be looked up.
SYMBOL_IMAGE_MAP = {
'com.google.Chrome': 'Google Chrome.app',
'com.google.Chrome.framework': 'Google Chrome Framework.framework',
'com.google.Chrome.helper': 'Google Chrome Helper.app'
}
class CrashReport(object):
"""A parsed representation of an Apple CrashReport text file."""
def __init__(self, file_name):
super(CrashReport, self).__init__()
self.report_info = {}
self.threads = []
self._binary_images = {}
fd = open(file_name, 'r')
self._ParseHeader(fd)
# Try and get the report version. If it's not a version we handle, abort.
self.report_version = int(self.report_info['Report Version'])
# Version 6: 10.5 and 10.6 crash report
# Version 7: 10.6 spindump report
# Version 8: 10.7 spindump report
# Version 9: 10.7 crash report
valid_versions = (6, 7, 8, 9)
if self.report_version not in valid_versions:
raise Exception("Only crash reports of versions %s are accepted." %
str(valid_versions))
# If this is a spindump (version 7 or 8 report), use a special parser. The
# format is undocumented, but is similar to version 6. However, the spindump
# report contains user and kernel stacks for every process on the system.
if self.report_version == 7 or self.report_version == 8:
self._ParseSpindumpStack(fd)
else:
self._ParseStack(fd)
self._ParseBinaryImages(fd)
fd.close()
def Symbolicate(self, symbol_path):
"""Symbolicates a crash report stack trace."""
# In order to be efficient, collect all the offsets that will be passed to
# atos by the image name.
offsets_by_image = self._CollectAddressesForImages(SYMBOL_IMAGE_MAP.keys())
# For each image, run atos with the list of addresses.
for image_name, addresses in offsets_by_image.items():
# If this image was not loaded or is in no stacks, skip.
if image_name not in self._binary_images or not len(addresses):
continue
# Combine the |image_name| and |symbol_path| into the path of the dSYM.
dsym_file = self._GetDSymPath(symbol_path, image_name)
# From the list of 2-Tuples of (frame, address), create a list of just
# addresses.
address_list = map(lambda x: x[1], addresses)
# Look up the load address of the image.
binary_base = self._binary_images[image_name][0]
# This returns a list of just symbols. The indices will match up with the
# list of |addresses|.
symbol_names = self._RunAtos(binary_base, dsym_file, address_list)
if not symbol_names:
print 'Error loading symbols for ' + image_name
continue
# Attaches a list of symbol names to stack frames. This assumes that the
# order of |addresses| has stayed the same as |symbol_names|.
self._AddSymbolsToFrames(symbol_names, addresses)
def _ParseHeader(self, fd):
"""Parses the header section of a crash report, which contains the OS and
application version information."""
# The header is made up of different sections, depending on the type of
# report and the report version. Almost all have a format of a key and
# value separated by a colon. Accumulate all of these artifacts into a
# dictionary until the first thread stack is reached.
thread_re = re.compile('^[ \t]*Thread ([a-f0-9]+)')
line = ''
while not thread_re.match(line):
# Skip blank lines. There are typically three or four sections separated
# by newlines in the header.
line = line.strip()
if line:
parts = line.split(':', 1)
# Certain lines in different report versions don't follow the key-value
# format, so skip them.
if len(parts) == 2:
# There's a varying amount of space padding after the ':' to align all
# the values; strip that.
self.report_info[parts[0]] = parts[1].lstrip()
line = fd.readline()
# When this loop exits, the header has been read in full. However, the first
# thread stack heading has been read past. Seek backwards from the current
# position by the length of the line so that it is re-read when
# _ParseStack() is entered.
fd.seek(-len(line), os.SEEK_CUR)
def _ParseStack(self, fd):
"""Parses the stack dump of a crash report and creates a list of threads
and their stack traces."""
# Compile a regex that matches the start of a thread stack. Note that this
# must be specific to not include the thread state section, which comes
# right after all the stack traces.
line_re = re.compile('^Thread ([0-9]+)( Crashed)?:(.*)')
# On entry into this function, the fd has been walked up to the "Thread 0"
# line.
line = fd.readline().rstrip()
in_stack = False
thread = None
while line_re.match(line) or in_stack:
# Check for start of the thread stack.
matches = line_re.match(line)
if not line.strip():
# A blank line indicates a break in the thread stack.
in_stack = False
elif matches:
# If this is the start of a thread stack, create the CrashThread.
in_stack = True
thread = CrashThread(matches.group(1))
thread.name = matches.group(3)
thread.did_crash = matches.group(2) != None
self.threads.append(thread)
else:
# All other lines are stack frames.
thread.stack.append(self._ParseStackFrame(line))
# Read the next line.
line = fd.readline()
def _ParseStackFrame(self, line):
"""Takes in a single line of text and transforms it into a StackFrame."""
frame = StackFrame(line)
# A stack frame is in the format of:
# |<frame-number> <binary-image> 0x<address> <symbol> <offset>|.
regex = '^([0-9]+) +(.+)[ \t]+(0x[0-9a-f]+) (.*) \+ ([0-9]+)$'
matches = re.match(regex, line)
if matches is None:
return frame
# Create a stack frame with the information extracted from the regex.
frame.frame_id = matches.group(1)
frame.image = matches.group(2)
frame.address = int(matches.group(3), 0) # Convert HEX to an int.
frame.original_symbol = matches.group(4)
frame.offset = matches.group(5)
frame.line = None
return frame
def _ParseSpindumpStack(self, fd):
"""Parses a spindump stack report. In this format, each thread stack has
both a user and kernel trace. Only the user traces are symbolicated."""
# The stack trace begins with the thread header, which is identified by a
# HEX number. The thread names appear to be incorrect in spindumps.
user_thread_re = re.compile('^ Thread ([0-9a-fx]+)')
# When this method is called, the fd has been walked right up to the first
# line.
line = fd.readline()
in_user_stack = False
in_kernel_stack = False
thread = None
frame_id = 0
while user_thread_re.match(line) or in_user_stack or in_kernel_stack:
# Check for the start of a thread.
matches = user_thread_re.match(line)
if not line.strip():
# A blank line indicates the start of a new thread. The blank line comes
# after the kernel stack before a new thread header.
in_kernel_stack = False
elif matches:
# This is the start of a thread header. The next line is the heading for
# the user stack, followed by the actual trace.
thread = CrashThread(matches.group(1))
frame_id = 0
self.threads.append(thread)
in_user_stack = True
line = fd.readline() # Read past the 'User stack:' header.
elif line.startswith(' Kernel stack:'):
# The kernel stack header comes immediately after the last frame (really
# the top frame) in the user stack, without a blank line.
in_user_stack = False
in_kernel_stack = True
elif in_user_stack:
# If this is a line while in the user stack, parse it as a stack frame.
thread.stack.append(self._ParseSpindumpStackFrame(line))
# Loop with the next line.
line = fd.readline()
# When the loop exits, the file has been read through the 'Binary images:'
# header. Seek backwards so that _ParseBinaryImages() does the right thing.
fd.seek(-len(line), os.SEEK_CUR)
def _ParseSpindumpStackFrame(self, line):
"""Parses a spindump-style stackframe."""
frame = StackFrame(line)
# The format of the frame is either:
# A: |<space><steps> <symbol> + <offset> (in <image-name>) [<address>]|
# B: |<space><steps> ??? (in <image-name> + <offset>) [<address>]|
regex_a = '^([ ]+[0-9]+) (.*) \+ ([0-9]+) \(in (.*)\) \[(0x[0-9a-f]+)\]'
regex_b = '^([ ]+[0-9]+) \?\?\?( \(in (.*) \+ ([0-9]+)\))? \[(0x[0-9a-f]+)\]'
# Create the stack frame with the information extracted from the regex.
matches = re.match(regex_a, line)
if matches:
frame.frame_id = matches.group(1)[4:] # Remove some leading spaces.
frame.original_symbol = matches.group(2)
frame.offset = matches.group(3)
frame.image = matches.group(4)
frame.address = int(matches.group(5), 0)
frame.line = None
return frame
# If pattern A didn't match (which it will most of the time), try B.
matches = re.match(regex_b, line)
if matches:
frame.frame_id = matches.group(1)[4:] # Remove some leading spaces.
frame.image = matches.group(3)
frame.offset = matches.group(4)
frame.address = int(matches.group(5), 0)
frame.line = None
return frame
# Otherwise, this frame could not be matched and just use the raw input.
frame.line = frame.line.strip()
return frame
def _ParseBinaryImages(self, fd):
"""Parses out the binary images section in order to get the load offset."""
# The parser skips some sections, so advance until the "Binary Images"
# header is reached.
while not fd.readline().lstrip().startswith("Binary Images:"): pass
# Create a regex to match the lines of format:
# |0x<start> - 0x<end> <binary-image> <version> (<version>) <<UUID>> <path>|
image_re = re.compile(
'[ ]*(0x[0-9a-f]+) -[ \t]+(0x[0-9a-f]+) [+ ]([a-zA-Z0-9._\-]+)')
# This section is in this format:
# |<start address> - <end address> <image name>|.
while True:
line = fd.readline()
if not line.strip():
# End when a blank line is hit.
return
# Match the line to the regex.
match = image_re.match(line)
if match:
# Store the offsets by image name so it can be referenced during
# symbolication. These are hex numbers with leading '0x', so int() can
# convert them to decimal if base=0.
address_range = (int(match.group(1), 0), int(match.group(2), 0))
self._binary_images[match.group(3)] = address_range
def _CollectAddressesForImages(self, images):
"""Iterates all the threads and stack frames and all the stack frames that
are in a list of binary |images|. The result is a dictionary, keyed by the
image name that maps to a list of tuples. Each is a 2-Tuple of
(stack_frame, address)"""
# Create the collection and initialize it with empty lists for each image.
collection = {}
for image in images:
collection[image] = []
# Perform the iteration.
for thread in self.threads:
for frame in thread.stack:
image_name = self._ImageForAddress(frame.address)
if image_name in images:
# Replace the image name in the frame in case it was elided.
frame.image = image_name
collection[frame.image].append((frame, frame.address))
# Return the result.
return collection
def _ImageForAddress(self, address):
"""Given a PC address, returns the bundle identifier of the image in which
the address resides."""
for image_name, address_range in self._binary_images.items():
if address >= address_range[0] and address <= address_range[1]:
return image_name
return None
def _GetDSymPath(self, base_path, image_name):
"""Takes a base path for the symbols and an image name. It looks the name up
in SYMBOL_IMAGE_MAP and creates a full path to the dSYM in the bundle."""
image_file = SYMBOL_IMAGE_MAP[image_name]
return os.path.join(base_path, image_file + '.dSYM', 'Contents',
'Resources', 'DWARF',
os.path.splitext(image_file)[0]) # Chop off the extension.
def _RunAtos(self, load_address, dsym_file, addresses):
"""Runs the atos with the provided arguments. |addresses| is used as stdin.
Returns a list of symbol information in the same order as |addresses|."""
args = ['atos', '-l', str(load_address), '-o', dsym_file]
# Get the arch type. This is of the format |X86 (Native)|.
if 'Code Type' in self.report_info:
arch = self.report_info['Code Type'].lower().split(' ')
if len(arch) == 2:
arch = arch[0]
if arch == 'x86':
# The crash report refers to i386 as x86, but atos doesn't know what
# that is.
arch = 'i386'
args.extend(['-arch', arch])
proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
addresses = map(hex, addresses)
(stdout, stderr) = proc.communicate(' '.join(addresses))
if proc.returncode:
return None
return stdout.rstrip().split('\n')
def _AddSymbolsToFrames(self, symbols, address_tuples):
"""Takes a single value (the list) from _CollectAddressesForImages and does
a smart-zip with the data returned by atos in |symbols|. Note that the
indices must match for this to succeed."""
if len(symbols) != len(address_tuples):
print 'symbols do not match'
# Each line of output from atos is in this format:
# |<symbol> (in <image>) (<file>:<line>)|.
line_regex = re.compile('(.+) \(in (.+)\) (\((.+):([0-9]+)\))?')
# Zip the two data sets together.
for i in range(len(symbols)):
symbol_parts = line_regex.match(symbols[i])
if not symbol_parts:
continue # Error.
frame = address_tuples[i][0]
frame.symbol = symbol_parts.group(1)
frame.image = symbol_parts.group(2)
frame.file_name = symbol_parts.group(4)
frame.line_number = symbol_parts.group(5)
class CrashThread(object):
"""A CrashThread represents a stacktrace of a single thread """
def __init__(self, thread_id):
super(CrashThread, self).__init__()
self.thread_id = thread_id
self.name = None
self.did_crash = False
self.stack = []
def __repr__(self):
name = ''
if self.name:
name = ': ' + self.name
return 'Thread ' + self.thread_id + name + '\n' + \
'\n'.join(map(str, self.stack))
class StackFrame(object):
"""A StackFrame is owned by a CrashThread."""
def __init__(self, line):
super(StackFrame, self).__init__()
# The original line. This will be set to None if symbolication was
# successfuly.
self.line = line
self.frame_id = 0
self.image = None
self.address = 0x0
self.original_symbol = None
self.offset = 0x0
# The following members are set after symbolication.
self.symbol = None
self.file_name = None
self.line_number = 0
def __repr__(self):
# If symbolication failed, just use the original line.
if self.line:
return ' %s' % self.line
# Use different location information depending on symbolicated data.
location = None
if self.file_name:
location = ' - %s:%s' % (self.file_name, self.line_number)
else:
location = ' + %s' % self.offset
# Same with the symbol information.
symbol = self.original_symbol
if self.symbol:
symbol = self.symbol
return ' %s\t0x%x\t[%s\t%s]\t%s' % (self.frame_id, self.address,
self.image, location, symbol)
def PrettyPrintReport(report):
"""Takes a crash report and prints it like the crash server would."""
print 'Process : ' + report.report_info['Process']
print 'Version : ' + report.report_info['Version']
print 'Date : ' + report.report_info['Date/Time']
print 'OS Version : ' + report.report_info['OS Version']
print
if 'Crashed Thread' in report.report_info:
print 'Crashed Thread : ' + report.report_info['Crashed Thread']
print
if 'Event' in report.report_info:
print 'Event : ' + report.report_info['Event']
print
for thread in report.threads:
print
if thread.did_crash:
exc_type = report.report_info['Exception Type'].split(' ')[0]
exc_code = report.report_info['Exception Codes'].replace('at', '@')
print '*CRASHED* ( ' + exc_type + ' / ' + exc_code + ' )'
# Version 7 reports have spindump-style output (with a stepped stack trace),
# so remove the first tab to get better alignment.
if report.report_version == 7:
for line in repr(thread).split('\n'):
print line.replace('\t', ' ', 1)
else:
print thread
def Main(args):
"""Program main."""
parser = optparse.OptionParser(
usage='%prog [options] symbol_path crash_report',
description='This will parse and symbolicate an Apple CrashReporter v6-9 '
'file.')
parser.add_option('-s', '--std-path', action='store_true', dest='std_path',
help='With this flag, the symbol_path is a containing '
'directory, in which a dSYM files are stored in a '
'directory named by the version. Example: '
'[symbolicate_crash.py -s ./symbols/ report.crash] will '
'look for dSYMs in ./symbols/15.0.666.0/ if the report is '
'from that verison.')
(options, args) = parser.parse_args(args[1:])
# Check that we have something to symbolicate.
if len(args) != 2:
parser.print_usage()
return 1
report = CrashReport(args[1])
symbol_path = None
# If not using the standard layout, this is a full path to the symbols.
if not options.std_path:
symbol_path = args[0]
# Otherwise, use the report version to locate symbols in a directory.
else:
# This is in the format of |M.N.B.P (B.P)|. Get just the part before the
# space.
chrome_version = report.report_info['Version'].split(' ')[0]
symbol_path = os.path.join(args[0], chrome_version)
# Check that the symbols exist.
if not os.path.isdir(symbol_path):
print >>sys.stderr, 'Symbol path %s is not a directory' % symbol_path
return 2
print >>sys.stderr, 'Using symbols from ' + symbol_path
print >>sys.stderr, '=' * 80
report.Symbolicate(symbol_path)
PrettyPrintReport(report)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
| bsd-3-clause |
hurricanerix/swift | test/functional/test_object.py | 5 | 61991 | #!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import unittest2
from unittest2 import SkipTest
from uuid import uuid4
import time
from six.moves import range
from test.functional import check_response, retry, requires_acls, \
requires_policies
import test.functional as tf
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestObject(unittest2.TestCase):
def setUp(self):
if tf.skip:
raise SkipTest
self.container = uuid4().hex
self.containers = []
self._create_container(self.container)
self._create_container(self.container, use_account=2)
self.obj = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, self.obj), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def _create_container(self, name=None, headers=None, use_account=1):
if not name:
name = uuid4().hex
self.containers.append(name)
headers = headers or {}
def put(url, token, parsed, conn, name):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + name, '',
new_headers)
return check_response(conn)
resp = retry(put, name, use_account=use_account)
resp.read()
self.assertEqual(resp.status, 201)
# With keystoneauth we need the accounts to have had the project
# domain id persisted as sysmeta prior to testing ACLs. This may
# not be the case if, for example, the account was created using
# a request with reseller_admin role, when project domain id may
# not have been known. So we ensure that the project domain id is
# in sysmeta by making a POST to the accounts using an admin role.
def post(url, token, parsed, conn):
conn.request('POST', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, use_account=use_account)
resp.read()
self.assertEqual(resp.status, 204)
return name
def tearDown(self):
if tf.skip:
raise SkipTest
# get list of objects in container
def get(url, token, parsed, conn, container):
conn.request(
'GET', parsed.path + '/' + container + '?format=json', '',
{'X-Auth-Token': token})
return check_response(conn)
# delete an object
def delete(url, token, parsed, conn, container, obj):
conn.request(
'DELETE', '/'.join([parsed.path, container, obj['name']]), '',
{'X-Auth-Token': token})
return check_response(conn)
for container in self.containers:
while True:
resp = retry(get, container)
body = resp.read()
if resp.status == 404:
break
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, container, obj)
resp.read()
self.assertIn(resp.status, (204, 404))
# delete the container
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
for container in self.containers:
resp = retry(delete, container)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_metadata(self):
obj = 'test_metadata'
req_metadata = {}
def put(url, token, parsed, conn):
headers = {'X-Auth-Token': token}
headers.update(req_metadata)
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, obj
), '', headers)
return check_response(conn)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, obj),
'',
{'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn):
headers = {'X-Auth-Token': token}
headers.update(req_metadata)
conn.request('POST', '%s/%s/%s' % (
parsed.path, self.container, obj
), '', headers)
return check_response(conn)
def metadata(resp):
metadata = {}
for k, v in resp.headers.items():
if 'meta' in k.lower():
metadata[k] = v
return metadata
# empty put
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {})
# empty post
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {})
# metadata put
req_metadata = {
'x-object-meta-Color': 'blUe',
'X-Object-Meta-food': 'PizZa',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'blUe',
'X-Object-Meta-Food': 'PizZa',
})
# metadata post
req_metadata = {'X-Object-Meta-color': 'oraNge'}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'oraNge'
})
# sysmeta put
req_metadata = {
'X-Object-Meta-Color': 'Red',
'X-Object-Sysmeta-Color': 'Green',
'X-Object-Transient-Sysmeta-Color': 'Blue',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'Red',
})
# sysmeta post
req_metadata = {
'X-Object-Meta-Food': 'Burger',
'X-Object-Meta-Animal': 'Cat',
'X-Object-Sysmeta-Animal': 'Cow',
'X-Object-Transient-Sysmeta-Food': 'Burger',
}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Food': 'Burger',
'X-Object-Meta-Animal': 'Cat',
})
# non-ascii put
req_metadata = {
'X-Object-Meta-Foo': u'B\u00e2r',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa2r',
})
# non-ascii post
req_metadata = {
'X-Object-Meta-Foo': u'B\u00e5z',
}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual('', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa5z',
})
def test_if_none_match(self):
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'If-None-Match': '*'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 412)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'If-None-Match': 'somethingelse'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 400)
def test_too_small_x_timestamp(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'too_small_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Timestamp': '-1'})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
'too_small_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0'})
return check_response(conn)
ts_before = time.time()
resp = retry(put)
body = resp.read()
ts_after = time.time()
if resp.status == 400:
# shunt_inbound_x_timestamp must be false
self.assertIn(
'X-Timestamp should be a UNIX timestamp float value', body)
else:
self.assertEqual(resp.status, 201)
self.assertEqual(body, '')
resp = retry(head)
resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
self.assertLess(float(resp.headers['x-timestamp']), ts_after)
def test_too_big_x_timestamp(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'too_big_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Timestamp': '99999999999.9999999999'})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
'too_big_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0'})
return check_response(conn)
ts_before = time.time()
resp = retry(put)
body = resp.read()
ts_after = time.time()
if resp.status == 400:
# shunt_inbound_x_timestamp must be false
self.assertIn(
'X-Timestamp should be a UNIX timestamp float value', body)
else:
self.assertEqual(resp.status, 201)
self.assertEqual(body, '')
resp = retry(head)
resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
self.assertLess(float(resp.headers['x-timestamp']), ts_after)
def test_x_delete_after(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'x_delete_after'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-After': '1'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_after'),
'',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
resp.read()
count = 0
while resp.status == 200 and count < 10:
resp = retry(get)
resp.read()
count += 1
time.sleep(1)
self.assertEqual(resp.status, 404)
# To avoid an error when the object deletion in tearDown(),
# the object is added again.
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def test_x_delete_at(self):
def put(url, token, parsed, conn):
dt = datetime.datetime.now()
epoch = time.mktime(dt.timetuple())
delete_time = str(int(epoch) + 3)
conn.request(
'PUT',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
'',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': delete_time})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
'',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
resp.read()
count = 0
while resp.status == 200 and count < 10:
resp = retry(get)
resp.read()
count += 1
time.sleep(1)
self.assertEqual(resp.status, 404)
# To avoid an error when the object deletion in tearDown(),
# the object is added again.
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def test_non_integer_x_delete_after(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_after'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-After': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-After')
def test_non_integer_x_delete_at(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_at'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-At')
def test_x_delete_at_in_the_past(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'x_delete_at_in_the_past'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '0'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, 'X-Delete-At in past')
def test_copy_object(self):
if tf.skip:
raise SkipTest
source = '%s/%s' % (self.container, self.obj)
dest = '%s/%s' % (self.container, 'test_copy')
# get contents of source
def get_source(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, source),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_source)
source_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, 'test')
# copy source to dest with X-Copy-From
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': source})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
def get_dest(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, dest),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest)
resp.read()
self.assertEqual(resp.status, 404)
# copy source to dest with COPY
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination': dest})
return check_response(conn)
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# copy source to dest with COPY and range
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination': dest,
'Range': 'bytes=1-2'})
return check_response(conn)
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents[1:3])
# delete the copy
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_copy_between_accounts(self):
if tf.skip:
raise SkipTest
source = '%s/%s' % (self.container, self.obj)
dest = '%s/%s' % (self.container, 'test_copy')
# get contents of source
def get_source(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, source),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_source)
source_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, 'test')
acct = tf.parsed[0].path.split('/', 2)[2]
# copy source to dest with X-Copy-From-Account
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From-Account': acct,
'X-Copy-From': source})
return check_response(conn)
# try to put, will not succeed
# user does not have permissions to read from source
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 403)
# add acl to allow reading from source
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
self.assertEqual(resp.status, 204)
# retry previous put, now should succeed
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
def get_dest(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, dest),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=2)
resp.read()
self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest, use_account=2)
resp.read()
self.assertEqual(resp.status, 404)
acct_dest = tf.parsed[1].path.split('/', 2)[2]
# copy source to dest with COPY
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination-Account': acct_dest,
'Destination': dest})
return check_response(conn)
# try to copy, will not succeed
# user does not have permissions to write to destination
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 403)
# add acl to allow write to destination
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[0]})
return check_response(conn)
resp = retry(post, use_account=2)
self.assertEqual(resp.status, 204)
# now copy will succeed
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
resp = retry(delete, use_account=2)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_public_object(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET',
'%s/%s/%s' % (parsed.path, self.container, self.obj))
return check_response(conn)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '))
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get)
resp.read()
self.assertEqual(resp.status, 200)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token, 'X-Container-Read': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '))
def test_private_object(self):
if tf.skip or tf.skip3:
raise SkipTest
# Ensure we can't access the object with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# create a shared container writable by account3
shared_container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (
parsed.path, shared_container), '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2],
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can not copy from private container
def copy(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'private_object'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': '%s/%s' % (self.container, self.obj)})
return check_response(conn)
resp = retry(copy, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# verify third account can write "obj1" to shared container
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can copy "obj1" to shared container
def copy2(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container, 'obj1')})
return check_response(conn)
resp = retry(copy2, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account STILL can not copy from private container
def copy3(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container,
'private_object')})
return check_response(conn)
resp = retry(copy3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# clean up "obj1"
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
# clean up shared_container
def delete(url, token, parsed, conn):
conn.request('DELETE',
parsed.path + '/' + shared_container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_container_write_only(self):
if tf.skip or tf.skip3:
raise SkipTest
# Ensure we can't access the object with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# create a shared container writable (but not readable) by account3
shared_container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (
parsed.path, shared_container), '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can write "obj1" to shared container
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account cannot copy "obj1" to shared container
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container, 'obj2')})
return check_response(conn)
resp = retry(copy, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# verify third account can POST to "obj1" in shared container
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'X-Object-Meta-Color': 'blue'})
return check_response(conn)
resp = retry(post, use_account=3)
resp.read()
self.assertEqual(resp.status, 202)
# verify third account can DELETE from shared container
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=3)
resp.read()
self.assertIn(resp.status, (204, 404))
# clean up shared_container
def delete(url, token, parsed, conn):
conn.request('DELETE',
parsed.path + '/' + shared_container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
@requires_acls
def test_read_only(self):
if tf.skip3:
raise tf.SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test')
# can not put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 403)
# can not delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 403)
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertNotIn(obj_name, listing)
self.assertIn(self.obj, listing)
@requires_acls
def test_read_write(self):
if tf.skip3:
raise SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing)
self.assertNotIn(self.obj, listing)
@requires_acls
def test_admin(self):
if tf.skip3:
raise SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing)
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing)
self.assertNotIn(self.obj, listing)
def test_manifest(self):
if tf.skip:
raise SkipTest
# Data for the object segments
segments1 = ['one', 'two', 'three', 'four', 'five']
segments2 = ['six', 'seven', 'eight']
segments3 = ['nine', 'ten', 'eleven']
# Upload the first set of segments
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments1/%s' % (
parsed.path, self.container, str(objnum)), segments1[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments1)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Upload the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments1/' % self.container,
'Content-Type': 'text/jibberish', 'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should get all the segments as the body)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1))
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('content-type'), 'text/jibberish')
# Get with a range at the start of the second segment
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=3-'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1[1:]))
self.assertEqual(resp.status, 206)
# Get with a range in the middle of the second segment
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=5-'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)[5:])
self.assertEqual(resp.status, 206)
# Get with a full start and stop range
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=5-10'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)[5:11])
self.assertEqual(resp.status, 206)
# Upload the second set of segments
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments2/%s' % (
parsed.path, self.container, str(objnum)), segments2[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments2)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should still be the first segments of course)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1))
self.assertEqual(resp.status, 200)
# Update the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments2/' % self.container,
'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should be the second set of segments now)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments2))
self.assertEqual(resp.status, 200)
if not tf.skip3:
# Ensure we can't access the manifest with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Grant access to the third account
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# The third account should be able to get the manifest now
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
self.assertEqual(resp.read(), ''.join(segments2))
self.assertEqual(resp.status, 200)
# Create another container for the third set of segments
acontainer = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + acontainer, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Upload the third set of segments in the other container
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments3/%s' % (
parsed.path, acontainer, str(objnum)), segments3[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments3)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Update the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments3/' % acontainer,
'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest to ensure it's the third set of segments
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments3))
self.assertEqual(resp.status, 200)
if not tf.skip3:
# Ensure we can't access the manifest with the third account
# (because the segments are in a protected container even if the
# manifest itself is not).
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Grant access to the third account
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, acontainer),
'', {'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# The third account should be able to get the manifest now
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
self.assertEqual(resp.read(), ''.join(segments3))
self.assertEqual(resp.status, 200)
# Delete the manifest
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/manifest' % (
parsed.path,
self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the third set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments3/%s' % (
parsed.path, acontainer, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments3)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the second set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments2/%s' % (
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments2)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the first set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments1/%s' % (
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments1)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the extra container
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_delete_content_type(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/hi' % (parsed.path, self.container),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
self.assertEqual(resp.getheader('Content-Type'),
'text/html; charset=UTF-8')
def test_delete_if_delete_at_bad(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT',
'%s/%s/hi-delete-bad' % (parsed.path, self.container),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-If-Delete-At': 'bad'})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 400)
def test_null_name(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/abc%%00def' % (
parsed.path,
self.container), 'test', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404)
else:
self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412)
def test_cors(self):
if tf.skip:
raise SkipTest
try:
strict_cors = tf.cluster_info['swift']['strict_cors_mode']
except KeyError:
raise SkipTest("cors mode is unknown")
def put_cors_cont(url, token, parsed, conn, orig):
conn.request(
'PUT', '%s/%s' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-Container-Meta-Access-Control-Allow-Origin': orig})
return check_response(conn)
def put_obj(url, token, parsed, conn, obj):
conn.request(
'PUT', '%s/%s/%s' % (parsed.path, self.container, obj),
'test', {'X-Auth-Token': token})
return check_response(conn)
def check_cors(url, token, parsed, conn,
method, obj, headers):
if method != 'OPTIONS':
headers['X-Auth-Token'] = token
conn.request(
method, '%s/%s/%s' % (parsed.path, self.container, obj),
'', headers)
return conn.getresponse()
resp = retry(put_cors_cont, '*')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(put_obj, 'cat')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat', {'Origin': 'http://m.com'})
self.assertEqual(resp.status, 401)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
self.assertEqual(resp.status, 200)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com',
'X-Web-Mode': 'True'})
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
####################
resp = retry(put_cors_cont, 'http://secret.com')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
resp.read()
self.assertEqual(resp.status, 401)
if strict_cors:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertNotIn('access-control-allow-origin', headers)
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://secret.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'http://secret.com')
else:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'http://m.com')
@requires_policies
def test_cross_policy_copy(self):
# create container in first policy
policy = self.policies.select()
container = self._create_container(
headers={'X-Storage-Policy': policy['name']})
obj = uuid4().hex
# create a container in second policy
other_policy = self.policies.exclude(name=policy['name']).select()
other_container = self._create_container(
headers={'X-Storage-Policy': other_policy['name']})
other_obj = uuid4().hex
def put_obj(url, token, parsed, conn, container, obj):
# to keep track of things, use the original path as the body
content = '%s/%s' % (container, obj)
path = '%s/%s' % (parsed.path, content)
conn.request('PUT', path, content, {'X-Auth-Token': token})
return check_response(conn)
# create objects
for c, o in zip((container, other_container), (obj, other_obj)):
resp = retry(put_obj, c, o)
resp.read()
self.assertEqual(resp.status, 201)
def put_copy_from(url, token, parsed, conn, container, obj, source):
dest_path = '%s/%s/%s' % (parsed.path, container, obj)
conn.request('PUT', dest_path, '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': source})
return check_response(conn)
copy_requests = (
(container, other_obj, '%s/%s' % (other_container, other_obj)),
(other_container, obj, '%s/%s' % (container, obj)),
)
# copy objects
for c, o, source in copy_requests:
resp = retry(put_copy_from, c, o, source)
resp.read()
self.assertEqual(resp.status, 201)
def get_obj(url, token, parsed, conn, container, obj):
path = '%s/%s/%s' % (parsed.path, container, obj)
conn.request('GET', path, '', {'X-Auth-Token': token})
return check_response(conn)
# validate contents, contents should be source
validate_requests = copy_requests
for c, o, body in validate_requests:
resp = retry(get_obj, c, o)
self.assertEqual(resp.status, 200)
self.assertEqual(body, resp.read())
if __name__ == '__main__':
unittest2.main()
| apache-2.0 |
skearnes/color-features | paper/code/analysis.py | 1 | 12160 | """Analyze results.
Use the saved model output to calculate AUC and other metrics.
"""
import collections
import cPickle as pickle
import gflags as flags
import gzip
import logging
import numpy as np
import os
import pandas as pd
from sklearn import metrics
from statsmodels.stats import proportion
import sys
flags.DEFINE_string('root', None, 'Root directory containing model results.')
flags.DEFINE_string('dataset_file', None, 'Filename containing datasets.')
flags.DEFINE_string('prefix', None, 'Dataset prefix.')
flags.DEFINE_boolean('tversky', False, 'If True, use Tversky features.')
flags.DEFINE_integer('num_folds', 5, 'Number of cross-validation folds.')
flags.DEFINE_boolean('cycle', False,
'If True, expect multiple query molecules.')
flags.DEFINE_string('reload', None, 'Load previously analyzed results.')
flags.DEFINE_string('subset', None, 'Subset.')
FLAGS = flags.FLAGS
logging.getLogger().setLevel(logging.INFO)
FEATURES_MAP = {
'rocs': 'TanimotoCombo',
'shape_color': 'ST-CT',
'shape_color_components': 'ST-CCT',
'shape_color_overlaps': 'ST-CAO',
'shape_color_components_overlaps': 'ST-CCT-CAO',
'rocs_tversky': 'TverskyCombo',
'shape_color_tversky': 'STv-CTv',
'shape_color_components_tversky': 'STv-CCTv',
'shape_color_components_tversky_overlaps': 'STv-CCTv-CAO',
}
MODEL_MAP = {
'logistic': 'LR',
'random_forest': 'RF',
'svm': 'SVM',
}
def roc_enrichment(fpr, tpr, target_fpr):
"""Get ROC enrichment."""
assert fpr[0] == 0
assert fpr[-1] == 1
assert np.all(np.diff(fpr) >= 0)
return np.true_divide(np.interp(target_fpr, fpr, tpr), target_fpr)
def get_cv_metrics(y_true, y_pred):
"""Get 5-fold mean AUC."""
assert len(y_true) == len(y_pred)
fold_metrics = collections.defaultdict(list)
for yt, yp in zip(y_true, y_pred):
assert len(yt) == len(yp)
fold_metrics['auc'].append(metrics.roc_auc_score(yt, yp))
fpr, tpr, _ = metrics.roc_curve(yt, yp)
for x in [0.005, 0.01, 0.02, 0.05, 0.1, 0.2]:
fold_metrics['e-%g' % x].append(roc_enrichment(fpr, tpr, x))
return fold_metrics
def add_rows(features, scores, rows, dataset, index=None):
"""Record per-fold and averaged cross-validation results."""
for fold in range(len(scores['auc'])):
row = {'dataset': dataset, 'features': features, 'fold': fold}
if index is not None:
row['index'] = index
for key, values in scores.iteritems():
row[key] = values[fold]
rows.append(row)
# Averages
row = {'dataset': dataset, 'features': features, 'fold': 'all'}
if index is not None:
row['index'] = index
for key, values in scores.iteritems():
row[key] = np.mean(values)
rows.append(row)
def load_output_and_calculate_metrics(model, subset):
"""Calculate metrics using saved model output.
Args:
model: String model type (e.g. logistic).
subset: String query subset (e.g. omega1).
Returns:
DataFrame containing calculated metrics for each model/subset, including
per-fold and average values for each reference molecule.
"""
with open(FLAGS.dataset_file) as f:
datasets = [line.strip() for line in f]
rows = []
for dataset in datasets:
ref_idx = 0
while True: # Cycle through reference molecules.
ref_idx_exists = get_ref_rows(model, subset, dataset, ref_idx, rows)
if not FLAGS.cycle or not ref_idx_exists:
break
ref_idx += 1
logging.info('%s\t%d', dataset, ref_idx)
return pd.DataFrame(rows)
def get_ref_rows(model, subset, dataset, ref_idx, rows):
logging.debug('ref_idx %d', ref_idx)
for features in FEATURES_MAP.keys():
logging.debug('Features: %s', features)
fold_y_true = []
fold_y_pred = []
for fold_idx in range(FLAGS.num_folds):
filename = get_output_filename(dataset, model, subset, features,
fold_idx, ref_idx)
if not os.path.exists(filename):
return False
logging.debug(filename)
with gzip.open(filename) as f:
df = pickle.load(f)
fold_y_true.append(df['y_true'].values)
fold_y_pred.append(df['y_pred'].values)
scores = get_cv_metrics(fold_y_true, fold_y_pred)
add_rows(features, scores, rows, dataset, index=ref_idx)
return True
def get_output_filename(dataset, model, subset, features, fold_idx, ref_idx):
if FLAGS.cycle:
filename = os.path.join(
'%s-%s' % (FLAGS.root, subset),
dataset,
'fold-%d' % fold_idx,
'%s-%s-%s-%s-%s-fold-%d-ref-%d-output.pkl.gz' % (
FLAGS.prefix, dataset, model, subset, features,
fold_idx, ref_idx))
else:
assert ref_idx == 0
filename = os.path.join(
'%s-%s' % (FLAGS.root, subset),
dataset,
'fold-%d' % fold_idx,
'%s-%s-%s-%s-%s-fold-%d-output.pkl.gz' % (
FLAGS.prefix, dataset, model, subset, features,
fold_idx))
return filename
def load_data(model, subset):
data = []
with open(FLAGS.dataset_file) as f:
for line in f:
dataset = line.strip()
filename = os.path.join(FLAGS.root, '%s-%s-%s-%s.pkl.gz' % (
FLAGS.prefix, dataset, model, subset))
assert os.path.exists(filename)
logging.info(filename)
with gzip.open(filename) as g:
df = pickle.load(g)
data.append(df)
return pd.concat(data)
def confidence_interval(delta, metric):
"""Calculate a two-sided 95% confidence interval for differences."""
# Wilson score interval for sign test.
num_successes = np.count_nonzero(delta > 0)
num_trials = np.count_nonzero(delta != 0) # Exclude zero differences.
lower, upper = proportion.proportion_confint(
num_successes, num_trials, alpha=0.05, method='wilson')
median_delta = delta.median()
if metric == 'auc':
median = r'%.3f' % median_delta
ci = r'(%.2f, %.2f)' % (lower, upper)
else:
median = r'%.0f' % median_delta
ci = r'(%.2f, %.2f)' % (lower, upper)
if lower < 0.5 and upper < 0.5:
median = r'\bfseries \color{red} ' + median
ci = r'\bfseries \color{red} ' + ci
elif lower > 0.5 and upper > 0.5:
median = r'\bfseries ' + median
ci = r'\bfseries ' + ci
return median, ci
def data_table(data, subsets, models, kind=None, tversky=False):
"""Get medians and compare everything to ROCS.
Args:
data: DataFrame containing model performance.
subsets: List of query subsets.
models: List of models to include in the table.
kind: List of metrics to report. Defaults to ['auc'].
tversky: Boolean whether to use Tversky features. If False, use Tanimoto
features.
"""
if kind is None:
kind = ['auc']
if tversky:
rocs_baseline = 'rocs_tversky'
features_order = ['shape_color_tversky',
'shape_color_components_tversky',
'shape_color_overlaps',
'shape_color_components_tversky_overlaps']
else:
rocs_baseline = 'rocs'
features_order = ['shape_color', 'shape_color_components',
'shape_color_overlaps',
'shape_color_components_overlaps']
table = []
# Get ROCS row.
row = [r'\cellcolor{white} ROCS', FEATURES_MAP[rocs_baseline]]
for subset in subsets:
rocs_mask = ((data['features'] == rocs_baseline) &
(data['subset'] == subset) &
(data['model'] == models[0]))
rocs_df = data[rocs_mask]
logging.info('Confidence interval N = %d', len(rocs_df))
logging.info('Number of datasets = %d',
len(pd.unique(rocs_df['dataset'])))
for metric in kind:
if metric == 'auc':
number = '%.3f'
else:
number = '%.0f'
row.extend([number % rocs_df[metric].median(), '', ''])
table.append(' & '.join(row))
# Get model rows.
for model in models:
for features in features_order:
if features == features_order[-1]:
row = [r'\multirow{-%d}{*}{\cellcolor{white} %s}' % (
len(features_order), MODEL_MAP[model])]
else:
row = [r'\cellcolor{white}']
row.append(FEATURES_MAP[features])
for subset in subsets:
mask = ((data['features'] == features) &
(data['subset'] == subset) &
(data['model'] == model))
df = data[mask]
rocs_mask = ((data['features'] == rocs_baseline) &
(data['subset'] == subset) &
(data['model'] == model))
rocs_df = data[rocs_mask]
for metric in kind:
if metric == 'auc':
number = '%.3f'
else:
number = '%.0f'
row.append(number % df[metric].median())
if features == rocs_baseline:
row.append('')
row.append('')
else:
assert np.array_equal(df['dataset'].values,
rocs_df['dataset'].values)
if 'index' in df.columns:
assert np.array_equal(df['index'].values,
rocs_df['index'].values)
delta = df.copy()
delta[metric] -= rocs_df[metric].values
row.extend(confidence_interval(delta[metric], metric))
table.append(' & '.join(row))
print ' \\\\\n'.join(table)
def main():
if FLAGS.prefix == 'muv':
subsets = ['omega1']
assert FLAGS.cycle
elif FLAGS.prefix == 'dude':
subsets = ['xtal', 'omega1']
elif FLAGS.prefix == 'chembl':
subsets = ['omega1']
assert FLAGS.cycle
else:
raise ValueError(FLAGS.prefix)
if FLAGS.subset is not None:
subsets = [FLAGS.subset]
# Load data from output or previously processed.
models = ['logistic', 'random_forest', 'svm']
if FLAGS.reload is not None:
logging.info('Loading processed data from %s', FLAGS.reload)
data = pd.read_pickle(FLAGS.reload)
else:
data = []
for model in models:
for subset in subsets:
logging.info('%s\t%s', model, subset)
df = load_output_and_calculate_metrics(model, subset)
df['model'] = model
df['subset'] = subset
data.append(df)
data = pd.concat(data)
# Save processed data.
filename = '%s-processed.pkl.gz' % FLAGS.prefix
logging.info('Saving processed data to %s', filename)
with gzip.open(filename, 'wb') as f:
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
# Only keep 5-fold mean information.
mask = data['fold'] == 'all'
data = data[mask]
# AUC tables.
# Combine subsets into a single table here.
logging.info('AUC table')
data_table(data, subsets, models, kind=['auc'], tversky=FLAGS.tversky)
# Enrichment tables.
# One per FPR.
for metric in ['e-0.005', 'e-0.01', 'e-0.02', 'e-0.05']:
logging.info('Metric: %s', metric)
logging.info('Enrichment table')
data_table(data, subsets, models, kind=[metric], tversky=FLAGS.tversky)
if __name__ == '__main__':
flags.MarkFlagAsRequired('root')
flags.MarkFlagAsRequired('dataset_file')
flags.MarkFlagAsRequired('prefix')
FLAGS(sys.argv)
main()
| bsd-3-clause |
TheoRettisch/p2pool-octocoin | SOAPpy/Config.py | 289 | 7622 | """
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Config.py 1298 2006-11-07 00:54:15Z sanxiyn $'
from version import __version__
import socket
from types import *
from NS import NS
################################################################################
# Configuration class
################################################################################
class SOAPConfig:
__readonly = ('SSLserver', 'SSLclient', 'GSIserver', 'GSIclient')
class SSLconfig:
__slots__ = ('key_file', 'cert_file')
key_file = None
cert_file = None
def __init__(self, config = None, **kw):
d = self.__dict__
if config:
if not isinstance(config, SOAPConfig):
raise AttributeError, \
"initializer must be SOAPConfig instance"
s = config.__dict__
for k, v in s.items():
if k[0] != '_':
d[k] = v
else:
# Setting debug also sets returnFaultInfo,
# dumpHeadersIn, dumpHeadersOut, dumpSOAPIn, and dumpSOAPOut
self.debug = 0
self.dumpFaultInfo = 1
# Setting namespaceStyle sets typesNamespace, typesNamespaceURI,
# schemaNamespace, and schemaNamespaceURI
self.namespaceStyle = '1999'
self.strictNamespaces = 0
self.typed = 1
self.buildWithNamespacePrefix = 1
self.returnAllAttrs = 0
# Strict checking of range for floats and doubles
self.strict_range = 0
# Default encoding for dictionary keys
self.dict_encoding = 'ascii'
# New argument name handling mechanism. See
# README.MethodParameterNaming for details
self.specialArgs = 1
# If unwrap_results=1 and there is only element in the struct,
# SOAPProxy will assume that this element is the result
# and return it rather than the struct containing it.
# Otherwise SOAPproxy will return the struct with all the
# elements as attributes.
self.unwrap_results = 1
# Automatically convert SOAP complex types, and
# (recursively) public contents into the corresponding
# python types. (Private subobjects have names that start
# with '_'.)
#
# Conversions:
# - faultType --> raise python exception
# - arrayType --> array
# - compoundType --> dictionary
#
self.simplify_objects = 0
# Per-class authorization method. If this is set, before
# calling a any class method, the specified authorization
# method will be called. If it returns 1, the method call
# will proceed, otherwise the call will throw with an
# authorization error.
self.authMethod = None
# Globus Support if pyGlobus.io available
try:
from pyGlobus import io;
d['GSIserver'] = 1
d['GSIclient'] = 1
except:
d['GSIserver'] = 0
d['GSIclient'] = 0
# Server SSL support if M2Crypto.SSL available
try:
from M2Crypto import SSL
d['SSLserver'] = 1
except:
d['SSLserver'] = 0
# Client SSL support if socket.ssl available
try:
from socket import ssl
d['SSLclient'] = 1
except:
d['SSLclient'] = 0
# Cert support
if d['SSLclient'] or d['SSLserver']:
d['SSL'] = self.SSLconfig()
for k, v in kw.items():
if k[0] != '_':
setattr(self, k, v)
def __setattr__(self, name, value):
if name in self.__readonly:
raise AttributeError, "readonly configuration setting"
d = self.__dict__
if name in ('typesNamespace', 'typesNamespaceURI',
'schemaNamespace', 'schemaNamespaceURI'):
if name[-3:] == 'URI':
base, uri = name[:-3], 1
else:
base, uri = name, 0
if type(value) == StringType:
if NS.NSMAP.has_key(value):
n = (value, NS.NSMAP[value])
elif NS.NSMAP_R.has_key(value):
n = (NS.NSMAP_R[value], value)
else:
raise AttributeError, "unknown namespace"
elif type(value) in (ListType, TupleType):
if uri:
n = (value[1], value[0])
else:
n = (value[0], value[1])
else:
raise AttributeError, "unknown namespace type"
d[base], d[base + 'URI'] = n
try:
d['namespaceStyle'] = \
NS.STMAP_R[(d['typesNamespace'], d['schemaNamespace'])]
except:
d['namespaceStyle'] = ''
elif name == 'namespaceStyle':
value = str(value)
if not NS.STMAP.has_key(value):
raise AttributeError, "unknown namespace style"
d[name] = value
n = d['typesNamespace'] = NS.STMAP[value][0]
d['typesNamespaceURI'] = NS.NSMAP[n]
n = d['schemaNamespace'] = NS.STMAP[value][1]
d['schemaNamespaceURI'] = NS.NSMAP[n]
elif name == 'debug':
d[name] = \
d['returnFaultInfo'] = \
d['dumpHeadersIn'] = \
d['dumpHeadersOut'] = \
d['dumpSOAPIn'] = \
d['dumpSOAPOut'] = value
else:
d[name] = value
Config = SOAPConfig()
| gpl-3.0 |
Zozoz/shadowsocks | shadowsocks/eventloop.py | 949 | 7288 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# from ssloop
# https://github.com/clowwindy/ssloop
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import time
import socket
import select
import errno
import logging
from collections import defaultdict
from shadowsocks import shell
__all__ = ['EventLoop', 'POLL_NULL', 'POLL_IN', 'POLL_OUT', 'POLL_ERR',
'POLL_HUP', 'POLL_NVAL', 'EVENT_NAMES']
POLL_NULL = 0x00
POLL_IN = 0x01
POLL_OUT = 0x04
POLL_ERR = 0x08
POLL_HUP = 0x10
POLL_NVAL = 0x20
EVENT_NAMES = {
POLL_NULL: 'POLL_NULL',
POLL_IN: 'POLL_IN',
POLL_OUT: 'POLL_OUT',
POLL_ERR: 'POLL_ERR',
POLL_HUP: 'POLL_HUP',
POLL_NVAL: 'POLL_NVAL',
}
# we check timeouts every TIMEOUT_PRECISION seconds
TIMEOUT_PRECISION = 10
class KqueueLoop(object):
MAX_EVENTS = 1024
def __init__(self):
self._kqueue = select.kqueue()
self._fds = {}
def _control(self, fd, mode, flags):
events = []
if mode & POLL_IN:
events.append(select.kevent(fd, select.KQ_FILTER_READ, flags))
if mode & POLL_OUT:
events.append(select.kevent(fd, select.KQ_FILTER_WRITE, flags))
for e in events:
self._kqueue.control([e], 0)
def poll(self, timeout):
if timeout < 0:
timeout = None # kqueue behaviour
events = self._kqueue.control(None, KqueueLoop.MAX_EVENTS, timeout)
results = defaultdict(lambda: POLL_NULL)
for e in events:
fd = e.ident
if e.filter == select.KQ_FILTER_READ:
results[fd] |= POLL_IN
elif e.filter == select.KQ_FILTER_WRITE:
results[fd] |= POLL_OUT
return results.items()
def register(self, fd, mode):
self._fds[fd] = mode
self._control(fd, mode, select.KQ_EV_ADD)
def unregister(self, fd):
self._control(fd, self._fds[fd], select.KQ_EV_DELETE)
del self._fds[fd]
def modify(self, fd, mode):
self.unregister(fd)
self.register(fd, mode)
def close(self):
self._kqueue.close()
class SelectLoop(object):
def __init__(self):
self._r_list = set()
self._w_list = set()
self._x_list = set()
def poll(self, timeout):
r, w, x = select.select(self._r_list, self._w_list, self._x_list,
timeout)
results = defaultdict(lambda: POLL_NULL)
for p in [(r, POLL_IN), (w, POLL_OUT), (x, POLL_ERR)]:
for fd in p[0]:
results[fd] |= p[1]
return results.items()
def register(self, fd, mode):
if mode & POLL_IN:
self._r_list.add(fd)
if mode & POLL_OUT:
self._w_list.add(fd)
if mode & POLL_ERR:
self._x_list.add(fd)
def unregister(self, fd):
if fd in self._r_list:
self._r_list.remove(fd)
if fd in self._w_list:
self._w_list.remove(fd)
if fd in self._x_list:
self._x_list.remove(fd)
def modify(self, fd, mode):
self.unregister(fd)
self.register(fd, mode)
def close(self):
pass
class EventLoop(object):
def __init__(self):
if hasattr(select, 'epoll'):
self._impl = select.epoll()
model = 'epoll'
elif hasattr(select, 'kqueue'):
self._impl = KqueueLoop()
model = 'kqueue'
elif hasattr(select, 'select'):
self._impl = SelectLoop()
model = 'select'
else:
raise Exception('can not find any available functions in select '
'package')
self._fdmap = {} # (f, handler)
self._last_time = time.time()
self._periodic_callbacks = []
self._stopping = False
logging.debug('using event model: %s', model)
def poll(self, timeout=None):
events = self._impl.poll(timeout)
return [(self._fdmap[fd][0], fd, event) for fd, event in events]
def add(self, f, mode, handler):
fd = f.fileno()
self._fdmap[fd] = (f, handler)
self._impl.register(fd, mode)
def remove(self, f):
fd = f.fileno()
del self._fdmap[fd]
self._impl.unregister(fd)
def add_periodic(self, callback):
self._periodic_callbacks.append(callback)
def remove_periodic(self, callback):
self._periodic_callbacks.remove(callback)
def modify(self, f, mode):
fd = f.fileno()
self._impl.modify(fd, mode)
def stop(self):
self._stopping = True
def run(self):
events = []
while not self._stopping:
asap = False
try:
events = self.poll(TIMEOUT_PRECISION)
except (OSError, IOError) as e:
if errno_from_exception(e) in (errno.EPIPE, errno.EINTR):
# EPIPE: Happens when the client closes the connection
# EINTR: Happens when received a signal
# handles them as soon as possible
asap = True
logging.debug('poll:%s', e)
else:
logging.error('poll:%s', e)
import traceback
traceback.print_exc()
continue
for sock, fd, event in events:
handler = self._fdmap.get(fd, None)
if handler is not None:
handler = handler[1]
try:
handler.handle_event(sock, fd, event)
except (OSError, IOError) as e:
shell.print_exception(e)
now = time.time()
if asap or now - self._last_time >= TIMEOUT_PRECISION:
for callback in self._periodic_callbacks:
callback()
self._last_time = now
def __del__(self):
self._impl.close()
# from tornado
def errno_from_exception(e):
"""Provides the errno from an Exception object.
There are cases that the errno attribute was not set so we pull
the errno out of the args but if someone instatiates an Exception
without any args you will get a tuple error. So this function
abstracts all that behavior to give you a safe way to get the
errno.
"""
if hasattr(e, 'errno'):
return e.errno
elif e.args:
return e.args[0]
else:
return None
# from tornado
def get_sock_error(sock):
error_number = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
return socket.error(error_number, os.strerror(error_number))
| apache-2.0 |
techtonik/numpy | numpy/f2py/tests/test_return_integer.py | 69 | 4639 | from __future__ import division, absolute_import, print_function
from numpy.testing import *
from numpy import array
from numpy.compat import long
import util
class TestReturnInteger(util.F2PyTest):
def check_function(self, t):
assert_( t(123)==123, repr(t(123)))
assert_( t(123.6)==123)
assert_( t(long(123))==123)
assert_( t('123')==123)
assert_( t(-123)==-123)
assert_( t([123])==123)
assert_( t((123,))==123)
assert_( t(array(123))==123)
assert_( t(array([123]))==123)
assert_( t(array([[123]]))==123)
assert_( t(array([123], 'b'))==123)
assert_( t(array([123], 'h'))==123)
assert_( t(array([123], 'i'))==123)
assert_( t(array([123], 'l'))==123)
assert_( t(array([123], 'B'))==123)
assert_( t(array([123], 'f'))==123)
assert_( t(array([123], 'd'))==123)
#assert_raises(ValueError, t, array([123],'S3'))
assert_raises(ValueError, t, 'abc')
assert_raises(IndexError, t, [])
assert_raises(IndexError, t, ())
assert_raises(Exception, t, t)
assert_raises(Exception, t, {})
if t.__doc__.split()[0] in ['t8', 's8']:
assert_raises(OverflowError, t, 100000000000000000000000)
assert_raises(OverflowError, t, 10000000011111111111111.23)
class TestF77ReturnInteger(TestReturnInteger):
code = """
function t0(value)
integer value
integer t0
t0 = value
end
function t1(value)
integer*1 value
integer*1 t1
t1 = value
end
function t2(value)
integer*2 value
integer*2 t2
t2 = value
end
function t4(value)
integer*4 value
integer*4 t4
t4 = value
end
function t8(value)
integer*8 value
integer*8 t8
t8 = value
end
subroutine s0(t0,value)
integer value
integer t0
cf2py intent(out) t0
t0 = value
end
subroutine s1(t1,value)
integer*1 value
integer*1 t1
cf2py intent(out) t1
t1 = value
end
subroutine s2(t2,value)
integer*2 value
integer*2 t2
cf2py intent(out) t2
t2 = value
end
subroutine s4(t4,value)
integer*4 value
integer*4 t4
cf2py intent(out) t4
t4 = value
end
subroutine s8(t8,value)
integer*8 value
integer*8 t8
cf2py intent(out) t8
t8 = value
end
"""
@dec.slow
def test_all(self):
for name in "t0,t1,t2,t4,t8,s0,s1,s2,s4,s8".split(","):
self.check_function(getattr(self.module, name))
class TestF90ReturnInteger(TestReturnInteger):
suffix = ".f90"
code = """
module f90_return_integer
contains
function t0(value)
integer :: value
integer :: t0
t0 = value
end function t0
function t1(value)
integer(kind=1) :: value
integer(kind=1) :: t1
t1 = value
end function t1
function t2(value)
integer(kind=2) :: value
integer(kind=2) :: t2
t2 = value
end function t2
function t4(value)
integer(kind=4) :: value
integer(kind=4) :: t4
t4 = value
end function t4
function t8(value)
integer(kind=8) :: value
integer(kind=8) :: t8
t8 = value
end function t8
subroutine s0(t0,value)
integer :: value
integer :: t0
!f2py intent(out) t0
t0 = value
end subroutine s0
subroutine s1(t1,value)
integer(kind=1) :: value
integer(kind=1) :: t1
!f2py intent(out) t1
t1 = value
end subroutine s1
subroutine s2(t2,value)
integer(kind=2) :: value
integer(kind=2) :: t2
!f2py intent(out) t2
t2 = value
end subroutine s2
subroutine s4(t4,value)
integer(kind=4) :: value
integer(kind=4) :: t4
!f2py intent(out) t4
t4 = value
end subroutine s4
subroutine s8(t8,value)
integer(kind=8) :: value
integer(kind=8) :: t8
!f2py intent(out) t8
t8 = value
end subroutine s8
end module f90_return_integer
"""
@dec.slow
def test_all(self):
for name in "t0,t1,t2,t4,t8,s0,s1,s2,s4,s8".split(","):
self.check_function(getattr(self.module.f90_return_integer, name))
if __name__ == "__main__":
import nose
nose.runmodule()
| bsd-3-clause |
nkmk/python-snippets | notebook/numpy_split.py | 1 | 1773 | import numpy as np
a = np.arange(16).reshape(4, 4)
print(a)
# [[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]
# [12 13 14 15]]
a_split = np.split(a, 2)
print(type(a_split))
# <class 'list'>
print(len(a_split))
# 2
print(a_split[0])
# [[0 1 2 3]
# [4 5 6 7]]
print(a_split[1])
# [[ 8 9 10 11]
# [12 13 14 15]]
print(type(a_split[0]))
# <class 'numpy.ndarray'>
print(a)
# [[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]
# [12 13 14 15]]
a0, a1 = np.split(a, 2)
print(a0)
# [[0 1 2 3]
# [4 5 6 7]]
print(a1)
# [[ 8 9 10 11]
# [12 13 14 15]]
# np.split(a, 3)
# ValueError: array split does not result in an equal division
a0, a1, a2 = np.split(a, [1, 3])
print(a0)
# [[0 1 2 3]]
print(a1)
# [[ 4 5 6 7]
# [ 8 9 10 11]]
print(a2)
# [[12 13 14 15]]
a0, a1 = np.split(a, [1])
print(a0)
# [[0 1 2 3]]
print(a1)
# [[ 4 5 6 7]
# [ 8 9 10 11]
# [12 13 14 15]]
a0, a1 = np.split(a, 2, 0)
print(a0)
# [[0 1 2 3]
# [4 5 6 7]]
print(a1)
# [[ 8 9 10 11]
# [12 13 14 15]]
a0, a1 = np.split(a, 2, 1)
print(a0)
# [[ 0 1]
# [ 4 5]
# [ 8 9]
# [12 13]]
print(a1)
# [[ 2 3]
# [ 6 7]
# [10 11]
# [14 15]]
# np.split(a, 2, 2)
# IndexError: tuple index out of range
a_3d = np.arange(24).reshape(2, 3, 4)
print(a_3d)
# [[[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]]
#
# [[12 13 14 15]
# [16 17 18 19]
# [20 21 22 23]]]
print(a_3d.shape)
# (2, 3, 4)
a0, a1 = np.split(a_3d, 2, 0)
print(a0)
# [[[ 0 1 2 3]
# [ 4 5 6 7]
# [ 8 9 10 11]]]
print(a1)
# [[[12 13 14 15]
# [16 17 18 19]
# [20 21 22 23]]]
a0, a1 = np.split(a_3d, [1], 2)
print(a0)
# [[[ 0]
# [ 4]
# [ 8]]
#
# [[12]
# [16]
# [20]]]
print(a1)
# [[[ 1 2 3]
# [ 5 6 7]
# [ 9 10 11]]
#
# [[13 14 15]
# [17 18 19]
# [21 22 23]]]
| mit |
vaygr/ansible | lib/ansible/plugins/terminal/ce.py | 122 | 1841 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(r'[\r\n]?<.+>(?:\s*)$'),
re.compile(r'[\r\n]?\[.+\](?:\s*)$'),
]
terminal_stderr_re = [
re.compile(r"% ?Error: "),
re.compile(r"^% \w+", re.M),
re.compile(r"% ?Bad secret"),
re.compile(r"invalid input", re.I),
re.compile(r"(?:incomplete|ambiguous) command", re.I),
re.compile(r"connection timed out", re.I),
re.compile(r"[^\r\n]+ not found", re.I),
re.compile(r"'[^']' +returned error code: ?\d+"),
re.compile(r"syntax error"),
re.compile(r"unknown command"),
re.compile(r"Error\[\d+\]: ", re.I),
re.compile(r"Error:", re.I)
]
def on_open_shell(self):
try:
self._exec_cli_command('screen-length 0 temporary')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
| gpl-3.0 |
srh/rethinkdb | test/regression/issue_659.py | 12 | 3384 | #!/usr/bin/env python
# Copyright 2012-2016 RethinkDB, all rights reserved.
'''Lowering the number of replicas results in strange behavior'''
from __future__ import print_function
import os, sys, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import rdb_unittest, utils
class ReplicaChangeRegrssion(rdb_unittest.RdbTestCase):
shards = 1
servers = 2
recordsToGenerate = 20000
def test_replicaChanges(self):
print() # solve a formatting issue with unittest reporting
tableUUID = self.table.info()['id'].run(self.conn)
utils.print_with_time("Increasing replication factor")
self.table.reconfigure(shards=1, replicas=2).run(self.conn)
self.table.wait(wait_for='all_replicas_ready').run(self.conn)
self.checkCluster()
utils.print_with_time("Checking that both servers have a data file")
deadline = time.time() + 5
lastError = None
while time.time() < deadline:
for server in self.cluster:
dataPath = os.path.join(server.data_path, tableUUID)
if not os.path.exists(dataPath):
lastError = 'Server %s does not have a data file at %s' % (server.name, dataPath)
break
else:
break
else:
raise Exception(lastError or 'Timed out in a weird way')
master = self.getPrimaryForShard(0)
slave = self.getReplicaForShard(0)
utils.print_with_time("Decreasing replication factor")
self.table.config().update({'shards':[{'primary_replica':master.name, 'replicas':[master.name]}]}).run(self.conn)
self.table.wait(wait_for='all_replicas_ready').run(self.conn)
self.checkCluster()
utils.print_with_time("Waiting for file deletion on the slave")
deadline = time.time() + 5
slaveDataPath = os.path.join(slave.data_path, tableUUID)
while time.time() < deadline:
if not os.path.exists(slaveDataPath):
break
else:
raise Exception('File deletion had not happend after 5 seconds, file still exists at: %s' % slaveDataPath)
utils.print_with_time("Increasing replication factor again")
self.table.reconfigure(shards=1, replicas=2).run(self.conn, noreply=True)
utils.print_with_time("Confirming that the progress meter indicates a backfill happening")
deadline = time.time() + 5
last_error = None
while time.time() < deadline:
try:
assert r.db("rethinkdb") \
.table("jobs") \
.filter({"type": "backfill", "info": {"table": self.tableName}}) \
.count() \
.run(self.conn) == 1, "No backfill job found in `rethinkdb.jobs`."
break
except Exception, e:
last_error = e
time.sleep(0.02)
else:
pass #raise last_error
utils.print_with_time("Killing the cluster")
# The large backfill might take time, and for this test we don't care about it succeeding
for server in self.cluster[:]:
server.kill()
if __name__ == '__main__':
rdb_unittest.main()
| apache-2.0 |
ewindisch/nova | nova/db/base.py | 26 | 1379 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo.config import cfg
from nova.openstack.common import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='nova.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver) # pylint: disable=C0103
| apache-2.0 |
emedinaa/contentbox | third_party/social/backends/runkeeper.py | 83 | 1807 | """
RunKeeper OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/runkeeper.html
"""
from social.backends.oauth import BaseOAuth2
class RunKeeperOAuth2(BaseOAuth2):
"""RunKeeper OAuth authentication backend"""
name = 'runkeeper'
AUTHORIZATION_URL = 'https://runkeeper.com/apps/authorize'
ACCESS_TOKEN_URL = 'https://runkeeper.com/apps/token'
ACCESS_TOKEN_METHOD = 'POST'
EXTRA_DATA = [
('userID', 'id'),
]
def get_user_id(self, details, response):
return response['userID']
def get_user_details(self, response):
"""Parse username from profile link"""
username = None
profile_url = response.get('profile')
if len(profile_url):
profile_url_parts = profile_url.split('http://runkeeper.com/user/')
if len(profile_url_parts) > 1 and len(profile_url_parts[1]):
username = profile_url_parts[1]
fullname, first_name, last_name = self.get_user_names(
fullname=response.get('name')
)
return {'username': username,
'email': response.get('email') or '',
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
# We need to use the /user endpoint to get the user id, the /profile
# endpoint contains name, user name, location, gender
user_data = self._user_data(access_token, '/user')
profile_data = self._user_data(access_token, '/profile')
return dict(user_data, **profile_data)
def _user_data(self, access_token, path):
url = 'https://api.runkeeper.com{0}'.format(path)
return self.get_json(url, params={'access_token': access_token})
| apache-2.0 |
ProvincalInovationManagement/provinceManagement | debug_toolbar/utils/__init__.py | 2 | 5594 | import inspect
import os.path
import django
import SocketServer
import sys
from django.conf import settings
from django.views.debug import linebreak_iter
from django.utils.html import escape
from django.utils.safestring import mark_safe
# Figure out some paths
django_path = os.path.realpath(os.path.dirname(django.__file__))
socketserver_path = os.path.realpath(os.path.dirname(SocketServer.__file__))
def ms_from_timedelta(td):
"""
Given a timedelta object, returns a float representing milliseconds
"""
return (td.seconds * 1000) + (td.microseconds / 1000.0)
hide_django_sql = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('HIDE_DJANGO_SQL', True)
def tidy_stacktrace(stack):
"""
Clean up stacktrace and remove all entries that:
1. Are part of Django (except contrib apps)
2. Are part of SocketServer (used by Django's dev server)
3. Are the last entry (which is part of our stacktracing code)
``stack`` should be a list of frame tuples from ``inspect.stack()``
"""
trace = []
for frame, path, line_no, func_name, text in (f[:5] for f in stack):
s_path = os.path.realpath(path)
# Support hiding of frames -- used in various utilities that provide
# inspection.
if '__traceback_hide__' in frame.f_locals:
continue
if hide_django_sql and django_path in s_path and not 'django/contrib' in s_path:
continue
if socketserver_path in s_path:
continue
if not text:
text = ''
else:
text = (''.join(text)).strip()
trace.append((path, line_no, func_name, text))
return trace
def render_stacktrace(trace):
stacktrace = []
for frame in trace:
params = map(escape, frame[0].rsplit(os.path.sep, 1) + list(frame[1:]))
try:
stacktrace.append(u'<span class="path">{0}/</span><span class="file">{1}</span> in <span class="func">{3}</span>(<span class="lineno">{2}</span>)\n <span class="code">{4}</span>'.format(*params))
except IndexError:
# This frame doesn't have the expected format, so skip it and move on to the next one
continue
return mark_safe('\n'.join(stacktrace))
def get_template_info(source, context_lines=3):
line = 0
upto = 0
source_lines = []
# before = during = after = ""
origin, (start, end) = source
template_source = origin.reload()
for num, next in enumerate(linebreak_iter(template_source)):
if start >= upto and end <= next:
line = num
# before = template_source[upto:start]
# during = template_source[start:end]
# after = template_source[end:next]
source_lines.append((num, template_source[upto:next]))
upto = next
top = max(1, line - context_lines)
bottom = min(len(source_lines), line + 1 + context_lines)
context = []
for num, content in source_lines[top:bottom]:
context.append({
'num': num,
'content': content,
'highlight': (num == line),
})
return {
'name': origin.name,
'context': context,
}
def get_name_from_obj(obj):
if hasattr(obj, '__name__'):
name = obj.__name__
elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'):
name = obj.__class__.__name__
else:
name = '<unknown>'
if hasattr(obj, '__module__'):
module = obj.__module__
name = '%s.%s' % (module, name)
return name
def getframeinfo(frame, context=1):
"""
Get information about a frame or traceback object.
A tuple of five things is returned: the filename, the line number of
the current line, the function name, a list of lines of context from
the source code, and the index of the current line within that list.
The optional second argument specifies the number of lines of context
to return, which are centered around the current line.
This originally comes from ``inspect`` but is modified to handle issues
with ``findsource()``.
"""
if inspect.istraceback(frame):
lineno = frame.tb_lineno
frame = frame.tb_frame
else:
lineno = frame.f_lineno
if not inspect.isframe(frame):
raise TypeError('arg is not a frame or traceback object')
filename = inspect.getsourcefile(frame) or inspect.getfile(frame)
if context > 0:
start = lineno - 1 - context // 2
try:
lines, lnum = inspect.findsource(frame)
except (IOError, IndexError):
lines = index = None
else:
start = max(start, 1)
start = max(0, min(start, len(lines) - context))
lines = lines[start:(start + context)]
index = lineno - 1 - start
else:
lines = index = None
if hasattr(inspect, 'Traceback'):
return inspect.Traceback(filename, lineno, frame.f_code.co_name, lines, index)
else:
return (filename, lineno, frame.f_code.co_name, lines, index)
def get_stack(context=1):
"""
Get a list of records for a frame and all higher (calling) frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context.
Modified version of ``inspect.stack()`` which calls our own ``getframeinfo()``
"""
frame = sys._getframe(1)
framelist = []
while frame:
framelist.append((frame,) + getframeinfo(frame, context))
frame = frame.f_back
return framelist
| agpl-3.0 |
heenbo/mosquitto-heenbo | test/lib/03-publish-c2b-qos1-timeout.py | 7 | 2881 | #!/usr/bin/env python
# Test whether a client sends a correct PUBLISH to a topic with QoS 1 and responds to a delay.
# The client should connect to port 1888 with keepalive=60, clean session set,
# and client id publish-qos1-test
# The test will send a CONNACK message to the client with rc=0. Upon receiving
# the CONNACK the client should verify that rc==0. If not, it should exit with
# return code=1.
# On a successful CONNACK, the client should send a PUBLISH message with topic
# "pub/qos1/test", payload "message" and QoS=1.
# The test will not respond to the first PUBLISH message, so the client must
# resend the PUBLISH message with dup=1. Note that to keep test durations low, a
# message retry timeout of less than 10 seconds is required for this test.
# On receiving the second PUBLISH message, the test will send the correct
# PUBACK response. On receiving the correct PUBACK response, the client should
# send a DISCONNECT message.
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("publish-qos1-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
disconnect_packet = mosq_test.gen_disconnect()
mid = 1
publish_packet = mosq_test.gen_publish("pub/qos1/test", qos=1, mid=mid, payload="message")
publish_packet_dup = mosq_test.gen_publish("pub/qos1/test", qos=1, mid=mid, payload="message", dup=True)
puback_packet = mosq_test.gen_puback(mid)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "publish", publish_packet):
# Delay for > 3 seconds (message retry time)
if mosq_test.expect_packet(conn, "dup publish", publish_packet_dup):
conn.send(puback_packet)
if mosq_test.expect_packet(conn, "disconnect", disconnect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
| gpl-3.0 |
icereval/raven-python | raven/contrib/django/management/commands/raven.py | 23 | 1140 | """
raven.contrib.django.management.commands.raven
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.core.management.base import BaseCommand
from optparse import make_option
from raven.scripts.runner import store_json, send_test_message
import sys
import time
class Command(BaseCommand):
help = 'Commands to interact with the Sentry client'
option_list = BaseCommand.option_list + (
make_option(
"--data", action="callback", callback=store_json,
type="string", nargs=1, dest="data"),
make_option(
"--tags", action="callback", callback=store_json,
type="string", nargs=1, dest="tags"),
)
def handle(self, *args, **options):
if len(args) != 1 or args[0] != 'test':
print('Usage: manage.py raven test')
sys.exit(1)
from raven.contrib.django.models import client
send_test_message(client, options)
time.sleep(3)
| bsd-3-clause |
PIPplware/xbmc | lib/libUPnP/Platinum/Build/Tools/Scripts/GenDocumentation.py | 263 | 2205 | #! /usr/bin/python
#############################################################
# This tool is used to generate the Platinum documentation #
#############################################################
import os
import sys
import subprocess
import shutil
import zipfile
import tarfile
#############################################################
# ZIP support
#############################################################
def ZipDir(top, archive, dir) :
entries = os.listdir(top)
for entry in entries:
path = os.path.join(top, entry)
if os.path.isdir(path):
ZipDir(path, archive, os.path.join(dir, entry))
else:
zip_name = os.path.join(dir, entry)
archive.write(path, zip_name)
def ZipIt(root, dir) :
zip_filename = root+'/'+dir+'.zip'
if os.path.exists(zip_filename):
os.remove(zip_filename)
archive = zipfile.ZipFile(zip_filename, "w", zipfile.ZIP_DEFLATED)
ZipDir(root+'/'+dir, archive, dir)
archive.close()
def TarIt(root, dir) :
tar_filename = root+'/'+dir+'.tgz'
if os.path.exists(tar_filename):
os.remove(tar_filename)
archive = tarfile.TarFileCompat(tar_filename, "w", tarfile.TAR_GZIPPED)
ZipDir(root+'/'+dir, archive, dir)
archive.close()
#############################################################
# Main
#############################################################
# ensure that PLATINUM_KIT_HOME has been set and exists
if not os.environ.has_key('PLATINUM_KIT_HOME'):
print 'ERROR: PLATINUM_KIT_HOME not set'
sys.exit(1)
PLATINUM_KIT_HOME = os.environ['PLATINUM_KIT_HOME']
if not os.path.exists(PLATINUM_KIT_HOME) :
print 'ERROR: PLATINUM_KIT_HOME ('+PLATINUM_KIT_HOME+') does not exist'
sys.exit(1)
else :
print 'PLATINUM_KIT_HOME = ' + PLATINUM_KIT_HOME
# compute paths
SDK_DOC_NAME='Platinum-HTML'
SDK_DOC_ROOT=PLATINUM_KIT_HOME+'/Platinum/Docs/Doxygen'
# start doxygen
retcode = subprocess.call(['doxygen'], cwd=SDK_DOC_ROOT)
if retcode != 0:
print 'ERROR: doxygen failed'
sys.exit(1)
# zip documentation
ZipIt(SDK_DOC_ROOT, SDK_DOC_NAME)
# cleanup
shutil.rmtree(SDK_DOC_ROOT+'/'+SDK_DOC_NAME)
| gpl-2.0 |
museumsvictoria/nodel-recipes | Computer Controller/General/script.py | 2 | 6005 | '''Computer Node'''
### Libraries required by this Node
import java.lang.System
import subprocess
### Parameters used by this Node
system = java.lang.System.getProperty('os.name')
arch = java.lang.System.getProperty('sun.arch.data.model').lower()
windows = [ "Windows 7", "Windows 8", "Windows 10" ]
### Functions used by this Node
def shutdown():
if system in windows:
# shutdown WIN
returncode = subprocess.call('shutdown -s -f -t 0 /c "Nodel is shutting down the machine now"', shell=True)
elif(system=="Mac OS X"):
# shutdown OSX
# nodel process must have sudo rights to shutdown command
returncode = subprocess.call("sudo shutdown -h -u now", shell=True)
elif(system=="Linux"):
# shutdown Linux
# nodel process must have sudo rights to shutdown command
returncode = subprocess.call("sudo shutdown -h now", shell=True)
else:
print 'unknown system: ' + system
def restart():
if system in windows:
# restart WIN
returncode = subprocess.call('shutdown -r -f -t 0 /c "Nodel is restarting the machine now"', shell=True)
elif(system=="Mac OS X"):
# restart OSX
returncode = subprocess.call("sudo shutdown -r now", shell=True)
elif(system=="Linux"):
# restart Linux
# nodel process must have sudo rights to shutdown command
returncode = subprocess.call("sudo shutdown -r now", shell=True)
else:
print 'unknown system: ' + system
def suspend():
if system in windows:
# suspend WIN
returncode = subprocess.call("rundll32.exe powrprof.dll,SetSuspendState 0,1,0", shell=True)
elif(system=="Mac OS X"):
# suspend OSX
# nodel process must have sudo rights to shutdown command
returncode = subprocess.call("sudo shutdown -s now", shell=True)
else:
print 'unknown system: ' + system
def mute():
if system in windows:
returncode = subprocess.call("nircmd"+arch+".exe mutesysvolume 1", shell=True)
elif(system=="Mac OS X"):
returncode = subprocess.call("osascript -e 'set volume output muted true'", shell=True)
elif(system=="Linux"):
returncode = subprocess.call("amixer -q -D pulse sset Master mute", shell=True)
else:
print 'unknown system: ' + system
def unmute():
if system in windows:
returncode = subprocess.call("nircmd"+arch+".exe mutesysvolume 0", shell=True)
print returncode
elif(system=="Mac OS X"):
returncode = subprocess.call("osascript -e 'set volume output muted false'", shell=True)
elif(system=="Linux"):
returncode = subprocess.call("amixer -q -D pulse sset Master unmute", shell=True)
else:
print 'unknown system: ' + system
def set_volume(vol):
if system in windows:
winvol = (65535/100)*vol
returncode = subprocess.call("nircmd"+arch+".exe setsysvolume "+str(winvol), shell=True)
elif(system=="Mac OS X"):
returncode = subprocess.call("osascript -e 'set volume output volume "+str(vol)+"'", shell=True)
elif(system=="Linux"):
returncode = subprocess.call("amixer -q -D pulse sset Master "+str(vol)+"% unmute", shell=True)
# raspberry pi volume: "amixer cset numid=1 -- 20%"
#returncode = subprocess.call("amixer cset numid=1 -- "+str(vol)+"%", shell=True)
else:
print 'unknown system: ' + system
### Local actions this Node provides
def local_action_PowerOff(arg = None):
"""{"title":"PowerOff","desc":"Turns this computer off.","group":"Power"}"""
print 'Action PowerOff requested'
shutdown()
def local_action_Suspend(arg = None):
"""{"title":"Suspend","desc":"Suspends this computer.","group":"Power"}"""
print 'Action Suspend requested'
suspend()
def local_action_Restart(arg = None):
"""{"title":"Restart","desc":"Restarts this computer.","group":"Power"}"""
print 'Action Restart requested'
restart()
def local_action_Mute(arg = None):
"""{"title":"Mute","group":"Volume","schema":{"type":"string","enum": ['On', 'Off'], "required": True}}"""
print 'Action Mute%s requested' % arg
mute() if arg == 'On' else unmute()
def local_action_MuteOn(arg = None):
"""{"title":"MuteOn","desc":"Mute this computer.","group":"Volume"}"""
print 'Action MuteOn requested'
mute()
def local_action_MuteOff(arg = None):
"""{"title":"MuteOff","desc":"Un-mute this computer.","group":"Volume"}"""
print 'Action MuteOff requested'
unmute()
def local_action_SetVolume(arg = None):
"""{"title":"SetVolume","desc":"Set volume.","schema":{"title":"Drag slider to adjust level.","type":"integer","format":"range","min": 0, "max": 100,"required":"true"},"group":"Volume"}"""
print 'Action SetVolume requested - '+str(arg)
set_volume(arg)
DEFAULT_FREESPACEMB = 0.5
param_FreeSpaceThreshold = Parameter({'title': 'Freespace threshold (GB)', 'schema': {'type': 'integer', 'hint': DEFAULT_FREESPACEMB}})
local_event_Status = LocalEvent({'group': 'Status', 'order': next_seq(), 'schema': {'type': 'object', 'properties': {
'level': {'type': 'integer', 'order': 1},
'message': {'type': 'string', 'order': 2}}}})
from java.io import File
def check_status():
# unfortunately this pulls in removable disk drives
# roots = list(File.listRoots())
roots = [File('.')] # so just using current drive instead
warnings = list()
roots.sort(lambda x, y: cmp(x.getAbsolutePath(), y.getAbsolutePath()))
for root in roots:
path = root.getAbsolutePath()
total = root.getTotalSpace()
free = root.getFreeSpace()
usable = root.getUsableSpace()
if free < (param_FreeSpaceThreshold or DEFAULT_FREESPACEMB)*1024*1024*1024L:
warnings.append('%s has less than %0.1f GB left' % (path, long(free)/1024/1024/1024))
if len(warnings) > 0:
local_event_Status.emit({'level': 2, 'message': 'Disk space is low on some drives: %s' % (','.join(warnings))})
else:
local_event_Status.emit({'level': 0, 'message': 'OK'})
Timer(check_status, 150, 10) # check status every 2.5 mins (10s first time)
### Main
def main(arg = None):
# Start your script here.
print 'Nodel script started.'
| mit |
aimejeux/enigma2 | lib/python/Screens/InfoBarGenerics.py | 2 | 157619 | # -*- coding: utf-8 -*-
from Components.ActionMap import ActionMap, HelpableActionMap, NumberActionMap
from Components.Harddisk import harddiskmanager, findMountPoint
from Components.Input import Input
from Components.Label import Label
from Components.MovieList import AUDIO_EXTENSIONS, MOVIE_EXTENSIONS, DVD_EXTENSIONS
from Components.PluginComponent import plugins
from Components.ServiceEventTracker import ServiceEventTracker
from Components.Sources.Boolean import Boolean
from Components.Sources.List import List
from Components.config import config, configfile, ConfigBoolean, ConfigClock
from Components.SystemInfo import SystemInfo
from Components.UsageConfig import preferredInstantRecordPath, defaultMoviePath, preferredTimerPath, ConfigSelection
from Components.Pixmap import MovingPixmap, MultiPixmap
from Components.Sources.StaticText import StaticText
from Components.ScrollLabel import ScrollLabel
from Components.Timeshift import InfoBarTimeshift
from Plugins.Plugin import PluginDescriptor
from Components.Timeshift import InfoBarTimeshift
from Screens.Screen import Screen
from Screens import ScreenSaver
from Screens.ChannelSelection import ChannelSelection, PiPZapSelection, BouquetSelector, SilentBouquetSelector, EpgBouquetSelector
from Screens.ChoiceBox import ChoiceBox
from Screens.Dish import Dish
from Screens.EventView import EventViewEPGSelect, EventViewSimple
from Screens.EpgSelection import EPGSelection
from Screens.InputBox import InputBox
from Screens.MessageBox import MessageBox
from Screens.MinuteInput import MinuteInput
from Screens.TimerSelection import TimerSelection
from Screens.PictureInPicture import PictureInPicture
from Screens.PVRState import PVRState, TimeshiftState
from Screens.SubtitleDisplay import SubtitleDisplay
from Screens.RdsDisplay import RdsInfoDisplay, RassInteractive
from Screens.Standby import Standby, TryQuitMainloop
from Screens.TimeDateInput import TimeDateInput
from Screens.TimerEdit import TimerEditList
from Screens.UnhandledKey import UnhandledKey
from ServiceReference import ServiceReference, isPlayableForCur
from RecordTimer import RecordTimer, RecordTimerEntry, parseEvent, AFTEREVENT, findSafeRecordPath
from Screens.TimerEntry import TimerEntry as TimerEntry
from Tools import Directories, Notifications
from Tools.Directories import pathExists, fileExists, getRecordingFilename, copyfile, moveFiles, resolveFilename, SCOPE_TIMESHIFT, SCOPE_CURRENT_SKIN
from Tools.KeyBindings import getKeyDescription
from enigma import eTimer, eServiceCenter, eDVBServicePMTHandler, iServiceInformation, iPlayableService, eServiceReference, eEPGCache, eActionMap
from boxbranding import getBoxType, getMachineBrand, getMachineName, getBrandOEM, getDriverDate, getImageVersion, getImageBuild, getMachineProcModel
from time import time, localtime, strftime
from bisect import insort
from sys import maxint
import os, cPickle
# hack alert!
from Screens.Menu import MainMenu, Menu, mdom
from Screens.Setup import Setup
import Screens.Standby
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def disable(self):
self.HEADER = ''
self.OKBLUE = ''
self.OKGREEN = ''
self.WARNING = ''
self.FAIL = ''
self.ENDC = ''
print bcolors.OKGREEN + "~~~~ read box informations ~~~~~~~~~" + bcolors.ENDC
print bcolors.OKBLUE + "MachineName =", getMachineName() + bcolors.ENDC
print bcolors.OKBLUE + "MachineBrand =", getMachineBrand() + bcolors.ENDC
print bcolors.OKBLUE + "BoxType =", getBoxType() + bcolors.ENDC
print bcolors.OKBLUE + "OEM =", getBrandOEM() + bcolors.ENDC
print bcolors.OKBLUE + "Driverdate =", getDriverDate() + bcolors.ENDC
print bcolors.OKBLUE + "Imageversion =", getImageVersion() + bcolors.ENDC
print bcolors.OKBLUE + "Imagebuild =", getImageBuild() + bcolors.ENDC
print bcolors.OKGREEN + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" + bcolors.ENDC
try:
os.system("echo ~~~ Box Info ~~~~~~~~~~~~~~~~~~~~"" > /etc/enigma2/boxinformations")
os.system("echo getMachineName = " + getMachineName() + " >> /etc/enigma2/boxinformations")
os.system("echo getMachineBrand = " + getMachineBrand() + " >> /etc/enigma2/boxinformations")
os.system("echo getBoxType = " + getBoxType() + " >> /etc/enigma2/boxinformations")
os.system("echo getBrandOEM = " + getBrandOEM() + " >> /etc/enigma2/boxinformations")
os.system("echo getDriverDate = " + getDriverDate() + " >> /etc/enigma2/boxinformations")
os.system("echo getImageVersion = " + getImageVersion() + " >> /etc/enigma2/boxinformations")
os.system("echo getImageBuild = " + getImageBuild() + " >> /etc/enigma2/boxinformations")
os.system("echo ~~~ CPU Info ~~~~~~~~~~~~~~~~~~~~"" >> /etc/enigma2/boxinformations")
os.system("cat /proc/cpuinfo >> /etc/enigma2/boxinformations")
except:
pass
AUDIO = False
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/CoolTVGuide/plugin.pyo"):
COOLTVGUIDE = True
else:
COOLTVGUIDE = False
def isStandardInfoBar(self):
return self.__class__.__name__ == "InfoBar"
def isMoviePlayerInfoBar(self):
return self.__class__.__name__ == "MoviePlayer"
def setResumePoint(session):
global resumePointCache, resumePointCacheLast
service = session.nav.getCurrentService()
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (service is not None) and (ref is not None): # and (ref.type != 1):
# ref type 1 has its own memory...
seek = service.seek()
if seek:
pos = seek.getPlayPosition()
if not pos[0]:
key = ref.toString()
lru = int(time())
l = seek.getLength()
if l:
l = l[1]
else:
l = None
resumePointCache[key] = [lru, pos[1], l]
for k, v in resumePointCache.items():
if v[0] < lru:
candidate = k
filepath = os.path.realpath(candidate.split(':')[-1])
mountpoint = findMountPoint(filepath)
if os.path.ismount(mountpoint) and not os.path.exists(filepath):
del resumePointCache[candidate]
saveResumePoints()
def delResumePoint(ref):
global resumePointCache, resumePointCacheLast
try:
del resumePointCache[ref.toString()]
except KeyError:
pass
saveResumePoints()
def getResumePoint(session):
global resumePointCache
ref = session.nav.getCurrentlyPlayingServiceOrGroup()
if (ref is not None) and (ref.type != 1):
try:
entry = resumePointCache[ref.toString()]
entry[0] = int(time()) # update LRU timestamp
return entry[1]
except KeyError:
return None
def saveResumePoints():
global resumePointCache, resumePointCacheLast
try:
f = open('/etc/enigma2/resumepoints.pkl', 'wb')
cPickle.dump(resumePointCache, f, cPickle.HIGHEST_PROTOCOL)
f.close()
except Exception, ex:
print "[InfoBar] Failed to write resumepoints:", ex
resumePointCacheLast = int(time())
def loadResumePoints():
try:
file = open('/etc/enigma2/resumepoints.pkl', 'rb')
PickleFile = cPickle.load(file)
file.close()
return PickleFile
except Exception, ex:
print "[InfoBar] Failed to load resumepoints:", ex
return {}
def updateresumePointCache():
global resumePointCache
resumePointCache = loadResumePoints()
def ToggleVideo():
mode = open("/proc/stb/video/policy").read()[:-1]
print mode
if mode == "letterbox":
f = open("/proc/stb/video/policy", "w")
f.write("panscan")
f.close()
elif mode == "panscan":
f = open("/proc/stb/video/policy", "w")
f.write("letterbox")
f.close()
else:
# if current policy is not panscan or letterbox, set to panscan
f = open("/proc/stb/video/policy", "w")
f.write("panscan")
f.close()
resumePointCache = loadResumePoints()
resumePointCacheLast = int(time())
class InfoBarDish:
def __init__(self):
self.dishDialog = self.session.instantiateDialog(Dish)
class InfoBarLongKeyDetection:
def __init__(self):
eActionMap.getInstance().bindAction('', -maxint -1, self.detection) #highest prio
self.LongButtonPressed = False
#this function is called on every keypress!
def detection(self, key, flag):
if flag == 3:
self.LongButtonPressed = True
elif flag == 0:
self.LongButtonPressed = False
class InfoBarUnhandledKey:
def __init__(self):
self.unhandledKeyDialog = self.session.instantiateDialog(UnhandledKey)
self.hideUnhandledKeySymbolTimer = eTimer()
self.hideUnhandledKeySymbolTimer.callback.append(self.unhandledKeyDialog.hide)
self.checkUnusedTimer = eTimer()
self.checkUnusedTimer.callback.append(self.checkUnused)
self.onLayoutFinish.append(self.unhandledKeyDialog.hide)
eActionMap.getInstance().bindAction('', -maxint -1, self.actionA) #highest prio
eActionMap.getInstance().bindAction('', maxint, self.actionB) #lowest prio
self.flags = (1<<1)
self.uflags = 0
#this function is called on every keypress!
def actionA(self, key, flag):
if config.plisettings.ShowPressedButtons.getValue():
print "Enable debug mode for every pressed key."
try:
print 'Pressed KEY: %s %s' % (key,getKeyDescription(key)[0])
except:
print 'Pressed KEY: %s' % key
self.unhandledKeyDialog.hide()
if self.closeSIB(key) and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if flag != 4:
if self.flags & (1<<1):
self.flags = self.uflags = 0
self.flags |= (1<<flag)
if flag == 1: # break
self.checkUnusedTimer.start(0, True)
return 0
def closeSIB(self, key):
if key >= 12 and key != 352 and key != 272 and key != 103 and key != 108 and key != 402 and key != 403 and key != 407 and key != 412 :
return True
else:
return False
#this function is only called when no other action has handled this key
def actionB(self, key, flag):
if flag != 4:
self.uflags |= (1<<flag)
def checkUnused(self):
if self.flags == self.uflags:
self.unhandledKeyDialog.show()
self.hideUnhandledKeySymbolTimer.start(2000, True)
class InfoBarScreenSaver:
def __init__(self):
self.onExecBegin.append(self.__onExecBegin)
self.onExecEnd.append(self.__onExecEnd)
self.screenSaverTimer = eTimer()
self.screenSaverTimer.callback.append(self.screensaverTimeout)
self.screensaver = self.session.instantiateDialog(ScreenSaver.Screensaver)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self.screensaver.hide()
def __onExecBegin(self):
self.ScreenSaverTimerStart()
def __onExecEnd(self):
if self.screensaver.shown:
self.screensaver.hide()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
self.screenSaverTimer.stop()
def ScreenSaverTimerStart(self):
time = int(config.usage.screen_saver.value)
flag = self.seekstate[0]
if not flag:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
ref = ref.toString().split(":")
flag = ref[2] == "2" or os.path.splitext(ref[10])[1].lower() in AUDIO_EXTENSIONS
if time and flag:
self.screenSaverTimer.startLongTimer(time)
else:
self.screenSaverTimer.stop()
def screensaverTimeout(self):
if self.execing and not Screens.Standby.inStandby and not Screens.Standby.inTryQuitMainloop:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
self.screensaver.show()
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressScreenSaver)
def keypressScreenSaver(self, key, flag):
if flag:
self.screensaver.hide()
self.show()
self.ScreenSaverTimerStart()
eActionMap.getInstance().unbindAction('', self.keypressScreenSaver)
class SecondInfoBar(Screen):
ADD_TIMER = 0
REMOVE_TIMER = 1
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "SecondInfoBar"
self["epg_description"] = ScrollLabel()
self["channel"] = Label()
self["key_red"] = Label()
self["key_green"] = Label()
self["key_yellow"] = Label()
self["key_blue"] = Label()
self["SecondInfoBar"] = ActionMap(["2ndInfobarActions"],
{
"prevPage": self.pageUp,
"nextPage": self.pageDown,
"prevEvent": self.prevEvent,
"nextEvent": self.nextEvent,
"timerAdd": self.timerAdd,
"openSimilarList": self.openSimilarList,
}, -1)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.getEvent
})
self.onShow.append(self.__Show)
self.onHide.append(self.__Hide)
def pageUp(self):
self["epg_description"].pageUp()
def pageDown(self):
self["epg_description"].pageDown()
def __Show(self):
if config.plisettings.ColouredButtons.getValue():
self["key_yellow"].setText(_("Search"))
self["key_red"].setText(_("Similar"))
self["key_blue"].setText(_("Extensions"))
self["SecondInfoBar"].doBind()
self.getEvent()
def __Hide(self):
if self["SecondInfoBar"].bound:
self["SecondInfoBar"].doUnbind()
def getEvent(self):
self["epg_description"].setText("")
self["channel"].setText("")
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
Event = self.epglist[0]
Ref = ServiceReference(ref)
callback = self.eventViewCallback
self.cbFunc = callback
self.currentService = Ref
self.isRecording = (not Ref.ref.flags & eServiceReference.isGroup) and Ref.ref.getPath()
self.event = Event
self.key_green_choice = self.ADD_TIMER
if self.isRecording:
self["key_green"].setText("")
else:
self["key_green"].setText(_("Add timer"))
self.setEvent(self.event)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
def prevEvent(self):
if self.cbFunc is not None:
self.cbFunc(self.setEvent, self.setService, -1)
def nextEvent(self):
if self.cbFunc is not None:
self.cbFunc(self.setEvent, self.setService, +1)
def removeTimer(self, timer):
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def timerAdd(self):
self.hide()
self.secondInfoBarWasShown = False
if self.isRecording:
return
event = self.event
serviceref = self.currentService
if event is None:
return
eventid = event.getEventId()
refstr = serviceref.ref.toString()
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and timer.service_ref.ref.toString() == refstr:
cb_func = lambda ret : not ret or self.removeTimer(timer)
self.session.openWithCallback(cb_func, MessageBox, _("Do you really want to delete %s?") % event.getEventName())
break
else:
newEntry = RecordTimerEntry(self.currentService, checkOldTimers = True, dirname = preferredTimerPath(), *parseEvent(self.event))
self.session.openWithCallback(self.finishedAdd, TimerEntry, newEntry)
def finishedAdd(self, answer):
# print "finished add"
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.getValue() and not config.recording.margin_after.getValue() and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
self["key_green"].setText(_("Remove timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
# print "Timeredit aborted"
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def setService(self, service):
self.currentService=service
if self.isRecording:
self["channel"].setText(_("Recording"))
else:
name = self.currentService.getServiceName()
if name is not None:
self["channel"].setText(name)
else:
self["channel"].setText(_("unknown service"))
def sort_func(self,x,y):
if x[1] < y[1]:
return -1
elif x[1] == y[1]:
return 0
else:
return 1
def setEvent(self, event):
if event is None:
return
self.event = event
try:
name = event.getEventName()
self["channel"].setText(name)
except:
pass
description = event.getShortDescription()
extended = event.getExtendedDescription()
if description and extended:
description += '\n'
text = description + extended
self.setTitle(event.getEventName())
self["epg_description"].setText(text)
serviceref = self.currentService
eventid = self.event.getEventId()
refstr = serviceref.ref.toString()
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
if timer.eit == eventid and timer.service_ref.ref.toString() == refstr:
isRecordEvent = True
break
if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER:
self["key_green"].setText(_("Remove timer"))
self.key_green_choice = self.REMOVE_TIMER
elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def openSimilarList(self):
id = self.event and self.event.getEventId()
refstr = str(self.currentService)
if id is not None:
self.hide()
self.secondInfoBarWasShown = False
self.session.open(EPGSelection, refstr, None, id)
class InfoBarShowHide(InfoBarScreenSaver):
""" InfoBar show/hide control, accepts toggleShow and hide actions, might start
fancy animations. """
STATE_HIDDEN = 0
STATE_HIDING = 1
STATE_SHOWING = 2
STATE_SHOWN = 3
def __init__(self):
self["ShowHideActions"] = ActionMap( ["InfobarShowHideActions"] ,
{
"LongOKPressed": self.toggleShowLong,
"toggleShow": self.OkPressed,
"hide": self.keyHide,
}, 1) # lower prio to make it possible to override ok and cancel..
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.serviceStarted,
})
InfoBarScreenSaver.__init__(self)
self.__state = self.STATE_SHOWN
self.__locked = 0
self.hideTimer = eTimer()
self.hideTimer.callback.append(self.doTimerHide)
self.hideTimer.start(5000, True)
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
self.onShowHideNotifiers = []
self.standardInfoBar = False
self.lastSecondInfoBar = 0
self.secondInfoBarScreen = ""
if isStandardInfoBar(self):
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.secondInfoBarScreen.show()
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.standardInfoBar = True
self.secondInfoBarWasShown = False
self.EventViewIsShown = False
try:
if self.pvrStateDialog:
pass
except:
self.pvrStateDialog = None
def OkPressed(self):
if config.usage.okbutton_mode.getValue() == "0":
self.toggleShow()
elif config.usage.okbutton_mode.getValue() == "1":
try:
self.openServiceList()
except:
self.toggleShow()
def SwitchSecondInfoBarScreen(self):
if self.lastSecondInfoBar == config.usage.show_second_infobar.getValue():
return
self.secondInfoBarScreen = self.session.instantiateDialog(SecondInfoBar)
self.lastSecondInfoBar = config.usage.show_second_infobar.getValue()
def LongOKPressed(self):
if isinstance(self, InfoBarEPG):
if config.plisettings.InfoBarEpg_mode.getValue() == "1":
self.openInfoBarEPG()
def __onShow(self):
self.__state = self.STATE_SHOWN
for x in self.onShowHideNotifiers:
x(True)
self.startHideTimer()
def __onHide(self):
self.__state = self.STATE_HIDDEN
# if self.secondInfoBarScreen:
# self.secondInfoBarScreen.hide()
for x in self.onShowHideNotifiers:
x(False)
def keyHide(self):
if self.__state == self.STATE_HIDDEN:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
ref = ref.toString()
else:
ref = " "
if config.plisettings.InfoBarEpg_mode.getValue() == "2" and not ref[1:].startswith(":0:0:0:0:0:0:0:0:0:"):
try:
self.openInfoBarEPG()
except:
pass
else:
self.hide()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
if self.session.pipshown and "popup" in config.usage.pip_hideOnExit.getValue():
if config.usage.pip_hideOnExit.getValue() == "popup":
self.session.openWithCallback(self.hidePipOnExitCallback, MessageBox, _("Disable Picture in Picture"), simple=True)
else:
self.hidePipOnExitCallback(True)
else:
self.hide()
if hasattr(self, "pvrStateDialog"):
self.pvrStateDialog.hide()
def hidePipOnExitCallback(self, answer):
if answer:
self.showPiP()
def connectShowHideNotifier(self, fnc):
if not fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.append(fnc)
def disconnectShowHideNotifier(self, fnc):
if fnc in self.onShowHideNotifiers:
self.onShowHideNotifiers.remove(fnc)
def serviceStarted(self):
if self.execing:
if config.usage.show_infobar_on_zap.getValue():
self.doShow()
def startHideTimer(self):
if self.__state == self.STATE_SHOWN and not self.__locked:
self.hideTimer.stop()
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
elif (self.secondInfoBarScreen and self.secondInfoBarScreen.shown) or ((not config.usage.show_second_infobar.getValue() or isMoviePlayerInfoBar(self)) and self.EventViewIsShown):
self.hideTimer.stop()
idx = config.usage.second_infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
elif hasattr(self, "pvrStateDialog"):
self.hideTimer.stop()
idx = config.usage.infobar_timeout.index
if idx:
self.hideTimer.start(idx*1000, True)
def doShow(self):
self.show()
self.startHideTimer()
def doTimerHide(self):
self.hideTimer.stop()
if self.__state == self.STATE_SHOWN:
self.hide()
if hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
elif self.__state == self.STATE_HIDDEN and self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
elif self.__state == self.STATE_HIDDEN and self.EventViewIsShown:
try:
self.eventView.close()
except:
pass
self.EventViewIsShown = False
elif hasattr(self, "pvrStateDialog"):
try:
self.pvrStateDialog.hide()
except:
pass
def toggleShow(self):
if not hasattr(self, "LongButtonPressed"):
self.LongButtonPressed = False
if not self.LongButtonPressed:
if self.__state == self.STATE_HIDDEN:
if not self.secondInfoBarWasShown or (config.usage.show_second_infobar.getValue() == "1" and not self.EventViewIsShown):
self.show()
if self.secondInfoBarScreen:
self.secondInfoBarScreen.hide()
self.secondInfoBarWasShown = False
self.EventViewIsShown = False
elif self.secondInfoBarScreen and config.usage.show_second_infobar.getValue() == "2" and not self.secondInfoBarScreen.shown:
self.SwitchSecondInfoBarScreen()
self.hide()
self.secondInfoBarScreen.show()
self.secondInfoBarWasShown = True
self.startHideTimer()
elif (config.usage.show_second_infobar.getValue() == "1" or isMoviePlayerInfoBar(self)) and not self.EventViewIsShown:
self.hide()
try:
self.openEventView()
except:
pass
self.EventViewIsShown = True
self.hideTimer.stop()
elif isMoviePlayerInfoBar(self) and not self.EventViewIsShown and config.usage.show_second_infobar.getValue():
self.hide()
self.openEventView(True)
self.EventViewIsShown = True
self.startHideTimer()
else:
self.hide()
if self.secondInfoBarScreen and self.secondInfoBarScreen.shown:
self.secondInfoBarScreen.hide()
elif self.EventViewIsShown:
try:
self.eventView.close()
except:
pass
self.EventViewIsShown = False
def toggleShowLong(self):
if self.LongButtonPressed:
if isinstance(self, InfoBarEPG):
if config.plisettings.InfoBarEpg_mode.getValue() == "1":
self.openInfoBarEPG()
def lockShow(self):
try:
self.__locked += 1
except:
self.__locked = 0
if self.execing:
self.show()
self.hideTimer.stop()
def unlockShow(self):
try:
self.__locked -= 1
except:
self.__locked = 0
if self.__locked <0:
self.__locked = 0
if self.execing:
self.startHideTimer()
def openEventView(self, simple=False):
try:
if self.servicelist is None:
return
except:
simple = True
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
if not simple:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
self.dlg_stack.append(self.eventView)
else:
self.eventView = self.session.openWithCallback(self.closed, EventViewSimple, epglist[0], ServiceReference(ref))
self.dlg_stack = None
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
class NumberZap(Screen):
def quit(self):
self.Timer.stop()
self.close()
def keyOK(self):
self.Timer.stop()
self.close(self.service, self.bouquet)
def handleServiceName(self):
if self.searchNumber:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].setText(ServiceReference(self.service).getServiceName())
if not self.startBouquet:
self.startBouquet = self.bouquet
def keyBlue(self):
self.Timer.start(3000, True)
if self.searchNumber:
if self.startBouquet == self.bouquet:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()), firstBouquetOnly = True)
else:
self.service, self.bouquet = self.searchNumber(int(self["number"].getText()))
self["servicename"].setText(ServiceReference(self.service).getServiceName())
def keyNumberGlobal(self, number):
self.Timer.start(1000, True)
self.numberString += str(number)
self["number"].setText(self.numberString)
self["number_summary"].setText(self.numberString)
self.handleServiceName()
if len(self.numberString) >= 4:
self.keyOK()
def __init__(self, session, number, searchNumberFunction = None):
Screen.__init__(self, session)
self.onChangedEntry = [ ]
self.numberString = str(number)
self.searchNumber = searchNumberFunction
self.startBouquet = None
self["channel"] = Label(_("Channel:"))
self["channel_summary"] = StaticText(_("Channel:"))
self["number"] = Label(self.numberString)
self["number_summary"] = StaticText(self.numberString)
self["servicename"] = Label()
self.handleServiceName()
self["actions"] = NumberActionMap( [ "SetupActions", "ShortcutActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"blue": self.keyBlue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class InfoBarNumberZap:
""" Handles an initial number for NumberZapping """
def __init__(self):
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
def keyNumberGlobal(self, number):
if self.pvrStateDialog.has_key("PTSSeekPointer") and self.timeshiftEnabled() and self.isSeekable():
InfoBarTimeshiftState._mayShow(self)
self.pvrStateDialog["PTSSeekPointer"].setPosition((self.pvrStateDialog["PTSSeekBack"].instance.size().width()-4)/2, self.pvrStateDialog["PTSSeekPointer"].position[1])
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
self.ptsSeekPointerOK()
return
if self.pts_blockZap_timer.isActive():
return
if number == 0:
if isinstance(self, InfoBarPiP) and self.pipHandles0Action():
self.pipDoHandle0Action()
else:
if config.usage.panicbutton.getValue():
if self.session.pipshown:
del self.session.pip
self.session.pipshown = False
self.servicelist.history_tv = []
self.servicelist.history_radio = []
self.servicelist.history = self.servicelist.history_tv
self.servicelist.history_pos = 0
self.servicelist2.history_tv = []
self.servicelist2.history_radio = []
self.servicelist2.history = self.servicelist.history_tv
self.servicelist2.history_pos = 0
if config.usage.multibouquet.getValue():
bqrootstr = '1:7:1:0:0:0:0:0:0:0:FROM BOUQUET "bouquets.tv" ORDER BY bouquet'
else:
bqrootstr = '%s FROM BOUQUET "userbouquet.favourites.tv" ORDER BY bouquet'% self.service_types
serviceHandler = eServiceCenter.getInstance()
rootbouquet = eServiceReference(bqrootstr)
bouquet = eServiceReference(bqrootstr)
bouquetlist = serviceHandler.list(bouquet)
if not bouquetlist is None:
while True:
bouquet = bouquetlist.getNext()
if bouquet.flags & eServiceReference.isDirectory:
self.servicelist.clearPath()
self.servicelist.setRoot(bouquet)
servicelist = serviceHandler.list(bouquet)
if not servicelist is None:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
service, bouquet2 = self.searchNumber(1)
if service == serviceIterator: break
serviceIterator = servicelist.getNext()
if serviceIterator.valid() and service == serviceIterator: break
self.servicelist.enterPath(rootbouquet)
self.servicelist.enterPath(bouquet)
self.servicelist.saveRoot()
self.servicelist2.enterPath(rootbouquet)
self.servicelist2.enterPath(bouquet)
self.servicelist2.saveRoot()
self.selectAndStartService(service, bouquet)
else:
self.servicelist.recallPrevService()
else:
if self.has_key("TimeshiftActions") and self.timeshiftEnabled():
ts = self.getTimeshift()
if ts and ts.isTimeshiftActive():
return
self.session.openWithCallback(self.numberEntered, NumberZap, number, self.searchNumber)
def numberEntered(self, service = None, bouquet = None):
if service:
self.selectAndStartService(service, bouquet)
def searchNumberHelper(self, serviceHandler, num, bouquet):
servicelist = serviceHandler.list(bouquet)
if servicelist:
serviceIterator = servicelist.getNext()
while serviceIterator.valid():
if num == serviceIterator.getChannelNum():
return serviceIterator
serviceIterator = servicelist.getNext()
return None
def searchNumber(self, number, firstBouquetOnly = False):
bouquet = self.servicelist.getRoot()
service = None
serviceHandler = eServiceCenter.getInstance()
if not firstBouquetOnly:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if config.usage.multibouquet.value and not service:
bouquet = self.servicelist.bouquet_root
bouquetlist = serviceHandler.list(bouquet)
if bouquetlist:
bouquet = bouquetlist.getNext()
while bouquet.valid():
if bouquet.flags & eServiceReference.isDirectory:
service = self.searchNumberHelper(serviceHandler, number, bouquet)
if service:
playable = not (service.flags & (eServiceReference.isMarker|eServiceReference.isDirectory)) or (service.flags & eServiceReference.isNumberedMarker)
if not playable:
service = None
break
if config.usage.alternative_number_mode.getValue() or firstBouquetOnly:
break
bouquet = bouquetlist.getNext()
return service, bouquet
def selectAndStartService(self, service, bouquet):
if service:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
self.servicelist.zap(enable_pipzap = True)
self.servicelist.correctChannelNumber()
self.servicelist.startRoot = None
def zapToNumber(self, number):
service, bouquet = self.searchNumber(number)
self.selectAndStartService(service, bouquet)
config.misc.initialchannelselection = ConfigBoolean(default = True)
class InfoBarChannelSelection:
""" ChannelSelection - handles the channelSelection dialog and the initial
channelChange actions which open the channelSelection dialog """
def __init__(self):
#instantiate forever
self.servicelist = self.session.instantiateDialog(ChannelSelection)
self.servicelist2 = self.session.instantiateDialog(PiPZapSelection)
self.tscallback = None
if config.misc.initialchannelselection.value:
self.onShown.append(self.firstRun)
self["ChannelSelectActions"] = HelpableActionMap(self, "InfobarChannelSelection",
{
"switchChannelUp": (self.UpPressed, _("Open service list and select previous channel")),
"switchChannelDown": (self.DownPressed, _("Open service list and select next channel")),
"switchChannelUpLong": (self.switchChannelUp, _("Open service list and select previous channel for PiP")),
"switchChannelDownLong": (self.switchChannelDown, _("Open service list and select next channel for PiP")),
"zapUp": (self.zapUp, _("Switch to previous channel")),
"zapDown": (self.zapDown, _("Switch next channel")),
"historyBack": (self.historyBack, _("Switch to previous channel in history")),
"historyNext": (self.historyNext, _("Switch to next channel in history")),
"openServiceList": (self.openServiceList, _("Open service list")),
"openSatellites": (self.openSatellites, _("Open satellites list")),
"LeftPressed": self.LeftPressed,
"RightPressed": self.RightPressed,
"ChannelPlusPressed": self.ChannelPlusPressed,
"ChannelMinusPressed": self.ChannelMinusPressed,
"ChannelPlusPressedLong": self.ChannelPlusPressed,
"ChannelMinusPressedLong": self.ChannelMinusPressed,
})
def firstRun(self):
self.onShown.remove(self.firstRun)
config.misc.initialchannelselection.value = False
config.misc.initialchannelselection.save()
self.openServiceList()
def LeftPressed(self):
if config.plisettings.InfoBarEpg_mode.getValue() == "3":
self.openInfoBarEPG()
else:
self.zapUp()
def RightPressed(self):
if config.plisettings.InfoBarEpg_mode.getValue() == "3":
self.openInfoBarEPG()
else:
self.zapDown()
def UpPressed(self):
if config.usage.updownbutton_mode.getValue() == "0":
self.zapDown()
elif config.usage.updownbutton_mode.getValue() == "1":
self.switchChannelUp()
def DownPressed(self):
if config.usage.updownbutton_mode.getValue() == "0":
self.zapUp()
elif config.usage.updownbutton_mode.getValue() == "1":
self.switchChannelDown()
def ChannelPlusPressed(self):
if config.usage.channelbutton_mode.getValue() == "0":
self.zapDown()
elif config.usage.channelbutton_mode.getValue() == "1":
self.openServiceList()
elif config.usage.channelbutton_mode.getValue() == "2":
self.serviceListType = "Norm"
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def ChannelMinusPressed(self):
if config.usage.channelbutton_mode.getValue() == "0":
self.zapUp()
elif config.usage.channelbutton_mode.getValue() == "1":
self.openServiceList()
elif config.usage.channelbutton_mode.getValue() == "2":
self.serviceListType = "Norm"
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
def showTvChannelList(self, zap=False):
self.servicelist.setModeTv()
if zap:
self.servicelist.zap()
if config.usage.show_servicelist.getValue():
self.session.execDialog(self.servicelist)
def showRadioChannelList(self, zap=False):
self.servicelist.setModeRadio()
if zap:
self.servicelist.zap()
if config.usage.show_servicelist.getValue():
self.session.execDialog(self.servicelist)
def historyBack(self):
if config.usage.historymode.getValue() == "0":
self.servicelist.historyBack()
elif config.usage.historymode.getValue() == "2":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
self.showZapHistoryBrowser()
else:
self.servicelist.historyZap(-1)
def historyNext(self):
if config.usage.historymode.getValue() == "0":
self.servicelist.historyNext()
elif config.usage.historymode.getValue() == "2":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/ZapHistoryBrowser/plugin.pyo"):
self.showZapHistoryBrowser()
else:
self.servicelist.historyZap(+1)
def switchChannelUp(self):
if not self.LongButtonPressed:
if not config.usage.show_bouquetalways.getValue():
if "keep" not in config.usage.servicelist_cursor_behavior.getValue():
self.servicelist.moveUp()
self.session.execDialog(self.servicelist)
else:
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
elif self.LongButtonPressed:
if not config.usage.show_bouquetalways.getValue():
if "keep" not in config.usage.servicelist_cursor_behavior.getValue():
self.servicelist2.moveUp()
self.session.execDialog(self.servicelist2)
else:
self.servicelist2.showFavourites()
self.session.execDialog(self.servicelist2)
def switchChannelDown(self):
if not self.LongButtonPressed:
if not config.usage.show_bouquetalways.getValue():
if "keep" not in config.usage.servicelist_cursor_behavior.getValue():
self.servicelist.moveDown()
self.session.execDialog(self.servicelist)
else:
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
elif self.LongButtonPressed:
if not config.usage.show_bouquetalways.getValue():
if "keep" not in config.usage.servicelist_cursor_behavior.getValue():
self.servicelist2.moveDown()
self.session.execDialog(self.servicelist2)
else:
self.servicelist2.showFavourites()
self.session.execDialog(self.servicelist2)
def openServiceList(self):
self.session.execDialog(self.servicelist)
def openServiceListPiP(self):
self.session.execDialog(self.servicelist2)
def openSatellites(self):
self.servicelist.showSatellites()
self.session.execDialog(self.servicelist)
def zapUp(self):
if not self.LongButtonPressed:
if self.pts_blockZap_timer.isActive():
return
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.getValue():
if self.servicelist.atBegin():
self.servicelist.prevBouquet()
self.servicelist.moveUp()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveUp()
self.servicelist.zap(enable_pipzap = True)
elif self.LongButtonPressed:
if not hasattr(self.session, 'pip') and not self.session.pipshown:
self.session.open(MessageBox, _("Please open Picture in Picture first"), MessageBox.TYPE_ERROR)
return
from Screens.ChannelSelection import ChannelSelection
ChannelSelectionInstance = ChannelSelection.instance
ChannelSelectionInstance.dopipzap = True
if self.servicelist2.inBouquet():
prev = self.servicelist2.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.getValue():
if self.servicelist2.atBegin():
self.servicelist2.prevBouquet()
self.servicelist2.moveUp()
cur = self.servicelist2.getCurrentSelection()
if cur:
if ChannelSelectionInstance.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist2.moveUp()
self.servicelist2.zap(enable_pipzap = True)
ChannelSelectionInstance.dopipzap = False
def zapDown(self):
if not self.LongButtonPressed:
if self.pts_blockZap_timer.isActive():
return
if self.servicelist.inBouquet():
prev = self.servicelist.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist.atEnd():
self.servicelist.nextBouquet()
else:
self.servicelist.moveDown()
cur = self.servicelist.getCurrentSelection()
if cur:
if self.servicelist.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist.moveDown()
self.servicelist.zap(enable_pipzap = True)
elif self.LongButtonPressed:
if not hasattr(self.session, 'pip') and not self.session.pipshown:
self.session.open(MessageBox, _("Please open Picture in Picture first"), MessageBox.TYPE_ERROR)
return
from Screens.ChannelSelection import ChannelSelection
ChannelSelectionInstance = ChannelSelection.instance
ChannelSelectionInstance.dopipzap = True
if self.servicelist2.inBouquet():
prev = self.servicelist2.getCurrentSelection()
if prev:
prev = prev.toString()
while True:
if config.usage.quickzap_bouquet_change.value and self.servicelist2.atEnd():
self.servicelist2.nextBouquet()
else:
self.servicelist2.moveDown()
cur = self.servicelist2.getCurrentSelection()
if cur:
if ChannelSelectionInstance.dopipzap:
isPlayable = self.session.pip.isPlayableForPipService(cur)
else:
isPlayable = isPlayableForCur(cur)
if cur and (cur.toString() == prev or isPlayable):
break
else:
self.servicelist2.moveDown()
self.servicelist2.zap(enable_pipzap = True)
ChannelSelectionInstance.dopipzap = False
class InfoBarMenu:
""" Handles a menu action, to open the (main) menu """
def __init__(self):
self["MenuActions"] = HelpableActionMap(self, "InfobarMenuActions",
{
"mainMenu": (self.mainMenu, _("Enter main menu...")),
"showNetworkSetup": (self.showNetworkMounts, _("Show network mounts ...")),
"showSystemSetup": (self.showSystemMenu, _("Show network mounts ...")),
"showRFmod": (self.showRFSetup, _("Show RFmod setup...")),
"toggleAspectRatio": (self.toggleAspectRatio, _("Toggle aspect ratio...")),
})
self.session.infobar = None
def mainMenu(self):
# print "loading mainmenu XML..."
menu = mdom.getroot()
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.infobar = self
# so we can access the currently active infobar from screens opened from within the mainmenu
# at the moment used from the SubserviceSelection
self.session.openWithCallback(self.mainMenuClosed, MainMenu, menu)
def mainMenuClosed(self, *val):
self.session.infobar = None
def toggleAspectRatio(self):
ASPECT = [ "auto", "16_9", "4_3" ]
ASPECT_MSG = { "auto":"Auto", "16_9":"16:9", "4_3":"4:3" }
if config.av.aspect.getValue() in ASPECT:
index = ASPECT.index(config.av.aspect.getValue())
config.av.aspect.value = ASPECT[(index+1)%3]
else:
config.av.aspect.value = "auto"
config.av.aspect.save()
self.session.open(MessageBox, _("AV aspect is %s." % ASPECT_MSG[config.av.aspect.getValue()]), MessageBox.TYPE_INFO, timeout=5)
def showSystemMenu(self):
menulist = mdom.getroot().findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'setup_selection':
menulist = item.findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'system_selection':
menu = item
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.openWithCallback(self.mainMenuClosed, Menu, menu)
def showNetworkMounts(self):
menulist = mdom.getroot().findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'setup_selection':
menulist = item.findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'system_selection':
menulist = item.findall('menu')
for item in menulist:
if item.attrib['entryID'] == 'network_menu':
menu = item
assert menu.tag == "menu", "root element in menu must be 'menu'!"
self.session.openWithCallback(self.mainMenuClosed, Menu, menu)
def showRFSetup(self):
if SystemInfo["RfModulator"]:
self.session.openWithCallback(self.mainMenuClosed, Setup, 'RFmod')
def mainMenuClosed(self, *val):
self.session.infobar = None
class InfoBarSimpleEventView:
""" Opens the Eventview for now/next """
def __init__(self):
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"showEventInfo": (self.openEventView, _("show event details")),
"InfoPressed": (self.openEventView, _("show event details")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def openEventView(self, simple=False):
if self.servicelist is None:
return
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
if not simple:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
self.eventView = self.session.openWithCallback(self.closed, EventViewSimple, epglist[0], ServiceReference(ref))
self.dlg_stack.append(self.eventView)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0] = epglist[1]
epglist[1] = tmp
setEvent(epglist[0])
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
class SimpleServicelist:
def __init__(self, services):
self.services = services
self.length = len(services)
self.current = 0
def selectService(self, service):
if not self.length:
self.current = -1
return False
else:
self.current = 0
while self.services[self.current].ref != service:
self.current += 1
if self.current >= self.length:
return False
return True
def nextService(self):
if not self.length:
return
if self.current+1 < self.length:
self.current += 1
else:
self.current = 0
def prevService(self):
if not self.length:
return
if self.current-1 > -1:
self.current -= 1
else:
self.current = self.length - 1
def currentService(self):
if not self.length or self.current >= self.length:
return None
return self.services[self.current]
class InfoBarEPG:
""" EPG - Opens an EPG list when the showEPGList action fires """
def __init__(self):
self.is_now_next = False
self.dlg_stack = []
self.bouquetSel = None
self.eventView = None
self.isInfo = None
self.epglist = []
self.defaultEPGType = self.getDefaultEPGtype()
self.defaultGuideType = self.getDefaultGuidetype()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
})
self["EPGActions"] = HelpableActionMap(self, "InfobarEPGActions",
{
"RedPressed": (self.RedPressed, _("Show epg")),
"IPressed": (self.IPressed, _("show program information...")),
"InfoPressed": (self.InfoPressed, _("show program information...")),
"FavPressed": (self.FavPressed, _("show fav information...")),
"showEventInfoPlugin": (self.showEventInfoPlugins, _("List EPG functions...")),
"EPGPressed": (self.showDefaultEPG, _("show EPG...")),
"showEventGuidePlugin": (self.showEventGuidePlugins, _("List EPG functions...")),
"showInfobarOrEpgWhenInfobarAlreadyVisible": self.showEventInfoWhenNotVisible,
})
def getEPGPluginList(self):
pluginlist = [(p.name, boundFunction(self.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO)]
if pluginlist:
pluginlist.append((_("Event Info"), self.openEventView))
pluginlist.append((_("Graphical EPG"), self.openGraphEPG))
pluginlist.append((_("Infobar EPG"), self.openInfoBarEPG))
pluginlist.append((_("Multi EPG"), self.openMultiServiceEPG))
pluginlist.append((_("Show EPG for current channel..."), self.openSingleServiceEPG))
return pluginlist
def getDefaultEPGtype(self):
pluginlist = self.getEPGPluginList()
config.usage.defaultEPGType=ConfigSelection(default = "None", choices = pluginlist)
for plugin in pluginlist:
if plugin[0] == config.usage.defaultEPGType.getValue():
return plugin[1]
return None
def showEventInfoPlugins(self):
if isMoviePlayerInfoBar(self):
self.openEventView()
else:
pluginlist = self.getEPGPluginList()
if pluginlist:
pluginlist.append((_("Select default EPG type..."), self.SelectDefaultInfoPlugin))
self.session.openWithCallback(self.EventInfoPluginChosen, ChoiceBox, title=_("Please choose an extension..."), list = pluginlist, skin_name = "EPGExtensionsList")
else:
self.openSingleServiceEPG()
def SelectDefaultInfoPlugin(self):
self.session.openWithCallback(self.DefaultInfoPluginChosen, ChoiceBox, title=_("Please select a default EPG type..."), list = self.getEPGPluginList(), skin_name = "EPGExtensionsList")
def DefaultInfoPluginChosen(self, answer):
if answer is not None:
self.defaultEPGType = answer[1]
config.usage.defaultEPGType.value = answer[0]
config.usage.defaultEPGType.save()
configfile.save()
def getDefaultGuidetype(self):
pluginlist = self.getEPGPluginList()
config.usage.defaultGuideType=ConfigSelection(default = "None", choices = pluginlist)
for plugin in pluginlist:
if plugin[0] == config.usage.defaultGuideType.value:
return plugin[1]
return None
def showEventGuidePlugins(self):
if isMoviePlayerInfoBar(self):
self.openEventView()
else:
pluginlist = self.getEPGPluginList()
if pluginlist:
pluginlist.append((_("Select default EPG type..."), self.SelectDefaultGuidePlugin))
self.session.openWithCallback(self.EventGuidePluginChosen, ChoiceBox, title=_("Please choose an extension..."), list = pluginlist, skin_name = "EPGExtensionsList")
else:
self.openSingleServiceEPG()
def SelectDefaultGuidePlugin(self):
self.session.openWithCallback(self.DefaultGuidePluginChosen, ChoiceBox, title=_("Please select a default EPG type..."), list = self.getEPGPluginList(), skin_name = "EPGExtensionsList")
def DefaultGuidePluginChosen(self, answer):
if answer is not None:
self.defaultGuideType = answer[1]
config.usage.defaultGuideType.value = answer[0]
config.usage.defaultGuideType.save()
def EventGuidePluginChosen(self, answer):
if answer is not None:
answer[1]()
def runPlugin(self, plugin):
plugin(session = self.session, servicelist=self.servicelist)
def EventInfoPluginChosen(self, answer):
if answer is not None:
answer[1]()
def RedPressed(self):
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if config.usage.defaultEPGType.getValue() != _("Graphical EPG") and config.usage.defaultEPGType.getValue() != _("None"):
self.openGraphEPG()
else:
self.openSingleServiceEPG()
def InfoPressed(self):
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if getBoxType().startswith('vu'):
self.showDefaultEPG()
elif config.plisettings.PLIINFO_mode.getValue() == "eventview":
self.openEventView()
elif config.plisettings.PLIINFO_mode.getValue() == "epgpress":
self.showDefaultEPG()
elif config.plisettings.PLIINFO_mode.getValue() == "single":
self.openSingleServiceEPG()
elif config.plisettings.PLIINFO_mode.getValue() == "coolinfoguide" and COOLTVGUIDE:
self.showCoolInfoGuide()
elif config.plisettings.PLIINFO_mode.getValue() == "coolsingleguide" and COOLTVGUIDE:
self.showCoolSingleGuide()
elif config.plisettings.PLIINFO_mode.getValue() == "cooltvguide" and COOLTVGUIDE:
if self.isInfo:
self.showCoolTVGuide()
elif config.plisettings.PLIINFO_mode.getValue() == "etportal":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/EtPortal/plugin.pyo"):
self.showETPORTAL()
else:
self.session.open(MessageBox, _("The EtPortal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
self.showDefaultEPG()
def IPressed(self):
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
self.openEventView()
def EPGPressed(self):
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if config.plisettings.PLIEPG_mode.getValue() == "pliepg":
self.openGraphEPG()
elif config.plisettings.PLIEPG_mode.getValue() == "multi":
self.openMultiServiceEPG()
elif config.plisettings.PLIEPG_mode.getValue() == "single":
self.openSingleServiceEPG()
elif config.plisettings.PLIEPG_mode.getValue() == "merlinepgcenter":
self.openMerlinEPGCenter()
elif config.plisettings.PLIEPG_mode.getValue() == "cooltvguide" and COOLTVGUIDE:
if self.isInfo:
self.showCoolTVGuide()
elif config.plisettings.PLIEPG_mode.getValue() == "eventview":
self.openEventView()
else:
self.openSingleServiceEPG()
def showEventInfoWhenNotVisible(self):
if self.shown:
self.openEventView()
else:
self.toggleShow()
return 1
def zapToService(self, service, bouquet = None, preview = False, zapback = False):
if self.servicelist.startServiceRef is None:
self.servicelist.startServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.servicelist.currentServiceRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if service is not None:
if self.servicelist.getRoot() != bouquet: #already in correct bouquet?
self.servicelist.clearPath()
if self.servicelist.bouquet_root != bouquet:
self.servicelist.enterPath(self.servicelist.bouquet_root)
self.servicelist.enterPath(bouquet)
self.servicelist.setCurrentSelection(service) #select the service in servicelist
if not zapback or preview:
self.servicelist.zap(preview_zap = preview)
if (self.servicelist.dopipzap or zapback) and not preview:
self.servicelist.zapBack()
if not preview:
self.servicelist.startServiceRef = None
self.servicelist.startRoot = None
def getBouquetServices(self, bouquet):
services = []
servicelist = eServiceCenter.getInstance().list(bouquet)
if not servicelist is None:
while True:
service = servicelist.getNext()
if not service.valid(): #check if end of list
break
if service.flags & (eServiceReference.isDirectory | eServiceReference.isMarker): #ignore non playable services
continue
services.append(ServiceReference(service))
return services
def openBouquetEPG(self, bouquet = None, bouquets = None):
if bouquet:
self.StartBouquet = bouquet
self.dlg_stack.append(self.session.openWithCallback(self.closed, EPGSelection, zapFunc=self.zapToService, EPGtype=self.EPGtype, StartBouquet=self.StartBouquet, StartRef=self.StartRef, bouquets = bouquets))
def closed(self, ret=False):
if not self.dlg_stack:
return
closedScreen = self.dlg_stack.pop()
if self.bouquetSel and closedScreen == self.bouquetSel:
self.bouquetSel = None
elif self.eventView and closedScreen == self.eventView:
self.eventView = None
if ret == True or ret == 'close':
dlgs=len(self.dlg_stack)
if dlgs > 0:
self.dlg_stack[dlgs-1].close(dlgs > 1)
self.reopen(ret)
def MultiServiceEPG(self):
bouquets = self.servicelist.getBouquetList()
if bouquets is None:
cnt = 0
else:
cnt = len(bouquets)
if (self.EPGtype == "multi" and config.epgselection.multi_showbouquet.getValue()) or (self.EPGtype == "graph" and config.epgselection.graph_showbouquet.getValue()):
if cnt > 1: # show bouquet list
self.bouquetSel = self.session.openWithCallback(self.closed, EpgBouquetSelector, bouquets, self.openBouquetEPG, enableWrapAround=True)
self.dlg_stack.append(self.bouquetSel)
elif cnt == 1:
self.openBouquetEPG(bouquets=bouquets)
else:
self.openBouquetEPG(bouquets=bouquets)
def openMultiServiceEPG(self):
if self.servicelist is None:
return
self.EPGtype = "multi"
self.StartBouquet = self.servicelist.getRoot()
if isMoviePlayerInfoBar(self):
self.StartRef = self.lastservice
else:
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.MultiServiceEPG()
def openGraphEPG(self, reopen=False):
if self.servicelist is None:
return
self.EPGtype = "graph"
if not reopen:
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.MultiServiceEPG()
def openSingleServiceEPG(self, reopen=False):
if self.servicelist is None:
return
self.EPGtype = "enhanced"
self.SingleServiceEPG()
def openInfoBarEPG(self, reopen=False):
if self.servicelist is None:
return
if not reopen:
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if config.epgselection.infobar_type_mode.getValue() == 'single':
self.EPGtype = "infobar"
self.SingleServiceEPG()
else:
self.EPGtype = "infobargraph"
self.MultiServiceEPG()
def showCoolTVGuide(self):
if self.servicelist is None:
return
if COOLTVGUIDE:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Cool TV Guide"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Cool TV Guide plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showETPORTAL(self):
try:
from Plugins.Extensions.EtPortal.plugin import *
from Components.PluginComponent import plugins
self.session.open(EtPortalScreen)
except Exception, e:
self.session.open(MessageBox, _("The EtPortal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showEMC(self):
try:
from Plugins.Extensions.EnhancedMovieCenter.plugin import *
from Components.PluginComponent import plugins
showMoviesNew()
except Exception, e:
self.session.open(MessageBox, _("The Enhanced Movie Center plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showMEDIAPORTAL(self):
try:
from Plugins.Extensions.MediaPortal.plugin import *
from Components.PluginComponent import plugins
self.session.open(haupt_Screen)
except Exception, e:
self.session.open(MessageBox, _("The Media Portal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showZapHistoryBrowser(self):
try:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Zap-Historie Browser"):
self.runPlugin(plugin)
break
except Exception, e:
self.session.open(MessageBox, _("The Zap-History Browser plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def SingleServiceEPG(self):
self.StartBouquet = self.servicelist.getRoot()
self.StartRef = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if isMoviePlayerInfoBar(self):
ref = self.lastservice
else:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if ref:
services = self.getBouquetServices(self.StartBouquet)
self.serviceSel = SimpleServicelist(services)
if self.serviceSel.selectService(ref):
self.session.openWithCallback(self.SingleServiceEPGClosed,EPGSelection, self.servicelist, zapFunc=self.zapToService, serviceChangeCB = self.changeServiceCB, EPGtype=self.EPGtype, StartBouquet=self.StartBouquet, StartRef=self.StartRef)
else:
self.session.openWithCallback(self.SingleServiceEPGClosed, EPGSelection, ref)
def changeServiceCB(self, direction, epg):
if self.serviceSel:
if direction > 0:
self.serviceSel.nextService()
else:
self.serviceSel.prevService()
epg.setService(self.serviceSel.currentService())
def SingleServiceEPGClosed(self, ret=False):
self.serviceSel = None
self.reopen(ret)
def reopen(self, answer):
if answer == 'reopengraph':
self.openGraphEPG(True)
elif answer == 'reopeninfobargraph' or answer == 'reopeninfobar':
self.openInfoBarEPG(True)
elif answer == 'close' and isMoviePlayerInfoBar(self):
self.lastservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.close()
def showCoolInfoGuide(self):
if self.servicelist is None:
return
if COOLTVGUIDE:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Cool Info Guide"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Cool TV Guide plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showCoolSingleGuide(self):
if self.servicelist is None:
return
if COOLTVGUIDE:
for plugin in plugins.getPlugins([PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO]):
if plugin.name == _("Cool Single Guide"):
self.runPlugin(plugin)
break
else:
self.session.open(MessageBox, _("The Cool TV Guide plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def openSimilarList(self, eventid, refstr):
self.session.open(EPGSelection, refstr, eventid=eventid)
def getNowNext(self):
epglist = [ ]
service = self.session.nav.getCurrentService()
info = service and service.info()
ptr = info and info.getEvent(0)
if ptr:
epglist.append(ptr)
ptr = info and info.getEvent(1)
if ptr:
epglist.append(ptr)
self.epglist = epglist
def __evEventInfoChanged(self):
self.isInfo = True
if self.is_now_next and len(self.dlg_stack) == 1:
self.getNowNext()
if self.eventView and self.epglist:
self.eventView.setEvent(self.epglist[0])
def showDefaultEPG(self):
if self.defaultEPGType is not None:
self.defaultEPGType()
return
self.EPGPressed()
def openEventView(self, simple=False):
if self.servicelist is None:
return
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.getNowNext()
epglist = self.epglist
if not epglist:
self.is_now_next = False
epg = eEPGCache.getInstance()
ptr = ref and ref.valid() and epg.lookupEventTime(ref, -1)
if ptr:
epglist.append(ptr)
ptr = epg.lookupEventTime(ref, ptr.getBeginTime(), +1)
if ptr:
epglist.append(ptr)
else:
self.is_now_next = True
if epglist:
if not simple:
self.eventView = self.session.openWithCallback(self.closed, EventViewEPGSelect, epglist[0], ServiceReference(ref), self.eventViewCallback, self.openSingleServiceEPG, self.openMultiServiceEPG, self.openSimilarList)
else:
self.eventView = self.session.openWithCallback(self.closed, EventViewSimple, epglist[0], ServiceReference(ref))
self.dlg_stack.append(self.eventView)
def eventViewCallback(self, setEvent, setService, val): #used for now/next displaying
epglist = self.epglist
if len(epglist) > 1:
tmp = epglist[0]
epglist[0]=epglist[1]
epglist[1]=tmp
setEvent(epglist[0])
def FavPressed(self):
if isStandardInfoBar(self) or isMoviePlayerInfoBar(self):
if getBoxType().startswith('vu'):
self.showDefaultEPG()
elif config.plisettings.PLIFAV_mode.getValue() == "eventview":
self.openEventView()
elif config.plisettings.PLIFAV_mode.getValue() == "showfavourites":
self.serviceListType = "Norm"
self.servicelist.showFavourites()
self.session.execDialog(self.servicelist)
elif config.plisettings.PLIFAV_mode.getValue() == "epgpress":
self.showDefaultEPG()
elif config.plisettings.PLIFAV_mode.getValue() == "single":
self.openSingleServiceEPG()
elif config.plisettings.PLIFAV_mode.getValue() == "coolinfoguide" and COOLTVGUIDE:
self.showCoolInfoGuide()
elif config.plisettings.PLIFAV_mode.getValue() == "coolsingleguide" and COOLTVGUIDE:
self.showCoolSingleGuide()
elif config.plisettings.PLIFAV_mode.getValue() == "cooltvguide" and COOLTVGUIDE:
if self.isInfo:
self.showCoolTVGuide()
elif config.plisettings.PLIFAV_mode.getValue() == "etportal":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/EtPortal/plugin.pyo"):
self.showETPORTAL()
else:
self.session.open(MessageBox, _("The EtPortal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
elif config.plisettings.PLIFAV_mode.getValue() == "emc":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/EnhancedMovieCenter/plugin.pyo"):
self.showEMC()
else:
self.session.open(MessageBox, _("The EnhancedMovieCenter plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
elif config.plisettings.PLIFAV_mode.getValue() == "mediaportal":
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/MediaPortal/plugin.pyo"):
self.showMEDIAPORTAL()
else:
self.session.open(MessageBox, _("The Media Portal plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
self.showDefaultEPG()
class InfoBarRdsDecoder:
"""provides RDS and Rass support/display"""
def __init__(self):
self.rds_display = self.session.instantiateDialog(RdsInfoDisplay)
self.session.instantiateSummaryDialog(self.rds_display)
self.rass_interactive = None
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.__serviceStopped,
iPlayableService.evUpdatedRassSlidePic: self.RassSlidePicChanged
})
self["RdsActions"] = ActionMap(["InfobarRdsActions"],
{
"startRassInteractive": self.startRassInteractive
},-1)
self["RdsActions"].setEnabled(False)
self.onLayoutFinish.append(self.rds_display.show)
self.rds_display.onRassInteractivePossibilityChanged.append(self.RassInteractivePossibilityChanged)
def RassInteractivePossibilityChanged(self, state):
self["RdsActions"].setEnabled(state)
def RassSlidePicChanged(self):
if not self.rass_interactive:
service = self.session.nav.getCurrentService()
decoder = service and service.rdsDecoder()
if decoder:
decoder.showRassSlidePicture()
def __serviceStopped(self):
if self.rass_interactive is not None:
rass_interactive = self.rass_interactive
self.rass_interactive = None
rass_interactive.close()
def startRassInteractive(self):
self.rds_display.hide()
self.rass_interactive = self.session.openWithCallback(self.RassInteractiveClosed, RassInteractive)
def RassInteractiveClosed(self, *val):
if self.rass_interactive is not None:
self.rass_interactive = None
self.RassSlidePicChanged()
self.rds_display.show()
class Seekbar(Screen):
def __init__(self, session, fwd):
Screen.__init__(self, session)
self.setTitle(_("Seek"))
self.session = session
self.fwd = fwd
self.percent = 0.0
self.length = None
service = session.nav.getCurrentService()
if service:
self.seek = service.seek()
if self.seek:
self.length = self.seek.getLength()
position = self.seek.getPlayPosition()
if self.length and position and int(self.length[1]) > 0:
if int(position[1]) > 0:
self.percent = float(position[1]) * 100.0 / float(self.length[1])
else:
self.close()
self["cursor"] = MovingPixmap()
self["time"] = Label()
self["actions"] = ActionMap(["WizardActions", "DirectionActions"], {"back": self.exit, "ok": self.keyOK, "left": self.keyLeft, "right": self.keyRight}, -1)
self.cursorTimer = eTimer()
self.cursorTimer.callback.append(self.updateCursor)
self.cursorTimer.start(200, False)
def updateCursor(self):
if self.length:
x = 145 + int(2.7 * self.percent)
self["cursor"].moveTo(x, 15, 1)
self["cursor"].startMoving()
pts = int(float(self.length[1]) / 100.0 * self.percent)
self["time"].setText("%d:%02d" % ((pts/60/90000), ((pts/90000)%60)))
def exit(self):
self.cursorTimer.stop()
self.close()
def keyOK(self):
if self.length:
self.seek.seekTo(int(float(self.length[1]) / 100.0 * self.percent))
self.exit()
def keyLeft(self):
self.percent -= float(config.seek.sensibility.getValue()) / 10.0
if self.percent < 0.0:
self.percent = 0.0
def keyRight(self):
self.percent += float(config.seek.sensibility.getValue()) / 10.0
if self.percent > 100.0:
self.percent = 100.0
def keyNumberGlobal(self, number):
sel = self["config"].getCurrent()[1]
if sel == self.positionEntry:
self.percent = float(number) * 10.0
else:
ConfigListScreen.keyNumberGlobal(self, number)
class InfoBarSeek:
"""handles actions like seeking, pause"""
SEEK_STATE_PLAY = (0, 0, 0, ">")
SEEK_STATE_PAUSE = (1, 0, 0, "||")
SEEK_STATE_EOF = (1, 0, 0, "END")
def __init__(self, actionmap = "InfobarSeekActions"):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evEOF: self.__evEOF,
iPlayableService.evSOF: self.__evSOF,
})
self.fast_winding_hint_message_showed = False
class InfoBarSeekActionMap(HelpableActionMap):
def __init__(self, screen, *args, **kwargs):
HelpableActionMap.__init__(self, screen, *args, **kwargs)
self.screen = screen
def action(self, contexts, action):
# print "action:", action
if action[:5] == "seek:":
time = int(action[5:])
self.screen.doSeekRelative(time * 90000)
return 1
elif action[:8] == "seekdef:":
key = int(action[8:])
time = (-config.seek.selfdefined_13.getValue(), False, config.seek.selfdefined_13.getValue(),
-config.seek.selfdefined_46.getValue(), False, config.seek.selfdefined_46.getValue(),
-config.seek.selfdefined_79.getValue(), False, config.seek.selfdefined_79.getValue())[key-1]
self.screen.doSeekRelative(time * 90000)
return 1
else:
return HelpableActionMap.action(self, contexts, action)
self["SeekActions"] = InfoBarSeekActionMap(self, actionmap,
{
"playpauseService": self.playpauseService,
"pauseService": (self.pauseService, _("Pause playback")),
"pauseServiceYellow": (self.pauseServiceYellow, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"seekFwd": (self.seekFwd, _("Seek forward")),
"seekFwdManual": (self.seekFwdManual, _("Seek forward (enter time)")),
"seekBack": (self.seekBack, _("Seek backward")),
"seekBackManual": (self.seekBackManual, _("Seek backward (enter time)")),
"SeekbarFwd": self.seekFwdSeekbar,
"SeekbarBack": self.seekBackSeekbar
}, prio=-1) # give them a little more priority to win over color buttons
self["SeekActions"].setEnabled(False)
self["SeekActionsPTS"] = InfoBarSeekActionMap(self, "InfobarSeekActionsPTS",
{
"playpauseService": self.playpauseService,
"pauseService": (self.pauseService, _("Pause playback")),
"pauseServiceYellow": (self.pauseServiceYellow, _("Pause playback")),
"unPauseService": (self.unPauseService, _("Continue playback")),
"seekFwd": (self.seekFwd, _("skip forward")),
"seekFwdManual": (self.seekFwdManual, _("skip forward (enter time)")),
"seekBack": (self.seekBack, _("skip backward")),
"seekBackManual": (self.seekBackManual, _("skip backward (enter time)")),
}, prio=-1) # give them a little more priority to win over color buttons
self["SeekActionsPTS"].setEnabled(False)
self.activity = 0
self.activityTimer = eTimer()
self.activityTimer.callback.append(self.doActivityTimer)
self.seekstate = self.SEEK_STATE_PLAY
self.lastseekstate = self.SEEK_STATE_PLAY
self.onPlayStateChanged = [ ]
self.lockedBecauseOfSkipping = False
self.__seekableStatusChanged()
def makeStateForward(self, n):
return 0, n, 0, ">> %dx" % n
def makeStateBackward(self, n):
return 0, -n, 0, "<< %dx" % n
def makeStateSlowMotion(self, n):
return 0, 0, n, "/%d" % n
def isStateForward(self, state):
return state[1] > 1
def isStateBackward(self, state):
return state[1] < 0
def isStateSlowMotion(self, state):
return state[1] == 0 and state[2] > 1
def getHigher(self, n, lst):
for x in lst:
if x > n:
return x
return False
def getLower(self, n, lst):
lst = lst[:]
lst.reverse()
for x in lst:
if x < n:
return x
return False
def showAfterSeek(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def up(self):
pass
def down(self):
pass
def getSeek(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
seek = service.seek()
if seek is None or not seek.isCurrentlySeekable():
return None
return seek
def isSeekable(self):
if self.getSeek() is None or (isStandardInfoBar(self) and not self.timeshiftEnabled()):
return False
return True
def __seekableStatusChanged(self):
if isStandardInfoBar(self) and self.timeshiftEnabled():
pass
elif not self.isSeekable():
SystemInfo["SeekStatePlay"] = False
if os.path.exists("/proc/stb/lcd/symbol_hdd"):
f = open("/proc/stb/lcd/symbol_hdd", "w")
f.write("0")
f.close()
if os.path.exists("/proc/stb/lcd/symbol_hddprogress"):
f = open("/proc/stb/lcd/symbol_hddprogress", "w")
f.write("0")
f.close()
self["SeekActions"].setEnabled(False)
self.setSeekState(self.SEEK_STATE_PLAY)
else:
self["SeekActions"].setEnabled(True)
self.activityTimer.start(200, False)
for c in self.onPlayStateChanged:
c(self.seekstate)
def doActivityTimer(self):
if self.isSeekable():
self.activity += 16
hdd = 1
if self.activity >= 100:
self.activity = 0
if SystemInfo["FrontpanelDisplay"] and SystemInfo["Display"]:
if os.path.exists("/proc/stb/lcd/symbol_hdd"):
if config.lcd.hdd.getValue() == "1":
file = open("/proc/stb/lcd/symbol_hdd", "w")
file.write('%d' % int(hdd))
file.close()
if os.path.exists("/proc/stb/lcd/symbol_hddprogress"):
if config.lcd.hdd.getValue() == "1":
file = open("/proc/stb/lcd/symbol_hddprogress", "w")
file.write('%d' % int(self.activity))
file.close()
else:
self.activityTimer.stop()
self.activity = 0
hdd = 0
if os.path.exists("/proc/stb/lcd/symbol_hdd"):
file = open("/proc/stb/lcd/symbol_hdd", "w")
file.write('%d' % int(hdd))
file.close()
if os.path.exists("/proc/stb/lcd/symbol_hddprogress"):
file = open("/proc/stb/lcd/symbol_hddprogress", "w")
file.write('%d' % int(self.activity))
file.close()
def __serviceStarted(self):
self.fast_winding_hint_message_showed = False
self.setSeekState(self.SEEK_STATE_PLAY)
self.__seekableStatusChanged()
def setSeekState(self, state):
service = self.session.nav.getCurrentService()
if service is None:
return False
if not self.isSeekable():
if state not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE):
state = self.SEEK_STATE_PLAY
pauseable = service.pause()
if pauseable is None:
# print "not pauseable."
state = self.SEEK_STATE_PLAY
self.seekstate = state
if pauseable is not None:
if self.seekstate[0] and self.seekstate[3] == '||':
# print "resolved to PAUSE"
self.activityTimer.stop()
pauseable.pause()
elif self.seekstate[0] and self.seekstate[3] == 'END':
# print "resolved to STOP"
self.activityTimer.stop()
service.stop()
elif self.seekstate[1]:
if not pauseable.setFastForward(self.seekstate[1]):
pass
# print "resolved to FAST FORWARD"
else:
self.seekstate = self.SEEK_STATE_PLAY
# print "FAST FORWARD not possible: resolved to PLAY"
elif self.seekstate[2]:
if not pauseable.setSlowMotion(self.seekstate[2]):
pass
# print "resolved to SLOW MOTION"
else:
self.seekstate = self.SEEK_STATE_PAUSE
# print "SLOW MOTION not possible: resolved to PAUSE"
else:
# print "resolved to PLAY"
self.activityTimer.start(200, False)
pauseable.unpause()
for c in self.onPlayStateChanged:
c(self.seekstate)
self.checkSkipShowHideLock()
if hasattr(self, "ScreenSaverTimerStart"):
self.ScreenSaverTimerStart()
return True
def playpauseService(self):
if self.seekstate == self.SEEK_STATE_PLAY:
self.pauseService()
else:
if self.seekstate == self.SEEK_STATE_PAUSE:
if config.seek.on_pause.getValue() == "play":
self.unPauseService()
elif config.seek.on_pause.getValue() == "step":
self.doSeekRelative(1)
elif config.seek.on_pause.getValue() == "last":
self.setSeekState(self.lastseekstate)
self.lastseekstate = self.SEEK_STATE_PLAY
else:
self.unPauseService()
def pauseService(self):
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def pauseServiceYellow(self):
if self.seekstate != self.SEEK_STATE_EOF:
self.lastseekstate = self.seekstate
self.setSeekState(self.SEEK_STATE_PAUSE)
def unPauseService(self):
if self.seekstate == self.SEEK_STATE_PLAY:
return 0
self.setSeekState(self.SEEK_STATE_PLAY)
def doSeek(self, pts):
seekable = self.getSeek()
if seekable is None:
return
seekable.seekTo(pts)
def doSeekRelative(self, pts):
seekable = self.getSeek()
if seekable is None and int(self.seek.getLength()[1]) < 1:
return
prevstate = self.seekstate
if self.seekstate == self.SEEK_STATE_EOF:
if prevstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PAUSE)
else:
self.setSeekState(self.SEEK_STATE_PLAY)
seekable.seekRelative(pts<0 and -1 or 1, abs(pts))
if abs(pts) > 100 and config.usage.show_infobar_on_skip.getValue():
self.showAfterSeek()
def seekFwd(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
if self.seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.getValue())))
elif self.seekstate == self.SEEK_STATE_PAUSE:
if len(config.seek.speeds_slowmotion.getValue()):
self.setSeekState(self.makeStateSlowMotion(config.seek.speeds_slowmotion.getValue()[-1]))
else:
self.setSeekState(self.makeStateForward(int(config.seek.enter_forward.getValue())))
elif self.seekstate == self.SEEK_STATE_EOF:
pass
elif self.isStateForward(self.seekstate):
speed = self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_forward.getValue()) or config.seek.speeds_forward.getValue()[-1]
self.setSeekState(self.makeStateForward(speed))
elif self.isStateBackward(self.seekstate):
speed = -self.seekstate[1]
if self.seekstate[2]:
speed /= self.seekstate[2]
speed = self.getLower(speed, config.seek.speeds_backward.getValue())
if speed:
self.setSeekState(self.makeStateBackward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateSlowMotion(self.seekstate):
speed = self.getLower(self.seekstate[2], config.seek.speeds_slowmotion.getValue()) or config.seek.speeds_slowmotion.getValue()[0]
self.setSeekState(self.makeStateSlowMotion(speed))
def seekBack(self):
seek = self.getSeek()
if seek and not (seek.isCurrentlySeekable() & 2):
if not self.fast_winding_hint_message_showed and (seek.isCurrentlySeekable() & 1):
self.session.open(MessageBox, _("No fast winding possible yet.. but you can use the number buttons to skip forward/backward!"), MessageBox.TYPE_INFO, timeout=10)
self.fast_winding_hint_message_showed = True
return
return 0 # trade as unhandled action
seekstate = self.seekstate
if seekstate == self.SEEK_STATE_PLAY:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
elif seekstate == self.SEEK_STATE_EOF:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.getValue())))
self.doSeekRelative(-6)
elif seekstate == self.SEEK_STATE_PAUSE:
self.doSeekRelative(-1)
elif self.isStateForward(seekstate):
speed = seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getLower(speed, config.seek.speeds_forward.getValue())
if speed:
self.setSeekState(self.makeStateForward(speed))
else:
self.setSeekState(self.SEEK_STATE_PLAY)
elif self.isStateBackward(seekstate):
speed = -seekstate[1]
if seekstate[2]:
speed /= seekstate[2]
speed = self.getHigher(speed, config.seek.speeds_backward.getValue()) or config.seek.speeds_backward.getValue()[-1]
self.setSeekState(self.makeStateBackward(speed))
elif self.isStateSlowMotion(seekstate):
speed = self.getHigher(seekstate[2], config.seek.speeds_slowmotion.getValue())
if speed:
self.setSeekState(self.makeStateSlowMotion(speed))
else:
self.setSeekState(self.SEEK_STATE_PAUSE)
self.pts_lastseekspeed = self.seekstate[1]
def seekFwdManual(self, fwd=True):
if config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def seekBackManual(self, fwd=False):
if config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def seekFwdSeekbar(self, fwd=True):
if not config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.fwdSeekTo, MinuteInput)
def fwdSeekTo(self, minutes):
self.doSeekRelative(minutes * 60 * 90000)
def seekBackSeekbar(self, fwd=False):
if not config.seek.baractivation.getValue() == "leftright":
self.session.open(Seekbar, fwd)
else:
self.session.openWithCallback(self.rwdSeekTo, MinuteInput)
def rwdSeekTo(self, minutes):
# print "rwdSeekTo"
self.doSeekRelative(-minutes * 60 * 90000)
def checkSkipShowHideLock(self):
if self.seekstate == self.SEEK_STATE_PLAY or self.seekstate == self.SEEK_STATE_EOF:
self.lockedBecauseOfSkipping = False
self.unlockShow()
else:
wantlock = self.seekstate != self.SEEK_STATE_PLAY
if config.usage.show_infobar_on_skip.getValue():
if self.lockedBecauseOfSkipping and not wantlock:
self.unlockShow()
self.lockedBecauseOfSkipping = False
if wantlock and not self.lockedBecauseOfSkipping:
self.lockShow()
self.lockedBecauseOfSkipping = True
def calcRemainingTime(self):
seekable = self.getSeek()
if seekable is not None:
len = seekable.getLength()
try:
tmp = self.cueGetEndCutPosition()
if tmp:
len = (False, tmp)
except:
pass
pos = seekable.getPlayPosition()
speednom = self.seekstate[1] or 1
speedden = self.seekstate[2] or 1
if not len[0] and not pos[0]:
if len[1] <= pos[1]:
return 0
time = (len[1] - pos[1])*speedden/(90*speednom)
return time
return False
def __evEOF(self):
if self.seekstate == self.SEEK_STATE_EOF:
return
# if we are seeking forward, we try to end up ~1s before the end, and pause there.
seekstate = self.seekstate
if self.seekstate != self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_EOF)
if seekstate not in (self.SEEK_STATE_PLAY, self.SEEK_STATE_PAUSE): # if we are seeking
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-1)
self.doEofInternal(True)
if seekstate == self.SEEK_STATE_PLAY: # regular EOF
self.doEofInternal(True)
else:
self.doEofInternal(False)
def doEofInternal(self, playing):
pass # Defined in subclasses
def __evSOF(self):
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
class InfoBarPVRState:
def __init__(self, screen=PVRState, force_show = False):
self.onChangedEntry = [ ]
self.onPlayStateChanged.append(self.__playStateChanged)
self.pvrStateDialog = self.session.instantiateDialog(screen)
self.onShow.append(self._mayShow)
self.onHide.append(self.pvrStateDialog.hide)
self.force_show = force_show
def createSummary(self):
return InfoBarMoviePlayerSummary
def _mayShow(self):
if self.has_key("state") and not config.usage.movieplayer_pvrstate.getValue():
self["state"].setText("")
self["statusicon"].setPixmapNum(6)
self["speed"].setText("")
if self.shown and self.seekstate != self.SEEK_STATE_EOF and not config.usage.movieplayer_pvrstate.getValue():
self.pvrStateDialog.show()
self.startHideTimer()
def __playStateChanged(self, state):
playstateString = state[3]
state_summary = playstateString
self.pvrStateDialog["state"].setText(playstateString)
if playstateString == '>':
self.pvrStateDialog["statusicon"].setPixmapNum(0)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 0
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(0)
self["speed"].setText("")
elif playstateString == '||':
self.pvrStateDialog["statusicon"].setPixmapNum(1)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 1
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(1)
self["speed"].setText("")
elif playstateString == 'END':
self.pvrStateDialog["statusicon"].setPixmapNum(2)
self.pvrStateDialog["speed"].setText("")
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 2
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(2)
self["speed"].setText("")
elif playstateString.startswith('>>'):
speed = state[3].split()
self.pvrStateDialog["statusicon"].setPixmapNum(3)
self.pvrStateDialog["speed"].setText(speed[1])
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 3
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(3)
self["speed"].setText(speed[1])
elif playstateString.startswith('<<'):
speed = state[3].split()
self.pvrStateDialog["statusicon"].setPixmapNum(4)
self.pvrStateDialog["speed"].setText(speed[1])
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 4
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(4)
self["speed"].setText(speed[1])
elif playstateString.startswith('/'):
self.pvrStateDialog["statusicon"].setPixmapNum(5)
self.pvrStateDialog["speed"].setText(playstateString)
speed_summary = self.pvrStateDialog["speed"].text
statusicon_summary = 5
if self.has_key("state") and config.usage.movieplayer_pvrstate.getValue():
self["state"].setText(playstateString)
self["statusicon"].setPixmapNum(5)
self["speed"].setText(playstateString)
for cb in self.onChangedEntry:
cb(state_summary, speed_summary, statusicon_summary)
# if we return into "PLAY" state, ensure that the dialog gets hidden if there will be no infobar displayed
if not config.usage.show_infobar_on_skip.getValue() and self.seekstate == self.SEEK_STATE_PLAY and not self.force_show:
self.pvrStateDialog.hide()
else:
self._mayShow()
class InfoBarTimeshiftState(InfoBarPVRState):
def __init__(self):
InfoBarPVRState.__init__(self, screen=TimeshiftState, force_show = True)
self.onPlayStateChanged.append(self.__timeshiftEventName)
self.onHide.append(self.__hideTimeshiftState)
def _mayShow(self):
if self.shown and self.timeshiftEnabled() and self.isSeekable():
InfoBarTimeshift.ptsSeekPointerSetCurrentPos(self)
if config.timeshift.showinfobar.getValue():
self["TimeshiftSeekPointerActions"].setEnabled(True)
self.pvrStateDialog.show()
self.startHideTimer()
def __hideTimeshiftState(self):
self["TimeshiftSeekPointerActions"].setEnabled(False)
self.pvrStateDialog.hide()
def __timeshiftEventName(self,state):
if self.timeshiftEnabled() and os.path.exists("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_currplaying)):
readmetafile = open("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.getValue(),self.pts_currplaying), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
readmetafile.close()
self.pvrStateDialog["eventname"].setText(eventname)
else:
self.pvrStateDialog["eventname"].setText("")
class InfoBarShowMovies:
# i don't really like this class.
# it calls a not further specified "movie list" on up/down/movieList,
# so this is not more than an action map
def __init__(self):
self["MovieListActions"] = HelpableActionMap(self, "InfobarMovieListActions",
{
"movieList": (self.showMovies, _("Open the movie list")),
"up": (self.up, _("Open the movie list")),
"down": (self.down, _("Open the movie list"))
})
from Screens.PiPSetup import PiPSetup
class InfoBarExtensions:
EXTENSION_SINGLE = 0
EXTENSION_LIST = 1
def __init__(self):
self.list = []
if config.plisettings.ColouredButtons.getValue():
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.bluekey_ex, _("Show extensions...")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"showEventInfo": (self.SelectopenEventView, _("Show the infomation on current event.")),
"openTimerList": (self.showTimerList, _("Show the list of timers.")),
"openAutoTimerList": (self.showAutoTimerList, _("Show the list of AutoTimers.")),
"openEPGSearch": (self.showEPGSearch, _("Search the epg for current event.")),
"openIMDB": (self.showIMDB, _("Search IMDb for information about current event.")),
"showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")),
}, 1) # lower priority
else:
self["InstantExtensionsActions"] = HelpableActionMap(self, "InfobarExtensions",
{
"extensions": (self.bluekey_ex, _("view extensions...")),
"showPluginBrowser": (self.showPluginBrowser, _("Show the plugin browser..")),
"showEventInfo": (self.SelectopenEventView, _("Show the infomation on current event.")),
"showMediaPlayer": (self.showMediaPlayer, _("Show the media player...")),
}, 1) # lower priority
self.addExtension(extension = self.getLogManager, type = InfoBarExtensions.EXTENSION_LIST)
self.addExtension(extension = self.getOsd3DSetup, type = InfoBarExtensions.EXTENSION_LIST)
self.addExtension(extension = self.getCCcamInfo, type = InfoBarExtensions.EXTENSION_LIST)
self.addExtension(extension = self.getOScamInfo, type = InfoBarExtensions.EXTENSION_LIST)
#self.addExtension(extension = self.getRestartNetwork, type = InfoBarExtensions.EXTENSION_LIST)
def bluekey_ex(self):
self.showExtensionSelection()
def quickmenuStart(self):
try:
self.showExtensionSelection()
except:
print "[INFOBARGENERICS] QuickMenu: error pipshow, starting Quick Menu"
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open(QuickMenu)
def SelectopenEventView(self):
try:
self.openEventView()
except:
pass
def getLMname(self):
return _("Log Manager")
def getLogManager(self):
if config.logmanager.showinextensions.getValue():
return [((boundFunction(self.getLMname), boundFunction(self.openLogManager), lambda: True), None)]
else:
return []
def getRestartNetworkname(self):
return _("Restart Network")
def getRestartNetwork(self):
return [((boundFunction(self.getRestartNetworkname), boundFunction(self.openRestartNetwork), lambda: True), None)]
def get3DSetupname(self):
return _("OSD 3D Setup")
def getOsd3DSetup(self):
if config.osd.show3dextensions .getValue():
return [((boundFunction(self.get3DSetupname), boundFunction(self.open3DSetup), lambda: True), None)]
else:
return []
def getCCname(self):
return _("CCcam Info")
def getCCcamInfo(self):
softcams = sorted(filter(lambda x: x.startswith('softcam.'), os.listdir("/etc/init.d/")))
for softcam in softcams:
if "cccam" in os.readlink('/etc/init.d/softcam').lower() and config.cccaminfo.showInExtensions.getValue():
return [((boundFunction(self.getCCname), boundFunction(self.openCCcamInfo), lambda: True), None)] or []
else:
return []
def getOSname(self):
return _("OScam Info")
def getOScamInfo(self):
softcams = sorted(filter(lambda x: x.startswith('softcam.'), os.listdir("/etc/init.d/")))
for softcam in softcams:
if "oscam" in os.readlink('/etc/init.d/softcam') and config.oscaminfo.showInExtensions.getValue():
return [((boundFunction(self.getOSname), boundFunction(self.openOScamInfo), lambda: True), None)] or []
else:
return []
def addExtension(self, extension, key = None, type = EXTENSION_SINGLE):
self.list.append((type, extension, key))
if config.usage.sort_extensionslist.getValue():
self.list.sort()
def updateExtension(self, extension, key = None):
self.extensionsList.append(extension)
if key is not None:
if self.extensionKeys.has_key(key):
key = None
if key is None:
for x in self.availableKeys:
if not self.extensionKeys.has_key(x):
key = x
break
if key is not None:
self.extensionKeys[key] = len(self.extensionsList) - 1
def updateExtensions(self):
self.extensionsList = []
self.availableKeys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue" ]
self.extensionKeys = {}
for x in self.list:
if x[0] == self.EXTENSION_SINGLE:
self.updateExtension(x[1], x[2])
else:
for y in x[1]():
self.updateExtension(y[0], y[1])
def showExtensionSelection(self):
self.updateExtensions()
extensionsList = self.extensionsList[:]
keys = []
list = []
colorlist = []
for x in self.availableKeys:
if self.extensionKeys.has_key(x):
entry = self.extensionKeys[x]
extension = self.extensionsList[entry]
if extension[2]():
name = str(extension[0]())
if self.availableKeys.index(x) < 10:
list.append((extension[0](), extension))
else:
colorlist.append((extension[0](), extension))
keys.append(x)
extensionsList.remove(extension)
else:
extensionsList.remove(extension)
if config.usage.sort_extensionslist.getValue():
list.sort()
for x in colorlist:
list.append(x)
list.extend([(x[0](), x) for x in extensionsList])
keys += [""] * len(extensionsList)
self.session.openWithCallback(self.extensionCallback, ChoiceBox, title=_("Please choose an extension..."), list = list, keys = keys, skin_name = "ExtensionsList")
def extensionCallback(self, answer):
if answer is not None:
answer[1][1]()
def showPluginBrowser(self):
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
def openCCcamInfo(self):
from Screens.CCcamInfo import CCcamInfoMain
self.session.open(CCcamInfoMain)
def openOScamInfo(self):
from Screens.OScamInfo import OscamInfoMenu
self.session.open(OscamInfoMenu)
def showTimerList(self):
self.session.open(TimerEditList)
def openLogManager(self):
from Screens.LogManager import LogManager
self.session.open(LogManager)
def open3DSetup(self):
from Screens.UserInterfacePositioner import OSD3DSetupScreen
self.session.open(OSD3DSetupScreen)
def openRestartNetwork(self):
try:
from Plugins.Extensions.Infopanel.RestartNetwork import RestartNetwork
self.session.open(RestartNetwork)
except:
print'[INFOBARGENERICS] failed to restart network'
def showAutoTimerList(self):
if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/AutoTimer/plugin.pyo"):
from Plugins.Extensions.AutoTimer.plugin import main, autostart
from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
self.autopoller = AutoPoller()
self.autotimer = AutoTimer()
try:
self.autotimer.readXml()
except SyntaxError as se:
self.session.open(
MessageBox,
_("Your config file is not well-formed:\n%s") % (str(se)),
type = MessageBox.TYPE_ERROR,
timeout = 10
)
return
# Do not run in background while editing, this might screw things up
if self.autopoller is not None:
self.autopoller.stop()
from Plugins.Extensions.AutoTimer.AutoTimerOverview import AutoTimerOverview
self.session.openWithCallback(
self.editCallback,
AutoTimerOverview,
self.autotimer
)
else:
self.session.open(MessageBox, _("The AutoTimer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def editCallback(self, session):
# XXX: canceling of GUI (Overview) won't affect config values which might have been changed - is this intended?
# Don't parse EPG if editing was canceled
if session is not None:
# Save xml
self.autotimer.writeXml()
# Poll EPGCache
self.autotimer.parseEPG()
# Start autopoller again if wanted
if config.plugins.autotimer.autopoll.getValue():
if self.autopoller is None:
from Plugins.Extensions.AutoTimer.AutoPoller import AutoPoller
self.autopoller = AutoPoller()
self.autopoller.start()
# Remove instance if not running in background
else:
self.autopoller = None
self.autotimer = None
def showEPGSearch(self):
from Plugins.Extensions.EPGSearch.EPGSearch import EPGSearch
s = self.session.nav.getCurrentService()
if s:
info = s.info()
event = info.getEvent(0) # 0 = now, 1 = next
if event:
name = event and event.getEventName() or ''
else:
name = self.session.nav.getCurrentlyPlayingServiceOrGroup().toString()
name = name.split('/')
name = name[-1]
name = name.replace('.',' ')
name = name.split('-')
name = name[0]
if name.endswith(' '):
name = name[:-1]
if name:
self.session.open(EPGSearch, name, False)
else:
self.session.open(EPGSearch)
else:
self.session.open(EPGSearch)
def showIMDB(self):
if os.path.exists("/usr/lib/enigma2/python/Plugins/Extensions/IMDb/plugin.pyo"):
from Plugins.Extensions.IMDb.plugin import IMDB
s = self.session.nav.getCurrentService()
if s:
info = s.info()
event = info.getEvent(0) # 0 = now, 1 = next
name = event and event.getEventName() or ''
self.session.open(IMDB, name)
else:
self.session.open(MessageBox, _("The IMDb plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
def showMediaPlayer(self):
if isinstance(self, InfoBarExtensions):
if isinstance(self, InfoBar):
try: # falls es nicht installiert ist
from Plugins.Extensions.MediaPlayer.plugin import MediaPlayer
self.session.open(MediaPlayer)
no_plugin = False
except Exception, e:
self.session.open(MessageBox, _("The MediaPlayer plugin is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
from Tools.BoundFunction import boundFunction
import inspect
# depends on InfoBarExtensions
class InfoBarPlugins:
def __init__(self):
self.addExtension(extension = self.getPluginList, type = InfoBarExtensions.EXTENSION_LIST)
def getPluginName(self, name):
return name
def getPluginList(self):
l = []
for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EXTENSIONSMENU):
args = inspect.getargspec(p.__call__)[0]
if len(args) == 1 or len(args) == 2 and isinstance(self, InfoBarChannelSelection):
l.append(((boundFunction(self.getPluginName, p.name), boundFunction(self.runPlugin, p), lambda: True), None, p.name))
l.sort(key = lambda e: e[2]) # sort by name
return l
def runPlugin(self, plugin):
if isinstance(self, InfoBarChannelSelection):
plugin(session = self.session, servicelist = self.servicelist)
else:
plugin(session = self.session)
from Components.Task import job_manager
class InfoBarJobman:
def __init__(self):
self.addExtension(extension = self.getJobList, type = InfoBarExtensions.EXTENSION_LIST)
def getJobList(self):
if config.usage.jobtaksextensions.getValue():
return [((boundFunction(self.getJobName, job), boundFunction(self.showJobView, job), lambda: True), None) for job in job_manager.getPendingJobs()]
else:
return []
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job)
def JobViewCB(self, in_background):
job_manager.in_background = in_background
# depends on InfoBarExtensions
class InfoBarPiP:
def __init__(self):
try:
self.session.pipshown
except:
self.session.pipshown = False
if SystemInfo.get("NumVideoDecoders", 1) > 1 and isinstance(self, InfoBarEPG):
self["PiPActions"] = HelpableActionMap(self, "InfobarPiPActions",
{
"activatePiP": (self.showPiP, _("Activate PiP")),
})
if self.allowPiP:
self.addExtension((self.getShowHideName, self.showPiP, lambda: True), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
self.addExtension((self.getSwapName, self.swapPiP, self.pipShown), "yellow")
# self.addExtension((self.getTogglePipzapName, self.togglePipzap, self.pipShown), "red")
else:
self.addExtension((self.getShowHideName, self.showPiP, self.pipShown), "blue")
self.addExtension((self.getMoveName, self.movePiP, self.pipShown), "green")
def pipShown(self):
return self.session.pipshown
def pipHandles0Action(self):
return self.pipShown() and config.usage.pip_zero_button.getValue() != "standard"
def getShowHideName(self):
if self.session.pipshown:
return _("Disable Picture in Picture")
else:
return _("Activate Picture in Picture")
def getSwapName(self):
return _("Swap services")
def getMoveName(self):
return _("Move Picture in Picture")
def getTogglePipzapName(self):
slist = self.servicelist
if slist and slist.dopipzap:
return _("Zap focus to main screen")
return _("Zap focus to Picture in Picture")
def togglePipzap(self):
if not self.session.pipshown:
self.showPiP()
slist = self.servicelist
if slist and self.session.pipshown:
slist.togglePipzap()
if slist.dopipzap:
currentServicePath = slist.getCurrentServicePath()
self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.servicePath = currentServicePath
def showPiP(self):
if self.session.pipshown:
slist = self.servicelist
if slist and slist.dopipzap:
self.togglePipzap()
if self.session.pipshown:
del self.session.pip
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modeminitv.value)
f.close()
self.session.pipshown = False
else:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.show()
newservice = self.session.nav.getCurrentlyPlayingServiceReference() or self.servicelist.servicelist.getCurrent()
if self.session.pip.playService(newservice):
self.session.pipshown = True
self.session.pip.servicePath = self.servicelist.getCurrentServicePath()
if SystemInfo["LCDMiniTV"]:
if config.lcd.modepip.value >= "1":
f = open("/proc/stb/lcd/mode", "w")
f.write(config.lcd.modepip.value)
f.close()
f = open("/proc/stb/vmpeg/1/dst_width", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_height", "w")
f.write("0")
f.close()
f = open("/proc/stb/vmpeg/1/dst_apply", "w")
f.write("1")
f.close()
else:
self.session.pipshown = False
del self.session.pip
def swapPiP(self):
swapservice = self.session.nav.getCurrentlyPlayingServiceOrGroup()
pipref = self.session.pip.getCurrentService()
if swapservice and pipref and pipref.toString() != swapservice.toString():
currentServicePath = self.servicelist.getCurrentServicePath()
self.servicelist.setCurrentServicePath(self.session.pip.servicePath, doZap=False)
self.session.pip.playService(swapservice)
self.session.nav.stopService() # stop portal
self.session.nav.playService(pipref, checkParentalControl=False, adjust=False)
self.session.pip.servicePath = currentServicePath
if self.servicelist.dopipzap:
# This unfortunately won't work with subservices
self.servicelist.setCurrentSelection(self.session.pip.getCurrentService())
def movePiP(self):
self.session.open(PiPSetup, pip = self.session.pip)
def pipDoHandle0Action(self):
use = config.usage.pip_zero_button.getValue()
if "swap" == use:
self.swapPiP()
elif "swapstop" == use:
self.swapPiP()
self.showPiP()
elif "stop" == use:
self.showPiP()
class InfoBarINFOpanel:
"""INFO-Panel - handles the infoPanel action"""
def __init__(self):
self["INFOpanelActions"] = HelpableActionMap(self, "InfoBarINFOpanel",
{
"infoPanel": (self.selectRedKeytask, _("INFO-Panel...")),
"softcamPanel": (self.softcamPanel, _("Softcam-Panel...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def selectRedKeytask(self):
isWEBBROWSER = None
isHBBTV = None
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/WebBrowser/browser.pyo"):
isWEBBROWSER = True
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/HbbTV/plugin.pyo"):
isHBBTV = True
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/E3Opera/plugin.pyo"):
isHBBTV = True
if isWEBBROWSER or isHBBTV:
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif config.plugins.infopanel_redpanel.enabled.getValue() == True:
try:
from Plugins.Extensions.Infopanel.plugin import Infopanel
self.session.open(Infopanel, services = self.servicelist)
except:
pass
else:
self.instantRecord()
elif config.plugins.infopanel_redpanel.enabled.getValue() == True:
try:
from Plugins.Extensions.Infopanel.plugin import Infopanel
self.session.open(Infopanel, services = self.servicelist)
except:
pass
else:
self.instantRecord()
def softcamPanel(self):
if config.plugins.infopanel_redpanel.enabledlong.getValue() == True:
try:
from Plugins.Extensions.Infopanel.SoftcamPanel import SoftcamPanel
self.session.open(SoftcamPanel)
except:
pass
else:
pass
class InfoBarQuickMenu:
def __init__(self):
self["QuickMenuActions"] = HelpableActionMap(self, "InfoBarQuickMenu",
{
"quickmenu": (self.bluekey_qm, _("Quick Menu...")),
})
def bluekey_qm(self):
self.showExtensionSelection()
def quickmenuStart(self):
try:
self.showExtensionSelection()
except:
print "[INFOBARGENERICS] QuickMenu: error pipshow, starting Quick Menu"
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open(QuickMenu)
class InfoBarInstantRecord:
"""Instant Record - handles the instantRecord action in order to
start/stop instant records"""
def __init__(self):
self["InstantRecordActions"] = HelpableActionMap(self, "InfobarInstantRecord",
{
"instantRecord": (self.instantRecord, _("Instant recording...")),
})
if isStandardInfoBar(self):
self.recording = []
else:
from Screens.InfoBar import InfoBar
InfoBarInstance = InfoBar.instance
if InfoBarInstance:
self.recording = InfoBarInstance.recording
def stopCurrentRecording(self, entry = -1):
if entry is not None and entry != -1:
self.session.nav.RecordTimer.removeEntry(self.recording[entry])
self.recording.remove(self.recording[entry])
def getProgramInfoAndEvent(self, info, name):
info["serviceref"] = self.session.nav.getCurrentlyPlayingServiceOrGroup()
# try to get event info
event = None
try:
service = self.session.nav.getCurrentService()
epg = eEPGCache.getInstance()
event = epg.lookupEventTime(info["serviceref"], -1, 0)
if event is None:
event = service.info().getEvent(0)
except:
pass
info["event"] = event
info["name"] = name
info["description"] = ""
info["eventid"] = None
if event is not None:
curEvent = parseEvent(event)
info["name"] = curEvent[2]
info["description"] = curEvent[3]
info["eventid"] = curEvent[4]
info["end"] = curEvent[1]
def startInstantRecording(self, limitEvent = False):
begin = int(time())
end = begin + 3600 # dummy
name = "instant record"
info = { }
self.getProgramInfoAndEvent(info, name)
serviceref = info["serviceref"]
event = info["event"]
if event is not None:
if limitEvent:
end = info["end"]
else:
if limitEvent:
self.session.open(MessageBox, _("No event info found, recording indefinitely."), MessageBox.TYPE_INFO)
if isinstance(serviceref, eServiceReference):
serviceref = ServiceReference(serviceref)
recording = RecordTimerEntry(serviceref, begin, end, info["name"], info["description"], info["eventid"], dirname = preferredInstantRecordPath())
recording.dontSave = True
if event is None or limitEvent == False:
recording.autoincrease = True
recording.setAutoincreaseEnd()
simulTimerList = self.session.nav.RecordTimer.record(recording)
if simulTimerList is None: # no conflict
recording.autoincrease = False
self.recording.append(recording)
else:
if len(simulTimerList) > 1: # with other recording
name = simulTimerList[1].name
name_date = ' '.join((name, strftime('%F %T', localtime(simulTimerList[1].begin))))
# print "[TIMER] conflicts with", name_date
recording.autoincrease = True # start with max available length, then increment
if recording.setAutoincreaseEnd():
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
self.session.open(MessageBox, _("Record time limited due to conflicting timer %s") % name_date, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to conflicting timer %s") % name, MessageBox.TYPE_INFO)
else:
self.session.open(MessageBox, _("Could not record due to invalid service %s") % serviceref, MessageBox.TYPE_INFO)
recording.autoincrease = False
def isInstantRecordRunning(self):
# print "self.recording:", self.recording
if self.recording:
for x in self.recording:
if x.isRunning():
return True
return False
def recordQuestionCallback(self, answer):
# print 'recordQuestionCallback'
# print "pre:\n", self.recording
# print 'test1'
if answer is None or answer[1] == "no":
# print 'test2'
return
list = []
recording = self.recording[:]
for x in recording:
if not x in self.session.nav.RecordTimer.timer_list:
self.recording.remove(x)
elif x.dontSave and x.isRunning():
list.append((x, False))
if answer[1] == "changeduration":
if len(self.recording) == 1:
self.changeDuration(0)
else:
self.session.openWithCallback(self.changeDuration, TimerSelection, list)
elif answer[1] == "changeendtime":
if len(self.recording) == 1:
self.setEndtime(0)
else:
self.session.openWithCallback(self.setEndtime, TimerSelection, list)
elif answer[1] == "timer":
import TimerEdit
self.session.open(TimerEdit.TimerEditList)
elif answer[1] == "stop":
self.session.openWithCallback(self.stopCurrentRecording, TimerSelection, list)
elif answer[1] in ( "indefinitely" , "manualduration", "manualendtime", "event"):
self.startInstantRecording(limitEvent = answer[1] in ("event", "manualendtime") or False)
if answer[1] == "manualduration":
self.changeDuration(len(self.recording)-1)
elif answer[1] == "manualendtime":
self.setEndtime(len(self.recording)-1)
elif answer[1] == "savetimeshift":
# print 'test1'
if self.isSeekable() and self.pts_eventcount != self.pts_currplaying:
# print 'test2'
InfoBarTimeshift.SaveTimeshift(self, timeshiftfile="pts_livebuffer_%s" % self.pts_currplaying)
else:
# print 'test3'
Notifications.AddNotification(MessageBox,_("Timeshift will get saved at end of event!"), MessageBox.TYPE_INFO, timeout=5)
self.save_current_timeshift = True
config.timeshift.isRecording.value = True
elif answer[1] == "savetimeshiftEvent":
# print 'test4'
InfoBarTimeshift.saveTimeshiftEventPopup(self)
elif answer[1].startswith("pts_livebuffer") is True:
# print 'test2'
InfoBarTimeshift.SaveTimeshift(self, timeshiftfile=answer[1])
def setEndtime(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.endtime=ConfigClock(default = self.recording[self.selectedEntry].end)
dlg = self.session.openWithCallback(self.TimeDateInputClosed, TimeDateInput, self.endtime)
dlg.setTitle(_("Please change recording endtime"))
def TimeDateInputClosed(self, ret):
if len(ret) > 1:
if ret[0]:
# print "stopping recording at", strftime("%F %T", localtime(ret[1]))
if self.recording[self.selectedEntry].end != ret[1]:
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = ret[1]
else:
if self.recording[self.selectedEntry].end != int(time()):
self.recording[self.selectedEntry].autoincrease = False
self.recording[self.selectedEntry].end = int(time())
self.session.nav.RecordTimer.timeChanged(self.recording[self.selectedEntry])
def changeDuration(self, entry):
if entry is not None and entry >= 0:
self.selectedEntry = entry
self.session.openWithCallback(self.inputCallback, InputBox, title=_("How many minutes do you want to record?"), text="5", maxSize=False, type=Input.NUMBER)
def inputCallback(self, value):
# print "stopping recording after", int(value), "minutes."
entry = self.recording[self.selectedEntry]
if value is not None:
if int(value) != 0:
entry.autoincrease = False
entry.end = int(time()) + 60 * int(value)
else:
if entry.end != int(time()):
entry.autoincrease = False
entry.end = int(time())
self.session.nav.RecordTimer.timeChanged(entry)
def isTimerRecordRunning(self):
identical = timers = 0
for timer in self.session.nav.RecordTimer.timer_list:
if timer.isRunning() and not timer.justplay:
timers += 1
if self.recording:
for x in self.recording:
if x.isRunning() and x == timer:
identical += 1
return timers > identical
def instantRecord(self):
pirr = preferredInstantRecordPath()
if not findSafeRecordPath(pirr) and not findSafeRecordPath(defaultMoviePath()):
if not pirr:
pirr = ""
self.session.open(MessageBox, _("Missing ") + "\n" + pirr +
"\n" + _("No HDD found or HDD not initialized!"), MessageBox.TYPE_ERROR)
return
if isStandardInfoBar(self):
common = ((_("Add recording (stop after current event)"), "event"),
(_("Add recording (indefinitely)"), "indefinitely"),
(_("Add recording (enter recording duration)"), "manualduration"),
(_("Add recording (enter recording endtime)"), "manualendtime"),)
timeshiftcommon = ((_("Timeshift save recording (stop after current event)"), "savetimeshift"),
(_("Timeshift save recording (Select event)"), "savetimeshiftEvent"),)
else:
common = ()
timeshiftcommon = ()
if self.isInstantRecordRunning():
title =_("A recording is currently running.\nWhat do you want to do?")
list = ((_("Stop recording"), "stop"),) + common + \
((_("Change recording (duration)"), "changeduration"),
(_("Change recording (endtime)"), "changeendtime"),)
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
else:
title=_("Start recording?")
list = common
if self.isTimerRecordRunning():
list += ((_("Stop timer recording"), "timer"),)
if isStandardInfoBar(self) and self.timeshiftEnabled():
list = list + timeshiftcommon
if isStandardInfoBar(self):
list = list + ((_("Do not record"), "no"),)
if list:
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox,title=title,list=list)
else:
return 0
class InfoBarAudioSelection:
def __init__(self):
self["AudioSelectionAction"] = HelpableActionMap(self, "InfobarAudioSelectionActions",
{
"audioSelection": (self.audioSelection, _("Audio options...")),
"audio_key": (self.audio_key, _("Audio options...")),
"audioSelectionLong": (self.audioSelectionLong, _("Toggle Digital downmix...")),
})
def audioSelection(self):
if not hasattr(self, "LongButtonPressed"):
self.LongButtonPressed = False
if not self.LongButtonPressed:
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audio_key(self):
from Screens.AudioSelection import AudioSelection
self.session.openWithCallback(self.audioSelected, AudioSelection, infobar=self)
def audioSelected(self, ret=None):
print "[infobar::audioSelected]", ret
def audioSelectionLong(self):
if SystemInfo["CanDownmixAC3"] and self.LongButtonPressed:
if config.av.downmix_ac3.getValue():
message = _("Dobly Digital downmix is now") + " " + _("disabled")
print '[Audio] Dobly Digital downmix is now disabled'
config.av.downmix_ac3.setValue(False)
else:
config.av.downmix_ac3.setValue(True)
message = _("Dobly Digital downmix is now") + " " + _("enabled")
print '[Audio] Dobly Digital downmix is now enabled'
Notifications.AddPopup(text = message, type = MessageBox.TYPE_INFO, timeout = 5, id = "DDdownmixToggle")
class InfoBarSubserviceSelection:
def __init__(self):
self["SubserviceSelectionAction"] = HelpableActionMap(self, "InfobarSubserviceSelectionActions",
{
"GreenPressed": (self.GreenPressed),
"subserviceSelection": (self.subserviceSelection),
})
self["SubserviceQuickzapAction"] = HelpableActionMap(self, "InfobarSubserviceQuickzapActions",
{
"nextSubservice": (self.nextSubservice, _("Switch to next sub service")),
"prevSubservice": (self.prevSubservice, _("Switch to previous sub service"))
}, -1)
self["SubserviceQuickzapAction"].setEnabled(False)
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evUpdatedEventInfo: self.checkSubservicesAvail
})
self.onClose.append(self.__removeNotifications)
self.bsel = None
def GreenPressed(self):
if not config.plisettings.Subservice.getValue():
self.openTimerList()
else:
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
if fileExists("/usr/lib/enigma2/python/Plugins/Extensions/CustomSubservices/plugin.pyo"):
serviceRef = self.session.nav.getCurrentlyPlayingServiceReference()
subservices = self.getAvailableSubservices(serviceRef)
if not subservices or len(subservices) == 0:
self.openPluginBrowser()
else:
self.subserviceSelection()
else:
self.openPluginBrowser()
else:
self.subserviceSelection()
def openPluginBrowser(self):
try:
from Screens.PluginBrowser import PluginBrowser
self.session.open(PluginBrowser)
except:
pass
def __removeNotifications(self):
self.session.nav.event.remove(self.checkSubservicesAvail)
def checkSubservicesAvail(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
if not subservices or subservices.getNumberOfSubservices() == 0:
self["SubserviceQuickzapAction"].setEnabled(False)
def nextSubservice(self):
self.changeSubservice(+1)
def prevSubservice(self):
self.changeSubservice(-1)
def changeSubservice(self, direction):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
n = subservices and subservices.getNumberOfSubservices()
if n and n > 0:
selection = -1
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
idx = 0
while idx < n:
if subservices.getSubservice(idx).toString() == ref.toString():
selection = idx
break
idx += 1
if selection != -1:
selection += direction
if selection >= n:
selection=0
elif selection < 0:
selection=n-1
newservice = subservices.getSubservice(selection)
if newservice.valid():
del subservices
del service
self.session.nav.playService(newservice, False)
def subserviceSelection(self):
service = self.session.nav.getCurrentService()
subservices = service and service.subServices()
self.bouquets = self.servicelist.getBouquetList()
n = subservices and subservices.getNumberOfSubservices()
selection = 0
if n and n > 0:
ref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
tlist = []
idx = 0
while idx < n:
i = subservices.getSubservice(idx)
if i.toString() == ref.toString():
selection = idx
tlist.append((i.getName(), i))
idx += 1
if self.bouquets and len(self.bouquets):
keys = ["red", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
if config.usage.multibouquet.getValue():
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to bouquet"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), (_("Add to favourites"), "CALLFUNC", self.addSubserviceToBouquetCallback), ("--", "")] + tlist
selection += 3
else:
tlist = [(_("Quick zap"), "quickzap", service.subServices()), ("--", "")] + tlist
keys = ["red", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ] + [""] * n
selection += 2
self.session.openWithCallback(self.subserviceSelected, ChoiceBox, title=_("Please select a sub service..."), list = tlist, selection = selection, keys = keys, skin_name = "SubserviceSelection")
def subserviceSelected(self, service):
del self.bouquets
if not service is None:
if isinstance(service[1], str):
if service[1] == "quickzap":
from Screens.SubservicesQuickzap import SubservicesQuickzap
self.session.open(SubservicesQuickzap, service[2])
else:
self["SubserviceQuickzapAction"].setEnabled(True)
self.session.nav.playService(service[1], False)
def addSubserviceToBouquetCallback(self, service):
if len(service) > 1 and isinstance(service[1], eServiceReference):
self.selectedSubservice = service
if self.bouquets is None:
cnt = 0
else:
cnt = len(self.bouquets)
if cnt > 1: # show bouquet list
self.bsel = self.session.openWithCallback(self.bouquetSelClosed, BouquetSelector, self.bouquets, self.addSubserviceToBouquet)
elif cnt == 1: # add to only one existing bouquet
self.addSubserviceToBouquet(self.bouquets[0][1])
self.session.open(MessageBox, _("Service has been added to the favourites."), MessageBox.TYPE_INFO)
def bouquetSelClosed(self, confirmed):
self.bsel = None
del self.selectedSubservice
if confirmed:
self.session.open(MessageBox, _("Service has been added to the selected bouquet."), MessageBox.TYPE_INFO)
def addSubserviceToBouquet(self, dest):
self.servicelist.addServiceToBouquet(dest, self.selectedSubservice[1])
if self.bsel:
self.bsel.close(True)
else:
del self.selectedSubservice
def openTimerList(self):
self.session.open(TimerEditList)
class InfoBarRedButton:
def __init__(self):
self["RedButtonActions"] = HelpableActionMap(self, "InfobarRedButtonActions",
{
"activateRedButton": (self.activateRedButton, _("Red button...")),
})
self.onHBBTVActivation = [ ]
self.onRedButtonActivation = [ ]
def activateRedButton(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
if info and info.getInfoString(iServiceInformation.sHBBTVUrl) != "":
for x in self.onHBBTVActivation:
x()
elif False: # TODO: other red button services
for x in self.onRedButtonActivation:
x()
class InfoBarTimerButton:
def __init__(self):
self["TimerButtonActions"] = HelpableActionMap(self, "InfobarTimerButtonActions",
{
"timerSelection": (self.timerSelection, _("Timer selection...")),
})
def timerSelection(self):
from Screens.TimerEdit import TimerEditList
self.session.open(TimerEditList)
class InfoBarAspectSelection:
STATE_HIDDEN = 0
STATE_ASPECT = 1
STATE_RESOLUTION = 2
def __init__(self):
self["AspectSelectionAction"] = HelpableActionMap(self, "InfobarAspectSelectionActions",
{
"aspectSelection": (self.ExGreen_toggleGreen, _("Aspect list...")),
})
self.__ExGreen_state = self.STATE_HIDDEN
def ExGreen_doAspect(self):
print "do self.STATE_ASPECT"
self.__ExGreen_state = self.STATE_ASPECT
self.aspectSelection()
def ExGreen_doResolution(self):
print "do self.STATE_RESOLUTION"
self.__ExGreen_state = self.STATE_RESOLUTION
self.resolutionSelection()
def ExGreen_doHide(self):
print "do self.STATE_HIDDEN"
self.__ExGreen_state = self.STATE_HIDDEN
def ExGreen_toggleGreen(self, arg=""):
print self.__ExGreen_state
if self.__ExGreen_state == self.STATE_HIDDEN:
print "self.STATE_HIDDEN"
self.ExGreen_doAspect()
elif self.__ExGreen_state == self.STATE_ASPECT:
print "self.STATE_ASPECT"
self.ExGreen_doResolution()
elif self.__ExGreen_state == self.STATE_RESOLUTION:
print "self.STATE_RESOLUTION"
self.ExGreen_doHide()
def aspectSelection(self):
selection = 0
tlist = []
tlist.append((_("Resolution"), "resolution"))
tlist.append(("--", ""))
try:
policy = open("/proc/stb/video/policy_choices").read()[:-1]
except IOError:
print "couldn't read available policymodes."
policy_available = [ ]
return
policy_available = policy.split(' ')
for x in policy_available:
tlist.append((x[0].upper() + x[1:], _(x)))
mode = open("/proc/stb/video/policy").read()[:-1]
print mode
for x in range(len(tlist)):
if tlist[x][1] == mode:
selection = x
keys = ["green", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ]
self.session.openWithCallback(self.aspectSelected, ChoiceBox, title=_("Please select an aspect ratio..."), list = tlist, selection = selection, keys = keys)
def aspectSelected(self, aspect):
if not aspect is None:
if isinstance(aspect[1], str):
if aspect[1] == "":
self.ExGreen_doHide()
elif aspect[1] == "resolution":
self.ExGreen_toggleGreen()
else:
if aspect[1] == "letterbox":
f = open("/proc/stb/video/policy", "w")
f.write("panscan")
f.close()
elif aspect[1] == "panscan":
f = open("/proc/stb/video/policy", "w")
f.write("letterbox")
f.close()
else:
f = open("/proc/stb/video/policy", "w")
f.write(aspect[1])
f.close()
self.ExGreen_doHide()
else:
self.ExGreen_doHide()
return
class InfoBarResolutionSelection:
def __init__(self):
return
def resolutionSelection(self):
f = open("/proc/stb/vmpeg/0/xres", "r")
xresString = f.read()
f.close()
f = open("/proc/stb/vmpeg/0/yres", "r")
yresString = f.read()
f.close()
if getBoxType().startswith('azbox'):
fpsString = '50000'
else:
try:
f = open("/proc/stb/vmpeg/0/framerate", "r")
fpsString = f.read()
f.close()
except:
print"[InfoBarResolutionSelection] Error open /proc/stb/vmpeg/0/framerate !!"
fpsString = '50000'
xres = int(xresString, 16)
yres = int(yresString, 16)
fps = int(fpsString)
fpsFloat = float(fps)
fpsFloat = fpsFloat/1000
selection = 0
tlist = []
tlist.append((_("Exit"), "exit"))
tlist.append((_("Auto(not available)"), "auto"))
tlist.append(("Video: " + str(xres) + "x" + str(yres) + "@" + str(fpsFloat) + "hz", ""))
tlist.append(("--", ""))
tlist.append(("576i", "576i50"))
tlist.append(("576p", "576p50"))
tlist.append(("720p@50hz", "720p50"))
tlist.append(("720p@60hz", "720p60"))
tlist.append(("1080i@50hz", "1080i50"))
tlist.append(("1080i@60hz", "1080i60"))
tlist.append(("1080p@23.976hz", "1080p23"))
tlist.append(("1080p@24hz", "1080p24"))
tlist.append(("1080p@25hz", "1080p25"))
tlist.append(("1080p@29hz", "1080p29"))
tlist.append(("1080p@30hz", "1080p30"))
keys = ["green", "yellow", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ]
mode = open("/proc/stb/video/videomode").read()[:-1]
print mode
for x in range(len(tlist)):
if tlist[x][1] == mode:
selection = x
self.session.openWithCallback(self.ResolutionSelected, ChoiceBox, title=_("Please select a resolution..."), list = tlist, selection = selection, keys = keys)
def ResolutionSelected(self, Resolution):
if not Resolution is None:
if isinstance(Resolution[1], str):
if Resolution[1] == "exit" or Resolution[1] == "" or Resolution[1] == "auto":
self.ExGreen_toggleGreen()
if Resolution[1] != "auto":
f = open("/proc/stb/video/videomode", "w")
f.write(Resolution[1])
f.close()
#from enigma import gMainDC
#gMainDC.getInstance().setResolution(-1, -1)
self.ExGreen_doHide()
else:
self.ExGreen_doHide()
return
class InfoBarVmodeButton:
def __init__(self):
self["VmodeButtonActions"] = HelpableActionMap(self, "InfobarVmodeButtonActions",
{
"vmodeSelection": (self.vmodeSelection, _("Letterbox zoom")),
})
def vmodeSelection(self):
self.session.open(VideoMode)
class VideoMode(Screen):
def __init__(self,session):
Screen.__init__(self, session)
self["videomode"] = Label()
self["actions"] = NumberActionMap( [ "InfobarVmodeButtonActions" ],
{
"vmodeSelection": self.selectVMode
})
self.Timer = eTimer()
self.Timer.callback.append(self.quit)
self.selectVMode()
def selectVMode(self):
policy = config.av.policy_43
if self.isWideScreen():
policy = config.av.policy_169
idx = policy.choices.index(policy.value)
idx = (idx + 1) % len(policy.choices)
policy.value = policy.choices[idx]
self["videomode"].setText(policy.value)
self.Timer.start(1000, True)
def isWideScreen(self):
from Components.Converter.ServiceInfo import WIDESCREEN
service = self.session.nav.getCurrentService()
info = service and service.info()
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
def quit(self):
self.Timer.stop()
self.close()
class InfoBarAdditionalInfo:
def __init__(self):
self["RecordingPossible"] = Boolean(fixed=harddiskmanager.HDDCount() > 0)
self["TimeshiftPossible"] = self["RecordingPossible"]
self["ExtensionsAvailable"] = Boolean(fixed=1)
# TODO: these properties should be queried from the input device keymap
self["ShowTimeshiftOnYellow"] = Boolean(fixed=0)
self["ShowAudioOnYellow"] = Boolean(fixed=0)
self["ShowRecordOnRed"] = Boolean(fixed=0)
class InfoBarNotifications:
def __init__(self):
self.onExecBegin.append(self.checkNotifications)
Notifications.notificationAdded.append(self.checkNotificationsIfExecing)
self.onClose.append(self.__removeNotification)
def __removeNotification(self):
Notifications.notificationAdded.remove(self.checkNotificationsIfExecing)
def checkNotificationsIfExecing(self):
if self.execing:
self.checkNotifications()
def checkNotifications(self):
notifications = Notifications.notifications
if notifications:
n = notifications[0]
del notifications[0]
cb = n[0]
if n[3].has_key("onSessionOpenCallback"):
n[3]["onSessionOpenCallback"]()
del n[3]["onSessionOpenCallback"]
if cb:
dlg = self.session.openWithCallback(cb, n[1], *n[2], **n[3])
elif not Notifications.current_notifications and n[4] == "ZapError":
if n[3].has_key("timeout"):
del n[3]["timeout"]
n[3]["enable_input"] = False
dlg = self.session.instantiateDialog(n[1], *n[2], **n[3])
self.hide()
dlg.show()
self.notificationDialog = dlg
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypressNotification)
else:
dlg = self.session.open(n[1], *n[2], **n[3])
# remember that this notification is currently active
d = (n[4], dlg)
Notifications.current_notifications.append(d)
dlg.onClose.append(boundFunction(self.__notificationClosed, d))
def closeNotificationInstantiateDialog(self):
if hasattr(self, "notificationDialog"):
self.session.deleteDialog(self.notificationDialog)
del self.notificationDialog
eActionMap.getInstance().unbindAction('', self.keypressNotification)
def keypressNotification(self, key, flag):
if flag:
self.closeNotificationInstantiateDialog()
def __notificationClosed(self, d):
Notifications.current_notifications.remove(d)
class InfoBarServiceNotifications:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evEnd: self.serviceHasEnded
})
def serviceHasEnded(self):
# print "service end!"
try:
self.setSeekState(self.SEEK_STATE_PLAY)
except:
pass
class InfoBarCueSheetSupport:
CUT_TYPE_IN = 0
CUT_TYPE_OUT = 1
CUT_TYPE_MARK = 2
CUT_TYPE_LAST = 3
ENABLE_RESUME_SUPPORT = False
def __init__(self, actionmap = "InfobarCueSheetActions"):
self["CueSheetActions"] = HelpableActionMap(self, actionmap,
{
"jumpPreviousMark": (self.jumpPreviousMark, _("Jump to previous marked position")),
"jumpNextMark": (self.jumpNextMark, _("Jump to next marked position")),
"toggleMark": (self.toggleMark, _("Toggle a cut mark at the current position"))
}, prio=1)
self.cut_list = [ ]
self.is_closing = False
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evCuesheetChanged: self.downloadCuesheet,
})
def __serviceStarted(self):
if self.is_closing:
return
# print "new service started! trying to download cuts!"
self.downloadCuesheet()
self.resume_point = None
if self.ENABLE_RESUME_SUPPORT:
for (pts, what) in self.cut_list:
if what == self.CUT_TYPE_LAST:
last = pts
break
else:
last = getResumePoint(self.session)
if last is None:
return
# only resume if at least 10 seconds ahead, or <10 seconds before the end.
seekable = self.__getSeekable()
if seekable is None:
return # Should not happen?
length = seekable.getLength() or (None,0)
# print "seekable.getLength() returns:", length
# Hmm, this implies we don't resume if the length is unknown...
if (last > 900000) and (not length[1] or (last < length[1] - 900000)):
self.resume_point = last
l = last / 90000
if config.usage.on_movie_start.getValue() == "ask" or not length[1]:
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Do you want to resume this playback?") + "\n" + (_("Resume position at %s") % ("%d:%02d:%02d" % (l/3600, l%3600/60, l%60))), timeout=10)
elif config.usage.on_movie_start.getValue() == "resume":
Notifications.AddNotificationWithCallback(self.playLastCB, MessageBox, _("Resuming playback"), timeout=2, type=MessageBox.TYPE_INFO)
def playLastCB(self, answer):
if answer == True and self.resume_point:
self.doSeek(self.resume_point)
self.hideAfterResume()
def hideAfterResume(self):
if isinstance(self, InfoBarShowHide):
self.hide()
def __getSeekable(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.seek()
def cueGetCurrentPosition(self):
seek = self.__getSeekable()
if seek is None:
return None
r = seek.getPlayPosition()
if r[0]:
return None
return long(r[1])
def cueGetEndCutPosition(self):
ret = False
isin = True
for cp in self.cut_list:
if cp[1] == self.CUT_TYPE_OUT:
if isin:
isin = False
ret = cp[0]
elif cp[1] == self.CUT_TYPE_IN:
isin = True
return ret
def jumpPreviousNextMark(self, cmp, start=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
return False
mark = self.getNearestCutPoint(current_pos, cmp=cmp, start=start)
if mark is not None:
pts = mark[0]
else:
return False
self.doSeek(pts)
return True
def jumpPreviousMark(self):
# we add 5 seconds, so if the play position is <5s after
# the mark, the mark before will be used
self.jumpPreviousNextMark(lambda x: -x-5*90000, start=True)
def jumpNextMark(self):
if not self.jumpPreviousNextMark(lambda x: x-90000):
self.doSeek(-1)
def getNearestCutPoint(self, pts, cmp=abs, start=False):
# can be optimized
beforecut = True
nearest = None
bestdiff = -1
instate = True
if start:
bestdiff = cmp(0 - pts)
if bestdiff >= 0:
nearest = [0, False]
for cp in self.cut_list:
if beforecut and cp[1] in (self.CUT_TYPE_IN, self.CUT_TYPE_OUT):
beforecut = False
if cp[1] == self.CUT_TYPE_IN: # Start is here, disregard previous marks
diff = cmp(cp[0] - pts)
if start and diff >= 0:
nearest = cp
bestdiff = diff
else:
nearest = None
bestdiff = -1
if cp[1] == self.CUT_TYPE_IN:
instate = True
elif cp[1] == self.CUT_TYPE_OUT:
instate = False
elif cp[1] in (self.CUT_TYPE_MARK, self.CUT_TYPE_LAST):
diff = cmp(cp[0] - pts)
if instate and diff >= 0 and (nearest is None or bestdiff > diff):
nearest = cp
bestdiff = diff
return nearest
def toggleMark(self, onlyremove=False, onlyadd=False, tolerance=5*90000, onlyreturn=False):
current_pos = self.cueGetCurrentPosition()
if current_pos is None:
# print "not seekable"
return
nearest_cutpoint = self.getNearestCutPoint(current_pos)
if nearest_cutpoint is not None and abs(nearest_cutpoint[0] - current_pos) < tolerance:
if onlyreturn:
return nearest_cutpoint
if not onlyadd:
self.removeMark(nearest_cutpoint)
elif not onlyremove and not onlyreturn:
self.addMark((current_pos, self.CUT_TYPE_MARK))
if onlyreturn:
return None
def addMark(self, point):
insort(self.cut_list, point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def removeMark(self, point):
self.cut_list.remove(point)
self.uploadCuesheet()
self.showAfterCuesheetOperation()
def showAfterCuesheetOperation(self):
if isinstance(self, InfoBarShowHide):
self.doShow()
def __getCuesheet(self):
service = self.session.nav.getCurrentService()
if service is None:
return None
return service.cueSheet()
def uploadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
# print "upload failed, no cuesheet interface"
return
cue.setCutList(self.cut_list)
def downloadCuesheet(self):
cue = self.__getCuesheet()
if cue is None:
# print "download failed, no cuesheet interface"
self.cut_list = [ ]
else:
self.cut_list = cue.getCutList()
class InfoBarSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="82,18" font="Regular;16" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="82,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.Event_Now" render="Progress" position="6,46" size="46,18" borderWidth="1" >
<convert type="EventTime">Progress</convert>
</widget>
</screen>"""
# for picon: (path="piconlcd" will use LCD picons)
# <widget source="session.CurrentService" render="Picon" position="6,0" size="120,64" path="piconlcd" >
# <convert type="ServiceName">Reference</convert>
# </widget>
class InfoBarSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarSummary
class InfoBarMoviePlayerSummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="62,46" size="64,18" font="Regular;16" halign="right" >
<convert type="ClockToText">WithSeconds</convert>
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="62,46" size="64,18" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
<widget source="session.CurrentService" render="Label" position="6,4" size="120,42" font="Regular;18" >
<convert type="ServiceName">Name</convert>
</widget>
<widget source="session.CurrentService" render="Progress" position="6,46" size="56,18" borderWidth="1" >
<convert type="ServicePosition">Position</convert>
</widget>
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session, parent = parent)
self["state_summary"] = StaticText("")
self["speed_summary"] = StaticText("")
self["statusicon_summary"] = MultiPixmap()
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
self.parent.onChangedEntry.append(self.selectionChanged)
def removeWatcher(self):
self.parent.onChangedEntry.remove(self.selectionChanged)
def selectionChanged(self, state_summary, speed_summary, statusicon_summary):
self["state_summary"].setText(state_summary)
self["speed_summary"].setText(speed_summary)
self["statusicon_summary"].setPixmapNum(int(statusicon_summary))
class InfoBarMoviePlayerSummarySupport:
def __init__(self):
pass
def createSummary(self):
return InfoBarMoviePlayerSummary
class InfoBarTeletextPlugin:
def __init__(self):
self.teletext_plugin = None
for p in plugins.getPlugins(PluginDescriptor.WHERE_TELETEXT):
self.teletext_plugin = p
if self.teletext_plugin is not None:
self["TeletextActions"] = HelpableActionMap(self, "InfobarTeletextActions",
{
"startTeletext": (self.startTeletext, _("View teletext..."))
})
else:
print "no teletext plugin found!"
def startTeletext(self):
self.teletext_plugin(session=self.session, service=self.session.nav.getCurrentService())
class InfoBarSubtitleSupport(object):
def __init__(self):
object.__init__(self)
self["SubtitleSelectionAction"] = HelpableActionMap(self, "InfobarSubtitleSelectionActions",
{
"subtitleSelection": (self.subtitleSelection, _("Subtitle selection...")),
})
self.selected_subtitle = None
if isStandardInfoBar(self):
self.subtitle_window = self.session.instantiateDialog(SubtitleDisplay)
else:
from Screens.InfoBar import InfoBar
self.subtitle_window = InfoBar.instance.subtitle_window
self.subtitle_window.hide()
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStart: self.__serviceChanged,
iPlayableService.evEnd: self.__serviceChanged,
iPlayableService.evUpdatedInfo: self.__updatedInfo
})
def getCurrentServiceSubtitle(self):
service = self.session.nav.getCurrentService()
return service and service.subtitle()
def subtitleSelection(self):
service = self.session.nav.getCurrentService()
subtitle = service and service.subtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if self.selected_subtitle or subtitlelist and len(subtitlelist)>0:
from Screens.AudioSelection import SubtitleSelection
self.session.open(SubtitleSelection, self)
else:
return 0
def __serviceChanged(self):
if self.selected_subtitle:
self.selected_subtitle = None
self.subtitle_window.hide()
def __updatedInfo(self):
if not self.selected_subtitle:
subtitle = self.getCurrentServiceSubtitle()
cachedsubtitle = subtitle.getCachedSubtitle()
if cachedsubtitle:
self.enableSubtitle(cachedsubtitle)
def enableSubtitle(self, selectedSubtitle):
subtitle = self.getCurrentServiceSubtitle()
self.selected_subtitle = selectedSubtitle
if subtitle and self.selected_subtitle:
subtitle.enableSubtitles(self.subtitle_window.instance, self.selected_subtitle)
self.subtitle_window.show()
else:
if subtitle:
subtitle.disableSubtitles(self.subtitle_window.instance)
self.subtitle_window.hide()
def restartSubtitle(self):
if self.selected_subtitle:
self.enableSubtitle(self.selected_subtitle)
class InfoBarServiceErrorPopupSupport:
def __init__(self):
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evTuneFailed: self.__tuneFailed,
iPlayableService.evTunedIn: self.__serviceStarted,
iPlayableService.evStart: self.__serviceStarted
})
self.__serviceStarted()
def __serviceStarted(self):
self.closeNotificationInstantiateDialog()
self.last_error = None
Notifications.RemovePopup(id = "ZapError")
def __tuneFailed(self):
if not config.usage.hide_zap_errors.getValue():
service = self.session.nav.getCurrentService()
info = service and service.info()
error = info and info.getInfo(iServiceInformation.sDVBState)
if error == self.last_error:
error = None
else:
self.last_error = error
error = {
eDVBServicePMTHandler.eventNoResources: _("No free tuner!"),
eDVBServicePMTHandler.eventTuneFailed: _("Tune failed!"),
eDVBServicePMTHandler.eventNoPAT: _("No data on transponder!\n(Timeout reading PAT)"),
eDVBServicePMTHandler.eventNoPATEntry: _("Service not found!\n(SID not found in PAT)"),
eDVBServicePMTHandler.eventNoPMT: _("Service invalid!\n(Timeout reading PMT)"),
eDVBServicePMTHandler.eventNewProgramInfo: None,
eDVBServicePMTHandler.eventTuned: None,
eDVBServicePMTHandler.eventSOF: None,
eDVBServicePMTHandler.eventEOF: None,
eDVBServicePMTHandler.eventMisconfiguration: _("Service unavailable!\nCheck tuner configuration!"),
}.get(error) #this returns None when the key not exist in the dict
if error:
self.closeNotificationInstantiateDialog()
if hasattr(self, "dishDialog") and not self.dishDialog.dishState():
Notifications.AddPopup(text = error, type = MessageBox.TYPE_ERROR, timeout = 5, id = "ZapError")
class InfoBarZoom:
def __init__(self):
self.zoomrate=0
self.zoomin=1
self["ZoomActions"] = HelpableActionMap(self, "InfobarZoomActions",
{
"ZoomInOut":(self.ZoomInOut, _("Zoom In/Out TV...")),
"ZoomOff":(self.ZoomOff, _("Zoom Off...")),
}, prio=2)
def ZoomInOut(self):
zoomval=0
if self.zoomrate > 3:
self.zoomin = 0
elif self.zoomrate < -9:
self.zoomin = 1
if self.zoomin == 1:
self.zoomrate += 1
else:
self.zoomrate -= 1
if self.zoomrate < 0:
zoomval=abs(self.zoomrate)+10
else:
zoomval=self.zoomrate
# print "zoomRate:", self.zoomrate
# print "zoomval:", zoomval
file = open("/proc/stb/vmpeg/0/zoomrate", "w")
file.write('%d' % int(zoomval))
file.close()
def ZoomOff(self):
self.zoomrate = 0
self.zoomin = 1
f = open("/proc/stb/vmpeg/0/zoomrate", "w")
f.write(str(0))
f.close()
class InfoBarHdmi:
def __init__(self):
self.hdmi_enabled = False
self.hdmi_enabled_full = False
self.hdmi_enabled_pip = False
if getMachineProcModel().startswith('ini-90'):
if not self.hdmi_enabled_full:
self.addExtension((self.getHDMIInFullScreen, self.HDMIInFull, lambda: True), "blue")
if not self.hdmi_enabled_pip:
self.addExtension((self.getHDMIInPiPScreen, self.HDMIInPiP, lambda: True), "green")
self["HDMIActions"] = HelpableActionMap(self, "InfobarHDMIActions",
{
"HDMIin":(self.HDMIIn, _("Switch to HDMI in mode")),
"HDMIinLong":(self.HDMIInLong, _("Switch to HDMI in mode")),
}, prio=2)
def HDMIInLong(self):
if self.LongButtonPressed:
if not hasattr(self.session, 'pip') and not self.session.pipshown:
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
self.session.pip.show()
self.session.pipshown = True
else:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.pipshown = False
del self.session.pip
def HDMIIn(self):
if not self.LongButtonPressed:
slist = self.servicelist
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.session.nav.playService(slist.servicelist.getCurrent())
def getHDMIInFullScreen(self):
if not self.hdmi_enabled_full:
return _("Turn on HDMI-IN Full screen mode")
else:
return _("Turn off HDMI-IN Full screen mode")
def getHDMIInPiPScreen(self):
if not self.hdmi_enabled_pip:
return _("Turn on HDMI-IN PiP mode")
else:
return _("Turn off HDMI-IN PiP mode")
def HDMIInPiP(self):
if not hasattr(self.session, 'pip') and not self.session.pipshown:
self.hdmi_enabled_pip = True
self.session.pip = self.session.instantiateDialog(PictureInPicture)
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
self.session.pip.show()
self.session.pipshown = True
else:
curref = self.session.pip.getCurrentService()
if curref and curref.type != 8192:
self.hdmi_enabled_pip = True
self.session.pip.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.hdmi_enabled_pip = False
self.session.pipshown = False
del self.session.pip
def HDMIInFull(self):
slist = self.servicelist
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if curref and curref.type != 8192:
self.hdmi_enabled_full = True
self.session.nav.playService(eServiceReference('8192:0:1:0:0:0:0:0:0:0:'))
else:
self.hdmi_enabled_full = False
self.session.nav.playService(slist.servicelist.getCurrent())
class InfoBarPowersaver:
def __init__(self):
self.inactivityTimer = eTimer()
self.inactivityTimer.callback.append(self.inactivityTimeout)
self.restartInactiveTimer()
self.sleepTimer = eTimer()
self.sleepTimer.callback.append(self.sleepTimerTimeout)
eActionMap.getInstance().bindAction('', -maxint - 1, self.keypress)
def keypress(self, key, flag):
if flag:
self.restartInactiveTimer()
def restartInactiveTimer(self):
time = abs(int(config.usage.inactivity_timer.value))
if time:
self.inactivityTimer.startLongTimer(time)
else:
self.inactivityTimer.stop()
def inactivityTimeout(self):
if config.usage.inactivity_timer_blocktime.value:
curtime = localtime(time())
if curtime.tm_year != 1970: #check if the current time is valid
curtime = (curtime.tm_hour, curtime.tm_min, curtime.tm_sec)
begintime = tuple(config.usage.inactivity_timer_blocktime_begin.value)
endtime = tuple(config.usage.inactivity_timer_blocktime_end.value)
if begintime <= endtime and (curtime >= begintime and curtime < endtime) or begintime > endtime and (curtime >= begintime or curtime < endtime):
duration = (endtime[0]*3600 + endtime[1]*60) - (curtime[0]*3600 + curtime[1]*60 + curtime[2])
if duration:
if duration < 0:
duration += 24*3600
self.inactivityTimer.startLongTimer(duration)
return
if Screens.Standby.inStandby:
self.inactivityTimeoutCallback(True)
else:
if int(config.usage.inactivity_timer.value) < 0:
message = _("Your receiver will shutdown due to inactivity.")
else:
message = _("Your receiver will got to standby due to inactivity.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.inactivityTimeoutCallback, MessageBox, message, timeout=60, simple = True)
def inactivityTimeoutCallback(self, answer):
if answer:
self.goShutdownOrStandby(int(config.usage.inactivity_timer.value))
else:
print "[InfoBarPowersaver] abort"
def setSleepTimer(self, time):
print "[InfoBarPowersaver] set sleeptimer", time
if time:
if time < 0:
message = _("And will shutdown your receiver over ")
else:
message = _("And will put your receiver in standby over ")
m = abs(time / 60)
message = _("The sleep timer has been activated.") + "\n" + message + ngettext("%d minute", "%d minutes", m) % m
self.sleepTimer.startLongTimer(abs(time))
else:
message = _("The sleep timer has been disabled.")
self.sleepTimer.stop()
Notifications.AddPopup(message, type = MessageBox.TYPE_INFO, timeout = 5)
self.sleepTimerSetting = time
def sleepTimerTimeout(self):
if Screens.Standby.inStandby:
self.sleepTimerTimeoutCallback(True)
else:
list = [ (_("Yes"), True), (_("Extend sleeptimer 15 minutes"), "extend"), (_("No"), False) ]
if self.sleepTimerSetting < 0:
message = _("Your receiver will shutdown due to the sleeptimer.")
elif self.sleepTimerSetting > 0:
message = _("Your receiver will got to stand by due to the sleeptimer.")
message += "\n" + _("Do you want this?")
self.session.openWithCallback(self.sleepTimerTimeoutCallback, MessageBox, message, timeout=60, simple = True, list = list)
def sleepTimerTimeoutCallback(self, answer):
if answer == "extend":
print "[InfoBarPowersaver] extend sleeptimer"
if self.sleepTimerSetting < 0:
self.setSleepTimer(-900)
else:
self.setSleepTimer(900)
elif answer:
self.goShutdownOrStandby(self.sleepTimerSetting)
else:
print "[InfoBarPowersaver] abort"
self.setSleepTimer(0)
def goShutdownOrStandby(self, value):
if value < 0:
if Screens.Standby.inStandby:
print "[InfoBarPowersaver] already in standby now shut down"
RecordTimerEntry.TryQuitMainloop()
elif not Screens.Standby.inTryQuitMainloop:
print "[InfoBarPowersaver] goto shutdown"
self.session.open(Screens.Standby.TryQuitMainloop, 1)
elif not Screens.Standby.inStandby:
print "[InfoBarPowersaver] goto standby"
self.session.open(Screens.Standby.Standby)
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.