repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
tavendo/AutobahnTestSuite | autobahntestsuite/autobahntestsuite/case/case4_1_4.py | 2 | 1624 | ###############################################################################
##
## Copyright (c) Crossbar.io Technologies GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case4_1_4(Case):
DESCRIPTION = """Send small text message, then send frame with reserved non-control <b>Opcode = 6</b> and non-empty payload, then send Ping."""
EXPECTATION = """Echo for first message is received, but then connection is failed immediately, since reserved opcode frame is used. A Pong is not received."""
def onOpen(self):
payload = "Hello, world!"
self.expected[Case.OK] = [("message", payload, False)]
self.expected[Case.NON_STRICT] = []
self.expectedClose = {"closedByMe":False,"closeCode":[self.p.CLOSE_STATUS_CODE_PROTOCOL_ERROR],"requireClean":False}
self.p.sendFrame(opcode = 1, payload = payload)
self.p.sendFrame(opcode = 6, payload = payload)
self.p.sendFrame(opcode = 9)
self.p.killAfter(1)
| apache-2.0 |
arnaudsj/pybrain | pybrain/tests/unittests/structure/networks/test_no_gravity_network.py | 31 | 1686 | """
The library should be able to handle networks without any weight:
>>> n1= buildNonGravityNet(False)
>>> n1.paramdim
0
>>> n1.activate([0.2,0.4])[0]
1.289...
>>> n1.activate([0.2,0.4])[0]
1.289...
Now let's verify the recurrent one as well:
>>> n2= buildNonGravityNet(True)
>>> n2.paramdim
0
>>> n2.activate([0.2,0.4])[0]
1.289...
>>> n2.activate([0.2,0.4])[0]
3.478...
"""
from pybrain.structure import RecurrentNetwork, FeedForwardNetwork, IdentityConnection, LinearLayer, SigmoidLayer
from pybrain.tests.testsuites import runModuleTestSuite
def buildNonGravityNet(recurrent = False):
if recurrent:
net = RecurrentNetwork()
else:
net = FeedForwardNetwork()
l1 = LinearLayer(2)
l2 = LinearLayer(3)
s1 = SigmoidLayer(2)
l3 = LinearLayer(1)
net.addInputModule(l1)
net.addModule(l2)
net.addModule(s1)
net.addOutputModule(l3)
net.addConnection(IdentityConnection(l1, l2, outSliceFrom = 1))
net.addConnection(IdentityConnection(l1, l2, outSliceTo = 2))
net.addConnection(IdentityConnection(l2, l3, inSliceFrom = 2))
net.addConnection(IdentityConnection(l2, l3, inSliceTo = 1))
net.addConnection(IdentityConnection(l1, s1))
net.addConnection(IdentityConnection(l2, s1, inSliceFrom = 1))
net.addConnection(IdentityConnection(s1, l3, inSliceFrom = 1))
if recurrent:
net.addRecurrentConnection(IdentityConnection(s1, l1))
net.addRecurrentConnection(IdentityConnection(l2, l2, inSliceFrom = 1, outSliceTo = 2))
net.sortModules()
return net
if __name__ == "__main__":
runModuleTestSuite(__import__('__main__'))
| bsd-3-clause |
binhqnguyen/ln | nsc/scons-local-1.2.0.d20090223/SCons/Tool/fortran.py | 19 | 2056 | """SCons.Tool.fortran
Tool-specific initialization for a generic Posix f77/f90 Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/fortran.py 4043 2009/02/23 09:06:45 scons"
import re
import string
import SCons.Action
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env
compilers = ['f95', 'f90', 'f77']
def generate(env):
add_all_to_env(env)
add_fortran_to_env(env)
fc = env.Detect(compilers) or 'f77'
env['SHFORTRAN'] = fc
env['FORTRAN'] = fc
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 |
tkasp/osmose-backend | osmose_config.py | 1 | 103302 | #-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Etienne Chové <chove@crans.org> 2009 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
# langue : http://fr.wikipedia.org/wiki/Liste_des_codes_ISO_639-1
# PAYS : http://fr.wikipedia.org/wiki/ISO_3166-1
import os
from collections import OrderedDict
import modules.config
from typing import Dict, Optional
###########################################################################
GEOFABRIK = u"http://download.geofabrik.de/"
OSMFR = u"http://download.openstreetmap.fr/extracts/"
OSMCH = u"https://planet.osm.ch/"
class template_config:
clean_at_end = True
updt_url = modules.config.url_frontend_update
dir_work = modules.config.dir_work
dir_tmp = modules.config.dir_tmp
dir_cache = modules.config.dir_cache
dir_scripts = modules.config.dir_osmose
bin_osmosis = modules.config.bin_osmosis
bin_pyosmium_up_to_date = modules.config.bin_pyosmium_up_to_date
osmosis_pre_scripts = [
dir_scripts + "/osmosis/pgsnapshot_schema_0.6.sql",
# dir_scripts + "/osmosis/osmosis-0.47.4/script/pgsnapshot_schema_0.6_bbox.sql",
dir_scripts + "/osmosis/osmosis-0.47.4/script/pgsnapshot_schema_0.6_linestring.sql",
dir_scripts + "/osmosis/CreateMetainfo.sql",
]
osmosis_import_scripts = [
dir_scripts + "/osmosis/ImportDatabase.sql",
]
osmosis_post_scripts = [
dir_scripts + "/osmosis/CreateTagsIndex.sql",
dir_scripts + "/osmosis/CreateFunctions.sql",
]
osmosis_change_init_post_scripts = [ # Scripts to run on database initialisation
dir_scripts + "/osmosis/pgsimple_schema_0.6_action_drop.sql",
dir_scripts + "/osmosis/osmosis-0.47.4/script/pgsnapshot_schema_0.6_action.sql",
]
osmosis_change_post_scripts = [ # Scripts to run each time the database is updated
dir_scripts + "/osmosis/CreateTouched.sql",
]
osmosis_resume_init_post_scripts = [ # Scripts to run on database initialisation
dir_scripts + "/osmosis/pgsimple_schema_0.6_action_drop.sql",
dir_scripts + "/osmosis/osmosis-0.47.4/script/pgsnapshot_schema_0.6_action.sql",
]
osmosis_resume_post_scripts = [ # Scripts to run each time the database is updated
dir_scripts + "/osmosis/ActionFromTimestamp.sql",
dir_scripts + "/osmosis/CreateTouched.sql",
]
dir_results = modules.config.dir_results
dir_extracts = modules.config.dir_extracts
dir_diffs = modules.config.dir_diffs
db_base: Optional[str] = 'osmose'
db_user: Optional[str] = 'osmose'
db_password: Optional[str] = '-osmose-'
db_host: Optional[str] = os.environ.get('DB_HOST', None) # Use socket by default
db_schema: Optional[str] = None
db_schema_path: Optional[str] = None
db_persistent = False
source_url = 'https://github.com/osm-fr/osmose-backend/blob/master'
def __init__(self, country, polygon_id=None, analyser_options=None, download_repo=GEOFABRIK):
config[country] = self
self.country = country
self.polygon_id = polygon_id # ID of a relation for the country boundary
self.download = {}
self.download_repo = download_repo
self.analyser: OrderedDict[str, str] = OrderedDict()
if analyser_options:
self.analyser_options = analyser_options
else:
self.analyser_options = {}
self.sql_post_scripts = [] # Scripts to run everytime, just before launching analysers
self.db_extension_check = []
self.analyser_updt_url = {}
def init(self):
if "diff" in self.download:
self.download["diff_path"] = os.path.join(self.dir_diffs, self.country)
if "url" in self.download and not "dst" in self.download:
ext = os.path.splitext(self.download["url"])[1]
for e in [".osm.pbf", ".osm.bz2", ".osm.gz"]:
if self.download["url"].endswith(e):
ext = e
break
self.download["dst"] = self.dir_extracts + "/" + self.country + ext
config: Dict[str, template_config] = OrderedDict()
###########################################################################
class default_simple(template_config):
def __init__(self, country, polygon_id=None, analyser_options=None, download_url=None, download_repo=None):
template_config.__init__(self, country, polygon_id, analyser_options, download_repo)
self.db_extension_check += ["fuzzystrmatch", "unaccent"]
self.download = {
"url": download_url
}
self.analyser["sax"] = "xxx"
self.analyser["osmosis_roundabout_reverse"] = "xxx"
self.analyser["osmosis_roundabout_level"] = "xxx"
self.analyser["osmosis_soundex"] = "xxx"
self.analyser["osmosis_roundabout"] = "xxx"
self.analyser["osmosis_boundary_hole"] = "xxx"
self.analyser["osmosis_building_overlaps"] = "xxx"
self.analyser["osmosis_polygon"] = "xxx"
self.analyser["osmosis_highway_vs_building"] = "xxx"
self.analyser["osmosis_orphan_nodes_cluster"] = "xxx"
self.analyser["osmosis_powerline"] = "xxx"
self.analyser["osmosis_double_tagging"] = "xxx"
self.analyser["osmosis_relation_associatedStreet"] = "xxx"
self.analyser["osmosis_highway_link"] = "xxx"
self.analyser["osmosis_highway_broken_level_continuity"] = "xxx"
self.analyser["osmosis_relation_large"] = "xxx"
self.analyser["osmosis_polygon_overlaps"] = "xxx"
self.analyser["osmosis_useless"] = "xxx"
self.analyser["osmosis_relation_multipolygon"] = "xxx"
self.analyser["osmosis_boundary_intersect"] = "xxx"
self.analyser["osmosis_node_like_way"] = "xxx"
self.analyser["osmosis_boundary_administrative"] = "xxx"
self.analyser["osmosis_tag_typo"] = "xxx"
self.analyser["osmosis_cycleway_track"] = "xxx"
self.analyser["osmosis_highway_features"] = "xxx"
self.analyser["osmosis_building_shapes"] = "xxx"
self.analyser["osmosis_highway_deadend"] = "xxx"
self.analyser["osmosis_boundary_relation"] = "xxx"
self.analyser["osmosis_highway_traffic_signals"] = "xxx"
self.analyser["osmosis_relation_restriction"] = "xxx"
self.analyser["osmosis_highway_tunnel_bridge"] = "xxx"
self.analyser["osmosis_waterway"] = "xxx"
self.analyser["osmosis_duplicated_geotag"] = "xxx"
self.analyser["osmosis_highway_noexit"] = "xxx"
self.analyser["osmosis_parking_highway"] = "xxx"
self.analyser["osmosis_highway_bad_intersection"] = "xxx"
self.analyser["osmosis_water"] = "xxx"
self.analyser["osmosis_relation_public_transport"] = "xxx"
self.analyser["osmosis_highway_turn_lanes"] = "xxx"
self.analyser["osmosis_highway_almost_junction"] = "xxx"
self.analyser["osmosis_highway_without_ref"] = "xxx"
self.analyser["osmosis_building_3nodes"] = "xxx"
self.analyser["osmosis_wikipedia"] = "xxx"
self.analyser["osmosis_highway_name_close"] = "xxx"
self.analyser["osmosis_relation_route_access"] = "xxx"
self.analyser["osmosis_highway_floating_islands"] = "xxx"
self.analyser["merge_traffic_signs"] = "xxx"
self.analyser["merge_street_objects"] = "xxx"
self.analyser["merge_street_objects2"] = "xxx"
self.analyser["osmosis_relation_enforcement"] = "xxx"
self.analyser["osmosis_addr_interpolation"] = "xxx"
self.analyser["osmosis_camp_pitch_out_of_camp_site"] = "xxx"
self.analyser["osmosis_relation_open"] = "xxx"
class default_country_simple(default_simple):
def __init__(self, part, country, polygon_id=None, analyser_options=None,
download_repo=GEOFABRIK, download_country=None):
part = part + '/' if part is not None else ''
if not download_country:
download_country = country
country = country.replace("-", "_").replace("/", "_")
analyser_options = dict({"project": "openstreetmap"}, **analyser_options)
default_simple.__init__(self, country, polygon_id, analyser_options, download_repo=download_repo)
self.download.update({
"url": self.download_repo + part + download_country + "-latest.osm.pbf",
"poly": self.download_repo + part + download_country + ".poly",
})
if download_repo == GEOFABRIK:
self.download["diff"] = self.download_repo + part + download_country + "-updates/"
self.download["state.txt"] = self.download["diff"] + "state.txt"
if download_repo == OSMFR:
self.download["poly"] = self.download["poly"].replace("/extracts/", "/polygons/")
self.download["diff"] = self.download_repo + "../replication/" + part + download_country + "/minute/"
self.download["state.txt"] = self.download_repo + part + download_country + ".state.txt"
if download_repo == OSMCH:
self.download["url"] = self.download_repo + download_country + "-padded.osm.pbf"
self.download["poly"] = self.download_repo + "switzerland-padded.poly"
self.download["diff"] = self.download_repo + "replication/hour/"
self.download["state.txt"] = self.download["diff"] + "state.txt"
class default_country(default_country_simple):
def __init__(self, part, country, polygon_id=None, analyser_options=None,
download_repo=GEOFABRIK, download_country=None):
default_country_simple.__init__(self, part, country, polygon_id, analyser_options,
download_repo, download_country)
self.analyser["osmosis_highway_cul-de-sac_level"] = "xxx"
self.analyser["osmosis_way_approximate"] = "xxx"
self.analyser["osmosis_highway_area_access"] = "xxx"
def gen_country(area, path_base=None,
country_base=None, country_code=None, download_repo=GEOFABRIK, include=[], exclude=[], **analyser_options_default):
area_default = area
path_base_default = path_base
country_base_default = country_base
country_base_default = country_base_default or path_base
country_code_default = country_code
download_repo_default = download_repo
include_default = include
exclude_default = exclude
def init(self, path, polygon_id=None, country_code=country_code_default,
area=area_default, country=None, path_base=path_base_default, country_base=country_base_default, download_repo=download_repo_default, include=[], exclude=[], **analyser_options):
ao = {'country': country_code}
ao.update(analyser_options_default)
ao.update(analyser_options)
path = path if isinstance(path, list) else [path]
country = (country or path[-1]).replace('-', '_')
download_country = '/'.join(filter(lambda a: a is not None, [path_base] + path))
default_country.__init__(self, area, country_base + '_' + country, polygon_id, ao, download_repo, download_country)
for analyser in include_default + include:
self.analyser[analyser] = 'xxx'
for analyser in exclude_default + exclude:
del(self.analyser[analyser])
class gen(default_country):
__init__ = init
return gen
france_departement = gen_country('europe', 'france', download_repo=OSMFR, language='fr', proj=2154, municipality_ref='ref:INSEE',
phone_code='33', phone_len=9, phone_format=r'^([+]%s([- ./]*[0-9]){8}[0-9])|[0-9]{4}|[0-9]{6}$', phone_international='00', phone_local_prefix='0',
include=[
'osmosis_building_geodesie_FR',
'osmosis_natural_swimming-pool',
'osmosis_fantoir',
'osmosis_highway_motorway',
'osmosis_highway_zone',
'merge_milestone_FR_metropole',
'merge_shop_FR',
], **{'addr:city-admin_level': '8,9'})
france_departement("alsace/bas_rhin", 7415, "FR-67", include=[
'merge_defibrillators_FR_basrhin',
])
france_departement("alsace/haut_rhin", 7403, "FR-68")
include_aquitaine = [
# Aquitiane
'merge_tourism_FR_aquitaine_camp_caravan',
'merge_tourism_FR_aquitaine_museum',
'merge_sport_FR_aquitaine_equestrian',
'merge_library_FR_aquitaine',
'merge_winery_FR_aquitaine',
]
france_departement("aquitaine/dordogne", 7375, "FR-24", include=include_aquitaine)
france_departement("aquitaine/gironde", 7405, "FR-33", include=include_aquitaine + [
# Bordeaux
'merge_recycling_FR_bm',
'merge_parking_FR_bm',
'merge_bicycle_rental_FR_bm',
'merge_cycle_parking_FR_bm',
'merge_public_equipment_FR_bordeaux_toilets',
'merge_public_transport_FR_tbm',
# Gironde
'merge_public_transport_FR_transgironde',
])
france_departement("aquitaine/landes", 7376, "FR-40", include=include_aquitaine)
france_departement("aquitaine/lot_et_garonne", 1284995, "FR-47", include=include_aquitaine)
france_departement("aquitaine/pyrenees_atlantiques", 7450, "FR-64", include=include_aquitaine + [
# Pau
'merge_recycling_FR_capp_glass',
'merge_recycling_FR_capp_clothes',
'merge_parking_FR_capp',
'merge_bicycle_parking_FR_capp',
])
france_departement("auvergne/allier", 1450201, "FR-03")
france_departement("auvergne/cantal", 7381, "FR-15")
france_departement("auvergne/haute_loire", 7452, "FR-43")
france_departement("auvergne/puy_de_dome", 7406, "FR-63")
france_departement("basse_normandie/calvados", 7453, "FR-14")
france_departement("basse_normandie/manche", 7404, "FR-50")
france_departement("basse_normandie/orne", 7419, "FR-61")
france_departement("bourgogne/cote_d_or", 7424, "FR-21")
france_departement("bourgogne/nievre", 7448, "FR-58")
france_departement("bourgogne/saone_et_loire", 7397, "FR-71", include=[
# Saône-et-Loire
'merge_hydrants_FR',
])
france_departement("bourgogne/yonne", 7392, "FR-89")
france_departement("bretagne/cotes_d_armor", 7398, "FR-22")
france_departement("bretagne/ille_et_vilaine", 7465, "FR-35", include=[
# Rennes
'merge_public_equipment_FR_rennes_toilets',
'merge_public_transport_FR_star',
'merge_defibrillators_FR_montfort',
'merge_defibrillators_FR_saintmalo',
])
france_departement("bretagne/finistere", 102430, "FR-29")
france_departement("bretagne/morbihan", 7447, "FR-56", include=[
'merge_defibrillators_FR_lorient',
])
france_departement("centre/cher", 7456, "FR-18")
france_departement("centre/eure_et_loir", 7374, "FR-28")
france_departement("centre/indre", 7417, "FR-36")
france_departement("centre/indre_et_loire", 7408, "FR-37")
france_departement("centre/loir_et_cher", 7399, "FR-41")
france_departement("centre/loiret", 7440, "FR-45")
france_departement("champagne_ardenne/ardennes", 7395, "FR-08")
france_departement("champagne_ardenne/aube", 7441, "FR-10")
france_departement("champagne_ardenne/marne", 7379, "FR-51")
france_departement("champagne_ardenne/haute_marne", 7396, "FR-52")
france_departement("corse/corse_du_sud", 76932, "FR-2A")
france_departement("corse/haute_corse", 76931, "FR-2B")
france_departement("franche_comte/doubs", 7462, "FR-25")
france_departement("franche_comte/jura", 7460, "FR-39")
france_departement("franche_comte/haute_saone", 7423, "FR-70")
france_departement("franche_comte/territoire_de_belfort", 7410, "FR-90")
france_departement("haute_normandie/eure", 7435, "FR-27")
france_departement("haute_normandie/seine_maritime", 7426, "FR-76", include=[
# Le Havre
'merge_public_equipment_FR_lehavre_toilets',
])
include_ile_de_france = [
# Île-de-france
'merge_public_transport_FR_stif',
'merge_bicycle_rental_FR_IDF',
'merge_parking_FR_IDF',
]
france_departement("ile_de_france/paris", 71525, "FR-75", include=include_ile_de_france + [
# Paris
'merge_bicycle_parking_FR_paris',
'merge_defibrillators_FR_paris',
], exclude=[
'merge_shop_FR',
])
france_departement("ile_de_france/hauts_de_seine", 7449, "FR-92", include=include_ile_de_france + [
# Hauts-de-Seine
'merge_restriction_FR_92',
'merge_defibrillators_FR_issylesmoulineaux',
])
france_departement("ile_de_france/seine_saint_denis", 7389, "FR-93", include=include_ile_de_france)
france_departement("ile_de_france/val_de_marne", 7458, "FR-94", include=include_ile_de_france)
france_departement("ile_de_france/essonne", 7401, "FR-91", include=include_ile_de_france)
france_departement("ile_de_france/seine_et_marne", 7383, "FR-77", include=include_ile_de_france)
france_departement("ile_de_france/val_d_oise", 7433, "FR-95", include=include_ile_de_france)
france_departement("ile_de_france/yvelines", 7457, "FR-78", include=include_ile_de_france)
france_departement("languedoc_roussillon/aude", 7446, "FR-11")
france_departement("languedoc_roussillon/gard", 7461, "FR-30")
france_departement("languedoc_roussillon/herault", 7429, "FR-34", include=[
# Montpellier
'merge_public_equipment_FR_montpellier_toilets',
])
france_departement("languedoc_roussillon/lozere", 7421, "FR-48")
france_departement("languedoc_roussillon/pyrenees_orientales", 7466, "FR-66")
france_departement("limousin/correze", 7464, "FR-19")
france_departement("limousin/creuse", 7459, "FR-23")
france_departement("limousin/haute_vienne", 7418, "FR-87")
france_departement("lorraine/meurthe_et_moselle", 51856, "FR-54", include=[
# Nancy
'merge_public_transport_FR_stan',
])
france_departement("lorraine/meuse", 7382, "FR-55")
france_departement("lorraine/moselle", 51854, "FR-57")
france_departement("lorraine/vosges", 7384, "FR-88")
france_departement("midi_pyrenees/ariege", 7439, "FR-09")
france_departement("midi_pyrenees/aveyron", 7451, "FR-12")
france_departement("midi_pyrenees/haute_garonne", 7413, "FR-31", include=[
# Toulouse
'merge_public_equipment_FR_toulouse_toilets',
'merge_defibrillators_FR_toulouse',
'merge_defibrillators_FR_cugnaux',
])
france_departement("midi_pyrenees/gers", 7422, "FR-32", include=[
'merge_defibrillators_FR_gers',
])
france_departement("midi_pyrenees/lot", 7454, "FR-46")
france_departement("midi_pyrenees/hautes_pyrenees", 7467, "FR-65")
france_departement("midi_pyrenees/tarn", 7442, "FR-81")
france_departement("midi_pyrenees/tarn_et_garonne", 7388, "FR-82")
france_departement("nord_pas_de_calais/nord", 7400, "FR-59")
france_departement("nord_pas_de_calais/pas_de_calais", 7394, "FR-62")
france_departement("pays_de_la_loire/loire_atlantique", 7432, "FR-44", include=[
# Nantes
'merge_recycling_FR_nm_glass',
'merge_public_equipment_FR_nantes_toilets',
'merge_recycling_FR_csma',
'merge_waste_disposal_FR_csma',
])
france_departement("pays_de_la_loire/maine_et_loire", 7409, "FR-49", include=[
# Angers
'merge_public_equipment_FR_angers_toilets',
])
france_departement("pays_de_la_loire/mayenne", 7438, "FR-53")
france_departement("pays_de_la_loire/sarthe", 7443, "FR-72")
france_departement("pays_de_la_loire/vendee", 7402, "FR-85")
france_departement("picardie/aisne", 7411, "FR-02")
france_departement("picardie/oise", 7427, "FR-60")
france_departement("picardie/somme", 7463, "FR-80")
france_departement("poitou_charentes/charente", 7428, "FR-16")
france_departement("poitou_charentes/charente_maritime", 7431, "FR-17")
france_departement("poitou_charentes/deux_sevres", 7455, "FR-79")
france_departement("poitou_charentes/vienne", 7377, "FR-86")
france_departement("provence_alpes_cote_d_azur/alpes_de_haute_provence", 7380, "FR-04")
france_departement("provence_alpes_cote_d_azur/hautes_alpes", 7436, "FR-05", include=[
'merge_defibrillators_FR_hautesalpes',
])
france_departement("provence_alpes_cote_d_azur/alpes_maritimes", 7385, "FR-06")
france_departement("provence_alpes_cote_d_azur/bouches_du_rhone", 7393, "FR-13")
france_departement("provence_alpes_cote_d_azur/var", 7390, "FR-83")
france_departement("provence_alpes_cote_d_azur/vaucluse", 7445, "FR-84")
france_departement("rhone_alpes/ain", 7387, "FR-01")
france_departement("rhone_alpes/ardeche", 7430, "FR-07")
france_departement("rhone_alpes/drome", 7434, "FR-26")
france_departement("rhone_alpes/isere", 7437, "FR-38")
france_departement("rhone_alpes/loire", 7420, "FR-42")
france_departement("rhone_alpes/rhone", 7378, "FR-69", include=[
# Lyon
'merge_public_equipment_FR_lyon_toilets',
])
france_departement("rhone_alpes/savoie", 7425, "FR-73")
france_departement("rhone_alpes/haute_savoie", 7407, "FR-74", include=[
# Annecy
'merge_public_transport_FR_sibra',
])
france_departement_dom = gen_country(None, country_base='france', download_repo=OSMFR, language='fr', municipality_ref='ref:INSEE',
phone_len=9, phone_format=r'^([+]%s([- ./]*[0-9]){8}[0-9])|[0-9]{4}|[0-9]{6}$', phone_international='00', phone_local_prefix='0',
include=[
'osmosis_building_geodesie_FR',
'osmosis_natural_swimming-pool',
'osmosis_fantoir',
'osmosis_highway_motorway',
'osmosis_highway_zone',
'merge_heritage_FR_merimee',
'merge_poste_FR',
'merge_school_FR',
'merge_college_FR',
'merge_service_public_FR',
'merge_pitch_FR',
'merge_police_FR_gn',
'merge_police_FR_pn',
'merge_healthcare_FR_finess',
'merge_postal_code_FR',
'merge_post_box_FR',
'merge_shop_FR',
'merge_wastewater_plant_FR',
'merge_museum_FR',
'merge_radio_support_FR',
'merge_defibrillators_FR',
'merge_defibrillators_FR_aedmap',
'merge_cemetery_FR',
'merge_man_made_FR',
'merge_poi_FR',
'merge_natural_FR',
'merge_reservoir_FR',
'merge_water_FR',
], **{'addr:city-admin_level': '8,9'})
france_departement_dom(["central-america", "guadeloupe"], 1401835, "FR-GP", dep_code=971, proj=32620, phone_code="590")
france_departement_dom(["south-america", "guyane"], 1260551, "FR-GF", dep_code=973, language='fr_GF', proj=2972, phone_code="594")
france_departement_dom(["central-america", "martinique"], 1891495, "FR-MQ", dep_code=972, proj=32620, phone_code="596")
france_departement_dom(["africa", "mayotte"], 1259885, "FR-YT", dep_code=976, proj=32738, phone_code="262")
france_departement_dom(["africa", "reunion"], 1785276, "FR-RE", dep_code=974, proj=2975, phone_code="262")
france_com = gen_country(None, country_base='france', download_repo=OSMFR, language='fr', municipality_ref='ref:INSEE',
phone_len=9, phone_format=r'^([+]%s([- ./]*[0-9]){8}[0-9])|[0-9]{4}|[0-9]{6}$', phone_international='00', phone_local_prefix='0',
include=[
'merge_college_FR',
'merge_service_public_FR',
'merge_pitch_FR',
'merge_police_FR_gn',
'merge_police_FR_pn',
'merge_postal_code_FR',
'merge_radio_support_FR',
], **{'addr:city-admin_level': '8,9'})
france_com(["central-america", "saint_barthelemy"], 537967, "FR-BL", proj=2969, phone_code="590", country="saintbarthelemy")
france_com(["central-america", "saint_martin"], 1891583, "FR-MF", proj=2969, phone_code="590", country="saintmartin")
france_com(["north-america", "saint_pierre_et_miquelon"], 233377, "FR-PM", proj=32621, phone_code="508", country="saintpierreetmiquelon")
france_com(["oceania", "wallis_et_futuna"], 290162, "FR-WF", proj=32701, phone_code="681", country="wallisetfutuna")
france_com(["oceania", "polynesie"], 3412620, "FR-PF", language='fr_PF', proj=32706, phone_code="689", phone_format=None, phone_len=8, phone_len_short=6, phone_local_prefix=None, phone_international='00')
france_com(["australia-oceania", "new-caledonia"], 3407643, "NC", download_repo=GEOFABRIK, proj=3163, country="nouvellecaledonie",
phone_code="687", phone_len=6, phone_format=r"^[+]%s([- ./]*[0-9]){5}[0-9]$", phone_international='00')
default_country("merge", "france_taaf", 6063103, download_repo=OSMFR, analyser_options={"country": "TF", "language": "fr", "proj": 32738})
###########################################################################
france_local_db = template_config("france_local_db", 1403916, {"project": "openstreetmap", "country": "FR", "language": "fr", "proj": 2154})
france_local_db.db_persistent = True
france_local_db.db_base = "osm"
france_local_db.db_user = "osmose"
france_local_db.db_password = "clostAdtoi"
france_local_db.db_schema = "osmosis"
france_local_db.db_schema_path = "\"$user\",osmosis,public"
france_local_db.sql_post_scripts += [
france_local_db.dir_scripts + "/osmosis/CreateFunctions.sql",
france_local_db.dir_scripts + "/osmosis/CreateMergeAnalyserCache.sql",
]
france_local_db.download["diff_path"] = "/data/work/osmosis/" # path to find state.txt
france_local_db.analyser["merge_heritage_FR_merimee"] = "xxx"
france_local_db.analyser["merge_poste_FR"] = "xxx"
france_local_db.analyser["merge_school_FR"] = "xxx"
france_local_db.analyser["merge_railway_level_crossing_FR"] = "xxx"
france_local_db.analyser["merge_railway_railstation_FR"] = "xxx"
france_local_db.analyser["merge_tmc_point_FR"] = "xxx"
france_local_db.analyser["merge_geodesie"] = "xxx"
france_local_db.analyser["merge_college_FR"] = "xxx"
france_local_db.analyser["merge_service_public_FR"] = "xxx"
france_local_db.analyser["merge_pitch_FR"] = "xxx"
france_local_db.analyser["merge_police_FR_gn"] = "xxx"
france_local_db.analyser["merge_police_FR_pn"] = "xxx"
france_local_db.analyser["merge_fuel_FR"] = "xxx"
france_local_db.analyser["merge_healthcare_FR_finess"] = "xxx"
france_local_db.analyser["merge_postal_code_FR"] = "xxx"
france_local_db.analyser["merge_geodesie_support_FR"] = "xxx"
france_local_db.analyser["merge_post_box_FR"] = "xxx"
france_local_db.analyser["merge_power_plant_FR"] = "xxx"
france_local_db.analyser["merge_power_substation_FR"] = "xxx"
france_local_db.analyser["merge_power_tower_FR"] = "xxx"
france_local_db.analyser["merge_restriction_motorway_FR"] = "xxx"
france_local_db.analyser["merge_power_substation_minor_FR"] = "xxx"
france_local_db.analyser["merge_wastewater_plant_FR"] = "xxx"
france_local_db.analyser["merge_museum_FR"] = "xxx"
france_local_db.analyser["merge_radio_support_FR"] = "xxx"
france_local_db.analyser["merge_carpool_FR"] = "xxx"
france_local_db.analyser["merge_charging_station_FR"] = "xxx"
france_local_db.analyser["merge_parking_FR_BNLS"] = "xxx"
france_local_db.analyser["merge_tourism_FR"] = "xxx"
france_local_db.analyser["merge_cemetery_FR"] = "xxx"
france_local_db.analyser["merge_man_made_FR"] = "xxx"
france_local_db.analyser["merge_poi_FR"] = "xxx"
france_local_db.analyser["merge_natural_FR"] = "xxx"
france_local_db.analyser["merge_reservoir_FR"] = "xxx"
france_local_db.analyser["merge_water_FR"] = "xxx"
france_local_db.analyser["merge_defibrillators_FR"] = "xxx"
france_local_db.analyser["merge_defibrillators_FR_aedmap"] = "xxx"
#########################################################################
default_country("europe", "albania", 53292, {"country": "AL", "language": "sq", "proj": 32634})
default_country("europe", "andorra", 9407, {"country": "AD", "language": "ca", "proj": 2154})
default_country("europe", "belarus", 59065, {"country": "BY", "language": ["be", "ru"], "proj": 32635}, download_repo=GEOFABRIK)
default_country("europe", "bosnia-herzegovina", 2528142, {"country": "BA", "language": ["bs", "hr", "sr"], "proj": 32633}, download_repo=GEOFABRIK)
default_country("europe", "bulgaria", 186382, {"country": "BG", "language": "bg", "proj": 32635}, download_repo=GEOFABRIK)
default_country("europe", "croatia", 214885, {"country": "HR", "language": "hr", "proj": 32633}, download_repo=GEOFABRIK)
default_country("europe", "estonia", 79510, {"country": "EE", "language": "et", "proj": 32634}, download_repo=GEOFABRIK)
default_country("europe", "cyprus", 307787, {"country": "CY", "language": ["el", "tr", "en"], "driving_side": "left", "proj": 32636})
default_country("europe", "faroe-islands", 52939, {"country": "FO", "language": "fo", "proj": 2169})
default_country("europe", "greece", 192307, {"country": "GR", "language": "el","proj": 32635}, download_repo=GEOFABRIK)
default_country("europe", "guernesey", 270009, {"country": "GG", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32630}, download_repo=OSMFR)
default_country("europe", "hungary", 21335, {"country": "HU", "language": "hu", "proj": 32633}, download_repo=GEOFABRIK)
default_country("europe", "ireland", 62273, {"country": "IE", "driving_side": "left", "language": ["en", "ga"], "proj": 32629}, download_repo=OSMFR)
default_country("europe", "isle-of-man", 62269, {"country": "IM", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32630})
default_country("europe", "jersey", 367988, {"country": "JE", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32630}, download_repo=OSMFR)
default_country("europe", "kosovo", 2088990, {"country": "XK", "language": ["sq", "sr-Latn"], "proj": 32634, "multilingual-style": "xk"})
default_country("europe", "liechtenstein", 1155955, {"country": "LI", "language": "de", "proj": 32632})
lithuania = default_country("europe", "lithuania", 72596, {"country": "LT", "language": "lt", "proj": 32635, "osmosis_way_approximate": {"highway": ("motorway", "trunk", "primary", "secondary", "tertiary")}}, download_repo=GEOFABRIK)
del(lithuania.analyser["osmosis_highway_cul-de-sac_level"]) # follow official highway classification
del(lithuania.analyser["osmosis_highway_broken_level_continuity"]) # follow official highway classification
default_country("europe", "latvia", 72594, {"country": "LV","language": "lv", "proj": 32634}, download_repo=GEOFABRIK)
luxembourg = default_country("europe", "luxembourg", 2171347, {"country": "LU", "language": "fr_LU", "proj": 2169, "boundary_detail_level": 6})
luxembourg.analyser["merge_emergency_points_LU"] = "xxx"
default_country("europe", "malta", 365307, {"country": "MT", "language": "en", "driving_side": "left", "proj": 32633})
default_country("europe", "macedonia", 53293, {"country": "MK", "language": "sq", "proj": 32634})
default_country("europe", "moldova", 58974, {"country": "MD", "language": "ro", "proj": 32635}, download_repo=GEOFABRIK)
default_country("europe", "monaco", 1124039, {"country": "MC", "language": "fr", "proj": 2154, "phone_code": '377', "phone_len": 8, "phone_format": r'^[+]%s([- ./]*[469])([- ./]*[0-9]){6}[0-9]$', "phone_international": '00'}, download_repo=OSMFR)
default_country("europe", "montenegro", 53296, {"country": "ME", "proj": 32634})
default_country("europe", "romania", 90689, {"country": "RO", "language": "ro", "proj": 31700})
default_country("europe", "san_marino", 54624, {"country": "SM", "language": "it", "proj": 23032}, download_repo=OSMFR)
default_country("europe", "serbia", 1741311, {"country": "RS", "language": "sr", "proj": 32634}, download_repo=GEOFABRIK)
default_country("europe", "slovenia", 218657, {"country": "SI", "language": ["sl", "hu", "it"], "proj": 32633}, download_repo=GEOFABRIK)
default_country("europe", "turkey", 174737, {"country": "TR", "language": "tr", "proj": 32636}, download_repo=GEOFABRIK)
default_country("europe", "vatican_city", 36989, {"country": "VA", "language": "it", "proj": 23032}, download_repo=OSMFR)
default_country("europe", "united_kingdom_akrotiri_and_dhekelia", 3263728, {"country": "GB", "language": ["en", "he"], "driving_side": "left", "proj": 32636}, download_country="cyprus") # British Sovereign Base in Cyprus
default_country("europe", "united_kingdom_gibraltar", 1278736, {"country": "GI", "language": "en", "proj": 32630}, download_repo=OSMFR, download_country="gibraltar")
default_country("europe", "united_kingdom_northern_ireland", 156393, {"country": "GB-NIR", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32629}, download_repo=OSMFR, download_country="united_kingdom/northern_ireland")
default_country("europe", "united_kingdom_wales", 58437, {"country": "GB-WLS", "language": ["en", "cy"], "driving_side": "left", "speed_limit_unit": "mph", "proj": 32630}, download_repo=GEOFABRIK, download_country="great-britain/wales")
default_country("europe", "united_kingdom_scotland", 58446, {"country": "GB-SCT", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32630}, download_repo=GEOFABRIK, download_country="great-britain/scotland")
iceland = default_country("europe","iceland", 299133, {"country": "IS", "language": "is", "proj": 32627}) # 299133
iceland.download["url"] = ""
default_country("europe", "denmark", 50046, {"country": "DK", "language": "da","proj": 32632, "phone_code": '45', "phone_len": 8, "phone_international": '00'}, download_repo=GEOFABRIK)
#########################################################################
be_part = gen_country('europe', 'belgium', download_repo=OSMFR, proj=32631, municipality_ref='ref:INS',
phone_code='32', phone_len=[8, 9], phone_len_short=4, phone_international='00', phone_local_prefix='0')
be_part('brussels_capital_region', 54094, 'BE-BRU', language=['fr', 'nl'], **{'multilingual-style': 'be'})
be_part('flanders', 53134, 'BE-VLG', language='nl')
be_part('wallonia_french_community', 2620920, 'BE-WAL', language='fr')
be_part('wallonia_german_community', 2425209, 'BE-WAL', language='de')
#########################################################################
se_part = gen_country('europe', 'sweden', download_repo=OSMFR, proj=32633, language='sv')
se_part('stockholm', 54391, 'SE-AB')
se_part('vasterbotten', 52825, 'SE-AC')
se_part('norrbotten', 52824, 'SE-BD')
se_part('uppsala', 54220, 'SE-C')
se_part('sodermanland', 54386, 'SE-D')
se_part('ostergotland', 940675, 'SE-E')
se_part('jonkoping', 54374, 'SE-F')
se_part('kronoberg', 54412, 'SE-G')
se_part('kalmar', 54417, 'SE-H')
se_part('gotland', 941530, 'SE-I')
se_part('blekinge', 54413, 'SE-K')
se_part('skane', 54409, 'SE-M')
se_part('halland', 54403, 'SE-N')
se_part('vastra_gotaland', 54367, 'SE-O')
se_part('varmland', 54223, 'SE-S')
se_part('orebro', 54222, 'SE-T')
se_part('vastmanland', 54221, 'SE-U')
se_part('dalarna', 52834, 'SE-W')
se_part('gavleborg', 52832, 'SE-X')
se_part('vasternorrland', 52827, 'SE-Y')
se_part('jamtland', 52826, 'SE-Z')
#########################################################################
ch_part = gen_country('europe', 'switzerland', download_repo=OSMFR, proj=2056, municipality_ref=['swisstopo:BFS_NUMMER', 'swisstopo:BEZIRKSNUM'],
phone_code='41', phone_len=9, phone_international='00', phone_local_prefix='0')
ch_part('aargau', 1686359, 'CH-AG', language='de')
ch_part('appenzell_ausserrhoden', 1686649, 'CH-AR', language='de')
ch_part('appenzell_innerrhoden', 1686666, 'CH-AI', language='de')
ch_part('basel_landschaft', 1686366, 'CH-BL', language='de')
ch_part('basel_stadt', 1699639, 'CH-BS', language='de')
ch_part('bern', 1686344, 'CH-BE', language=['de', 'fr'])
ch_part('fribourg', 1698314, 'CH-FR', language=['fr', 'de'])
ch_part('geneva', 1702419, 'CH-GE', language='fr')
ch_part('glarus', 1685673, 'CH-GL', language='de')
ch_part('grisons', 1686631, 'CH-GR', language=['de', 'it', 'rm'])
ch_part('jura', 1697347, 'CH-JU', language='fr')
ch_part('lucerne', 1685677, 'CH-LU', language='de')
ch_part('neuchatel', 1702420, 'CH-NE', language='fr')
ch_part('nidwalden', 1686449, 'CH-NW', language='de')
ch_part('obwalden', 1686448, 'CH-OW', language='de')
ch_part('schaffhausen', 1696112, 'CH-SH', language='de')
ch_part('schwyz', 1688583, 'CH-SZ', language='de')
ch_part('solothurn', 1701133, 'CH-SO', language='de')
ch_part('saint_gallen', 1687006, 'CH-SG', language='de')
ch_part('thurgau', 1693811, 'CH-TG', language='de')
ch_part('ticino', 1687730, 'CH-TI', language='it')
ch_part('uri', 1693971, 'CH-UR', language='de')
ch_part('valais', 1686699, 'CH-VS', language=['fr', 'de'])
ch_part('vaud', 1702421, 'CH-VD', language='fr')
ch_part('zug', 1686447, 'CH-ZG', language='de')
ch_part('zurich', 1690227, 'CH-ZH', language='de')
#########################################################################
fi_part = gen_country('europe', 'finland', download_repo=OSMFR, proj=32635)
fi_part('lapland', 2541341, 'FI-10', language="fi")
fi_part('north_ostrobothnia', 1724360, 'FI-14', language="fi")
fi_part('kainuu', 1997164, 'FI-05', language="fi")
fi_part('north_karelia', 1999428, 'FI-13', language="fi")
fi_part('north_savo', 918898, 'FI-15', language="fi")
fi_part('south_savo', 918897, 'FI-04', language="fi")
fi_part('south_karelia', 2067231, 'FI-02', language="fi")
fi_part('central_finland', 1701740, 'FI-08', language="fi")
fi_part('south_ostrobothnia', 1702263, 'FI-03', language="fi")
fi_part('ostrobothnia', 2000320, 'FI-12', language=["fi", "sv"])
fi_part('central_ostrobothnia', 1702330, 'FI-07', language=["fi", "sv"])
fi_part('pirkanmaa', 1701741, 'FI-11', language="fi")
fi_part('satakunta', 2000361, 'FI-17', language="fi")
fi_part('paijat_hame', 1703362, 'FI-16', language="fi")
fi_part('kanta_hame', 1473990, 'FI-06', language="fi")
fi_part('kymenlaakso', 2102313, 'FI-09', language="fi")
fi_part('uusimaa', 37355, 'FI-19', language=["fi", "sv"])
fi_part('southwest_finland', 38092, 'FI-19', language=["fi", "sv"])
fi_part('aland', 1650407, 'AX', language="sv")
#########################################################################
default_country("europe", "portugal", 295480, {"country": "PT", "language": "pt", "proj": 32629}, download_repo=GEOFABRIK)
pt_part = gen_country('europe', 'portugal', download_repo=OSMFR, language='pt')
pt_part('azores', 6451096, 'PT', proj=32627)
pt_part('madeira', 6451097, 'PT', proj=32628)
#########################################################################
ua_oblasts = gen_country('europe', 'ukraine', download_repo=OSMFR, language='uk', proj=32636)
ua_oblasts('cherkasy_oblast', 91278, 'UA-71')
ua_oblasts('chernihiv_oblast', 71249, 'UA-74')
ua_oblasts('chernivtsi_oblast', 72526, 'UA-77')
ua_oblasts('dnipropetrovsk_oblast', 101746, 'UA-12')
ua_oblasts('donetsk_oblast', 71973, 'UA-14')
ua_oblasts('ivano-frankivsk_oblast', 72488, 'UA-26')
ua_oblasts('kharkiv_oblast', 71254, 'UA-63')
ua_oblasts('kherson_oblast', 71022, 'UA-65')
ua_oblasts('khmelnytskyi_oblast', 90742, 'UA-68')
ua_oblasts('kiev_oblast', 71248, 'UA-32')
ua_oblasts('kiev', 421866, 'UA-30')
ua_oblasts('kirovohrad_oblast', 101859, 'UA-35')
ua_oblasts('luhansk_oblast', 71971, 'UA-09')
ua_oblasts('lviv_oblast', 72380, 'UA-46')
ua_oblasts('mykolaiv_oblast', 72635, 'UA-48')
ua_oblasts('odessa_oblast', 72634, 'UA-51')
ua_oblasts('poltava_oblast', 91294, 'UA-53')
ua_oblasts('rivne_oblast', 71236, 'UA-56')
ua_oblasts('sumy_oblast', 71250, 'UA-59')
ua_oblasts('ternopil_oblast', 72525, 'UA-61')
ua_oblasts('vinnytsia_oblast', 90726, 'UA-05')
ua_oblasts('volyn_oblast', 71064, 'UA-07')
ua_oblasts('zakarpattia_oblast', 72489, 'UA-21')
ua_oblasts('zaporizhia_oblast', 71980, 'UA-23')
ua_oblasts('zhytomyr_oblast', 71245, 'UA-18')
#########################################################################
no_county = gen_country('europe', 'norway', download_repo=OSMFR, language='no', proj=32632)
no_county('nordland', 408105, 'NO-18')
no_county('troms', 407717, 'NO-19')
no_county('finnmark', 406389, 'NO-20')
no_county('troendelag', 406567, 'NO-23')
no_county('moere_og_romsdal', 406868, 'NO-15')
no_county('sogn_og_fjordane', 407787, 'NO-14')
no_county('hordaland', 404144, 'NO-12')
no_county('rogaland', 405836, 'NO-11')
no_county('aust-agder', 406015, 'NO-09')
no_county('vest-agder', 405929, 'NO-10')
no_county('oslo', 406091, 'NO-03')
no_county('akershus', 406106, 'NO-02')
no_county('oestfold', 406060, 'NO-01')
no_county('vestfold', 404589, 'NO-07')
no_county('telemark', 405156, 'NO-08')
no_county('buskerud', 412297, 'NO-06')
no_county('oppland', 412377, 'NO-05')
no_county('hedmark', 412436, 'NO-04')
no_county('svalbard', 1337397, 'SJ')
no_county('jan_mayen', 1337126, 'SJ')
#########################################################################
default_country_simple("", "antarctica", None, {"proj": 3031}, download_repo=GEOFABRIK)
#########################################################################
default_country("north-america", "greenland", 2184073, {"country": "GL", "language": "kl", "proj": 3184})
default_country("north-america", "united_kingdom_bermuda", 1993208, {"country": "BM", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR, download_country="bermuda")
#########################################################################
mexico_state = gen_country('north-america', 'mexico', download_repo=OSMFR, language='es', proj=32614, exclude=[
"osmosis_highway_name_close" # Complicated Street Numbering
])
mexico_state("aguascalientes", 2610002, "MX-AGU")
mexico_state("baja_california", 2589601, "MX-BCN")
mexico_state("baja_california_sur", 2589611, "MX-BCS")
mexico_state("campeche", 2568834, "MX-CAM")
mexico_state("chiapas", 2556679, "MX-CHP")
mexico_state("chihuahua", 1673425, "MX-CHH")
mexico_state("coahuila", 1661524, "MX-COA")
mexico_state("colima", 2340912, "MX-COL")
mexico_state("durango", 2399740, "MX-DUR")
mexico_state("guanajuato", 2340909, "MX-GUA")
mexico_state("guerrero", 2439316, "MX-GRO")
mexico_state("hidalgo", 1376490, "MX-HID")
mexico_state("jalisco", 2340910, "MX-JAL")
mexico_state("mexico_city", 1376330, "MX-CMX")
mexico_state("michoacan", 2340636, "MX-MIC")
mexico_state("morelos", 1376332, "MX-MOR")
mexico_state("nayarit", 7695827, "MX-NAY")
mexico_state("nuevo_leon", 1661523, "MX-NLE")
mexico_state("oaxaca", 2529822, "MX-OAX")
mexico_state("puebla", 1376491, "MX-PUE")
mexico_state("queretaro", 2340903, "MX-QUE")
mexico_state("quintana_roo", 2614434, "MX-ROO")
mexico_state("san_luis_potosi", 4086617, "MX-SLP")
mexico_state("sinaloa", 2455086, "MX-SIN")
mexico_state("sonora", 1673426, "MX-SON")
mexico_state("state_of_mexico", 1376489, "MX-MEX")
mexico_state("tabasco", 2556680, "MX-TAB")
mexico_state("tamaulipas", 2415518, "MX-TAM")
mexico_state("tlaxcala", 1375274, "MX-TLA")
mexico_state("veracruz", 2415761, "MX-VER")
mexico_state("yucatan", 2614435, "MX-YUC")
mexico_state("zacatecas", 2399704, "MX-ZAC")
#########################################################################
us_state = gen_country('north-america/us', country_base='usa', language='en', speed_limit_unit='mph')
us_state("alabama", 161950, "US-AL", proj=26916)
us_state("alaska", 1116270, "US-AK", proj=26905)
us_state("arizona", 162018, "US-AZ", proj=26912)
us_state("arkansas", 161646, "US-AR", proj=26715)
us_ca_county = gen_country('north-america/us-west/california', country_base='usa_california', download_repo=OSMFR, language='en', proj=26910)
us_ca_county("alameda", 396499, "US-CA-ALA")
us_ca_county("alpine", 396497, "US-CA-ALP")
us_ca_county("amador", 396490, "US-CA-AMA")
us_ca_county("butte", 396508, "US-CA-BUT")
us_ca_county("calaveras", 396470, "US-CA-CAL")
us_ca_county("colusa", 396476, "US-CA-COL")
us_ca_county("contra_costa", 396462, "US-CA-CON")
us_ca_county("del_norte", 396503, "US-CA-DEL")
us_ca_county("el_dorado", 396481, "US-CA-ELD")
us_ca_county("fresno", 396492, "US-CA-FRE")
us_ca_county("glenn", 396493, "US-CA-GLE")
us_ca_county("humboldt", 396458, "US-CA-HUM")
us_ca_county("imperial", 396515, "US-CA-IMP")
us_ca_county("inyo", 396491, "US-CA-INY")
us_ca_county("kern", 396494, "US-CA-KER")
us_ca_county("kings", 396480, "US-CA-KIN")
us_ca_county("lake", 396502, "US-CA-LAK")
us_ca_county("lassen", 396469, "US-CA-LAS")
us_ca_county("los_angeles", 396479, "US-CA-LOS")
us_ca_county("madera", 396488, "US-CA-MAD")
us_ca_county("marin", 396461, "US-CA-MRN")
us_ca_county("mariposa", 396465, "US-CA-MP")
us_ca_county("mendocino", 396489, "US-CA-MEN")
us_ca_county("merced", 396504, "US-CA-MER")
us_ca_county("modoc", 396506, "US-CA-MOD")
us_ca_county("mono", 396472, "US-CA-MNO")
us_ca_county("monterey", 396485, "US-CA-MNT")
us_ca_county("napa", 396463, "US-CA-NAP")
us_ca_county("nevada", 396464, "US-CA-NEV")
us_ca_county("orange", 396466, "US-CA-ORA")
us_ca_county("placer", 396511, "US-CA-PLA")
us_ca_county("plumas", 396477, "US-CA-PLU")
us_ca_county("riverside", 396495, "US-CA-RIV")
us_ca_county("sacramento", 396460, "US-CA-SAC")
us_ca_county("san_benito", 396500, "US-CA-SBT")
us_ca_county("san_bernardino", 396509, "US-CA-SBD")
us_ca_county("san_diego", 396482, "US-CA-SDG")
us_ca_county("san_francisco", 396487, "US-CA-SFO")
us_ca_county("san_joaquin", 396467, "US-CA-SJQ")
us_ca_county("san_luis_obispo", 396496, "US-CA-SLO")
us_ca_county("san_mateo", 396498, "US-CA-SMT")
us_ca_county("santa_barbara", 396510, "US-CA-SBA")
us_ca_county("santa_clara", 396501, "US-CA-SCL")
us_ca_county("santa_cruz", 7870163, "US-CA-SCZ")
us_ca_county("shasta", 396512, "US-CA-SHA")
us_ca_county("sierra", 396474, "US-CA-SIE")
us_ca_county("siskiyou", 396483, "US-CA-SIS")
us_ca_county("solano", 396513, "US-CA-SOL")
us_ca_county("sonoma", 396468, "US-CA-SON")
us_ca_county("stanislaus", 396514, "US-CA-STA")
us_ca_county("sutter", 396478, "US-CA-SUT")
us_ca_county("tehama", 396486, "US-CA-TEH")
us_ca_county("trinity", 396484, "US-CA-TRI")
us_ca_county("tulare", 396459, "US-CA-TUL")
us_ca_county("tuolumne", 396471, "US-CA-TUO")
us_ca_county("ventura", 396505, "US-CA-VEN")
us_ca_county("yolo", 396507, "US-CA-YOL")
us_ca_county("yuba", 396475, "US-CA-YUB")
us_state("colorado", 161961, "US-CO", proj=26713)
us_state("connecticut", 165794, "US-CT", proj=3507)
us_state("delaware", 162110, "US-DE", proj=3509)
us_state("district-of-columbia", 162069, "US-DC", proj=3559)
us_state("florida", 162050, "US-FL", proj=3513)
us_state("georgia", 161957, "US-GA", proj=26917)
us_state("hawaii", 166563, "US-HI", proj=2783) # note: projection for hawaii is the one used for center islands, not for the whole
us_state("idaho", 162116, "US-ID", proj=3741)
us_state("illinois", 122586, "US-IL", proj=3746)
us_state("indiana", 161816, "US-IN", proj=3745)
us_state("iowa", 161650, "US-IA", proj=3745)
us_state("kansas", 161644, "US-KS", proj=3744)
us_state("kentucky", 161655, "US-KY", proj=3088)
us_state("louisiana", 224922, "US-LA", proj=3745, exclude=[
'osmosis_waterway', # Too many swamp, not suitable
])
us_state("maine", 63512, "US-ME", proj=3749)
us_state("maryland", 162112, "US-MD", proj=26985)
us_state("massachusetts", 61315, "US-MA", proj=2805)
us_state("michigan", 165789, "US-MI", proj=3746)
us_state("minnesota", 165471, "US-MN", proj=26992)
us_state("mississippi", 161943, "US-MS", proj=3816)
us_state("missouri", 161638, "US-MO", proj=3601)
us_state("montana", 162115, "US-MT", proj=3604)
us_state("nebraska", 161648, "US-NE", proj=3606)
us_state("nevada", 165473, "US-NV", proj=3607)
us_state("new-hampshire", 67213, "US-NH", proj=3613)
us_state("new-jersey", 224951, "US-NJ", proj=3615)
us_state("new-mexico", 162014, "US-NM", proj=3617)
us_state("new-york", 61320, "US-NY", proj=3623)
us_state("north-carolina", 224045, "US-NC", proj=3631)
us_state("north-dakota", 161653, "US-ND", proj=3633)
us_state("ohio", 162061, "US-OH", proj=26917)
us_state("oklahoma", 161645, "US-OK", proj=3639)
us_state("oregon", 165476, "US-OR", proj=3643)
us_state("pennsylvania", 162109, "US-PA", proj=3651)
us_state("rhode-island", 392915, "US-RI", proj=3653)
us_state("south-carolina", 224040, "US-SC", proj=3655)
us_state("south-dakota", 161652, "US-SD", proj=3659)
us_state("tennessee", 161838, "US-TN", proj=3661)
us_state("texas", 114690, "US-TX", proj=3082)
us_state("utah", 161993, "US-UT", proj=3675)
us_state("vermont", 60759, "US-VT", proj=3684)
us_state("virginia", 224042, "US-VA", proj=3968)
us_state("washington", 165479, "US-WA", proj=3725)
us_state("west-virginia",162068, "US-WV", proj=3747)
us_state("wisconsin", 165466, "US-WI", proj=3695)
us_state("wyoming", 161991, "US-WY", proj=26913)
default_country("oceania", "usa_guam", 306001, {"country": "GU", "language": "en", "proj": 32654}, download_repo=OSMFR, download_country="guam")
default_country("oceania", "usa_northern_mariana_islands", 306004, {"country": "MP", "language": "en", "proj": 32654}, download_repo=OSMFR, download_country="northern_mariana_islands")
default_country("oceania", "usa_american_samoa", 2177187, {"country": "AS", "language": "en", "proj": 32601}, download_repo=OSMFR, download_country="american_samoa")
#########################################################################
canada_options = {'download_repo': OSMFR, 'addr:street_distance': 2000,
'phone_code': '1', 'phone_len': 10, 'phone_format': r"^[+]%s[- ][0-9]{3}[- ][0-9]{3}[- ][0-9]{4}$", 'suffix_separators': "x",
'exclude': [
'osmosis_waterway',
]}
canada_province = gen_country('north-america', 'canada', language='en', **canada_options)
canada_province("alberta", 391186, "CA-AB", proj=32610)
canada_province("british_columbia", 390867, "CA-BC", proj=32609)
canada_province("manitoba", 390841, "CA-MB", proj=32615)
canada_province("new_brunswick", 68942, "CA-NB", proj=32619)
canada_province("newfoundland_and_labrador", 391196, "CA-NL", proj=32621)
canada_province("northwest_territories", 391220, "CA-NT", proj=32612)
canada_province("nova_scotia", 390558, "CA-NS", proj=32620)
canada_province("nunavut", 390840, "CA-NU", proj=32616)
canada_ontario_region = gen_country('north-america', 'canada/ontario', proj=32616, country_code='CA-ON', language='en', **canada_options)
canada_ontario_region('central_ontario', 9330364)
canada_ontario_region('eastern_ontario', 9330323)
canada_ontario_region('golden_horseshoe', 9330407)
canada_ontario_region('northeastern_ontario', 9330447)
canada_ontario_region('northwestern_ontario', 9330452)
canada_ontario_region('southwestern_ontario', 9330436)
canada_province("prince_edward_island", 391115, "CA-PE", proj=32620)
canada_quebec_region = gen_country('north-america', 'canada/quebec', proj=2138, country_code='CA-QC', language='fr', **canada_options)
canada_quebec_region('abitibi_temiscamingue', 8107213, 'CA-QC-ABT')
canada_quebec_region('bas_saint_laurent', 8137316, 'CA-QC-BSL')
canada_quebec_region('capitale_nationale', 8114679, 'CA-QC-CAPN')
canada_quebec_region('centre_du_quebec', 8100165, 'CA-QC-CQC')
canada_quebec_region('chaudiere_appalaches', 8138409, 'CA-QC-CHAPP')
canada_quebec_region('cote_nord', 8126390, 'CA-QC-CN')
canada_quebec_region('estrie', 8098121, 'CA-QC-ESTR')
canada_quebec_region('gaspesie_iles_de_la_madeleine', 7485821, 'CA-QC-GIM')
canada_quebec_region('lanaudiere', 8098959, 'CA-QC-LAN')
canada_quebec_region('laurentides', 8098885, 'CA-QC-LAUR')
canada_quebec_region('laval', 3532125, 'CA-QC-LAV')
canada_quebec_region('mauricie', 8098985, 'CA-QC-MAUR')
canada_quebec_region('monteregie', 8093332, 'CA-QC-MGIE')
canada_quebec_region('montreal', 1571328, 'CA-QC-MTL')
canada_quebec_region('nord_du_quebec', 8118159, 'CA-QC-NQC')
canada_quebec_region('outaouais', 8100164, 'CA-QC-OUT')
canada_quebec_region('saguenay_lac_saint_jean', 8120111, 'CA-QC-SLSJ')
canada_province("saskatchewan", 391178, "CA-SK", proj=32613)
canada_province("yukon", 391455, "CA-YT", proj=32608)
#########################################################################
default_country("africa", "algeria", 192756, {"country": "DZ", "language": ["ar", "fr"], "proj": 32631}, download_repo=OSMFR)
default_country("africa", "angola", 195267, {"country": "AO", "language": "pt", "proj": 32733}, download_repo=OSMFR)
default_country("africa", "benin", 192784, {"country": "BJ", "language": "fr", "proj": 32631, 'phone_code': '229', 'phone_len': 8, 'phone_international': '00'}, download_repo=OSMFR)
default_country("africa", "botswana", 1889339, {"country": "BW", "language": "en", "driving_side": "left", "proj": 32734})
default_country("africa", "burkina_faso", 192783, {"country": "BF", "language": "fr", "proj": 32630}, download_repo=OSMFR)
default_country("africa", "burundi", 195269, {"country": "BI", "language": "fr", "proj": 32735}, download_repo=OSMFR)
default_country("africa", "cameroon", 192830, {"country": "CM", "language": "fr", "proj": 32632}, download_repo=OSMFR)
default_country("africa", "cape_verde", 535774, {"country": "CV", "language": "pt", "proj": 32626}, download_repo=OSMFR)
default_country("africa", "central_african_republic", 192790, {"country": "CF", "language": "fr", "proj": 32634}, download_repo=OSMFR)
default_country("africa", "chad", 2361304, {"country": "TD", "language": ["ar", "fr"], "proj": 32634}, download_repo=OSMFR)
default_country("africa", "comoros", 535790, {"country": "KM", "language": ["ar", "fr"], "proj": 32738}, download_repo=OSMFR)
default_country("africa", "congo_brazzaville", 192794, {"country": "CG", "language": "fr", "proj": 32733}, download_repo=OSMFR)
default_country("africa", "congo_kinshasa", 192795, {"country": "CD", "language": "fr", "proj": 32734}, download_repo=OSMFR)
default_country("africa", "djibouti", 192801, {"country": "DJ", "language": ["fr", "ar"], "proj": 32638, "multilingual-style": "dj"}, download_repo=OSMFR)
default_country("africa", "egypt", 1473947, {"country": "EG", "language": "ar", "proj": 32635})
default_country("africa", "equatorial_guinea", 192791, {"country": "GQ", "language": "es", "proj": 32732}, download_repo=OSMFR)
default_country("africa", "eritrea", 296961, {"country": "ER", "proj": 32637}, download_repo=OSMFR)
default_country("africa", "ethiopia", 192800, {"country": "ET", "proj": 32638})
default_country("africa", "gabon", 192793, {"country": "GA", "language": "fr", "proj": 32732}, download_repo=OSMFR)
default_country("africa", "gambia", 192774, {"country": "GM", "language": "en", "proj": 32628}, download_repo=OSMFR)
default_country("africa", "ghana", 192781, {"country": "GH", "language": "en", "proj": 32630}, download_repo=OSMFR)
default_country("africa", "guinea", 192778, {"country": "GN", "language": "fr", "proj": 32628}, download_repo=OSMFR)
default_country("africa", "guinea-bissau", 192776, {"country": "GW", "language": "pt", "proj": 32628})
default_country("africa", "ivory_coast", 192779, {"country": "CI", "language": "fr", "proj": 32630}, download_repo=OSMFR)
default_country("africa", "kenya", 192798, {"country": "KE", "language": "en", "driving_side": "left", "proj": 32737}, download_repo=OSMFR)
default_country("africa", "lesotho", 2093234, {"country": "LS", "language": "en", "driving_side": "left", "proj": 32735}, download_repo=OSMFR)
default_country("africa", "liberia", 192780, {"country": "LR", "language": "en", "speed_limit_unit": "mph", "proj": 32629})
default_country("africa", "libya", 192758, {"country": "LY", "language": "ar", "proj": 32633})
default_country("africa", "madagascar", 447325, {"country": "MG", "language": ["fr", "mg"], "proj": 32738}, download_repo=GEOFABRIK)
default_country("africa", "malawi", 195290, {"country": "MW", "language": "en", "driving_side": "left", "proj": 32736}, download_repo=OSMFR)
default_country("africa", "mali", 192785, {"country": "ML", "language": "fr", "proj": 32630}, download_repo=OSMFR)
default_country("africa", "mauritania", 192763, {"country": "MR", "language": "ar", "proj": 32628}, download_repo=OSMFR)
default_country("africa", "mauritius", 535828, {"country": "MU", "language": ["en", "fr"], "driving_side": "left", "proj": 32740}, download_repo=OSMFR)
default_country("africa", "morocco", 3630439, {"country": "MA", "language": ["ar", "fr", "zgh", "ber"], "proj": 32629, "multilingual-style": "ma"})
default_country("africa", "mozambique", 195273, {"country": "MZ", "language": "pt", "driving_side": "left", "proj": 32736}, download_repo=OSMFR)
default_country("africa", "namibia", 195266, {"country": "NA", "language": "en", "driving_side": "left", "proj": 32733}, download_repo=OSMFR)
default_country("africa", "niger", 192786, {"country": "NE", "language": "fr", "proj": 32632}, download_repo=OSMFR)
default_country("africa", "nigeria", 192787, {"country": "NG", "language": "en", "proj": 32633})
default_country("africa", "norway_bouvet_island", 2425963, {"country": "BV", "language": "no", "proj": 32729}, download_repo=OSMFR, download_country="bouvet_island")
default_country("africa", "rwanda", 171496, {"country": "RW", "language": ["en", "fr"], "proj": 32735}, download_repo=OSMFR)
default_country("africa", "sao_tome_and_principe", 535880, {"country": "ST", "language": "pt", "proj": 32632}, download_repo=OSMFR)
default_country("africa", "senegal", 192775, {"country": "SN", "language": "fr", "proj": 32628}, download_repo=OSMFR)
default_country("africa", "seychelles", 536765, {"country": "SC", "language": ["en", "fr"], "driving_side": "left", "proj": 32739}, download_repo=OSMFR)
default_country("africa", "sierra-leone", 192777, {"country": "SL", "language": "en", "proj": 32629})
default_country("africa", "somalia", 192799, {"country": "SO", "language": "so", "proj": 32638})
default_country("africa", "south_africa", 87565, {"country": "ZA", "language": "en", "driving_side": "left", "proj": 32735}, download_repo=OSMFR)
default_country("africa", "south_sudan", 1656678, {"country": "SS", "language": "en", "proj": 32635}, download_repo=OSMFR)
default_country("africa", "sudan", 192789, {"country": "SD", "language": ["ar", "en"], "proj": 32636}, download_repo=OSMFR)
default_country("africa", "swaziland", 88210, {"country": "SZ", "language": "en", "driving_side": "left", "proj": 32736}, download_repo=OSMFR)
default_country("africa", "tanzania", 195270, {"country": "TZ", "language": "en", "driving_side": "left", "proj": 32736})
default_country("africa", "togo", 192782, {"country": "TG", "language": "fr", "proj": 32631}, download_repo=OSMFR)
default_country("africa", "tunisia", 192757, {"country": "TN", "language": ["ar", "fr"], "proj": 32632}, download_repo=OSMFR)
default_country("africa", "uganda", 192796, {"country": "UG", "language": "en", "driving_side": "left", "proj": 32636}, download_repo=OSMFR)
default_country("africa", "united_kingdom_saint_helena_ascension_tristan_da_cunha", 1964272, {"country": "SH", "language": "en", "driving_side": "left", "proj": 32729}, download_repo=OSMFR, download_country="saint_helena_ascension_tristan_da_cunha")
default_country("africa", "western_sahara", 2559126, {"country": "EH", "proj": 32629}, download_repo=OSMFR)
default_country("africa", "zambia", 195271, {"country": "ZM", "language": "en", "driving_side": "left", "proj": 32736}, download_repo=OSMFR)
default_country("africa", "zimbabwe", 195272, {"country": "ZW", "language": "en", "driving_side": "left", "proj": 32736}, download_repo=OSMFR)
config["chad"].analyser["osmosis_way_approximate"] = "xxx"
config["djibouti"].analyser["osmosis_way_approximate"] = "xxx"
config["kenya"].analyser["osmosis_way_approximate"] = "xxx"
config["madagascar"].analyser["osmosis_way_approximate"] = "xxx"
config["mali"].analyser["osmosis_way_approximate"] = "xxx"
config["senegal"].analyser["osmosis_way_approximate"] = "xxx"
config["togo"].analyser["osmosis_way_approximate"] = "xxx"
for country, c in config.items():
if c.download and "url" in c.download and "/africa/" in c.download["url"] and not ("mayotte" in c.download["url"] or "reunion" in c.download["url"]):
del(c.analyser["osmosis_building_shapes"])
#########################################################################
default_country("asia", "afghanistan", 303427, {"country": "AF", "proj": 32641}, download_repo=OSMFR)
default_country("asia", "armenia", 364066, {"country": "AM", "language": "hy", "proj": 32641}, download_repo=OSMFR)
default_country("asia", "azerbaijan", 364110, {"country": "AZ", "language": "az", "proj": 32638})
default_country("asia", "bangladesh", 184640, {"country": "BD", "language": "bn", "driving_side": "left", "proj": 32646})
default_country("asia", "bahrain", 378734, {"country": "BH", "language": "ar","proj": 32639}, download_repo=OSMFR)
default_country("asia", "bhutan", 184629, {"country": "BT", "language": ["dz", "en"], "proj": 32646}, download_repo=OSMFR)
default_country("asia", "brunei", 2103120, {"country": "BN", "driving_side": "left", "language": "ms", "proj": 32650}, download_repo=OSMFR)
default_country("asia", "cambodia", 49898, {"country": "KHM", "language": "km", "proj": 32648}, download_repo=OSMFR)
default_country("asia", "east_timor", 305142, {"country": "TL", "language": "pt", "proj": 32651}, download_repo=OSMFR)
default_country("asia", "georgia", 28699, {"country": "GE", "language": "ka", "proj": 32637}, download_repo=OSMFR)
default_country("asia", "israel", 1473946, {"country": "IL", "language": ["he", "ar"], "proj": 32636}, download_repo=OSMFR)
default_country("asia", "iran", 304938, {"country": "IR", "language": "fa","proj": 32640}, download_repo=GEOFABRIK)
default_country("asia", "iraq", 304934, {"country": "IQ", "language": "ar", "proj": 32638})
default_country("asia", "jordan", 184818, {"country": "JO", "language": "ar", "proj": 32637})
default_country("asia", "kazakhstan", 214665, {"country": "KZ", "proj": 32640}, download_repo=GEOFABRIK)
default_country("asia", "kuwait", 305099, {"country": "KW", "language": "ar","proj": 32639}, download_repo=OSMFR)
default_country("asia", "kyrgyzstan", 178009, {"country": "KG", "language": ["ky", "ru"], "proj": 32643})
default_country("asia", "laos", 49903, {"country": "LA", "language": ["lo", "en"], "proj": 32648}, download_repo=OSMFR)
default_country("asia", "lebanon", 184843, {"country": "LB", "language": "ar", "proj": 32636})
default_country("asia", "malaysia", 2108121, {"country": "MY", "language": "ms", "driving_side": "left", "proj": 32649}, download_repo=OSMFR)
default_country("asia", "maldives", 536773, {"country": "MV", "language": "dv", "proj": 32643}, download_repo=OSMFR)
default_country("asia", "mongolia", 161033, {"country": "MN", "language": "mn", "proj": 32648})
default_country("asia", "myanmar", 50371, {"country": "MM", "language": "my", "proj": 32646}, download_repo=OSMFR)
default_country("asia", "north_korea", 192734, {"country": "KP", "language": "ko", "proj": 32652}, download_country="north-korea")
default_country("asia", "nepal", 184633, {"country": "NP", "language": "ne", "driving_side": "left", "proj": 32645})
default_country("asia", "oman", 305138, {"country": "OM", "language": "ar","proj": 32640}, download_repo=OSMFR)
default_country("asia", "pakistan", 307573, {"country": "PK", "language": ["en", "ur"], "driving_side": "left", "proj": 32642})
default_country("asia", "palestine", 1703814, {"country": "PS", "language": "ar", "proj": 32636}, download_repo=OSMFR)
default_country("asia", "philippines", 2850940, {"country": "PH", "language": "en", "proj": 32651, 'phone_code': '63', 'phone_len': [7, 8], 'phone_international': '00'}, download_repo=GEOFABRIK)
default_country("asia", "qatar", 305095, {"country": "QA", "language": "ar","proj": 32639}, download_repo=OSMFR)
default_country("asia", "saudi_arabia", 307584, {"country": "SA", "language": "ar","proj": 32637}, download_repo=OSMFR)
default_country("asia", "singapore", 536780, {"country": "SG", "language": "en", "driving_side": "left", "proj": 32648}, download_repo=OSMFR)
default_country("asia", "sri-lanka", 536807, {"country": "LK", "language": ["en", "si", "ta"], "driving_side": "left", "proj": 32644})
default_country("asia", "south_korea", 307756, {"country": "KR", "language": "ko", "proj": 32652}, download_country="south-korea")
default_country("asia", "syria", 184840, {"country": "SY", "language": "ar", "proj": 32637})
default_country("asia", "tajikistan", 214626, {"country": "TJ", "language": "tg", "proj": 32642})
default_country("asia", "taiwan", 3777248, {"country": "TW", "language": ["zh_TW", "en"], "proj": 32651}, download_repo=GEOFABRIK)
default_country("asia", "thailand", 2067731, {"country": "TH", "language": "th", "proj": 32647, "driving_side": "left"})
default_country("asia", "turkmenistan", 223026, {"country": "TM", "language": "tk", "proj": 32640})
united_arab_emirates = default_country("asia", "united_arab_emirates", 307763, {"country": "AE", "language": "ar","proj": 32640}, download_repo=OSMFR)
del(united_arab_emirates.analyser["osmosis_highway_name_close"]) # Complicated Street Numbering
default_country("asia", "united_kingdom_british_indian_ocean_territory", 1993867, {"country": "IO", "language": "en", "driving_side": "left", "proj": 32742}, download_repo=OSMFR, download_country="british_indian_ocean_territory")
default_country("asia", "uzbekistan", 196240, {"country": "UZ", "proj": 32640}, download_repo=GEOFABRIK)
default_country("asia", "vietnam", 49915, {"country": "VN", "language": "vi", "proj": 32648}, download_repo=GEOFABRIK)
default_country("asia", "yemen", 305092, {"country": "YE", "language": "ar","proj": 32638}, download_repo=GEOFABRIK)
#########################################################################
id_province = gen_country('asia', 'indonesia', download_repo=OSMFR, language='id', proj=23837)
id_province("aceh", 2390836, "ID-AC")
id_province("bali", 1615621, "ID-BA")
id_province("bangka_belitung_islands", 3797243, "ID-BB")
id_province("banten", 2388356, "ID-BT")
id_province("bengkulu", 2390837, "ID-BE")
id_province("central_java", 2388357, "ID-JT")
id_province("central_kalimantan", 2388613, "ID-KT")
id_province("central_sulawesi", 2388664, "ID-ST")
id_province("east_java", 3438227, "ID-JI")
id_province("east_kalimantan", 5449459, "ID-KI")
id_province("east_nusa_tenggara", 2396778, "ID-NT")
id_province("gorontalo", 2388665, "ID-GO")
id_province("jakarta", 6362934, "ID-JK")
id_province("jambi", 2390838, "ID-JA")
id_province("lampung", 2390839, "ID-LA")
id_province("maluku", 2396795, "ID-MA")
id_province("north_kalimantan", 5449460, "ID-KU")
id_province("north_maluku", 2396796, "ID-MU")
id_province("north_sulawesi", 2388666, "ID-SA")
id_province("north_sumatra", 2390843, "ID-SU")
id_province("papua", 4521144, "ID-PA")
id_province("riau", 2390840, "ID-RI")
id_province("riau_islands", 3797244, "ID-KR")
id_province("southeast_sulawesi", 2388668, "ID-SG")
id_province("south_kalimantan", 2388615, "ID-KS")
id_province("south_sulawesi", 2388667, "ID-SN")
id_province("south_sumatra", 2390842, "ID-SS")
id_province("west_java", 2388361, "ID-JB")
id_province("west_kalimantan", 2388616, "ID-KB")
id_province("west_nusa_tenggara", 1615622, "ID-NB")
id_province("west_papua", 4521145, "ID-PB")
id_province("west_sulawesi", 2388669, "ID-SR")
id_province("west_sumatra", 2390841, "ID-SB")
id_province("yogyakarta", 5616105, "ID-YO")
#########################################################################
# central america
default_country("central-america", "belize", 287827, {"country": "BZ", "language": "en", "speed_limit_unit": "mph", "proj": 32616})
default_country("central-america", "costa_rica", 287667, {"country": "CR", "language": "es", "proj": 32617}, download_repo=OSMFR)
default_country("central-america", "el_salvador", 1520612, {"country": "SV", "language": "es", "proj": 32616}, download_repo=OSMFR)
default_country("central-america", "guatemala", 1521463, {"country": "GT", "language": "es", "proj": 32616})
default_country("central-america", "honduras", 287670, {"country": "HN", "language": "es", "proj": 32616}, download_repo=OSMFR)
default_country("central-america", "panama", 287668, {"country": "PA", "language": "es", "proj": 32617}, download_repo=OSMFR)
default_country("central-america", "trinidad_and_tobago", 555717, {"country": "TT", "language": "en", "driving_side": "left","proj": 32620}, download_repo=OSMFR)
# caribbean
default_country("central-america", "haiti", 307829, {"country": "HT", "language": "fr", "proj": 32618},
download_repo=GEOFABRIK, download_country="haiti-and-domrep")
config["haiti"].analyser["osmosis_way_approximate"] = "xxx"
default_country("central-america", "antigua_and_barbuda", 536900, {"country": "BB", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "barbados", 547511, {"country": "BB", "language": "en", "driving_side": "left", "proj": 32621}, download_repo=OSMFR)
default_country("central-america", "bahamas", 547469, {"country": "BS", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "cuba", 307833, {"country": "CU", "language": "es", "proj": 32617, "phone_code": "53", "phone_len": 8, "phone_international": "011", "phone_local_prefix": "0"})
default_country("central-america", "dominica", 307823, {"country": "DM", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "dominican_republic", 307828, {"country": "DO", "language": "es", "proj": 32619}, download_repo=GEOFABRIK, download_country="haiti-and-domrep")
default_country("central-america", "grenada", 550727, {"country": "GD", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "jamaica", 555017, {"country": "JM", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "nicaragua", 287666, {"country": "NI", "language": "es", "proj": 32616}, download_repo=OSMFR)
default_country("central-america", "saint_lucia", 550728, {"country": "LC", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "saint_vincent_and_the_grenadines", 550725, {"country": "VC", "language": "en", "proj": 32620}, download_repo=OSMFR)
default_country("central-america", "saint_kitts_and_nevis", 536899, {"country": "KN", "language": "en", "driving_side": "left", "proj": 2005}, download_repo=OSMFR)
default_country("central-america", "united_kingdom_anguilla", 2177161, {"country": "AI", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR, download_country="anguilla")
default_country("central-america", "united_kingdom_cayman_islands", 2185366, {"country": "KY", "language": "en", "driving_side": "left", "proj": 32617}, download_repo=OSMFR, download_country="cayman_islands")
default_country("central-america", "united_kingdom_montserrat", 537257, {"country": "MS", "language": "en", "driving_side": "left", "proj": 2005}, download_repo=OSMFR, download_country="montserrat")
default_country("central-america", "united_kingdom_turks_and_caicos_islands", 547479, {"country": "TC", "language": "en", "driving_side": "left", "proj": 32619}, download_repo=OSMFR, download_country="turks_and_caicos_islands")
default_country("central-america", "united_kingdom_virgin_islands", 285454, {"country": "VG", "language": "en", "driving_side": "left", "proj": 32620}, download_repo=OSMFR, download_country="british_virgin_islands")
default_country("central-america", "usa_puerto_rico", 4422604, {"country": "PR", "language": ["es", "en"], "proj": 32619, "boundary_detail_level": 6}, download_repo=OSMFR, download_country="puerto_rico")
default_country("central-america", "usa_virgin_islands", 286898, {"country": "VI", "language": "en", "driving_side": "left", "proj": 4437}, download_repo=OSMFR, download_country="usa_virgin_islands")
#########################################################################
default_country("australia-oceania", "new-zealand", 556706, {"country": "NZ", "language": "en", "proj": 32759, "driving_side": "left", "addr:street_distance": 2000})
default_country("oceania", "cook_islands", 2184233, {"country": "CK", "language": "en", "driving_side": "left", "proj": 32603}, download_repo=OSMFR)
default_country("oceania", "marshall_islands", 571771, {"country": "MH", "language": "en", "proj": 32660}, download_repo=OSMFR)
default_country("oceania", "nauru", 571804, {"country": "NR", "language": "en", "driving_side": "left", "proj": 32659}, download_repo=OSMFR)
default_country("oceania", "niue", 1558556, {"country": "NU", "language": "en", "driving_side": "left", "proj": 32602}, download_repo=OSMFR)
default_country("oceania", "palau", 571805, {"country": "PW", "language": "en", "proj": 32653}, download_repo=OSMFR)
default_country("oceania", "micronesia", 571802, {"country": "FM", "language": "en", "speed_limit_unit": "mph", "proj": 32656}, download_repo=OSMFR)
default_country("oceania", "papua_new_guinea", 307866, {"country": "PG", "language": "en","proj": 32755}, download_repo=OSMFR)
default_country("oceania", "samoa", 1872673, {"country": "WS", "language": "en", "driving_side": "left", "speed_limit_unit": "mph", "proj": 32602}, download_repo=OSMFR)
default_country("oceania", "solomon_islands", 1857436, {"country": "SB", "language": "en", "driving_side": "left", "proj": 32657}, download_repo=OSMFR)
default_country("oceania", "new_zealand_tokelau", 2186600, {"country": "TK", "language": "en", "driving_side": "left", "proj": 32602}, download_repo=OSMFR, download_country="tokelau")
default_country("oceania", "tonga", 2186665, {"country": "TO", "language": "en", "driving_side": "left", "proj": 32601}, download_repo=OSMFR)
default_country("oceania", "tuvalu", 2177266, {"country": "TV", "language": "en", "driving_side": "left", "proj": 32660}, download_repo=OSMFR)
default_country("oceania", "united_kingdom_pitcairn", 2185375, {"country": "PN", "language": "en", "driving_side": "left", "proj": 32709}, download_repo=OSMFR, download_country="pitcairn")
default_country("oceania", "vanuatu", 2177246, {"country": "VU", "language": ["en", "fr"], "proj": 32658}, download_repo=OSMFR)
#########################################################################
default_country("merge", "fiji", 571747, {"country": "FJ", "language": "en", "driving_side": "left", "proj": 32660}, download_repo=OSMFR)
default_country("merge", "kiribati", 571178, {"country": "KL", "language": "en", "driving_side": "left", "proj": 32660}, download_repo=OSMFR)
#########################################################################
au_state = gen_country('oceania', 'australia', download_repo=OSMFR, language='en', driving_side='left')
au_state("australian_capital_territory", 2354197, "AU-ACT", proj=32755)
au_state("new_south_wales", 2316593, "AU-NSW", proj=32755)
au_state("northern_territory", 2316594, "AU-NT", proj=32753)
au_state("western_australia", 2316598, "AU-WA", proj=32750)
au_state("south_australia", 2316596, "AU-SA", proj=32753)
au_state("victoria", 2316741, "AU-VIC", proj=32755)
au_state("queensland", 2316595, "AU-QLD", proj=32755)
au_state("tasmania", 2369652, "AU-TAS", proj=32755)
au_state("christmas_island", 2177207, "CX", proj=32648)
au_state("cocos_islands", 82636, "CC", proj=32646)
au_state("coral_sea_islands", 3225677, "AU", proj=32655)
au_state("norfolk_island", 2574988, "NF", proj=32658)
#########################################################################
default_country("south-america", "bolivia", 252645, {"country": "BO", "language": "es", "proj": 32720})
default_country("south-america", "chile", 167454, {"country": "CL", "language": "es", "proj": 32718})
colombia = default_country("south-america", "colombia", 120027, {"country": "CO", "language": "es", "proj": 32618})
del(colombia.analyser["osmosis_highway_name_close"]) # Complicated Street Numbering
default_country("south-america", "ecuador", 108089, {"country": "EC", "language": "es", "proj": 32727})
default_country("south-america", "guyana", 287083, {"country": "GY", "language": "en", "driving_side": "left", "proj": 32621}, download_repo=OSMFR)
default_country("south-america", "paraguay", 287077, {"country": "PY", "language": "es", "proj": 32721}, download_repo=OSMFR)
default_country("south-america", "peru", 288247, {"country": "PE", "language": "es", "proj": 32718})
default_country("south-america", "suriname", 287082, {"country": "SR", "language": "nl", "driving_side": "left", "proj": 32621}, download_repo=OSMFR)
default_country("south-america", "united_kingdom_falkland", 2185374, {"country": "FK", "language": "en", "driving_side": "left", "proj": 32721}, download_repo=OSMFR, download_country="falkland")
default_country("south-america", "united_kingdom_south_georgia_and_south_sandwich", 1983628, {"country": "GS", "language": "en", "driving_side": "left", "proj": 32725}, download_repo=OSMFR, download_country="south_georgia_and_south_sandwich")
default_country("south-america", "uruguay", 287072, {"country": "UY", "language": "es", "proj": 32721})
default_country("south-america", "venezuela", 272644, {"country": "VE", "language": "es", "proj": 32620}, download_repo=OSMFR)
#########################################################################
ar_state = gen_country('south-america', 'argentina', download_repo=OSMFR, language='es', proj=32720,
phone_code='54', phone_local_prefix='0', phone_len=10, phone_international=00, suffix_separators='INT')
ar_state('buenos_aires_city', 1224652, 'AR-C')
ar_state('buenos_aires', 1632167, 'AR-B')
ar_state('catamarca', 153545, 'AR-K')
ar_state('chaco', 153554, 'AR-H')
ar_state('chubut', 153548, 'AR-CU')
ar_state('cordoba', 3592494, 'AR-X')
ar_state('corrientes', 153552, 'AR-W')
ar_state('entre_rios', 153551, 'AR-E')
ar_state('formosa', 2849847, 'AR-P')
ar_state('jujuy', 153556, 'AR-Y')
ar_state('la_pampa', 153541, 'AR-L')
ar_state('la_rioja', 153536, 'AR-F')
ar_state('mendoza', 153540, 'AR-M')
ar_state('misiones', 153553, 'AR-N')
ar_state('neuquen', 1606727, 'AR-Q')
ar_state('rio_negro', 153547, 'AR-R')
ar_state('salta', 2405230, 'AR-A')
ar_state('san_juan', 153539, 'AR-J')
ar_state('san_luis', 153538, 'AR-D')
ar_state('santa_cruz', 153549, 'AR-Z')
ar_state('santa_fe', 153543, 'AR-S')
ar_state('santiago_del_estero', 153544, 'AR-G')
ar_state('tierra_del_fuego', 153550, 'AR-V')
ar_state('tucuman', 153558, 'AR-T')
#########################################################################
br_region = gen_country('south-america', 'brazil', download_repo=OSMFR, language='pt', proj=32722, exclude=[
'osmosis_highway_name_close', # Complicated Street Numbering
])
br_region(["north", "acre"], 326266, "BR-AC")
br_region(["northeast", "alagoas"], 303781, "BR-AL")
br_region(["north", "amapa"], 331463, "BR-AP")
br_region(["north", "amazonas"], 332476, "BR-AM")
br_region(["northeast", "bahia"], 362413, "BR-BA")
br_region(["northeast", "ceara"], 302635, "BR-CE")
br_region(["central-west", "distrito-federal"], 421151, "BR-DF")
br_region(["southeast", "espirito-santo"], 54882, "BR-ES")
br_region(["central-west", "goias"], 334443, "BR-GO")
br_region(["northeast", "maranhao"], 332924, "BR-MA")
br_region(["central-west", "mato-grosso"], 333597, "BR-MT")
br_region(["central-west", "mato-grosso-do-sul"], 334051, "BR-MS")
br_region(["southeast", "minas-gerais"], 315173, "BR-MG")
br_region(["north", "para"], 185579, "BR-PA")
br_region(["northeast", "paraiba"], 301464, "BR-PB")
br_region(["south", "parana"], 297640, "BR-PR")
br_region(["northeast", "pernambuco"], 303702, "BR-PE")
br_region(["northeast", "piaui"], 302819, "BR-PI")
br_region(["southeast", "rio-de-janeiro"], 57963, "BR-RJ")
br_region(["northeast", "rio-grande-do-norte"], 301079, "BR-RN")
br_region(["south", "rio-grande-do-sul"], 242620, "BR-RS")
br_region(["north", "rondonia"], 325866, "BR-RO")
br_region(["north", "roraima"], 326287, "BR-RR")
br_region(["south", "santa-catarina"], 296584, "BR-SC")
br_region(["southeast", "sao-paulo"], 298204, "BR-SP")
br_region(["northeast", "sergipe"], 303940, "BR-SE")
br_region(["north", "tocantins"], 336819, "BR-TO")
#########################################################################
it_region = gen_country('europe', 'italy', download_repo=OSMFR, language='it', proj=23032, municipality_ref='ref:ISTAT',
phone_code='39', phone_len=[6, 11], phone_len_short=[3, 4], phone_international='00', phone_format=r"^(?:(?:[+]%s[- ]*[03])|[18])[0-9]+(?:[- ][0-9]+)?(?:(?:[- ][0-9]+)|$)$", include=[
'merge_fuel_IT',
'merge_pharmacy_IT',
'merge_parapharmacy_IT',
])
it_region("abruzzo", 53937, "IT-65")
it_region("basilicata", 40137, "IT-77")
it_region("calabria", 1783980, "IT-78")
it_region("campania", 40218, "IT-72")
it_region("emilia_romagna", 42611, "IT-45")
it_region("friuli_venezia_giulia", 179296, "IT-36")
it_region("lazio", 40784, "IT-62")
it_region("liguria", 301482, "IT-42")
it_region("lombardia", 44879, "IT-25")
it_region("marche", 53060, "IT-57")
it_region("molise", 41256, "IT-67")
it_region("piemonte", 44874, "IT-21")
it_region("puglia", 40095, "IT-75")
it_region("sardegna", 7361997, "IT-88")
it_region("sicilia", 39152, "IT-82")
it_region("toscana", 41977, "IT-52")
it_region("trentino_alto_adige", 45757, "IT-32", language=["it","de"])
it_region("umbria", 42004, "IT-55")
it_region("valle_aosta", 45155, "IT-23")
it_region("veneto", 43648, "IT-34")
#########################################################################
nl_province = gen_country('europe', 'netherlands', download_repo=OSMFR, language='nl', proj=23032)
nl_province("zuid_holland", 47772, "NL-ZH")
nl_province("zeeland", 47806, "NL-ZE")
nl_province("noord_brabant", 47696, "NL-NB")
nl_province("limburg", 47793, "NL-LI")
nl_province("gelderland", 47554, "NL-GE")
nl_province("overijssel", 47608, "NL-OV")
nl_province("drenthe", 47540, "NL-DR")
nl_province("friesland", 47381, "NL-FR", language=["nl", "fy"])
nl_province("groningen", 47826, "NL-GR")
nl_province("flevoland", 47407, "NL-FL")
nl_province("utrecht", 47667, "NL-UT")
nl_province("noord_holland", 47654, "NL-NH")
nl_province("aruba", 1231749, "AW", area="central-america", path_base=None, proj=32620)
nl_province("curacao", 1216719, "CW", area="central-america", path_base=None, proj=32620)
nl_province("sint_maarten", 1231790, "SX", area="central-america", path_base=None, proj=32620)
nl_province("caribbean", 1216720, "NL", area="central-america", path_base=None, proj=32620)
#########################################################################
cz_kraj = gen_country('europe', 'czech_republic', download_repo=OSMFR, language='cs', proj=32633)
cz_kraj("praha", 435514, "CZ-PR")
cz_kraj("stredocesky", 442397, "CZ-ST")
cz_kraj("jihocesky", 442321, "CZ-JC")
cz_kraj("plzensky", 442466, "CZ-PL")
cz_kraj("karlovarsky", 442314, "CZ-KA")
cz_kraj("ustecky", 442452, "CZ-US")
cz_kraj("liberecky", 442455, "CZ-LI")
cz_kraj("kralovehradecky", 442463, "CZ-KR")
cz_kraj("pardubicky", 442460, "CZ-PA")
cz_kraj("vysocina", 442453, "CZ-VY")
cz_kraj("jihomoravsky", 442311, "CZ-JM")
cz_kraj("olomoucky", 442459, "CZ-OL")
cz_kraj("moravskoslezsky", 442461, "CZ-MO")
cz_kraj("zlinsky", 442449, "CZ-ZL")
#########################################################################
pl_province = gen_country('europe', 'poland', download_repo=OSMFR, language='pl', proj=32634,
phone_code='48', phone_len=9, phone_international='00')
pl_province("dolnoslaskie", 224457, "PL-DS")
pl_province("kujawsko_pomorskie", 223407, "PL-KP")
pl_province("lubelskie", 130919, "PL-LU")
pl_province("lubuskie", 130969, "PL-LB")
pl_province("lodzkie", 224458, "PL-LD")
pl_province("malopolskie", 224459, "PL-MA")
pl_province("mazowieckie", 130935, "PL-MZ")
pl_province("opolskie", 224460, "PL-OP")
pl_province("podkarpackie", 130957, "PL-PK")
pl_province("podlaskie", 224461, "PL-PD")
pl_province("pomorskie", 130975, "PL-PM")
pl_province("slaskie", 224462, "PL-SL")
pl_province("swietokrzyskie", 130914, "PL-SK")
pl_province("warminsko_mazurskie", 223408, "PL-WN")
pl_province("wielkopolskie", 130971, "PL-WP")
pl_province("zachodniopomorskie", 104401, "PL-ZP")
#########################################################################
de_state = gen_country('europe', 'germany', language='de', proj=32632, municipality_ref='de:regionalschluessel',
phone_code='49', phone_international='00', phone_local_prefix='0', phone_values_separators=[','],
include=[
'osmosis_highway_zone'
]
)
#de_state("baden-wuerttemberg", 62611, "DE-BW")
for (name, rel_id) in [("freiburg-regbez", 2106112),
("karlsruhe-regbez", 22027),
("stuttgart-regbez", 22041),
("tuebingen-regbez", 2811874)]:
de_state("baden-wuerttemberg/" + name, rel_id, "DE-BW", download_repo=GEOFABRIK)
#de_state("bayern", 2145268, "DE-BY")
for (name, rel_id) in [("mittelfranken", 17614),
("niederbayern", 17593),
("oberbayern", 2145274),
("oberfranken", 17592),
("oberpfalz", 17596),
("schwaben", 17657),
("unterfranken", 17585)]:
de_state("bayern/" + name, rel_id, "DE-BY", download_repo=GEOFABRIK)
de_state("berlin", 62422, "DE-BE")
de_state("brandenburg", 62504, "DE-BB")
de_state("bremen", 62718, "DE-HB")
de_state("hamburg", 62782, "DE-HH")
de_state("hessen", 62650, "DE-HE")
de_state("mecklenburg-vorpommern", 28322, "DE-MV")
de_state("niedersachsen", 454192, "DE-NI")
#de_state("nordrhein-westfalen", 62761, "DE-NW")
for (name, rel_id) in [("arnsberg", 73340),
("detmold", 73347),
("dusseldorf", 63306),
("koln", 72022),
("munster", 63594)]:
de_state("nordrhein_westfalen/" + name, rel_id, "DE-NW", download_repo=OSMFR)
de_state("rheinland-pfalz", 62341, "DE-RP")
de_state("saarland", 62372, "DE-SL")
de_state("sachsen-anhalt", 62607, "DE-ST")
de_state("sachsen", 62467, "DE-SN")
de_state("schleswig-holstein", 51529, "DE-SH")
de_state("thueringen", 62366, "DE-TH")
#########################################################################
at_state = gen_country('europe', 'austria', download_repo=OSMFR, language='de', proj=32633)
at_state("niederosterreich", 77189, "AT-3")
at_state("burgenland", 76909, "AT-1")
at_state("karnten", 52345, "AT-2")
at_state("oberosterreich", 102303, "AT-4")
at_state("salzburg", 86539, "AT-5")
at_state("steiermark", 35183, "AT-6")
at_state("tirol", 52343, "AT-7")
at_state("wien", 109166, "AT-9")
at_state("vorarlberg", 74942, "AT-8")
#########################################################################
es_comm = gen_country('europe', 'spain', download_repo=OSMFR, language='es', proj=32629, municipality_ref='ine:municipio', phone_code='34', phone_len=9, phone_len_short=[3, 4, 5], phone_international='00')
es_comm("andalucia", 349044, "ES-AN", proj=32629)
es_comm("aragon", 349045, "ES-AR", proj=32630)
es_comm("asturias", 349033, "ES-AS", proj=32629)
es_comm("illes_balears", 348981, "ES-IB", proj=32630, language="ca")
es_comm("cantabria", 349013, "ES-CB", proj=32630)
es_comm("castilla_la_mancha", 349052, "ES-CM", proj=32630)
es_comm("castilla_y_leon", 349041, "ES-CL", proj=32629)
es_comm("catalunya", 349053, "ES-CT", proj=32630, language="ca")
es_comm("comunitat_valenciana", 349043, "ES-VC", proj=32630, language=["es", "ca"])
es_comm("extremadura", 349050, "ES-EX", proj=32629)
es_comm("galicia", 349036, "ES-GA", proj=32629, language=["es", "gl"])
es_comm("la_rioja", 348991, "ES-RI", proj=32630)
es_comm("comunidad_de_madrid", 349055, "ES-MD", proj=32630)
es_comm("comunidad_foral_de_navarra", 349027, "ES-NC", proj=32630)
es_comm("euskadi", 349042, "ES-PV", proj=32630, language=["es", "eu"])
es_comm("region_de_murcia", 349047, "ES-MC", proj=32630)
es_comm("canarias", 349048, "ES-CN", proj=32628, area="africa")
es_comm("ceuta", 1154756, "ES-CE", proj=32630, area="africa")
es_comm("melilla", 1154757, "ES-ML", proj=32628, area="africa")
#########################################################################
en_region = gen_country('europe', 'united_kingdom/england', download_repo=OSMFR, country_code='GB-ENG', language='en', proj=32630, driving_side='left', speed_limit_unit='mph')
en_region("east_midlands", 151279)
en_region("east", 151336)
en_region("greater_london", 175342)
en_region("north_east", 151164)
en_region("north_west", 151261)
en_region("south_east", 151304)
en_region("south_west", 151339, language=["en", "kw"])
en_region("west_midlands", 151283)
en_region("yorkshire_and_the_humber", 151012)
#########################################################################
sk_kraj = gen_country('europe', 'slovakia', download_repo=OSMFR, language='sk', proj=32634)
sk_kraj("trnavsky", 388266, "SK-TA")
sk_kraj("trenciansky", 388267, "SK-TC")
sk_kraj("presovsky", 388271, "SK-PV")
sk_kraj("nitriansky", 388268, "SK-NI")
sk_kraj("kosicky", 388272, "SK-KI")
sk_kraj("zilinsky", 388269, "SK-ZI")
sk_kraj("banskobystricky", 388270, "SK-BC")
sk_kraj("bratislavsky", 388265, "SK-BL")
#########################################################################
india_state = gen_country('asia', 'india', download_repo=OSMFR, language=['hi', 'en'], proj=32644, driving_side='left')
india_state("andhra_pradesh", 2022095, "IN-AP", proj=32644)
india_state("arunachal_pradesh",2027346, "IN-AR", proj=32646)
india_state("assam", 2025886, "IN-AS", proj=32646)
india_state("bihar", 1958982, "IN-BR", proj=32645)
india_state("chhattisgarh", 1972004, "IN-CT", proj=32644)
india_state("goa", 11251493, "IN-GA", proj=32643)
india_state("gujarat", 1949080, "IN-GJ", proj=32643)
india_state("haryana", 1942601, "IN-HR", proj=32643)
india_state("himachal_pradesh", 364186, "IN-HP", proj=32643)
india_state("jammu_and_kashmir", 1943188, "IN-JK", proj=32643)
india_state("jharkhand", 1960191, "IN-JH", proj=32645)
india_state("karnataka", 2019939, "IN-KA", proj=32643)
india_state("kerala", 2018151, "IN-KL", proj=32643)
india_state("madhya_pradesh", 1950071, "IN-MP", proj=32643)
india_state("maharashtra", 1950884, "IN-MH", proj=32643)
india_state("manipur", 2027869, "IN-MN", proj=32646)
india_state("meghalaya", 2027521, "IN-ML", proj=32646)
india_state("mizoram", 2029046, "IN-MZ", proj=32646)
india_state("nagaland", 2027973, "IN-NL", proj=32646)
india_state("odisha", 1984022, "IN-OR", proj=32645)
india_state("punjab", 1942686, "IN-PB", proj=32643)
india_state("rajasthan", 1942920, "IN-RJ", proj=32643)
india_state("sikkim", 1791324, "IN-SK", proj=32645)
india_state("tamil_nadu", 96905, "IN-TN", proj=32644)
india_state("telangana", 3250963, "IN-TG", proj=32646)
india_state("tripura", 2026458, "IN-TR", proj=32644)
india_state("uttar_pradesh", 1942587, "IN-UP", proj=32644)
india_state("uttarakhand", 9987086, "IN-UT", proj=32644)
india_state("west_bengal", 1960177, "IN-WB", proj=32645)
india_state("andaman_and_nicobar_islands", 2025855, "IN-AN", proj=32646)
india_state("chandigarh", 1942809, "IN-CH", proj=32643)
india_state("dadra_and_nagar_haveli_and_daman_and_diu", 1952530, "IN-DH", proj=32643)
india_state("lakshadweep", 2027460, "IN-LD", proj=32643)
india_state("national_capital_territory_of_delhi", 1942586, "IN-DL", proj=32643)
india_state("puducherry", 107001, "IN-PY", proj=32643)
#########################################################################
russia_region = gen_country(None, 'russia', download_repo=OSMFR, language='ru')
russia_region(["central_federal_district", "belgorod_oblast"], 83184, "RU-BEL", proj=32637)
russia_region(["central_federal_district", "bryansk_oblast"], 81997, "RU-BRY", proj=32636)
russia_region(["central_federal_district", "ivanovo_oblast"], 85617, "RU-IVA", proj=32637)
russia_region(["central_federal_district", "kaluga_oblast"], 81995, "RU-KLU", proj=32636)
russia_region(["central_federal_district", "kostroma_oblast"], 85963, "RU-KOS", proj=32637)
russia_region(["central_federal_district", "kursk_oblast"], 72223, "RU-KRS", proj=32637)
russia_region(["central_federal_district", "lipetsk_oblast"], 72169, "RU-LIP", proj=32637)
russia_region(["central_federal_district", "moscow_oblast"], 51490, "RU-MOS", proj=32637)
russia_region(["central_federal_district", "moscow"], 102269, "RU-MOW", proj=32637)
russia_region(["central_federal_district", "oryol_oblast"], 72224, "RU-ORL", proj=32637)
russia_region(["central_federal_district", "ryazan_oblast"], 71950, "RU-RYA", proj=32637)
russia_region(["central_federal_district", "smolensk_oblast"], 81996, "RU-SMO", proj=32636)
russia_region(["central_federal_district", "tambov_oblast"], 72180, "RU-TAM", proj=32637)
russia_region(["central_federal_district", "tula_oblast"], 81993, "RU-TUL", proj=32637)
russia_region(["central_federal_district", "tver_oblast"], 2095259, "RU-TVE", proj=32637)
russia_region(["central_federal_district", "vladimir_oblast"], 72197, "RU-VLA", proj=32637)
russia_region(["central_federal_district", "voronezh_oblast"], 72181, "RU-VOR", proj=32637)
russia_region(["central_federal_district", "yaroslavl_oblast"], 81994, "RU-YAR", proj=32637)
russia_region(["far_eastern_federal_district", "amur_oblast"], 147166, "RU-AMU", proj=32652)
russia_region(["far_eastern_federal_district", "chukotka_autonomous_okrug"], 151231, "RU-CHU", proj=32659)
russia_region(["far_eastern_federal_district", "jewish_autonomous_oblast"], 147167, "RU-YEV", proj=32653)
russia_region(["far_eastern_federal_district", "kamchatka_krai"], 151233, "RU-KAM", proj=32658)
russia_region(["far_eastern_federal_district", "khabarovsk_krai"], 151223, "RU-KHA", proj=32653)
russia_region(["far_eastern_federal_district", "magadan_oblast"], 151228, "RU-MAG", proj=32656)
russia_region(["far_eastern_federal_district", "primorsky_krai"], 151225, "RU-PRI", proj=32653)
russia_region(["far_eastern_federal_district", "sakha_republic"], 151234, "RU-SA", proj=32652)
russia_region(["far_eastern_federal_district", "sakhalin_oblast"], 394235, "RU-SAK", proj=32654)
russia_region(["north_caucasian_federal_district", "chechen_republic"], 109877, "RU-CE", proj=32638)
russia_region(["north_caucasian_federal_district", "dagestan_republic"], 109876, "RU-DA", proj=32638)
russia_region(["north_caucasian_federal_district", "ingushetia_republic"], 253252, "RU-IN", proj=32638)
russia_region(["north_caucasian_federal_district", "kabardino_balkar_republic"], 109879, "RU-KB", proj=32638)
russia_region(["north_caucasian_federal_district", "karachay_cherkess_republic"], 109878, "RU-KC", proj=32638)
russia_region(["north_caucasian_federal_district", "north_ossetia_alania_republic"], 110032, "RU-SE", proj=32638)
russia_region(["north_caucasian_federal_district", "stavropol_krai"], 108081, "RU-STA", proj=32638)
russia_region(["northwestern_federal_district", "arkhangelsk_oblast"], 140337, "RU-ARK", proj=32638)
russia_region(["northwestern_federal_district", "kaliningrad_oblast"], 103906, "RU-KGD", proj=32634)
russia_region(["northwestern_federal_district", "karelia_republic"], 393980, "RU-KR", proj=32636)
russia_region(["northwestern_federal_district", "komi_republic"], 115136, "RU-KO", proj=32640)
russia_region(["northwestern_federal_district", "leningrad_oblast"], 176095, "RU-LEN", proj=32636)
russia_region(["northwestern_federal_district", "murmansk_oblast"], 2099216, "RU-MUR", proj=32636)
russia_region(["northwestern_federal_district", "nenets_autonomous_okrug"], 274048, "RU-NEN", proj=32639)
russia_region(["northwestern_federal_district", "novgorod_oblast"], 89331, "RU-NGR", proj=32636)
russia_region(["northwestern_federal_district", "pskov_oblast"], 155262, "RU-PSK", proj=32636)
russia_region(["northwestern_federal_district", "saint_petersburg"], 337422, "RU-SPE", proj=32636)
russia_region(["northwestern_federal_district", "vologda_oblast"], 115106, "RU-VLG", proj=32637)
russia_region(["siberian_federal_district", "altai_krai"], 144764, "RU-ALT", proj=32644)
russia_region(["siberian_federal_district", "altai_republic"], 145194, "RU-AL", proj=32645)
russia_region(["siberian_federal_district", "buryatia_republic"], 145729, "RU-BU", proj=32647)
russia_region(["siberian_federal_district", "irkutsk_oblast"], 145454, "RU-IRK", proj=32648)
russia_region(["siberian_federal_district", "kemerovo_oblast"], 144763, "RU-KEM", proj=32645)
russia_region(["siberian_federal_district", "khakassia_republic"], 190911, "RU-KK", proj=32646)
russia_region(["siberian_federal_district", "krasnoyarsk_krai"], 190090, "RU-KYA", proj=32646)
russia_region(["siberian_federal_district", "novosibirsk_oblast"], 140294, "RU-NVS", proj=32644)
russia_region(["siberian_federal_district", "omsk_oblast"], 140292, "RU-OMS", proj=32643)
russia_region(["siberian_federal_district", "tomsk_oblast"], 140295, "RU-TOM", proj=32644)
russia_region(["siberian_federal_district", "tuva_republic"], 145195, "RU-TY", proj=32646)
russia_region(["siberian_federal_district", "zabaykalsky_krai"], 145730, "RU-ZAB", proj=32650)
russia_region(["southern_federal_district", "crimea_republic"], 3795586, "RU-CR", proj=32636)
russia_region(["southern_federal_district", "adygea_republic"], 253256, "RU-AD", proj=32637)
russia_region(["southern_federal_district", "astrakhan_oblast"], 112819, "RU-AST", proj=32638)
russia_region(["southern_federal_district", "kalmykia_republic"], 108083, "RU-KL", proj=32638)
russia_region(["southern_federal_district", "krasnodar_krai"], 108082, "RU-KDA", proj=32637)
russia_region(["southern_federal_district", "rostov_oblast"], 85606, "RU-ROS", proj=32637)
russia_region(["southern_federal_district", "sevastopol"], 1574364, "RU", proj=32636)
russia_region(["southern_federal_district", "volgograd_oblast"], 77665, "RU-VGG", proj=32638)
russia_region(["ural_federal_district", "chelyabinsk_oblast"], 77687, "RU-CHE", proj=32641)
russia_region(["ural_federal_district", "khanty_mansi_autonomous_okrug"], 140296, "RU-KHM", proj=32642)
russia_region(["ural_federal_district", "kurgan_oblast"], 140290, "RU-KGN", proj=32641)
russia_region(["ural_federal_district", "sverdlovsk_oblast"], 79379, "RU-SVE", proj=32641)
russia_region(["ural_federal_district", "tyumen_oblast"], 140291, "RU-TYU", proj=32642)
russia_region(["ural_federal_district", "yamalo_nenets_autonomous_okrug"], 191706, "RU-YAN", proj=32643)
russia_region(["volga_federal_district", "bashkortostan_republic"], 77677, "RU-BA", proj=32640)
russia_region(["volga_federal_district", "chuvash_republic"], 80513, "RU-CU", proj=32639)
russia_region(["volga_federal_district", "kirov_oblast"], 115100, "RU-KIR", proj=32639)
russia_region(["volga_federal_district", "mari_el_republic"], 115114, "RU-ME", proj=32639)
russia_region(["volga_federal_district", "mordovia_republic"], 72196, "RU-MO", proj=32638)
russia_region(["volga_federal_district", "nizhny_novgorod_oblast"], 72195, "RU-NIZ", proj=32638)
russia_region(["volga_federal_district", "orenburg_oblast"], 77669, "RU-ORE", proj=32640)
russia_region(["volga_federal_district", "penza_oblast"], 72182, "RU-PNZ", proj=32638)
russia_region(["volga_federal_district", "perm_krai"], 115135, "RU-PER", proj=32640)
russia_region(["volga_federal_district", "samara_oblast"], 72194, "RU-SAM", proj=32639)
russia_region(["volga_federal_district", "saratov_oblast"], 72193, "RU-SAR", proj=32638)
russia_region(["volga_federal_district", "tatarstan_republic"], 79374, "RU-TA", proj=32639)
russia_region(["volga_federal_district", "udmurt_republic"], 115134, "RU-UD", proj=32639)
russia_region(["volga_federal_district", "ulyanovsk_oblast"], 72192, "RU-ULY", proj=32639)
#########################################################################
japan_region = gen_country('asia', 'japan', download_repo=OSMFR, country_code='JP', language='ja', proj=32654, driving_side='left')
japan_region("hokkaido", 3795658, proj=32654)
japan_region("tohoku", 1835900, proj=32654)
japan_region("kanto", 1803923, proj=32654)
japan_region("chubu", 532759, proj=32654)
japan_region("kansai", 357113, proj=32653)
japan_region("chugoku", 1842114, proj=32653)
japan_region("shikoku", 1847663, proj=32653)
japan_region("kyushu", 1842245, proj=32652)
#########################################################################
china_province = gen_country('asia', 'china', download_repo=OSMFR, language='zh')
china_province("anhui", 913011, "CN-34", proj=32650)
china_province("fujian", 553303, "CN-35", proj=32650)
china_province("gansu", 153314, "CN-62", proj=32648)
china_province("guangdong", 911844, "CN-44", proj=32649)
china_province("guizhou", 286937, "CN-52", proj=32648)
china_province("hainan", 2128285, "CN-46", proj=32649)
china_province("hebei", 912998, "CN-13", proj=32650)
china_province("heilongjiang", 199073, "CN-23", proj=32652)
china_province("henan", 407492, "CN-41", proj=32650)
china_province("hubei", 913106, "CN-42", proj=32649)
china_province("hunan", 913073, "CN-43", proj=32649)
china_province("jiangsu", 913012, "CN-32", proj=32650)
china_province("jiangxi", 913109, "CN-36", proj=32650)
china_province("jilin", 198590, "CN-22", proj=32652)
china_province("liaoning", 912942, "CN-21", proj=32651)
china_province("qinghai", 153269, "CN-63", proj=32647)
china_province("shaanxi", 913100, "CN-61", proj=32649)
china_province("shandong", 913006, "CN-37", proj=32650)
china_province("shanxi", 913105, "CN-14", proj=32650)
china_province("sichuan", 913068, "CN-51", proj=32648)
china_province("yunnan", 913094, "CN-53", proj=32648)
china_province("zhejiang", 553302, "CN-33", proj=32651)
china_province("tibet", 153292, "CN-54", proj=32645)
china_province("xinjiang", 153310, "CN-65", proj=32645)
china_province("guangxi", 286342, "CN-45", proj=32649)
china_province("inner_mongolia", 161349, "CN-15", proj=32650)
china_province("ningxia", 913101, "CN-64", proj=32648)
china_province("beijing", 912940, "CN-11", proj=32650)
china_province("tianjin", 912999, "CN-12", proj=32650)
china_province("shanghai", 913067, "CN-31", proj=32651)
china_province("chongqing", 913069, "CN-50", proj=32649)
china_province("hong_kong", 913110, "CN-91", proj=32650, language=["zh", "en"], driving_side="left")
china_province("macau", 1867188, "CN-92", proj=32649, language=["zh", "pt"])
#########################################################################
ogf = default_simple("ogf", None, {"project": "opengeofiction"},
download_url=u"http://opengeofiction.net/backup/ogf_latest.osm.pbf")
del(ogf.analyser["osmosis_soundex"])
###########################################################################
# Merge analysers are uploaded to a different frontend server
for country in config.keys():
config[country].analyser_updt_url = {}
# NOTE: commented, as opendata.osmose causes timeout issues
# for k in config[country].analyser.keys():
# if k.startswith("merge_"):
# config[country].analyser_updt_url[k] = [modules.config.url_frontend_update, modules.config.url_frontend_opendata_update]
#########################################################################
# Passwords are stored in separate file, not on git repository
import osmose_config_password
osmose_config_password.set_password(config)
###########################################################################
if __name__ == "__main__":
import json
j = []
for (k,v) in config.items():
j.append(dict(v.__dict__, **{"country": k}))
print(json.dumps(j, indent=4))
| gpl-3.0 |
sfrehse/googletest | test/gtest_xml_output_unittest.py | 1815 | 14580 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| bsd-3-clause |
jayceyxc/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/openpyxl/shared/units.py | 118 | 2029 | # file openpyxl/shared/units.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
import math
def pixels_to_EMU(value):
return int(round(value * 9525))
def EMU_to_pixels(value):
if not value:
return 0
else:
return round(value / 9525.)
def EMU_to_cm(value):
if not value:
return 0
else:
return (EMU_to_pixels(value) * 2.57 / 96)
def pixels_to_points(value):
return value * 0.67777777
def points_to_pixels(value):
if not value:
return 0
else:
return int(math.ceil(value * 1.333333333))
def degrees_to_angle(value):
return int(round(value * 60000))
def angle_to_degrees(value):
if not value:
return 0
else:
return round(value / 60000.)
def short_color(color):
""" format a color to its short size """
if len(color) > 6:
return color[2:]
else:
return color
| apache-2.0 |
kytos/kyco | kytos/core/buffers.py | 2 | 5301 | """Kytos Buffer Classes, based on Python Queue."""
import logging
# from queue import Queue
from janus import Queue
from kytos.core.events import KytosEvent
__all__ = ('KytosBuffers', )
LOG = logging.getLogger(__name__)
class KytosEventBuffer:
"""KytosEventBuffer represents a queue to store a set of KytosEvents."""
def __init__(self, name, event_base_class=None, loop=None):
"""Contructor of KytosEventBuffer receive the parameters below.
Args:
name (string): name of KytosEventBuffer.
event_base_class (class): Class of KytosEvent.
"""
self.name = name
self._event_base_class = event_base_class
self._loop = loop
self._queue = Queue(loop=self._loop)
self._reject_new_events = False
def put(self, event):
"""Insert an event in KytosEventBuffer if reject_new_events is False.
Reject new events is True when a kytos/core.shutdown message was
received.
Args:
event (:class:`~kytos.core.events.KytosEvent`):
KytosEvent sent to queue.
"""
if not self._reject_new_events:
self._queue.sync_q.put(event)
LOG.debug('[buffer: %s] Added: %s', self.name, event.name)
if event.name == "kytos/core.shutdown":
LOG.info('[buffer: %s] Stop mode enabled. Rejecting new events.',
self.name)
self._reject_new_events = True
async def aput(self, event):
"""Insert a event in KytosEventBuffer if reject new events is False.
Reject new events is True when a kytos/core.shutdown message was
received.
Args:
event (:class:`~kytos.core.events.KytosEvent`):
KytosEvent sent to queue.
"""
# qsize = self._queue.async_q.qsize()
# print('qsize before:', qsize)
if not self._reject_new_events:
await self._queue.async_q.put(event)
LOG.debug('[buffer: %s] Added: %s', self.name, event.name)
# qsize = self._queue.async_q.qsize()
# print('qsize after:', qsize)
if event.name == "kytos/core.shutdown":
LOG.info('[buffer: %s] Stop mode enabled. Rejecting new events.',
self.name)
self._reject_new_events = True
def get(self):
"""Remove and return a event from top of queue.
Returns:
:class:`~kytos.core.events.KytosEvent`:
Event removed from top of queue.
"""
event = self._queue.sync_q.get()
LOG.debug('[buffer: %s] Removed: %s', self.name, event.name)
return event
async def aget(self):
"""Remove and return a event from top of queue.
Returns:
:class:`~kytos.core.events.KytosEvent`:
Event removed from top of queue.
"""
event = await self._queue.async_q.get()
LOG.debug('[buffer: %s] Removed: %s', self.name, event.name)
return event
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
If a :func:`~kytos.core.buffers.KytosEventBuffer.join` is currently
blocking, it will resume if all itens in KytosEventBuffer have been
processed (meaning that a task_done() call was received for every item
that had been put() into the KytosEventBuffer).
"""
self._queue.sync_q.task_done()
def join(self):
"""Block until all events are gotten and processed.
A item is processed when the method task_done is called.
"""
self._queue.sync_q.join()
def qsize(self):
"""Return the size of KytosEventBuffer."""
return self._queue.sync_q.qsize()
def empty(self):
"""Return True if KytosEventBuffer is empty."""
return self._queue.sync_q.empty()
def full(self):
"""Return True if KytosEventBuffer is full of KytosEvent."""
return self._queue.sync_q.full()
class KytosBuffers:
"""Set of KytosEventBuffer used in Kytos."""
def __init__(self, loop=None):
"""Build four KytosEventBuffers.
:attr:`raw`: :class:`~kytos.core.buffers.KytosEventBuffer` with events
received from network.
:attr:`msg_in`: :class:`~kytos.core.buffers.KytosEventBuffer` with
events to be received.
:attr:`msg_out`: :class:`~kytos.core.buffers.KytosEventBuffer` with
events to be sent.
:attr:`app`: :class:`~kytos.core.buffers.KytosEventBuffer` with events
sent to NApps.
"""
self._loop = loop
self.raw = KytosEventBuffer('raw_event', loop=self._loop)
self.msg_in = KytosEventBuffer('msg_in_event', loop=self._loop)
self.msg_out = KytosEventBuffer('msg_out_event', loop=self._loop)
self.app = KytosEventBuffer('app_event', loop=self._loop)
def send_stop_signal(self):
"""Send a ``kytos/core.shutdown`` event to each buffer."""
LOG.info('Stop signal received by Kytos buffers.')
LOG.info('Sending KytosShutdownEvent to all apps.')
event = KytosEvent(name='kytos/core.shutdown')
self.raw.put(event)
self.msg_in.put(event)
self.msg_out.put(event)
self.app.put(event)
| mit |
israelbenatar/boto | boto/ec2/elb/securitygroup.py | 152 | 1576 | # Copyright (c) 2010 Reza Lotun http://reza.lotun.name
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class SecurityGroup(object):
def __init__(self, connection=None):
self.name = None
self.owner_alias = None
def __repr__(self):
return 'SecurityGroup(%s, %s)' % (self.name, self.owner_alias)
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'GroupName':
self.name = value
elif name == 'OwnerAlias':
self.owner_alias = value
| mit |
IndraVikas/scikit-learn | examples/linear_model/plot_lasso_model_selection.py | 311 | 5431 | """
===================================================
Lasso model selection: Cross-Validation / AIC / BIC
===================================================
Use the Akaike information criterion (AIC), the Bayes Information
criterion (BIC) and cross-validation to select an optimal value
of the regularization parameter alpha of the :ref:`lasso` estimator.
Results obtained with LassoLarsIC are based on AIC/BIC criteria.
Information-criterion based model selection is very fast, but it
relies on a proper estimation of degrees of freedom, are
derived for large samples (asymptotic results) and assume the model
is correct, i.e. that the data are actually generated by this model.
They also tend to break when the problem is badly conditioned
(more features than samples).
For cross-validation, we use 20-fold with 2 algorithms to compute the
Lasso path: coordinate descent, as implemented by the LassoCV class, and
Lars (least angle regression) as implemented by the LassoLarsCV class.
Both algorithms give roughly the same results. They differ with regards
to their execution speed and sources of numerical errors.
Lars computes a path solution only for each kink in the path. As a
result, it is very efficient when there are only of few kinks, which is
the case if there are few features or samples. Also, it is able to
compute the full path without setting any meta parameter. On the
opposite, coordinate descent compute the path points on a pre-specified
grid (here we use the default). Thus it is more efficient if the number
of grid points is smaller than the number of kinks in the path. Such a
strategy can be interesting if the number of features is really large
and there are enough samples to select a large amount. In terms of
numerical errors, for heavily correlated variables, Lars will accumulate
more errors, while the coordinate descent algorithm will only sample the
path on a grid.
Note how the optimal value of alpha varies for each fold. This
illustrates why nested-cross validation is necessary when trying to
evaluate the performance of a method for which a parameter is chosen by
cross-validation: this choice of parameter may not be optimal for unseen
data.
"""
print(__doc__)
# Author: Olivier Grisel, Gael Varoquaux, Alexandre Gramfort
# License: BSD 3 clause
import time
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LassoCV, LassoLarsCV, LassoLarsIC
from sklearn import datasets
diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target
rng = np.random.RandomState(42)
X = np.c_[X, rng.randn(X.shape[0], 14)] # add some bad features
# normalize data as done by Lars to allow for comparison
X /= np.sqrt(np.sum(X ** 2, axis=0))
##############################################################################
# LassoLarsIC: least angle regression with BIC/AIC criterion
model_bic = LassoLarsIC(criterion='bic')
t1 = time.time()
model_bic.fit(X, y)
t_bic = time.time() - t1
alpha_bic_ = model_bic.alpha_
model_aic = LassoLarsIC(criterion='aic')
model_aic.fit(X, y)
alpha_aic_ = model_aic.alpha_
def plot_ic_criterion(model, name, color):
alpha_ = model.alpha_
alphas_ = model.alphas_
criterion_ = model.criterion_
plt.plot(-np.log10(alphas_), criterion_, '--', color=color,
linewidth=3, label='%s criterion' % name)
plt.axvline(-np.log10(alpha_), color=color, linewidth=3,
label='alpha: %s estimate' % name)
plt.xlabel('-log(alpha)')
plt.ylabel('criterion')
plt.figure()
plot_ic_criterion(model_aic, 'AIC', 'b')
plot_ic_criterion(model_bic, 'BIC', 'r')
plt.legend()
plt.title('Information-criterion for model selection (training time %.3fs)'
% t_bic)
##############################################################################
# LassoCV: coordinate descent
# Compute paths
print("Computing regularization path using the coordinate descent lasso...")
t1 = time.time()
model = LassoCV(cv=20).fit(X, y)
t_lasso_cv = time.time() - t1
# Display results
m_log_alphas = -np.log10(model.alphas_)
plt.figure()
ymin, ymax = 2300, 3800
plt.plot(m_log_alphas, model.mse_path_, ':')
plt.plot(m_log_alphas, model.mse_path_.mean(axis=-1), 'k',
label='Average across the folds', linewidth=2)
plt.axvline(-np.log10(model.alpha_), linestyle='--', color='k',
label='alpha: CV estimate')
plt.legend()
plt.xlabel('-log(alpha)')
plt.ylabel('Mean square error')
plt.title('Mean square error on each fold: coordinate descent '
'(train time: %.2fs)' % t_lasso_cv)
plt.axis('tight')
plt.ylim(ymin, ymax)
##############################################################################
# LassoLarsCV: least angle regression
# Compute paths
print("Computing regularization path using the Lars lasso...")
t1 = time.time()
model = LassoLarsCV(cv=20).fit(X, y)
t_lasso_lars_cv = time.time() - t1
# Display results
m_log_alphas = -np.log10(model.cv_alphas_)
plt.figure()
plt.plot(m_log_alphas, model.cv_mse_path_, ':')
plt.plot(m_log_alphas, model.cv_mse_path_.mean(axis=-1), 'k',
label='Average across the folds', linewidth=2)
plt.axvline(-np.log10(model.alpha_), linestyle='--', color='k',
label='alpha CV')
plt.legend()
plt.xlabel('-log(alpha)')
plt.ylabel('Mean square error')
plt.title('Mean square error on each fold: Lars (train time: %.2fs)'
% t_lasso_lars_cv)
plt.axis('tight')
plt.ylim(ymin, ymax)
plt.show()
| bsd-3-clause |
devunt/hydrocarbon | board/signals.py | 1 | 1269 | from django.contrib.sessions.models import Session
from django.dispatch import receiver
from django.utils.translation import ugettext as _
from account.models import EmailAddress
from account.signals import email_confirmation_sent, password_changed, user_signed_up
from board.models import Board, Notification
from board.utils import treedict
@receiver(email_confirmation_sent)
def email_confirmation_sent_callback(sender, confirmation, **kwargs):
user = confirmation.email_address.user
# Logout user
[s.delete() for s in Session.objects.all() if s.get_decoded().get('_auth_user_id') == user.id]
@receiver(user_signed_up)
def user_signed_up_callback(sender, user, form, **kwargs):
user.is_active = True
user.save()
data = treedict()
data['type'] = 'SITE_ANNOUNCEMENT'
data['message'] = _('New site announcement')
data['text'] = _('Welcome to herocomics! We strongly recommend you read the announcements.')
data['url'] = Board.objects.get(slug='notice').get_absolute_url()
Notification.create(None, user, data)
@receiver(password_changed)
def password_changed_callback(sender, user, **kwargs):
email = EmailAddress.objects.get_primary(user)
if not email.verified:
email.verified = True
email.save()
| mit |
SeldonIO/seldon-server | python/seldon/sklearn_estimator.py | 3 | 2924 | from sklearn.feature_extraction import DictVectorizer
from seldon.pipeline.pandas_pipelines import BasePandasEstimator
from collections import OrderedDict
import io
from sklearn.utils import check_X_y
from sklearn.utils import check_array
from sklearn.base import BaseEstimator,ClassifierMixin
import pandas as pd
class SKLearnClassifier(BasePandasEstimator,BaseEstimator,ClassifierMixin):
"""
Wrapper for XGBoost classifier with pandas support
XGBoost specific arguments follow https://github.com/dmlc/xgboost/blob/master/python-package/xgboost/sklearn.py
clf : sklearn estimator
sklearn estimator to run
target : str
Target column
target_readable : str
More descriptive version of target variable
included : list str, optional
columns to include
excluded : list str, optional
columns to exclude
id_map : dict (int,str), optional
map of class ids to high level names
sk_args : str, optional
extra args for sklearn classifier
"""
def __init__(self, clf=None,target=None, target_readable=None,included=None,excluded=None,id_map={},vectorizer=None,**sk_args):
super(SKLearnClassifier, self).__init__(target,target_readable,included,excluded,id_map)
self.vectorizer = vectorizer
self.clf = clf
self.sk_args = sk_args
def fit(self,X,y=None):
"""
Fit an sklearn classifier to data
Parameters
----------
X : pandas dataframe or array-like
training samples
y : array like, required for array-like X and not used presently for pandas dataframe
class labels
Returns
-------
self: object
"""
if isinstance(X,pd.DataFrame):
df = X
(X,y,self.vectorizer) = self.convert_numpy(df)
else:
check_X_y(X,y)
self.clf.fit(X,y)
return self
def predict_proba(self,X):
"""
Returns class probability estimates for the given test data.
X : pandas dataframe or array-like
Test samples
Returns
-------
proba : array-like, shape = (n_samples, n_outputs)
Class probability estimates.
"""
if isinstance(X,pd.DataFrame):
df = X
(X,_,_) = self.convert_numpy(df)
else:
check_array(X)
return self.clf.predict_proba(X)
def predict(self,X):
"""
Returns class predictions
X : pandas dataframe or array-like
Test samples
Returns
-------
proba : array-like, shape = (n_samples, n_outputs)
Class predictions
"""
if isinstance(X,pd.DataFrame):
df = X
(X,_,_) = self.convert_numpy(df)
else:
check_array(X)
return self.clf.predict(X)
| apache-2.0 |
pforret/python-for-android | python-modules/twisted/twisted/python/text.py | 49 | 6637 | # -*- test-case-name: twisted.test.test_text -*-
#
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Miscellany of text-munging functions.
"""
import string, types
from twisted.python import deprecate, versions
def stringyString(object, indentation=''):
"""
Expansive string formatting for sequence types.
list.__str__ and dict.__str__ use repr() to display their
elements. This function also turns these sequence types
into strings, but uses str() on their elements instead.
Sequence elements are also displayed on seperate lines,
and nested sequences have nested indentation.
"""
braces = ''
sl = []
if type(object) is types.DictType:
braces = '{}'
for key, value in object.items():
value = stringyString(value, indentation + ' ')
if isMultiline(value):
if endsInNewline(value):
value = value[:-len('\n')]
sl.append("%s %s:\n%s" % (indentation, key, value))
else:
# Oops. Will have to move that indentation.
sl.append("%s %s: %s" % (indentation, key,
value[len(indentation) + 3:]))
elif type(object) in (types.TupleType, types.ListType):
if type(object) is types.TupleType:
braces = '()'
else:
braces = '[]'
for element in object:
element = stringyString(element, indentation + ' ')
sl.append(string.rstrip(element) + ',')
else:
sl[:] = map(lambda s, i=indentation: i+s,
string.split(str(object),'\n'))
if not sl:
sl.append(indentation)
if braces:
sl[0] = indentation + braces[0] + sl[0][len(indentation) + 1:]
sl[-1] = sl[-1] + braces[-1]
s = string.join(sl, "\n")
if isMultiline(s) and not endsInNewline(s):
s = s + '\n'
return s
def isMultiline(s):
"""Returns True if this string has a newline in it."""
return (string.find(s, '\n') != -1)
def endsInNewline(s):
"""Returns True if this string ends in a newline."""
return (s[-len('\n'):] == '\n')
deprecate.deprecatedModuleAttribute(
versions.Version("Twisted", 10, 2, 0),
"Please use inspect.getdoc instead.",
__name__, "docstringLStrip")
def docstringLStrip(docstring):
"""
Gets rid of unsightly lefthand docstring whitespace residue.
You'd think someone would have done this already, but apparently
not in 1.5.2.
BUT since we're all using Python 2.1 now, use L{inspect.getdoc}
instead. I{This function should go away soon.}
"""
if not docstring:
return docstring
docstring = string.replace(docstring, '\t', ' ' * 8)
lines = string.split(docstring,'\n')
leading = 0
for l in xrange(1,len(lines)):
line = lines[l]
if string.strip(line):
while 1:
if line[leading] == ' ':
leading = leading + 1
else:
break
if leading:
break
outlines = lines[0:1]
for l in xrange(1,len(lines)):
outlines.append(lines[l][leading:])
return string.join(outlines, '\n')
def greedyWrap(inString, width=80):
"""Given a string and a column width, return a list of lines.
Caveat: I'm use a stupid greedy word-wrapping
algorythm. I won't put two spaces at the end
of a sentence. I don't do full justification.
And no, I've never even *heard* of hypenation.
"""
outLines = []
#eww, evil hacks to allow paragraphs delimited by two \ns :(
if inString.find('\n\n') >= 0:
paragraphs = string.split(inString, '\n\n')
for para in paragraphs:
outLines.extend(greedyWrap(para) + [''])
return outLines
inWords = string.split(inString)
column = 0
ptr_line = 0
while inWords:
column = column + len(inWords[ptr_line])
ptr_line = ptr_line + 1
if (column > width):
if ptr_line == 1:
# This single word is too long, it will be the whole line.
pass
else:
# We've gone too far, stop the line one word back.
ptr_line = ptr_line - 1
(l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:])
outLines.append(string.join(l,' '))
ptr_line = 0
column = 0
elif not (len(inWords) > ptr_line):
# Clean up the last bit.
outLines.append(string.join(inWords, ' '))
del inWords[:]
else:
# Space
column = column + 1
# next word
return outLines
wordWrap = greedyWrap
def removeLeadingBlanks(lines):
ret = []
for line in lines:
if ret or line.strip():
ret.append(line)
return ret
def removeLeadingTrailingBlanks(s):
lines = removeLeadingBlanks(s.split('\n'))
lines.reverse()
lines = removeLeadingBlanks(lines)
lines.reverse()
return '\n'.join(lines)+'\n'
def splitQuoted(s):
"""Like string.split, but don't break substrings inside quotes.
>>> splitQuoted('the \"hairy monkey\" likes pie')
['the', 'hairy monkey', 'likes', 'pie']
Another one of those \"someone must have a better solution for
this\" things. This implementation is a VERY DUMB hack done too
quickly.
"""
out = []
quot = None
phrase = None
for word in s.split():
if phrase is None:
if word and (word[0] in ("\"", "'")):
quot = word[0]
word = word[1:]
phrase = []
if phrase is None:
out.append(word)
else:
if word and (word[-1] == quot):
word = word[:-1]
phrase.append(word)
out.append(" ".join(phrase))
phrase = None
else:
phrase.append(word)
return out
def strFile(p, f, caseSensitive=True):
"""Find whether string p occurs in a read()able object f
@rtype: C{bool}
"""
buf = ""
buf_len = max(len(p), 2**2**2**2)
if not caseSensitive:
p = p.lower()
while 1:
r = f.read(buf_len-len(p))
if not caseSensitive:
r = r.lower()
bytes_read = len(r)
if bytes_read == 0:
return False
l = len(buf)+bytes_read-buf_len
if l <= 0:
buf = buf + r
else:
buf = buf[l:] + r
if buf.find(p) != -1:
return True
| apache-2.0 |
blacklin/kbengine | kbe/src/lib/python/Lib/test/test_platform.py | 88 | 12413 | from unittest import mock
import os
import platform
import subprocess
import sys
import tempfile
import unittest
import warnings
from test import support
class PlatformTest(unittest.TestCase):
def test_architecture(self):
res = platform.architecture()
@support.skip_unless_symlink
def test_architecture_via_symlink(self): # issue3762
# On Windows, the EXE needs to know where pythonXY.dll is at so we have
# to add the directory to the path.
if sys.platform == "win32":
os.environ["Path"] = "{};{}".format(
os.path.dirname(sys.executable), os.environ["Path"])
def get(python):
cmd = [python, '-c',
'import platform; print(platform.architecture())']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return p.communicate()
real = os.path.realpath(sys.executable)
link = os.path.abspath(support.TESTFN)
os.symlink(real, link)
try:
self.assertEqual(get(real), get(link))
finally:
os.remove(link)
def test_platform(self):
for aliased in (False, True):
for terse in (False, True):
res = platform.platform(aliased, terse)
def test_system(self):
res = platform.system()
def test_node(self):
res = platform.node()
def test_release(self):
res = platform.release()
def test_version(self):
res = platform.version()
def test_machine(self):
res = platform.machine()
def test_processor(self):
res = platform.processor()
def setUp(self):
self.save_version = sys.version
self.save_mercurial = sys._mercurial
self.save_platform = sys.platform
def tearDown(self):
sys.version = self.save_version
sys._mercurial = self.save_mercurial
sys.platform = self.save_platform
def test_sys_version(self):
# Old test.
for input, output in (
('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]',
('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')),
('IronPython 1.0.60816 on .NET 2.0.50727.42',
('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')),
('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42',
('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
):
# branch and revision are not "parsed", but fetched
# from sys._mercurial. Ignore them
(name, version, branch, revision, buildno, builddate, compiler) \
= platform._sys_version(input)
self.assertEqual(
(name, version, '', '', buildno, builddate, compiler), output)
# Tests for python_implementation(), python_version(), python_branch(),
# python_revision(), python_build(), and python_compiler().
sys_versions = {
("2.6.1 (r261:67515, Dec 6 2008, 15:26:00) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]",
('CPython', 'tags/r261', '67515'), self.save_platform)
:
("CPython", "2.6.1", "tags/r261", "67515",
('r261:67515', 'Dec 6 2008 15:26:00'),
'GCC 4.0.1 (Apple Computer, Inc. build 5370)'),
("IronPython 2.0 (2.0.0.0) on .NET 2.0.50727.3053", None, "cli")
:
("IronPython", "2.0.0", "", "", ("", ""),
".NET 2.0.50727.3053"),
("2.6.1 (IronPython 2.6.1 (2.6.10920.0) on .NET 2.0.50727.1433)", None, "cli")
:
("IronPython", "2.6.1", "", "", ("", ""),
".NET 2.0.50727.1433"),
("2.7.4 (IronPython 2.7.4 (2.7.0.40) on Mono 4.0.30319.1 (32-bit))", None, "cli")
:
("IronPython", "2.7.4", "", "", ("", ""),
"Mono 4.0.30319.1 (32-bit)"),
("2.5 (trunk:6107, Mar 26 2009, 13:02:18) \n[Java HotSpot(TM) Client VM (\"Apple Computer, Inc.\")]",
('Jython', 'trunk', '6107'), "java1.5.0_16")
:
("Jython", "2.5.0", "trunk", "6107",
('trunk:6107', 'Mar 26 2009'), "java1.5.0_16"),
("2.5.2 (63378, Mar 26 2009, 18:03:29)\n[PyPy 1.0.0]",
('PyPy', 'trunk', '63378'), self.save_platform)
:
("PyPy", "2.5.2", "trunk", "63378", ('63378', 'Mar 26 2009'),
"")
}
for (version_tag, subversion, sys_platform), info in \
sys_versions.items():
sys.version = version_tag
if subversion is None:
if hasattr(sys, "_mercurial"):
del sys._mercurial
else:
sys._mercurial = subversion
if sys_platform is not None:
sys.platform = sys_platform
self.assertEqual(platform.python_implementation(), info[0])
self.assertEqual(platform.python_version(), info[1])
self.assertEqual(platform.python_branch(), info[2])
self.assertEqual(platform.python_revision(), info[3])
self.assertEqual(platform.python_build(), info[4])
self.assertEqual(platform.python_compiler(), info[5])
def test_system_alias(self):
res = platform.system_alias(
platform.system(),
platform.release(),
platform.version(),
)
def test_uname(self):
res = platform.uname()
self.assertTrue(any(res))
self.assertEqual(res[0], res.system)
self.assertEqual(res[1], res.node)
self.assertEqual(res[2], res.release)
self.assertEqual(res[3], res.version)
self.assertEqual(res[4], res.machine)
self.assertEqual(res[5], res.processor)
@unittest.skipUnless(sys.platform.startswith('win'), "windows only test")
def test_uname_win32_ARCHITEW6432(self):
# Issue 7860: make sure we get architecture from the correct variable
# on 64 bit Windows: if PROCESSOR_ARCHITEW6432 exists we should be
# using it, per
# http://blogs.msdn.com/david.wang/archive/2006/03/26/HOWTO-Detect-Process-Bitness.aspx
try:
with support.EnvironmentVarGuard() as environ:
if 'PROCESSOR_ARCHITEW6432' in environ:
del environ['PROCESSOR_ARCHITEW6432']
environ['PROCESSOR_ARCHITECTURE'] = 'foo'
platform._uname_cache = None
system, node, release, version, machine, processor = platform.uname()
self.assertEqual(machine, 'foo')
environ['PROCESSOR_ARCHITEW6432'] = 'bar'
platform._uname_cache = None
system, node, release, version, machine, processor = platform.uname()
self.assertEqual(machine, 'bar')
finally:
platform._uname_cache = None
def test_java_ver(self):
res = platform.java_ver()
if sys.platform == 'java':
self.assertTrue(all(res))
def test_win32_ver(self):
res = platform.win32_ver()
def test_mac_ver(self):
res = platform.mac_ver()
if platform.uname().system == 'Darwin':
# We're on a MacOSX system, check that
# the right version information is returned
fd = os.popen('sw_vers', 'r')
real_ver = None
for ln in fd:
if ln.startswith('ProductVersion:'):
real_ver = ln.strip().split()[-1]
break
fd.close()
self.assertFalse(real_ver is None)
result_list = res[0].split('.')
expect_list = real_ver.split('.')
len_diff = len(result_list) - len(expect_list)
# On Snow Leopard, sw_vers reports 10.6.0 as 10.6
if len_diff > 0:
expect_list.extend(['0'] * len_diff)
self.assertEqual(result_list, expect_list)
# res[1] claims to contain
# (version, dev_stage, non_release_version)
# That information is no longer available
self.assertEqual(res[1], ('', '', ''))
if sys.byteorder == 'little':
self.assertIn(res[2], ('i386', 'x86_64'))
else:
self.assertEqual(res[2], 'PowerPC')
@unittest.skipUnless(sys.platform == 'darwin', "OSX only test")
def test_mac_ver_with_fork(self):
# Issue7895: platform.mac_ver() crashes when using fork without exec
#
# This test checks that the fix for that issue works.
#
pid = os.fork()
if pid == 0:
# child
info = platform.mac_ver()
os._exit(0)
else:
# parent
cpid, sts = os.waitpid(pid, 0)
self.assertEqual(cpid, pid)
self.assertEqual(sts, 0)
def test_dist(self):
res = platform.dist()
def test_libc_ver(self):
import os
if os.path.isdir(sys.executable) and \
os.path.exists(sys.executable+'.exe'):
# Cygwin horror
executable = sys.executable + '.exe'
else:
executable = sys.executable
res = platform.libc_ver(executable)
def test_parse_release_file(self):
for input, output in (
# Examples of release file contents:
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64')),
('SUSE LINUX 10.1 (X86-64)', ('SUSE LINUX ', '10.1', 'X86-64')),
('SUSE LINUX 10.1 (i586)', ('SUSE LINUX ', '10.1', 'i586')),
('Fedora Core release 5 (Bordeaux)', ('Fedora Core', '5', 'Bordeaux')),
('Red Hat Linux release 8.0 (Psyche)', ('Red Hat Linux', '8.0', 'Psyche')),
('Red Hat Linux release 9 (Shrike)', ('Red Hat Linux', '9', 'Shrike')),
('Red Hat Enterprise Linux release 4 (Nahant)', ('Red Hat Enterprise Linux', '4', 'Nahant')),
('CentOS release 4', ('CentOS', '4', None)),
('Rocks release 4.2.1 (Cydonia)', ('Rocks', '4.2.1', 'Cydonia')),
('', ('', '', '')), # If there's nothing there.
):
self.assertEqual(platform._parse_release_file(input), output)
def test_popen(self):
mswindows = (sys.platform == "win32")
if mswindows:
command = '"{}" -c "print(\'Hello\')"'.format(sys.executable)
else:
command = "'{}' -c 'print(\"Hello\")'".format(sys.executable)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with platform.popen(command) as stdout:
hello = stdout.read().strip()
stdout.close()
self.assertEqual(hello, "Hello")
data = 'plop'
if mswindows:
command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"'
else:
command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'"
command = command.format(sys.executable)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with platform.popen(command, 'w') as stdin:
stdout = stdin.write(data)
ret = stdin.close()
self.assertIsNotNone(ret)
if os.name == 'nt':
returncode = ret
else:
returncode = ret >> 8
self.assertEqual(returncode, len(data))
def test_linux_distribution_encoding(self):
# Issue #17429
with tempfile.TemporaryDirectory() as tempdir:
filename = os.path.join(tempdir, 'fedora-release')
with open(filename, 'w', encoding='utf-8') as f:
f.write('Fedora release 19 (Schr\xf6dinger\u2019s Cat)\n')
with mock.patch('platform._UNIXCONFDIR', tempdir):
distname, version, distid = platform.linux_distribution()
self.assertEqual(distname, 'Fedora')
self.assertEqual(version, '19')
self.assertEqual(distid, 'Schr\xf6dinger\u2019s Cat')
def test_main():
support.run_unittest(
PlatformTest
)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
Team-Zeus/okapi | setup.py | 1 | 1584 | #!/usr/bin/env python3
import os
from distutils.core import setup
from setuptools import find_packages
import okapi
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
README = read('README.rst')
CHANGES = read('CHANGES.rst')
setup(
name='tz-okapi',
version=okapi.__version__,
url='https://github.com/Team-Zeus/okapi',
description=okapi.__doc__.strip(),
long_description='\n\n'.join([README, CHANGES]),
keywords=['api', 'rest', 'restructuredtext', 'rst'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Topic :: Documentation',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Documentation',
'Topic :: Software Development :: Testing',
'Topic :: Text Processing :: Markup',
'Topic :: Utilities'
],
include_package_data=True,
packages=find_packages(),
package_data={'okapi': [
'templates/*.html',
'templates/exports/*.html',
'static/js/*.js',
'static/js/*.swf',
'static/css/*.css'
]},
scripts=['bin/okapi'],
install_requires=[
'colorama >= 0.3.3',
'docopt >= 0.6.1',
'docutils >= 0.12',
'Jinja2 >= 2.7.3',
'Pygments >= 2.0.2',
'requests >= 2.7.0',
'urllib3 >= 1.10.4'
],
)
| mit |
lsqtongxin/django | django/contrib/auth/management/__init__.py | 348 | 6851 | """
Creates permissions for all installed apps that need permissions.
"""
from __future__ import unicode_literals
import getpass
import unicodedata
from django.apps import apps
from django.contrib.auth import get_permission_codename
from django.core import exceptions
from django.core.management.base import CommandError
from django.db import DEFAULT_DB_ALIAS, router
from django.utils import six
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
def _get_all_permissions(opts, ctype):
"""
Returns (codename, name) for all permissions in the given opts.
"""
builtin = _get_builtin_permissions(opts)
custom = list(opts.permissions)
_check_permission_clashing(custom, builtin, ctype)
return builtin + custom
def _get_builtin_permissions(opts):
"""
Returns (codename, name) for all autogenerated permissions.
By default, this is ('add', 'change', 'delete')
"""
perms = []
for action in opts.default_permissions:
perms.append((get_permission_codename(action, opts),
'Can %s %s' % (action, opts.verbose_name_raw)))
return perms
def _check_permission_clashing(custom, builtin, ctype):
"""
Check that permissions for a model do not clash. Raises CommandError if
there are duplicate permissions.
"""
pool = set()
builtin_codenames = set(p[0] for p in builtin)
for codename, _name in custom:
if codename in pool:
raise CommandError(
"The permission codename '%s' is duplicated for model '%s.%s'." %
(codename, ctype.app_label, ctype.model_class().__name__))
elif codename in builtin_codenames:
raise CommandError(
"The permission codename '%s' clashes with a builtin permission "
"for model '%s.%s'." %
(codename, ctype.app_label, ctype.model_class().__name__))
pool.add(codename)
def create_permissions(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs):
if not app_config.models_module:
return
try:
Permission = apps.get_model('auth', 'Permission')
except LookupError:
return
if not router.allow_migrate_model(using, Permission):
return
from django.contrib.contenttypes.models import ContentType
permission_name_max_length = Permission._meta.get_field('name').max_length
verbose_name_max_length = permission_name_max_length - 11 # len('Can change ') prefix
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
for klass in app_config.get_models():
# Force looking up the content types in the current database
# before creating foreign keys to them.
ctype = ContentType.objects.db_manager(using).get_for_model(klass)
if len(klass._meta.verbose_name) > verbose_name_max_length:
raise exceptions.ValidationError(
"The verbose_name of %s.%s is longer than %s characters" % (
ctype.app_label,
ctype.model,
verbose_name_max_length,
)
)
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta, ctype):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a content_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(Permission.objects.using(using).filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
perms = [
Permission(codename=codename, name=name, content_type=ct)
for ct, (codename, name) in searched_perms
if (ct.pk, codename) not in all_perms
]
# Validate the permissions before bulk_creation to avoid cryptic database
# error when the name is longer than 255 characters
for perm in perms:
if len(perm.name) > permission_name_max_length:
raise exceptions.ValidationError(
"The permission name %s of %s.%s is longer than %s characters" % (
perm.name,
perm.content_type.app_label,
perm.content_type.model,
permission_name_max_length,
)
)
Permission.objects.using(using).bulk_create(perms)
if verbosity >= 2:
for perm in perms:
print("Adding permission '%s'" % perm)
def get_system_username():
"""
Try to determine the current system user's username.
:returns: The username as a unicode string, or an empty string if the
username could not be determined.
"""
try:
result = getpass.getuser()
except (ImportError, KeyError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
return ''
if six.PY2:
try:
result = result.decode(DEFAULT_LOCALE_ENCODING)
except UnicodeDecodeError:
# UnicodeDecodeError - preventive treatment for non-latin Windows.
return ''
return result
def get_default_username(check_db=True):
"""
Try to determine the current system user's username to use as a default.
:param check_db: If ``True``, requires that the username does not match an
existing ``auth.User`` (otherwise returns an empty string).
:returns: The username, or an empty string if no username can be
determined.
"""
# This file is used in apps.py, it should not trigger models import.
from django.contrib.auth import models as auth_app
# If the User model has been swapped out, we can't make any assumptions
# about the default user name.
if auth_app.User._meta.swapped:
return ''
default_username = get_system_username()
try:
default_username = (unicodedata.normalize('NFKD', default_username)
.encode('ascii', 'ignore').decode('ascii')
.replace(' ', '').lower())
except UnicodeDecodeError:
return ''
# Run the username validator
try:
auth_app.User._meta.get_field('username').run_validators(default_username)
except exceptions.ValidationError:
return ''
# Don't return the default username if it is already taken.
if check_db and default_username:
try:
auth_app.User._default_manager.get(username=default_username)
except auth_app.User.DoesNotExist:
pass
else:
return ''
return default_username
| bsd-3-clause |
delfick/harpoon | tests/test_formatter.py | 1 | 2761 | # coding: spec
from harpoon.errors import BadOptionFormat, NoSuchEnvironmentVariable
from harpoon.formatter import MergedOptionStringFormatter
from tests.helpers import HarpoonCase
from delfick_project.errors_pytest import assertRaises
from delfick_project.option_merge import MergedOptions
from delfick_project.norms import sb
import uuid
import os
describe HarpoonCase, "MergedOptionStringFormatter":
def check_formatting(
self, configuration, value, expected=sb.NotSpecified, **configuration_kwargs
):
if not isinstance(configuration, MergedOptions):
configuration = MergedOptions.using(configuration, **configuration_kwargs)
formatter = MergedOptionStringFormatter(configuration, value)
got = formatter.format()
# Caller must check for exceptions if expected is not specified
if expected is sb.NotSpecified:
assert False, "Tester must specify what is expected"
assert got == expected
it "formats from the configuration":
self.check_formatting({"vars": "one"}, "{vars}", expected="one")
it "returns as is if formatting to just one value that is a dict":
class dictsub(dict):
pass
vrs = dictsub({1: 2, 3: 4})
self.check_formatting({"vars": vrs}, "{vars}", expected=vrs, dont_prefix=[dictsub])
it "formats :env as a bash variable":
self.check_formatting({}, "{blah:env} stuff", expected="${blah} stuff")
it "formats :from_env as from the environment":
try:
value = str(uuid.uuid1())
assert "WAT_ENV" not in os.environ
os.environ["WAT_ENV"] = value
self.check_formatting(
{}, "{WAT_ENV:from_env} stuff", expected="{0} stuff".format(value)
)
finally:
if "WAT_ENV" in os.environ:
del os.environ["WAT_ENV"]
it "complains if from_env references a variable that doesn't exist":
assert "WAT_ENV" not in os.environ
with assertRaises(NoSuchEnvironmentVariable, wanted="WAT_ENV"):
self.check_formatting({}, "{WAT_ENV:from_env} stuff")
it "formats formatted values":
self.check_formatting({"one": "{two}", "two": 2}, "{one}", expected="2")
it "complains about circular references":
with assertRaises(BadOptionFormat, "Recursive option", chain=["two", "one", "two"]):
self.check_formatting(
{"one": "{two}", "two": "{one}"}, "{two}", expected="circular reference"
)
it "can format into nested dictionaries because MergedOptions is awesome":
self.check_formatting(
{"one": {"two": {"three": 4, "five": 5}, "six": 6}}, "{one.two.three}", expected="4"
)
| mit |
MatthewSilverstein/pystach2 | pystache/template_spec.py | 50 | 1725 | # coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_path: absolute path to the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_path = None
template_rel_directory = None
template_rel_path = None
| mit |
cn-app-registry/cnr-server | appr/models/kv/models_index_base.py | 2 | 13077 | from __future__ import absolute_import, division, print_function
import datetime
import json
import appr.semver
from appr.exception import (PackageAlreadyExists, PackageNotFound, ResourceNotFound,
raise_channel_not_found, raise_package_not_found)
DEFAULT_LOCK_TIMEOUT = 3
class ModelsIndexBase(object):
packages_key = "packages.json"
def __init__(self, package=None):
self._packages = None
self._releases = None
self.package = package
self.locks = set()
@property
def releases_key(self):
return self.package + "/" + "releases.json"
@property
def releases_data(self):
path = self.releases_key
if self._releases is None:
try:
self._releases = self._fetch_data(path)
except ResourceNotFound:
raise_package_not_found(self.package)
return self._releases
def blob_key(self, digest, mod="sha256"):
return "%s/digests/%s/%s" % (self.package, mod, digest)
def add_blob(self, b64blob, digest):
try:
path = self.blob_key(digest)
self.get_lock(path)
self._write_raw_data(path, b64blob)
return True
finally:
self.release_lock(path)
def delete_blob(self, digest):
try:
path = self.blob_key(digest)
self.get_lock(path)
self._delete_data(path)
return True
finally:
self.release_lock(path)
def get_blob(self, digest):
try:
path = self.blob_key(digest)
return self._fetch_raw_data(path)
except ResourceNotFound:
raise_package_not_found(self.package, digest)
def add_package(self, package_name):
try:
self.get_lock(self.packages_key)
namespace, name = package_name.split("/")
if namespace not in self.packages_data['packages']:
self.packages_data['packages'][namespace] = {}
if name not in self.packages_data['packages'][namespace]:
pdata = {
"created_at": datetime.datetime.utcnow().isoformat(),
'name': name,
'namespace': namespace}
self.packages_data['packages'][namespace][name] = pdata
self._write_data(self.packages_key, self.packages_data)
finally:
self.release_lock(self.packages_key)
def delete_package(self, package_name):
try:
self.get_lock(self.packages_key)
namespace, name = package_name.split("/")
if (namespace not in self.packages_data['packages'] or
name not in self.packages_data['packages'][namespace]):
return None
pdata = self.packages_data['packages'][namespace].pop(name)
if not self.packages_data['packages'][namespace]:
self.packages_data['packages'].pop(namespace)
self._write_data(self.packages_key, self.packages_data)
return pdata
finally:
self.release_lock(self.packages_key)
def add_release(self, package_data, release, media_type, force=False):
try:
self.get_lock(self.releases_key)
try:
data = self.releases_data
except PackageNotFound:
data = {'page': 0, 'channels': {}, 'releases': {}}
if release not in data['releases']:
data['releases'][release] = {'manifests': {}, 'channels': []}
if (release in data['releases'] and
media_type in data['releases'][release]['manifests'] and not force):
raise PackageAlreadyExists("Package exists already", {
"package": self.package,
"release": release,
"media_type": media_type})
data['releases'][release]['manifests'][media_type] = package_data
self._write_data(self.releases_key, data)
self.add_package(self.package)
return data
finally:
self.release_lock(self.releases_key)
def delete_release(self, release, media_type):
try:
self.get_lock(self.releases_key)
data = self.releases_data
if release not in data['releases'] or media_type not in data['releases'][release][
'manifests']:
raise_package_not_found(self.package)
data['releases'][release]['manifests'].pop(media_type)
if not data['releases'][release]['manifests']:
data['releases'].pop(release)
if not data['releases']:
self.delete_package(self.package)
self._write_data(self.releases_key, data)
return True
finally:
self.release_lock(self.releases_key)
@property
def packages_data(self):
if self._packages is None:
try:
self._packages = self._fetch_data(self.packages_key)
except ResourceNotFound:
try:
self.get_lock(self.packages_key, timeout=None)
self._packages = {"page": 0, "packages": {}}
self._write_data(self.packages_key, self._packages)
finally:
self.release_lock(self.packages_key)
return self._packages
def releases(self, media_type=None):
if media_type is not None:
result = []
for release_name, release in self.releases_data['releases'].iteritems():
if media_type in release['manifests']:
result.append(release_name)
else:
result = self.releases_data['releases'].keys()
return result
def release_manifests(self, release):
try:
manifests = self.releases_data['releases'][release]['manifests']
return manifests
except KeyError:
raise_package_not_found(self.package, release)
def release_formats(self, release=None):
if release:
return self.release_manifests(release).keys()
else:
formats = set()
for _, release in self.releases_data['releases'].iteritems():
[formats.add(x) for x in release['manifests'].keys()]
return list(formats)
def release(self, release, media_type):
try:
return self.release_manifests(release)[media_type]
except KeyError:
raise_package_not_found(self.package, release, media_type)
def ispackage_exists(self):
return (len(self.releases()) > 0)
def channels(self):
data = self.releases_data['channels']
if data:
return data.values()
else:
return []
def channel(self, channel):
try:
return self.releases_data['channels'][channel]
except KeyError:
raise_channel_not_found(channel)
def _set_channel(self, channel, release):
try:
self.get_lock(self.releases_key)
data = self.releases_data
data['channels'][channel] = {
'name': channel,
'current': release,
'package': self.package}
if channel not in data['releases'][release]['channels']:
data['releases'][release]['channels'].append(channel)
self._write_data(self.releases_key, data)
return True
finally:
self.release_lock(self.releases_key)
def add_channel(self, channel, current):
return self._set_channel(channel, current)
def delete_channel(self, channel):
""" Delete the channel from all releases """
if not self.ischannel_exists(channel):
raise_channel_not_found(channel)
try:
self.get_lock(self.releases_key)
data = self.releases_data
for release in self.channel_releases(channel):
self._releases = self._delete_channel_release(channel, release)
if channel in data['channels']:
data['channels'].pop(channel)
self._write_data(self.releases_key, data)
finally:
self.release_lock(self.releases_key)
def set_channel_default(self, channel, release):
self._check_channel_release(channel, release)
return self._set_channel(channel, release)
def _check_channel_release(self, channel, release):
if not self.ischannel_exists(channel):
raise_channel_not_found(channel)
if release not in self.releases_data['releases']:
raise_package_not_found(self.package, release)
def add_channel_release(self, channel, release):
self._check_channel_release(channel, release)
try:
self.get_lock(self.releases_key)
data = self.releases_data
if channel not in data['releases'][release]['channels']:
data['releases'][release]['channels'].append(channel)
self._write_data(self.releases_key, data)
return True
finally:
self.release_lock(self.releases_key)
def delete_channel_release(self, channel, release):
self._check_channel_release(channel, release)
try:
self.get_lock(self.releases_key)
data = self._delete_channel_release(channel, release)
releases = self.channel_releases(channel)
if not releases:
data['channels'].pop(channel)
else:
self.set_channel_default(channel, releases[0])
self._write_data(self.releases_key, data)
return True
finally:
self.release_lock(self.releases_key)
def _delete_channel_release(self, channel, release):
data = self.releases_data
channels = set(data['releases'][release]['channels'])
if channel in channels:
channels.discard(channel)
data['releases'][release]['channels'] = list(channels)
return data
def channel_releases(self, channel):
if not self.ischannel_exists(channel):
raise_channel_not_found(self.package, channel)
releases = [
release for release, x in self.releases_data['releases'].iteritems()
if channel in x['channels']]
ordered_releases = [
str(x) for x in sorted(appr.semver.versions(releases, False), reverse=True)]
return ordered_releases
def release_channels(self, release):
if release not in self.releases_data['releases']:
raise_package_not_found(self.package, release)
return self.releases_data['releases'][release]['channels']
def package_names(self, namespace=None):
result = []
if namespace is not None:
if namespace in self.packages_data['packages']:
result = [
"%s/%s" % (namespace, name)
for name in self.packages_data['packages'][namespace].keys()]
else:
for namespace, packages in self.packages_data['packages'].iteritems():
for name in packages.keys():
result.append("%s/%s" % (namespace, name))
return result
def ischannel_exists(self, channel):
return channel in self.releases_data['channels']
def packages(self, namespace=None):
result = []
if namespace is not None:
if namespace in self.packages_data['packages']:
result = self.packages_data['packages'][namespace].values()
else:
for namespace, packages in self.packages_data['packages'].iteritems():
for _, data in packages.iteritems():
result.append(data)
return result
def _lock_key(self, key):
return "%s.lock" % (key)
def get_lock(self, key, ttl=3, timeout=DEFAULT_LOCK_TIMEOUT):
lock_key = self._lock_key(key)
if lock_key not in self.locks:
self._get_lock(lock_key, ttl, timeout)
self.locks.add(lock_key)
def release_lock(self, key):
""" Check if owner of the lock """
lock_key = self._lock_key(key)
if lock_key in self.locks:
self.locks.discard(lock_key)
self._release_lock(lock_key)
def _get_lock(self, key, ttl=3, timeout=DEFAULT_LOCK_TIMEOUT):
raise NotImplementedError
def _release_lock(self, key):
""" Remove the lock """
raise NotImplementedError
def _fetch_data(self, key):
return json.loads(self._fetch_raw_data(key))
def _fetch_raw_data(self, key):
raise NotImplementedError
def _write_data(self, key, data):
return self._write_raw_data(key, json.dumps(data))
def _write_raw_data(self, key, data):
raise NotImplementedError
def _delete_data(self, key):
raise NotImplementedError
| apache-2.0 |
talha81/TACTIC-DEV | src/tactic/command/queue.py | 4 | 13922 | ############################################################
#
# Copyright (c) 2010, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = ['JobTask', 'Queue']
import tacticenv
from pyasm.security import Batch
from pyasm.common import Common, Config, Container, Environment, jsonloads, jsondumps, TacticException
from pyasm.biz import Project
from pyasm.search import Search, SearchType, DbContainer, Transaction
from pyasm.command import Command
from tactic.command import Scheduler, SchedulerTask
import os
class Queue:
def get_next_job(job_search_type="sthpw/queue", queue_type=None, server_code=None):
sql = DbContainer.get("sthpw")
search_type_obj = SearchType.get(job_search_type)
table = search_type_obj.get_table()
# get the entire queue
search = Search(job_search_type)
if queue_type:
search.add_filter("queue", queue_type)
if server_code:
search.add_filter("server_code", server_code)
search.add_filter("state", "pending")
search.add_order_by("timestamp")
chunk = 10
search.add_limit(chunk)
queues = search.get_sobjects()
queue_id = 0
for queue in queues:
queue_id = queue.get_id()
# attempt to lock this queue
# have to do this manually
update = """UPDATE "%s" SET state = 'locked' where id = '%s' and state = 'pending'""" % (table, queue_id)
sql.do_update(update)
row_count = sql.get_row_count()
if row_count == 1:
break
else:
queue_id = 0
if queue_id:
queue = Search.get_by_id(job_search_type, queue_id)
return queue
else:
return None
get_next_job = staticmethod(get_next_job)
def add(command, kwargs, queue_type, priority, description, message_code=None):
queue = SearchType.create("sthpw/queue")
queue.set_value("project_code", Project.get_project_code())
#queue.set_sobject_value(sobject)
queue.set_value("queue", queue_type)
queue.set_value("state", "pending")
queue.set_value("login", Environment.get_user_name())
queue.set_value("command", command)
data = jsondumps(kwargs)
queue.set_value("data", data)
if message_code:
queue.set_value("message_code", message_code)
queue.set_value("priority", priority)
queue.set_value("description", description)
queue.set_user()
queue.commit()
return queue
add = staticmethod(add)
# create a task from the job
class JobTask(SchedulerTask):
def __init__(my, **kwargs):
#print "JobTask: init"
my.job = None
my.jobs = []
my.check_interval = kwargs.get("check_interval")
if not my.check_interval:
my.check_interval = 1
my.jobs_completed = 0
my.max_jobs_completed = kwargs.get("max_jobs_completed")
if not my.max_jobs_completed:
my.max_jobs_completed = -1
my.max_jobs = 2
my.queue_type = kwargs.get("queue")
super(JobTask, my).__init__()
def get_check_interval(my):
return my.check_interval
def set_check_interval(my, interval):
my.check_interval = interval
def get_process_key(my):
import platform;
host = platform.uname()[1]
pid = os.getpid()
return "%s:%s" % (host, pid)
def get_job_search_type(my):
return "sthpw/queue"
def get_next_job(my, queue_type=None):
return Queue.get_next_job(queue_type=queue_type)
def cleanup_db_jobs(my):
# clean up the jobs that this host previously had
process_key = my.get_process_key()
job_search = Search(my.get_job_search_type())
job_search.add_filter("host", process_key)
my.jobs = job_search.get_sobjects()
my.cleanup()
def cleanup(my, count=0):
if count >= 3:
return
try:
for job in my.jobs:
# reset all none complete jobs to pending
current_state = job.get_value("state")
if current_state not in ['locked']:
continue
#print "setting to pending"
job.set_value("state", "pending")
job.set_value("host", "")
job.commit()
my.jobs = []
except Exception, e:
print "Exception: ", e.message
count += 1
my.cleanup(count)
def execute(my):
import atexit
import time
atexit.register( my.cleanup )
while 1:
my.check_existing_jobs()
my.check_new_job()
time.sleep(my.check_interval)
DbContainer.close_thread_sql()
if my.max_jobs_completed != -1 and my.jobs_completed > my.max_jobs_completed:
Common.restart()
while 1:
print "Waiting to restart..."
time.sleep(1)
def check_existing_jobs(my):
my.keep_jobs = []
for job in my.jobs:
job_code = job.get_code()
search = Search(my.get_job_search_type())
search.add_filter("code", job_code)
job = search.get_sobject()
if not job:
print "Cancel ...."
scheduler = Scheduler.get()
scheduler.cancel_task(job_code)
continue
state = job.get_value("state")
if state == 'cancel':
print "Cancel task [%s] ...." % job_code
scheduler = Scheduler.get()
scheduler.cancel_task(job_code)
job.set_value("state", "terminated")
job.commit()
continue
my.keep_jobs.append(job)
my.jobs = my.keep_jobs
def check_new_job(my, queue_type=None):
num_jobs = len(my.jobs)
if num_jobs >= my.max_jobs:
print "Already at max jobs [%s]" % my.max_jobs
return
my.job = my.get_next_job(queue_type)
if not my.job:
return
# set the process key
process_key = my.get_process_key()
my.job.set_value("host", process_key)
my.job.commit()
my.jobs.append(my.job)
# get some info from the job
command = my.job.get_value("command")
job_code = my.job.get_value("code")
try:
kwargs = my.job.get_json_value("data")
except:
try:
# DEPRECATED
kwargs = my.job.get_json_value("serialized")
except:
kwargs = {}
if not kwargs:
kwargs = {}
login = my.job.get_value("login")
script_path = my.job.get_value("script_path", no_exception=True)
project_code = my.job.get_value("project_code")
if script_path:
command = 'tactic.command.PythonCmd'
folder = os.path.dirname(script_path)
title = os.path.basename(script_path)
search = Search("config/custom_script")
search.add_filter("folder", folder)
search.add_filter("title", title)
custom_script = search.get_sobject()
script_code = custom_script.get_value("script")
kwargs['code'] = script_code
# add the job to the kwargs
kwargs['job'] = my.job
#print "command: ", command
#print "kwargs: ", kwargs
# Because we started a new thread, the environment may not
# yet be initialized
try:
from pyasm.common import Environment
Environment.get_env_object()
except:
# it usually is run at the very first transaction
Batch()
Project.set_project(project_code)
queue = my.job.get_value("queue", no_exception=True)
queue_type = 'repeat'
stop_on_error = False
print "Running job: ", my.job.get_value("code")
if queue_type == 'inline':
cmd = Common.create_from_class_path(command, kwargs=kwargs)
try:
Container.put(Command.TOP_CMD_KEY, None)
Container.put(Transaction.KEY, None)
Command.execute_cmd(cmd)
# set job to complete
my.job.set_value("state", "complete")
except Exception, e:
my.job.set_value("state", "error")
my.job.commit()
my.jobs.remove(my.job)
my.job = None
my.jobs_completed += 1
elif queue_type == 'repeat':
attempts = 0
max_attempts = 3
retry_interval = 5
Container.put(Transaction.KEY, None)
while 1:
try:
cmd = Common.create_from_class_path(command, kwargs=kwargs)
Container.put(Command.TOP_CMD_KEY, None)
Command.execute_cmd(cmd)
#cmd.execute()
# set job to complete
my.job.set_value("state", "complete")
break
except TacticException, e:
# This is an error on this server, so just exit
# and don't bother retrying
print "Error: ", e
my.job.set_value("state", "error")
break
except Exception, e:
if stop_on_error:
raise
print "WARNING in Queue: ", e
import time
time.sleep(retry_interval)
attempts += 1
print "Retrying [%s]...." % attempts
if attempts >= max_attempts:
print "ERROR: reached max attempts"
my.job.set_value("state", "error")
break
my.job.commit()
my.jobs.remove(my.job)
my.job = None
my.jobs_completed += 1
else:
class ForkedTask(SchedulerTask):
def __init__(my, **kwargs):
super(ForkedTask, my).__init__(**kwargs)
def execute(my):
# check to see the status of this job
"""
job = my.kwargs.get('job')
job_code = job.get_code()
search = Search("sthpw/queue")
search.add_filter("code", job_code)
my.kwargs['job'] = search.get_sobject()
if not job:
print "Cancelling ..."
return
state = job.get_value("state")
if state == "cancel":
print "Cancelling 2 ...."
return
"""
subprocess_kwargs = {
'login': login,
'project_code': project_code,
'command': command,
'kwargs': kwargs
}
subprocess_kwargs_str = jsondumps(subprocess_kwargs)
install_dir = Environment.get_install_dir()
python = Config.get_value("services", "python")
if not python:
python = 'python'
args = ['%s' % python, '%s/src/tactic/command/queue.py' % install_dir]
args.append(subprocess_kwargs_str)
import subprocess
p = subprocess.Popen(args)
DbContainer.close_thread_sql()
return
# can't use a forked task ... need to use a system call
#Command.execute_cmd(cmd)
# register this as a forked task
task = ForkedTask(name=job_code, job=my.job)
scheduler = Scheduler.get()
scheduler.start_thread()
# FIXME: the queue should not be inline
if queue == 'interval':
interval = my.job.get_value("interval")
if not interval:
interval = 60
scheduler.add_interval_task(task, interval=interval,mode='threaded')
else:
scheduler.add_single_task(task, mode='threaded')
def start(**kwargs):
scheduler = Scheduler.get()
scheduler.start_thread()
task = JobTask(**kwargs)
task.cleanup_db_jobs()
scheduler.add_single_task(task, mode='threaded', delay=1)
start = staticmethod(start)
def run_batch(kwargs):
command = k.get("command")
kwargs = k.get("kwargs")
login = k.get("login")
project_code = k.get("project_code")
from pyasm.security import Batch
Batch(project_code=project_code, login_code=login)
cmd = Common.create_from_class_path(command, kwargs=kwargs)
Command.execute_cmd(cmd)
__all__.append("QueueTest")
class QueueTest(Command):
def execute(my):
# this command has only a one in 10 chance of succeeding
import random
value = random.randint(0, 10)
if value != 5:
sdaffsfda
if __name__ == '__main__':
import sys
args = sys.argv[1:]
k = args[0]
k = jsonloads(k)
run_batch(k)
| epl-1.0 |
nginxxx/ansible | lib/ansible/parsing/splitter.py | 118 | 11100 | # (c) 2014 James Cammarata, <jcammarata@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import codecs
# Decode escapes adapted from rspeer's answer here:
# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
_HEXCHAR = '[a-fA-F0-9]'
_ESCAPE_SEQUENCE_RE = re.compile(r'''
( \\U{0} # 8-digit hex escapes
| \\u{1} # 4-digit hex escapes
| \\x{2} # 2-digit hex escapes
| \\N\{{[^}}]+\}} # Unicode characters by name
| \\[\\'"abfnrtv] # Single-character escapes
)'''.format(_HEXCHAR*8, _HEXCHAR*4, _HEXCHAR*2), re.UNICODE | re.VERBOSE)
def _decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
def parse_kv(args, check_raw=False):
'''
Convert a string of key/value items to a dict. If any free-form params
are found and the check_raw option is set to True, they will be added
to a new parameter called '_raw_params'. If check_raw is not enabled,
they will simply be ignored.
'''
### FIXME: args should already be a unicode string
from ansible.utils.unicode import to_unicode
args = to_unicode(args, nonstring='passthru')
options = {}
if args is not None:
try:
vargs = split_args(args)
except ValueError as ve:
if 'no closing quotation' in str(ve).lower():
raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
else:
raise
raw_params = []
for x in vargs:
x = _decode_escapes(x)
if "=" in x:
pos = 0
try:
while True:
pos = x.index('=', pos + 1)
if pos > 0 and x[pos - 1] != '\\':
break
except ValueError:
# ran out of string, but we must have some escaped equals,
# so replace those and append this to the list of raw params
raw_params.append(x.replace('\\=', '='))
continue
k = x[:pos]
v = x[pos + 1:]
# only internal variables can start with an underscore, so
# we don't allow users to set them directy in arguments
if k.startswith('_'):
raise AnsibleError("invalid parameter specified: '%s'" % k)
# FIXME: make the retrieval of this list of shell/command
# options a function, so the list is centralized
if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'):
raw_params.append(x)
else:
options[k.strip()] = unquote(v.strip())
else:
raw_params.append(x)
# recombine the free-form params, if any were found, and assign
# them to a special option for use later by the shell/command module
if len(raw_params) > 0:
options[u'_raw_params'] = ' '.join(raw_params)
return options
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
is unterminated in which case it needs to be put back together
'''
# the char before the current one, used to see if
# the current character is escaped
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
prev_char = token[idx-1]
if cur_char in '"\'' and prev_char != '\\':
if quote_char:
if cur_char == quote_char:
quote_char = None
else:
quote_char = cur_char
return quote_char
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
if cur_depth < 0:
cur_depth = 0
return cur_depth
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
those that may have been split over a jinja2 block or quotes.
When used in a remote module, we won't ever have to be concerned about
jinja2 blocks, however this function is/will be used in the
core portions as well before the args are templated.
example input: a=b c="foo bar"
example output: ['a=b', 'c="foo bar"']
Basically this is a variation shlex that has some more intelligence for
how Ansible needs to use it.
'''
# the list of params parsed out of the arg string
# this is going to be the result value when we are done
params = []
# Initial split on white space
args = args.strip()
items = args.strip().split('\n')
# iterate over the tokens, and reassemble any that may have been
# split on a space inside a jinja2 block.
# ex if tokens are "{{", "foo", "}}" these go together
# These variables are used
# to keep track of the state of the parsing, since blocks and quotes
# may be nested within each other.
quote_char = None
inside_quotes = False
print_depth = 0 # used to count nested jinja2 {{ }} blocks
block_depth = 0 # used to count nested jinja2 {% %} blocks
comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
for itemidx,item in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
# inside quotation characters
tokens = item.strip().split(' ')
line_continuation = False
for idx,token in enumerate(tokens):
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
# on to the next token while setting a flag
if token == '\\' and not inside_quotes:
line_continuation = True
continue
# store the previous quoting state for checking later
was_inside_quotes = inside_quotes
quote_char = _get_quote_state(token, quote_char)
inside_quotes = quote_char is not None
# multiple conditions may append a token to the list of params,
# so we keep track with this flag to make sure it only happens once
# append means add to the end of the list, don't append means concatenate
# it to the end of the last token
appended = False
# if we're inside quotes now, but weren't before, append the token
# to the end of the list, since we'll tack on more to it later
# otherwise, if we're inside any jinja2 block, inside quotes, or we were
# inside quotes (but aren't now) concat this token to the last param
if inside_quotes and not was_inside_quotes:
params.append(token)
appended = True
elif print_depth or block_depth or comment_depth or inside_quotes or was_inside_quotes:
if idx == 0 and was_inside_quotes:
params[-1] = "%s%s" % (params[-1], token)
elif len(tokens) > 1:
spacer = ''
if idx > 0:
spacer = ' '
params[-1] = "%s%s%s" % (params[-1], spacer, token)
else:
params[-1] = "%s\n%s" % (params[-1], token)
appended = True
# if the number of paired block tags is not the same, the depth has changed, so we calculate that here
# and may append the current token to the params (if we haven't previously done so)
prev_print_depth = print_depth
print_depth = _count_jinja2_blocks(token, print_depth, "{{", "}}")
if print_depth != prev_print_depth and not appended:
params.append(token)
appended = True
prev_block_depth = block_depth
block_depth = _count_jinja2_blocks(token, block_depth, "{%", "%}")
if block_depth != prev_block_depth and not appended:
params.append(token)
appended = True
prev_comment_depth = comment_depth
comment_depth = _count_jinja2_blocks(token, comment_depth, "{#", "#}")
if comment_depth != prev_comment_depth and not appended:
params.append(token)
appended = True
# finally, if we're at zero depth for all blocks and not inside quotes, and have not
# yet appended anything to the list of params, we do so now
if not (print_depth or block_depth or comment_depth) and not inside_quotes and not appended and token != '':
params.append(token)
# if this was the last token in the list, and we have more than
# one item (meaning we split on newlines), add a newline back here
# to preserve the original structure
if len(items) > 1 and itemidx != len(items) - 1 and not line_continuation:
params[-1] += '\n'
# always clear the line continuation flag
line_continuation = False
# If we're done and things are not at zero depth or we're still inside quotes,
# raise an error to indicate that the args were unbalanced
if print_depth or block_depth or comment_depth or inside_quotes:
raise Exception("error while splitting arguments, either an unbalanced jinja2 block or quotes")
return params
def is_quoted(data):
return len(data) > 1 and data[0] == data[-1] and data[0] in ('"', "'") and data[-2] != '\\'
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data
| gpl-3.0 |
willthames/ansible | test/utils/shippable/tools/download.py | 124 | 10149 | #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
# (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""CLI tool for downloading results from Shippable CI runs."""
from __future__ import print_function
# noinspection PyCompatibility
import argparse
import json
import os
import re
import requests
try:
import argcomplete
except ImportError:
argcomplete = None
def main():
"""Main program body."""
api_key = get_api_key()
parser = argparse.ArgumentParser(description='Download results from a Shippable run.')
parser.add_argument('run_id',
metavar='RUN',
help='shippable run id, run url or run name formatted as: account/project/run_number')
parser.add_argument('-v', '--verbose',
dest='verbose',
action='store_true',
help='show what is being downloaded')
parser.add_argument('-t', '--test',
dest='test',
action='store_true',
help='show what would be downloaded without downloading')
parser.add_argument('--key',
dest='api_key',
default=api_key,
required=api_key is None,
help='api key for accessing Shippable')
parser.add_argument('--console-logs',
action='store_true',
help='download console logs')
parser.add_argument('--test-results',
action='store_true',
help='download test results')
parser.add_argument('--coverage-results',
action='store_true',
help='download code coverage results')
parser.add_argument('--job-metadata',
action='store_true',
help='download job metadata')
parser.add_argument('--run-metadata',
action='store_true',
help='download run metadata')
parser.add_argument('--all',
action='store_true',
help='download everything')
parser.add_argument('--job-number',
metavar='N',
action='append',
type=int,
help='limit downloads to the given job number')
if argcomplete:
argcomplete.autocomplete(parser)
args = parser.parse_args()
old_runs_prefix = 'https://app.shippable.com/runs/'
if args.run_id.startswith(old_runs_prefix):
args.run_id = args.run_id[len(old_runs_prefix):]
if args.all:
args.console_logs = True
args.test_results = True
args.coverage_results = True
args.job_metadata = True
args.run_metadata = True
selections = (
args.console_logs,
args.test_results,
args.coverage_results,
args.job_metadata,
args.run_metadata,
)
if not any(selections):
parser.error('At least one download option is required.')
headers = dict(
Authorization='apiToken %s' % args.api_key,
)
match = re.search(
r'^https://app.shippable.com/github/(?P<account>[^/]+)/(?P<project>[^/]+)/runs/(?P<run_number>[0-9]+)(?:/summary|(/(?P<job_number>[0-9]+)))?$',
args.run_id)
if not match:
match = re.search(r'^(?P<account>[^/]+)/(?P<project>[^/]+)/(?P<run_number>[0-9]+)$', args.run_id)
if match:
account = match.group('account')
project = match.group('project')
run_number = int(match.group('run_number'))
job_number = int(match.group('job_number')) if match.group('job_number') else None
if job_number:
if args.job_number:
exit('ERROR: job number found in url and specified with --job-number')
args.job_number = [job_number]
url = 'https://api.shippable.com/projects'
response = requests.get(url, dict(projectFullNames='%s/%s' % (account, project)), headers=headers)
if response.status_code != 200:
raise Exception(response.content)
project_id = response.json()[0]['id']
url = 'https://api.shippable.com/runs?projectIds=%s&runNumbers=%s' % (project_id, run_number)
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception(response.content)
run = [run for run in response.json() if run['runNumber'] == run_number][0]
args.run_id = run['id']
elif re.search('^[a-f0-9]+$', args.run_id):
url = 'https://api.shippable.com/runs/%s' % args.run_id
response = requests.get(url, headers=headers)
if response.status_code != 200:
raise Exception(response.content)
run = response.json()
account = run['subscriptionOrgName']
project = run['projectName']
run_number = run['runNumber']
else:
exit('ERROR: invalid run: %s' % args.run_id)
output_dir = '%s/%s/%s' % (account, project, run_number)
response = requests.get('https://api.shippable.com/jobs?runIds=%s' % args.run_id, headers=headers)
if response.status_code != 200:
raise Exception(response.content)
jobs = sorted(response.json(), key=lambda job: int(job['jobNumber']))
if not args.test:
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if args.run_metadata:
path = os.path.join(output_dir, 'run.json')
contents = json.dumps(run, sort_keys=True, indent=4)
if args.verbose or args.test:
print(path)
if not args.test:
with open(path, 'w') as metadata_fd:
metadata_fd.write(contents)
for j in jobs:
job_id = j['id']
job_number = j['jobNumber']
if args.job_number and job_number not in args.job_number:
continue
if args.job_metadata:
path = os.path.join(output_dir, '%s/job.json' % job_number)
contents = json.dumps(j, sort_keys=True, indent=4)
if args.verbose or args.test:
print(path)
if not args.test:
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, 'w') as metadata_fd:
metadata_fd.write(contents)
if args.console_logs:
path = os.path.join(output_dir, '%s/console.log' % job_number)
url = 'https://api.shippable.com/jobs/%s/consoles?download=true' % job_id
download(args, headers, path, url, is_json=False)
if args.test_results:
path = os.path.join(output_dir, '%s/test.json' % job_number)
url = 'https://api.shippable.com/jobs/%s/jobTestReports' % job_id
download(args, headers, path, url)
extract_contents(args, path, os.path.join(output_dir, '%s/test' % job_number))
if args.coverage_results:
path = os.path.join(output_dir, '%s/coverage.json' % job_number)
url = 'https://api.shippable.com/jobs/%s/jobCoverageReports' % job_id
download(args, headers, path, url)
extract_contents(args, path, os.path.join(output_dir, '%s/coverage' % job_number))
def extract_contents(args, path, output_dir):
"""
:type args: any
:type path: str
:type output_dir: str
"""
if not args.test:
if not os.path.exists(path):
return
with open(path, 'r') as json_fd:
items = json.load(json_fd)
for item in items:
contents = item['contents'].encode('utf-8')
path = output_dir + '/' + re.sub('^/*', '', item['path'])
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if args.verbose:
print(path)
if path.endswith('.json'):
contents = json.dumps(json.loads(contents), sort_keys=True, indent=4)
if not os.path.exists(path):
with open(path, 'w') as output_fd:
output_fd.write(contents)
def download(args, headers, path, url, is_json=True):
"""
:type args: any
:type headers: dict[str, str]
:type path: str
:type url: str
:type is_json: bool
"""
if args.verbose or args.test:
print(path)
if os.path.exists(path):
return
if not args.test:
response = requests.get(url, headers=headers)
if response.status_code != 200:
path += '.error'
if is_json:
content = json.dumps(response.json(), sort_keys=True, indent=4)
else:
content = response.content
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
with open(path, 'w') as content_fd:
content_fd.write(content)
def get_api_key():
"""
rtype: str
"""
key = os.environ.get('SHIPPABLE_KEY', None)
if key:
return key
path = os.path.join(os.environ['HOME'], '.shippable.key')
try:
with open(path, 'r') as key_fd:
return key_fd.read().strip()
except IOError:
return None
if __name__ == '__main__':
main()
| gpl-3.0 |
groschovskiy/keyczar | cpp/src/tools/scons/scons-local-1.2.0.d20090223/SCons/Node/Alias.py | 19 | 4271 |
"""scons.Node.Alias
Alias nodes.
This creates a hash of global Aliases (dummy targets).
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Node/Alias.py 4043 2009/02/23 09:06:45 scons"
import string
import UserDict
import SCons.Errors
import SCons.Node
import SCons.Util
class AliasNameSpace(UserDict.UserDict):
def Alias(self, name, **kw):
if isinstance(name, SCons.Node.Alias.Alias):
return name
try:
a = self[name]
except KeyError:
a = apply(SCons.Node.Alias.Alias, (name,), kw)
self[name] = a
return a
def lookup(self, name, **kw):
try:
return self[name]
except KeyError:
return None
class AliasNodeInfo(SCons.Node.NodeInfoBase):
current_version_id = 1
field_list = ['csig']
def str_to_node(self, s):
return default_ans.Alias(s)
class AliasBuildInfo(SCons.Node.BuildInfoBase):
current_version_id = 1
class Alias(SCons.Node.Node):
NodeInfo = AliasNodeInfo
BuildInfo = AliasBuildInfo
def __init__(self, name):
SCons.Node.Node.__init__(self)
self.name = name
def str_for_display(self):
return '"' + self.__str__() + '"'
def __str__(self):
return self.name
def make_ready(self):
self.get_csig()
really_build = SCons.Node.Node.build
is_up_to_date = SCons.Node.Node.children_are_up_to_date
def is_under(self, dir):
# Make Alias nodes get built regardless of
# what directory scons was run from. Alias nodes
# are outside the filesystem:
return 1
def get_contents(self):
"""The contents of an alias is the concatenation
of the content signatures of all its sources."""
childsigs = map(lambda n: n.get_csig(), self.children())
return string.join(childsigs, '')
def sconsign(self):
"""An Alias is not recorded in .sconsign files"""
pass
#
#
#
def changed_since_last_build(self, target, prev_ni):
cur_csig = self.get_csig()
try:
return cur_csig != prev_ni.csig
except AttributeError:
return 1
def build(self):
"""A "builder" for aliases."""
pass
def convert(self):
try: del self.builder
except AttributeError: pass
self.reset_executor()
self.build = self.really_build
def get_csig(self):
"""
Generate a node's content signature, the digested signature
of its content.
node - the node
cache - alternate node to use for the signature cache
returns - the content signature
"""
try:
return self.ninfo.csig
except AttributeError:
pass
contents = self.get_contents()
csig = SCons.Util.MD5signature(contents)
self.get_ninfo().csig = csig
return csig
default_ans = AliasNameSpace()
SCons.Node.arg2nodes_lookups.append(default_ans.lookup)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
kleinee/jns | scripts/dnld_julia-1.1.0-arm32bit.py | 1 | 1372 | #!/home/pi/.venv/jns/bin/python
#
# last modified 2019/05/26
#
# Python helper script to download Julia 1.1.0 binaries
# not meant to be executed manually
# https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url
#
FILE_ID = '1fj6pNAJgmUD7bsSXqh8ocC1wESx8jkRh'
DESTINATION = './julia-1.1.0-arm32bit.zip'
import requests
def download_file_from_google_drive(id, destination):
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params = { 'id' : id }, stream = True)
token = get_confirm_token(response)
if token:
params = { 'id' : id, 'confirm' : token }
response = session.get(URL, params = params, stream = True)
save_response_content(response, destination)
def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
def save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
if __name__ == "__main__":
file_id = FILE_ID
destination = DESTINATION
download_file_from_google_drive(file_id, destination)
| mit |
Hybrid-Rom/kernel_htc_msm8974 | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
jonathanmz34/ztransfert | node_modules/node-gyp/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit |
sloanyang/android_external_webkit | Tools/Scripts/webkitpy/common/newstringio.py | 18 | 1761 | #!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""'with'-compliant StringIO implementation."""
import StringIO
class StringIO(StringIO.StringIO):
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
| gpl-2.0 |
cypsun/FreeCAD | src/3rdParty/Pivy-0.5/soqt.py | 38 | 101587 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.36
#
# Don't modify this file, modify the SWIG interface instead.
"""
The soqt module is a wrapper for the SoQt library. The module will try
to import the sip module which is used for PyQt. If found the involved
wrapped Qt structures are converted to ones suitable for PyQt,
otherwise it will fall back to regular SWIG structures.
"""
import _soqt
import new
new_instancemethod = new.instancemethod
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class charp(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_charp(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_charp
__del__ = lambda self : None;
def assign(*args): return _soqt.charp_assign(*args)
def value(*args): return _soqt.charp_value(*args)
def cast(*args): return _soqt.charp_cast(*args)
frompointer = staticmethod(_soqt.charp_frompointer)
charp_swigregister = _soqt.charp_swigregister
charp_swigregister(charp)
cast = _soqt.cast
charp_frompointer = _soqt.charp_frompointer
class intp(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_intp(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_intp
__del__ = lambda self : None;
def assign(*args): return _soqt.intp_assign(*args)
def value(*args): return _soqt.intp_value(*args)
def cast(*args): return _soqt.intp_cast(*args)
frompointer = staticmethod(_soqt.intp_frompointer)
intp_swigregister = _soqt.intp_swigregister
intp_swigregister(intp)
intp_frompointer = _soqt.intp_frompointer
class longp(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_longp(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_longp
__del__ = lambda self : None;
def assign(*args): return _soqt.longp_assign(*args)
def value(*args): return _soqt.longp_value(*args)
def cast(*args): return _soqt.longp_cast(*args)
frompointer = staticmethod(_soqt.longp_frompointer)
longp_swigregister = _soqt.longp_swigregister
longp_swigregister(longp)
longp_frompointer = _soqt.longp_frompointer
class floatp(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_floatp(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_floatp
__del__ = lambda self : None;
def assign(*args): return _soqt.floatp_assign(*args)
def value(*args): return _soqt.floatp_value(*args)
def cast(*args): return _soqt.floatp_cast(*args)
frompointer = staticmethod(_soqt.floatp_frompointer)
floatp_swigregister = _soqt.floatp_swigregister
floatp_swigregister(floatp)
floatp_frompointer = _soqt.floatp_frompointer
class doublep(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_doublep(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_doublep
__del__ = lambda self : None;
def assign(*args): return _soqt.doublep_assign(*args)
def value(*args): return _soqt.doublep_value(*args)
def cast(*args): return _soqt.doublep_cast(*args)
frompointer = staticmethod(_soqt.doublep_frompointer)
doublep_swigregister = _soqt.doublep_swigregister
doublep_swigregister(doublep)
doublep_frompointer = _soqt.doublep_frompointer
FALSE = _soqt.FALSE
TRUE = _soqt.TRUE
HAVE_INTTYPES_H = _soqt.HAVE_INTTYPES_H
HAVE_STDINT_H = _soqt.HAVE_STDINT_H
HAVE_SYS_TYPES_H = _soqt.HAVE_SYS_TYPES_H
HAVE_STDDEF_H = _soqt.HAVE_STDDEF_H
HAVE_INT8_T = _soqt.HAVE_INT8_T
HAVE_UINT8_T = _soqt.HAVE_UINT8_T
HAVE_INT16_T = _soqt.HAVE_INT16_T
HAVE_UINT16_T = _soqt.HAVE_UINT16_T
HAVE_INT32_T = _soqt.HAVE_INT32_T
HAVE_UINT32_T = _soqt.HAVE_UINT32_T
HAVE_INT64_T = _soqt.HAVE_INT64_T
HAVE_UINT64_T = _soqt.HAVE_UINT64_T
HAVE_INTPTR_T = _soqt.HAVE_INTPTR_T
HAVE_UINTPTR_T = _soqt.HAVE_UINTPTR_T
M_E = _soqt.M_E
M_LOG2E = _soqt.M_LOG2E
M_LOG10E = _soqt.M_LOG10E
M_LN2 = _soqt.M_LN2
M_LN10 = _soqt.M_LN10
M_PI = _soqt.M_PI
M_TWOPI = _soqt.M_TWOPI
M_PI_2 = _soqt.M_PI_2
M_PI_4 = _soqt.M_PI_4
M_3PI_4 = _soqt.M_3PI_4
M_SQRTPI = _soqt.M_SQRTPI
M_1_PI = _soqt.M_1_PI
M_2_PI = _soqt.M_2_PI
M_2_SQRTPI = _soqt.M_2_SQRTPI
M_SQRT2 = _soqt.M_SQRT2
M_SQRT1_2 = _soqt.M_SQRT1_2
M_LN2LO = _soqt.M_LN2LO
M_LN2HI = _soqt.M_LN2HI
M_SQRT3 = _soqt.M_SQRT3
M_IVLN10 = _soqt.M_IVLN10
M_LOG2_E = _soqt.M_LOG2_E
M_INVLN2 = _soqt.M_INVLN2
COIN_MAJOR_VERSION = _soqt.COIN_MAJOR_VERSION
COIN_MINOR_VERSION = _soqt.COIN_MINOR_VERSION
COIN_MICRO_VERSION = _soqt.COIN_MICRO_VERSION
COIN_VERSION = _soqt.COIN_VERSION
HAVE_HASH_QUOTING = _soqt.HAVE_HASH_QUOTING
SUN_CC_4_0_SOTYPE_INIT_BUG = _soqt.SUN_CC_4_0_SOTYPE_INIT_BUG
class SbDict(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SbDict(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SbDict
__del__ = lambda self : None;
def applyToAll(*args): return _soqt.SbDict_applyToAll(*args)
def clear(*args): return _soqt.SbDict_clear(*args)
def enter(*args): return _soqt.SbDict_enter(*args)
def find(*args): return _soqt.SbDict_find(*args)
def makePList(*args): return _soqt.SbDict_makePList(*args)
def remove(*args): return _soqt.SbDict_remove(*args)
def setHashingFunction(*args): return _soqt.SbDict_setHashingFunction(*args)
SbDict_swigregister = _soqt.SbDict_swigregister
SbDict_swigregister(SbDict)
class SoType(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
fromName = staticmethod(_soqt.SoType_fromName)
def getName(*args): return _soqt.SoType_getName(*args)
def getParent(*args): return _soqt.SoType_getParent(*args)
def isDerivedFrom(*args): return _soqt.SoType_isDerivedFrom(*args)
getAllDerivedFrom = staticmethod(_soqt.SoType_getAllDerivedFrom)
def canCreateInstance(*args): return _soqt.SoType_canCreateInstance(*args)
def getData(*args): return _soqt.SoType_getData(*args)
def getKey(*args): return _soqt.SoType_getKey(*args)
def __eq__(*args): return _soqt.SoType___eq__(*args)
def __ne__(*args): return _soqt.SoType___ne__(*args)
def __lt__(*args): return _soqt.SoType___lt__(*args)
def __le__(*args): return _soqt.SoType___le__(*args)
def __ge__(*args): return _soqt.SoType___ge__(*args)
def __gt__(*args): return _soqt.SoType___gt__(*args)
removeType = staticmethod(_soqt.SoType_removeType)
init = staticmethod(_soqt.SoType_init)
fromKey = staticmethod(_soqt.SoType_fromKey)
badType = staticmethod(_soqt.SoType_badType)
def isBad(*args): return _soqt.SoType_isBad(*args)
def makeInternal(*args): return _soqt.SoType_makeInternal(*args)
def isInternal(*args): return _soqt.SoType_isInternal(*args)
getNumTypes = staticmethod(_soqt.SoType_getNumTypes)
def getInstantiationMethod(*args): return _soqt.SoType_getInstantiationMethod(*args)
def createInstance(*args): return _soqt.SoType_createInstance(*args)
def __init__(self, *args):
"""__init__(self) -> SoType"""
this = _soqt.new_SoType(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoType
__del__ = lambda self : None;
SoType_swigregister = _soqt.SoType_swigregister
SoType_swigregister(SoType)
SoType_fromName = _soqt.SoType_fromName
SoType_getAllDerivedFrom = _soqt.SoType_getAllDerivedFrom
SoType_removeType = _soqt.SoType_removeType
SoType_init = _soqt.SoType_init
SoType_fromKey = _soqt.SoType_fromKey
SoType_badType = _soqt.SoType_badType
SoType_getNumTypes = _soqt.SoType_getNumTypes
class SbPList(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SbPList(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SbPList
__del__ = lambda self : None;
def copy(*args): return _soqt.SbPList_copy(*args)
def fit(*args): return _soqt.SbPList_fit(*args)
def append(*args): return _soqt.SbPList_append(*args)
def find(*args): return _soqt.SbPList_find(*args)
def insert(*args): return _soqt.SbPList_insert(*args)
def removeItem(*args): return _soqt.SbPList_removeItem(*args)
def remove(*args): return _soqt.SbPList_remove(*args)
def removeFast(*args): return _soqt.SbPList_removeFast(*args)
def getLength(*args): return _soqt.SbPList_getLength(*args)
def truncate(*args): return _soqt.SbPList_truncate(*args)
def getArrayPtr(*args): return _soqt.SbPList_getArrayPtr(*args)
def __eq__(*args): return _soqt.SbPList___eq__(*args)
def __ne__(*args): return _soqt.SbPList___ne__(*args)
def get(*args): return _soqt.SbPList_get(*args)
def set(*args): return _soqt.SbPList_set(*args)
def __getitem__(*args): return _soqt.SbPList___getitem__(*args)
def __setitem__(*args): return _soqt.SbPList___setitem__(*args)
def __iter__(self):
for i in range(self.getLength()):
yield self[i]
SbPList_swigregister = _soqt.SbPList_swigregister
SbPList_swigregister(SbPList)
class SbIntList(SbPList):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SbIntList(*args)
try: self.this.append(this)
except: self.this = this
def append(*args): return _soqt.SbIntList_append(*args)
def find(*args): return _soqt.SbIntList_find(*args)
def insert(*args): return _soqt.SbIntList_insert(*args)
def __setitem__(*args): return _soqt.SbIntList___setitem__(*args)
def __getitem__(*args): return _soqt.SbIntList___getitem__(*args)
def get(*args): return _soqt.SbIntList_get(*args)
__swig_destroy__ = _soqt.delete_SbIntList
__del__ = lambda self : None;
SbIntList_swigregister = _soqt.SbIntList_swigregister
SbIntList_swigregister(SbIntList)
class SbString(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args):
this = _soqt.new_SbString(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SbString
__del__ = lambda self : None;
hash = staticmethod(_soqt.SbString_hash)
def getLength(*args): return _soqt.SbString_getLength(*args)
def makeEmpty(*args): return _soqt.SbString_makeEmpty(*args)
def getString(*args): return _soqt.SbString_getString(*args)
def getSubString(*args): return _soqt.SbString_getSubString(*args)
def deleteSubString(*args): return _soqt.SbString_deleteSubString(*args)
def addIntString(*args): return _soqt.SbString_addIntString(*args)
def __iadd__(*args): return _soqt.SbString___iadd__(*args)
def compareSubString(*args): return _soqt.SbString_compareSubString(*args)
def sprintf(*args): return _soqt.SbString_sprintf(*args)
def apply(*args): return _soqt.SbString_apply(*args)
def find(*args): return _soqt.SbString_find(*args)
def findAll(*args): return _soqt.SbString_findAll(*args)
def lower(*args): return _soqt.SbString_lower(*args)
def upper(*args): return _soqt.SbString_upper(*args)
def __eq__(*args): return _soqt.SbString___eq__(*args)
def __nq__(*args): return _soqt.SbString___nq__(*args)
def __getitem__(*args): return _soqt.SbString___getitem__(*args)
def __iter__(self):
return getString().__iter__()
def __repr__(*args): return _soqt.SbString___repr__(*args)
SbString_swigregister = _soqt.SbString_swigregister
SbString_swigregister(SbString)
SbString_hash = _soqt.SbString_hash
class SbName(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args):
this = _soqt.new_SbName(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SbName
__del__ = lambda self : None;
def getString(*args): return _soqt.SbName_getString(*args)
def getLength(*args): return _soqt.SbName_getLength(*args)
isIdentStartChar = staticmethod(_soqt.SbName_isIdentStartChar)
isIdentChar = staticmethod(_soqt.SbName_isIdentChar)
isBaseNameStartChar = staticmethod(_soqt.SbName_isBaseNameStartChar)
isBaseNameChar = staticmethod(_soqt.SbName_isBaseNameChar)
empty = staticmethod(_soqt.SbName_empty)
def __eq__(*args): return _soqt.SbName___eq__(*args)
def __nq__(*args): return _soqt.SbName___nq__(*args)
def __getitem__(*args): return _soqt.SbName___getitem__(*args)
def __iter__(self):
return getString().__iter__()
def __repr__(*args): return _soqt.SbName___repr__(*args)
SbName_swigregister = _soqt.SbName_swigregister
SbName_swigregister(SbName)
SbName_isIdentStartChar = _soqt.SbName_isIdentStartChar
SbName_isIdentChar = _soqt.SbName_isIdentChar
SbName_isBaseNameStartChar = _soqt.SbName_isBaseNameStartChar
SbName_isBaseNameChar = _soqt.SbName_isBaseNameChar
SbName_empty = _soqt.SbName_empty
class SoError(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SoError(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoError
__del__ = lambda self : None;
setHandlerCallback = staticmethod(_soqt.SoError_setHandlerCallback)
getHandlerCallback = staticmethod(_soqt.SoError_getHandlerCallback)
getHandlerData = staticmethod(_soqt.SoError_getHandlerData)
def getDebugString(*args): return _soqt.SoError_getDebugString(*args)
getClassTypeId = staticmethod(_soqt.SoError_getClassTypeId)
def getTypeId(*args): return _soqt.SoError_getTypeId(*args)
def isOfType(*args): return _soqt.SoError_isOfType(*args)
post = staticmethod(_soqt.SoError_post)
getString = staticmethod(_soqt.SoError_getString)
initClass = staticmethod(_soqt.SoError_initClass)
initClasses = staticmethod(_soqt.SoError_initClasses)
SoError_swigregister = _soqt.SoError_swigregister
SoError_swigregister(SoError)
SoError_setHandlerCallback = _soqt.SoError_setHandlerCallback
SoError_getHandlerCallback = _soqt.SoError_getHandlerCallback
SoError_getHandlerData = _soqt.SoError_getHandlerData
SoError_getClassTypeId = _soqt.SoError_getClassTypeId
SoError_post = _soqt.SoError_post
SoError_getString = _soqt.SoError_getString
SoError_initClass = _soqt.SoError_initClass
SoError_initClasses = _soqt.SoError_initClasses
class SoDebugError(SoError):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
ERROR = _soqt.SoDebugError_ERROR
WARNING = _soqt.SoDebugError_WARNING
INFO = _soqt.SoDebugError_INFO
setHandlerCallback = staticmethod(_soqt.SoDebugError_setHandlerCallback)
getHandlerCallback = staticmethod(_soqt.SoDebugError_getHandlerCallback)
getHandlerData = staticmethod(_soqt.SoDebugError_getHandlerData)
getClassTypeId = staticmethod(_soqt.SoDebugError_getClassTypeId)
def getTypeId(*args): return _soqt.SoDebugError_getTypeId(*args)
def getSeverity(*args): return _soqt.SoDebugError_getSeverity(*args)
post = staticmethod(_soqt.SoDebugError_post)
postWarning = staticmethod(_soqt.SoDebugError_postWarning)
postInfo = staticmethod(_soqt.SoDebugError_postInfo)
initClass = staticmethod(_soqt.SoDebugError_initClass)
def __init__(self, *args):
"""__init__(self) -> SoDebugError"""
this = _soqt.new_SoDebugError(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoDebugError
__del__ = lambda self : None;
SoDebugError_swigregister = _soqt.SoDebugError_swigregister
SoDebugError_swigregister(SoDebugError)
SoDebugError_setHandlerCallback = _soqt.SoDebugError_setHandlerCallback
SoDebugError_getHandlerCallback = _soqt.SoDebugError_getHandlerCallback
SoDebugError_getHandlerData = _soqt.SoDebugError_getHandlerData
SoDebugError_getClassTypeId = _soqt.SoDebugError_getClassTypeId
SoDebugError_post = _soqt.SoDebugError_post
SoDebugError_postWarning = _soqt.SoDebugError_postWarning
SoDebugError_postInfo = _soqt.SoDebugError_postInfo
SoDebugError_initClass = _soqt.SoDebugError_initClass
class SbVec2s(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SbVec2s(*args)
try: self.this.append(this)
except: self.this = this
def setValue(*args): return _soqt.SbVec2s_setValue(*args)
def getValue(*args): return _soqt.SbVec2s_getValue(*args)
def dot(*args): return _soqt.SbVec2s_dot(*args)
def negate(*args): return _soqt.SbVec2s_negate(*args)
def __imul__(*args): return _soqt.SbVec2s___imul__(*args)
def __idiv__(*args): return _soqt.SbVec2s___idiv__(*args)
def __iadd__(*args): return _soqt.SbVec2s___iadd__(*args)
def __isub__(*args): return _soqt.SbVec2s___isub__(*args)
def __neg__(*args): return _soqt.SbVec2s___neg__(*args)
def output(*args): return _soqt.SbVec2s_output(*args)
def __add__(*args): return _soqt.SbVec2s___add__(*args)
def __sub__(*args): return _soqt.SbVec2s___sub__(*args)
def __mul__(*args): return _soqt.SbVec2s___mul__(*args)
def __rmul__(*args): return _soqt.SbVec2s___rmul__(*args)
def __div__(*args): return _soqt.SbVec2s___div__(*args)
def __eq__(*args): return _soqt.SbVec2s___eq__(*args)
def __nq__(*args): return _soqt.SbVec2s___nq__(*args)
def __getitem__(*args): return _soqt.SbVec2s___getitem__(*args)
def __setitem__(*args): return _soqt.SbVec2s___setitem__(*args)
def __iter__(self):
for i in range(2):
yield self[i]
def __len__(self):
return 2
__swig_destroy__ = _soqt.delete_SbVec2s
__del__ = lambda self : None;
SbVec2s_swigregister = _soqt.SbVec2s_swigregister
SbVec2s_swigregister(SbVec2s)
class SbTime(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SbTime(*args)
try: self.this.append(this)
except: self.this = this
getTimeOfDay = staticmethod(_soqt.SbTime_getTimeOfDay)
def setToTimeOfDay(*args): return _soqt.SbTime_setToTimeOfDay(*args)
zero = staticmethod(_soqt.SbTime_zero)
max = staticmethod(_soqt.SbTime_max)
maxTime = staticmethod(_soqt.SbTime_maxTime)
sleep = staticmethod(_soqt.SbTime_sleep)
def setValue(*args): return _soqt.SbTime_setValue(*args)
def setMsecValue(*args): return _soqt.SbTime_setMsecValue(*args)
def getValue(*args): return _soqt.SbTime_getValue(*args)
def getMsecValue(*args): return _soqt.SbTime_getMsecValue(*args)
def format(*args): return _soqt.SbTime_format(*args)
def formatDate(*args): return _soqt.SbTime_formatDate(*args)
def parsedate(*args): return _soqt.SbTime_parsedate(*args)
def __iadd__(*args): return _soqt.SbTime___iadd__(*args)
def __isub__(*args): return _soqt.SbTime___isub__(*args)
def __neg__(*args): return _soqt.SbTime___neg__(*args)
def __imul__(*args): return _soqt.SbTime___imul__(*args)
def __idiv__(*args): return _soqt.SbTime___idiv__(*args)
def __mod__(*args): return _soqt.SbTime___mod__(*args)
def __eq__(*args): return _soqt.SbTime___eq__(*args)
def __ne__(*args): return _soqt.SbTime___ne__(*args)
def __lt__(*args): return _soqt.SbTime___lt__(*args)
def __gt__(*args): return _soqt.SbTime___gt__(*args)
def __le__(*args): return _soqt.SbTime___le__(*args)
def __ge__(*args): return _soqt.SbTime___ge__(*args)
def output(*args): return _soqt.SbTime_output(*args)
def __add__(*args): return _soqt.SbTime___add__(*args)
def __sub__(*args): return _soqt.SbTime___sub__(*args)
def __mul__(*args): return _soqt.SbTime___mul__(*args)
def __rmul__(*args): return _soqt.SbTime___rmul__(*args)
def __div__(*args): return _soqt.SbTime___div__(*args)
__swig_destroy__ = _soqt.delete_SbTime
__del__ = lambda self : None;
SbTime_swigregister = _soqt.SbTime_swigregister
SbTime_swigregister(SbTime)
SbTime_getTimeOfDay = _soqt.SbTime_getTimeOfDay
SbTime_zero = _soqt.SbTime_zero
SbTime_max = _soqt.SbTime_max
SbTime_maxTime = _soqt.SbTime_maxTime
SbTime_sleep = _soqt.SbTime_sleep
class SoEvent(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SoEvent(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoEvent
__del__ = lambda self : None;
def isOfType(*args): return _soqt.SoEvent_isOfType(*args)
getClassTypeId = staticmethod(_soqt.SoEvent_getClassTypeId)
def getTypeId(*args): return _soqt.SoEvent_getTypeId(*args)
def setTime(*args): return _soqt.SoEvent_setTime(*args)
def getTime(*args): return _soqt.SoEvent_getTime(*args)
def setPosition(*args): return _soqt.SoEvent_setPosition(*args)
def getPosition(*args): return _soqt.SoEvent_getPosition(*args)
def getNormalizedPosition(*args): return _soqt.SoEvent_getNormalizedPosition(*args)
def setShiftDown(*args): return _soqt.SoEvent_setShiftDown(*args)
def wasShiftDown(*args): return _soqt.SoEvent_wasShiftDown(*args)
def setCtrlDown(*args): return _soqt.SoEvent_setCtrlDown(*args)
def wasCtrlDown(*args): return _soqt.SoEvent_wasCtrlDown(*args)
def setAltDown(*args): return _soqt.SoEvent_setAltDown(*args)
def wasAltDown(*args): return _soqt.SoEvent_wasAltDown(*args)
initClass = staticmethod(_soqt.SoEvent_initClass)
SoEvent_swigregister = _soqt.SoEvent_swigregister
SoEvent_swigregister(SoEvent)
SoEvent_getClassTypeId = _soqt.SoEvent_getClassTypeId
SoEvent_initClass = _soqt.SoEvent_initClass
class SoNotRec(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
CONTAINER = _soqt.SoNotRec_CONTAINER
PARENT = _soqt.SoNotRec_PARENT
SENSOR = _soqt.SoNotRec_SENSOR
FIELD = _soqt.SoNotRec_FIELD
ENGINE = _soqt.SoNotRec_ENGINE
def __init__(self, *args):
this = _soqt.new_SoNotRec(*args)
try: self.this.append(this)
except: self.this = this
def setType(*args): return _soqt.SoNotRec_setType(*args)
def getBase(*args): return _soqt.SoNotRec_getBase(*args)
def getType(*args): return _soqt.SoNotRec_getType(*args)
def getPrevious(*args): return _soqt.SoNotRec_getPrevious(*args)
def setPrevious(*args): return _soqt.SoNotRec_setPrevious(*args)
def output(*args): return _soqt.SoNotRec_output(*args)
__swig_destroy__ = _soqt.delete_SoNotRec
__del__ = lambda self : None;
SoNotRec_swigregister = _soqt.SoNotRec_swigregister
SoNotRec_swigregister(SoNotRec)
class SoNotList(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
this = _soqt.new_SoNotList(*args)
try: self.this.append(this)
except: self.this = this
def append(*args): return _soqt.SoNotList_append(*args)
def setLastType(*args): return _soqt.SoNotList_setLastType(*args)
def getFirstRec(*args): return _soqt.SoNotList_getFirstRec(*args)
def getLastRec(*args): return _soqt.SoNotList_getLastRec(*args)
def getFirstRecAtNode(*args): return _soqt.SoNotList_getFirstRecAtNode(*args)
def getLastField(*args): return _soqt.SoNotList_getLastField(*args)
def getLastEngineOutput(*args): return _soqt.SoNotList_getLastEngineOutput(*args)
def getTimeStamp(*args): return _soqt.SoNotList_getTimeStamp(*args)
def output(*args): return _soqt.SoNotList_output(*args)
__swig_destroy__ = _soqt.delete_SoNotList
__del__ = lambda self : None;
SoNotList_swigregister = _soqt.SoNotList_swigregister
SoNotList_swigregister(SoNotList)
class SoField(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _soqt.delete_SoField
__del__ = lambda self : None;
initClass = staticmethod(_soqt.SoField_initClass)
initClasses = staticmethod(_soqt.SoField_initClasses)
cleanupClass = staticmethod(_soqt.SoField_cleanupClass)
def setIgnored(*args): return _soqt.SoField_setIgnored(*args)
def isIgnored(*args): return _soqt.SoField_isIgnored(*args)
def setDefault(*args): return _soqt.SoField_setDefault(*args)
def isDefault(*args): return _soqt.SoField_isDefault(*args)
def getTypeId(*args): return _soqt.SoField_getTypeId(*args)
getClassTypeId = staticmethod(_soqt.SoField_getClassTypeId)
def isOfType(*args): return _soqt.SoField_isOfType(*args)
def enableConnection(*args): return _soqt.SoField_enableConnection(*args)
def isConnectionEnabled(*args): return _soqt.SoField_isConnectionEnabled(*args)
def isConnectedFromEngine(*args): return _soqt.SoField_isConnectedFromEngine(*args)
def getConnectedEngine(*args): return _soqt.SoField_getConnectedEngine(*args)
def connectFrom(*args): return _soqt.SoField_connectFrom(*args)
def appendConnection(*args): return _soqt.SoField_appendConnection(*args)
def isConnectedFromField(*args): return _soqt.SoField_isConnectedFromField(*args)
def getConnectedField(*args): return _soqt.SoField_getConnectedField(*args)
def getNumConnections(*args): return _soqt.SoField_getNumConnections(*args)
def getForwardConnections(*args): return _soqt.SoField_getForwardConnections(*args)
def getConnections(*args): return _soqt.SoField_getConnections(*args)
def disconnect(*args): return _soqt.SoField_disconnect(*args)
def isConnected(*args): return _soqt.SoField_isConnected(*args)
def setContainer(*args): return _soqt.SoField_setContainer(*args)
def getContainer(*args): return _soqt.SoField_getContainer(*args)
def set(*args): return _soqt.SoField_set(*args)
def shouldWrite(*args): return _soqt.SoField_shouldWrite(*args)
def touch(*args): return _soqt.SoField_touch(*args)
def startNotify(*args): return _soqt.SoField_startNotify(*args)
def notify(*args): return _soqt.SoField_notify(*args)
def enableNotify(*args): return _soqt.SoField_enableNotify(*args)
def isNotifyEnabled(*args): return _soqt.SoField_isNotifyEnabled(*args)
def addAuditor(*args): return _soqt.SoField_addAuditor(*args)
def removeAuditor(*args): return _soqt.SoField_removeAuditor(*args)
def __eq__(*args): return _soqt.SoField___eq__(*args)
def __ne__(*args): return _soqt.SoField___ne__(*args)
def connectionStatusChanged(*args): return _soqt.SoField_connectionStatusChanged(*args)
def isReadOnly(*args): return _soqt.SoField_isReadOnly(*args)
def isSame(*args): return _soqt.SoField_isSame(*args)
def copyFrom(*args): return _soqt.SoField_copyFrom(*args)
def fixCopy(*args): return _soqt.SoField_fixCopy(*args)
def referencesCopy(*args): return _soqt.SoField_referencesCopy(*args)
def copyConnection(*args): return _soqt.SoField_copyConnection(*args)
def read(*args): return _soqt.SoField_read(*args)
def write(*args): return _soqt.SoField_write(*args)
def countWriteRefs(*args): return _soqt.SoField_countWriteRefs(*args)
NORMAL_FIELD = _soqt.SoField_NORMAL_FIELD
EVENTIN_FIELD = _soqt.SoField_EVENTIN_FIELD
EVENTOUT_FIELD = _soqt.SoField_EVENTOUT_FIELD
EXPOSED_FIELD = _soqt.SoField_EXPOSED_FIELD
def setFieldType(*args): return _soqt.SoField_setFieldType(*args)
def getFieldType(*args): return _soqt.SoField_getFieldType(*args)
def getDirty(*args): return _soqt.SoField_getDirty(*args)
def setDirty(*args): return _soqt.SoField_setDirty(*args)
def evaluate(*args): return _soqt.SoField_evaluate(*args)
def get(*args): return _soqt.SoField_get(*args)
SoField_swigregister = _soqt.SoField_swigregister
SoField_swigregister(SoField)
SoField_initClass = _soqt.SoField_initClass
SoField_initClasses = _soqt.SoField_initClasses
SoField_cleanupClass = _soqt.SoField_cleanupClass
SoField_getClassTypeId = _soqt.SoField_getClassTypeId
class SoSField(SoField):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _soqt.delete_SoSField
__del__ = lambda self : None;
initClass = staticmethod(_soqt.SoSField_initClass)
getClassTypeId = staticmethod(_soqt.SoSField_getClassTypeId)
atexit_cleanup = staticmethod(_soqt.SoSField_atexit_cleanup)
SoSField_swigregister = _soqt.SoSField_swigregister
SoSField_swigregister(SoSField)
SoSField_initClass = _soqt.SoSField_initClass
SoSField_getClassTypeId = _soqt.SoSField_getClassTypeId
SoSField_atexit_cleanup = _soqt.SoSField_atexit_cleanup
class SoMField(SoField):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _soqt.delete_SoMField
__del__ = lambda self : None;
getClassTypeId = staticmethod(_soqt.SoMField_getClassTypeId)
atexit_cleanup = staticmethod(_soqt.SoMField_atexit_cleanup)
def getNum(*args): return _soqt.SoMField_getNum(*args)
def setNum(*args): return _soqt.SoMField_setNum(*args)
def deleteValues(*args): return _soqt.SoMField_deleteValues(*args)
def insertSpace(*args): return _soqt.SoMField_insertSpace(*args)
def set1(*args): return _soqt.SoMField_set1(*args)
initClass = staticmethod(_soqt.SoMField_initClass)
def enableDeleteValues(*args): return _soqt.SoMField_enableDeleteValues(*args)
def isDeleteValuesEnabled(*args): return _soqt.SoMField_isDeleteValuesEnabled(*args)
def __iter__(self):
i = 0
while i < self.getNum():
yield self[i]
i += 1
def __len__(*args): return _soqt.SoMField___len__(*args)
def get1(*args): return _soqt.SoMField_get1(*args)
SoMField_swigregister = _soqt.SoMField_swigregister
SoMField_swigregister(SoMField)
SoMField_getClassTypeId = _soqt.SoMField_getClassTypeId
SoMField_atexit_cleanup = _soqt.SoMField_atexit_cleanup
SoMField_initClass = _soqt.SoMField_initClass
import pivy
class QEvent(object):
"""Proxy of C++ QEvent class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _soqt.delete_QEvent
__del__ = lambda self : None;
QEvent_swigregister = _soqt.QEvent_swigregister
QEvent_swigregister(QEvent)
class QWidget(object):
"""Proxy of C++ QWidget class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
__swig_destroy__ = _soqt.delete_QWidget
__del__ = lambda self : None;
QWidget_swigregister = _soqt.QWidget_swigregister
QWidget_swigregister(QWidget)
SOQT_MAJOR_VERSION = _soqt.SOQT_MAJOR_VERSION
SOQT_MINOR_VERSION = _soqt.SOQT_MINOR_VERSION
SOQT_MICRO_VERSION = _soqt.SOQT_MICRO_VERSION
SOQT_VERSION = _soqt.SOQT_VERSION
GUI_TOOLKIT_VERSION = _soqt.GUI_TOOLKIT_VERSION
class SoQtObject(object):
"""Proxy of C++ SoQtObject class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtObject_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtObject_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtObject_getTypeId(*args)
def isOfType(*args):
"""isOfType(self, SoType type) -> SbBool"""
return _soqt.SoQtObject_isOfType(*args)
def init(*args):
"""init()"""
return _soqt.SoQtObject_init(*args)
init = staticmethod(init)
__swig_destroy__ = _soqt.delete_SoQtObject
__del__ = lambda self : None;
SoQtObject_swigregister = _soqt.SoQtObject_swigregister
SoQtObject_swigregister(SoQtObject)
def SoQtObject_initClass(*args):
"""SoQtObject_initClass()"""
return _soqt.SoQtObject_initClass(*args)
def SoQtObject_getClassTypeId(*args):
"""SoQtObject_getClassTypeId() -> SoType"""
return _soqt.SoQtObject_getClassTypeId(*args)
def SoQtObject_init(*args):
"""SoQtObject_init()"""
return _soqt.SoQtObject_init(*args)
SOQT_SUN_CC_4_0_SOTYPE_INIT_BUG = _soqt.SOQT_SUN_CC_4_0_SOTYPE_INIT_BUG
class SoQtDevice(SoQtObject):
"""Proxy of C++ SoQtDevice class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtDevice_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtDevice_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtDevice_getTypeId(*args)
__swig_destroy__ = _soqt.delete_SoQtDevice
__del__ = lambda self : None;
def enable(*args):
"""enable(self, QWidget w, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtDevice_enable(*args)
def disable(*args):
"""disable(self, QWidget w, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtDevice_disable(*args)
def translateEvent(*args):
"""translateEvent(self, QEvent event) -> SoEvent"""
return _soqt.SoQtDevice_translateEvent(*args)
def setWindowSize(*args):
"""setWindowSize(self, SbVec2s size)"""
return _soqt.SoQtDevice_setWindowSize(*args)
def getWindowSize(*args):
"""getWindowSize(self) -> SbVec2s"""
return _soqt.SoQtDevice_getWindowSize(*args)
def initClasses(*args):
"""initClasses()"""
return _soqt.SoQtDevice_initClasses(*args)
initClasses = staticmethod(initClasses)
SoQtDevice_swigregister = _soqt.SoQtDevice_swigregister
SoQtDevice_swigregister(SoQtDevice)
def SoQtDevice_initClass(*args):
"""SoQtDevice_initClass()"""
return _soqt.SoQtDevice_initClass(*args)
def SoQtDevice_getClassTypeId(*args):
"""SoQtDevice_getClassTypeId() -> SoType"""
return _soqt.SoQtDevice_getClassTypeId(*args)
def SoQtDevice_initClasses(*args):
"""SoQtDevice_initClasses()"""
return _soqt.SoQtDevice_initClasses(*args)
class SoQtKeyboard(SoQtDevice):
"""Proxy of C++ SoQtKeyboard class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtKeyboard_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtKeyboard_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtKeyboard_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtKeyboard_createInstance(*args)
createInstance = staticmethod(createInstance)
KEY_PRESS = _soqt.SoQtKeyboard_KEY_PRESS
KEY_RELEASE = _soqt.SoQtKeyboard_KEY_RELEASE
ALL_EVENTS = _soqt.SoQtKeyboard_ALL_EVENTS
def __init__(self, *args):
"""
__init__(self, int eventmask=ALL_EVENTS) -> SoQtKeyboard
__init__(self) -> SoQtKeyboard
"""
this = _soqt.new_SoQtKeyboard(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtKeyboard
__del__ = lambda self : None;
def enable(*args):
"""enable(self, QWidget widget, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtKeyboard_enable(*args)
def disable(*args):
"""disable(self, QWidget widget, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtKeyboard_disable(*args)
def translateEvent(*args):
"""translateEvent(self, QEvent event) -> SoEvent"""
return _soqt.SoQtKeyboard_translateEvent(*args)
SoQtKeyboard_swigregister = _soqt.SoQtKeyboard_swigregister
SoQtKeyboard_swigregister(SoQtKeyboard)
def SoQtKeyboard_initClass(*args):
"""SoQtKeyboard_initClass()"""
return _soqt.SoQtKeyboard_initClass(*args)
def SoQtKeyboard_getClassTypeId(*args):
"""SoQtKeyboard_getClassTypeId() -> SoType"""
return _soqt.SoQtKeyboard_getClassTypeId(*args)
def SoQtKeyboard_createInstance(*args):
"""SoQtKeyboard_createInstance() -> void"""
return _soqt.SoQtKeyboard_createInstance(*args)
class SoQtMouse(SoQtDevice):
"""Proxy of C++ SoQtMouse class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtMouse_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtMouse_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtMouse_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtMouse_createInstance(*args)
createInstance = staticmethod(createInstance)
BUTTON_PRESS = _soqt.SoQtMouse_BUTTON_PRESS
BUTTON_RELEASE = _soqt.SoQtMouse_BUTTON_RELEASE
POINTER_MOTION = _soqt.SoQtMouse_POINTER_MOTION
BUTTON_MOTION = _soqt.SoQtMouse_BUTTON_MOTION
ALL_EVENTS = _soqt.SoQtMouse_ALL_EVENTS
def __init__(self, *args):
"""
__init__(self, int eventmask=ALL_EVENTS) -> SoQtMouse
__init__(self) -> SoQtMouse
"""
this = _soqt.new_SoQtMouse(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtMouse
__del__ = lambda self : None;
def enable(*args):
"""enable(self, QWidget widget, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtMouse_enable(*args)
def disable(*args):
"""disable(self, QWidget widget, SoQtEventHandler handler, void closure)"""
return _soqt.SoQtMouse_disable(*args)
def translateEvent(*args):
"""translateEvent(self, QEvent event) -> SoEvent"""
return _soqt.SoQtMouse_translateEvent(*args)
SoQtMouse_swigregister = _soqt.SoQtMouse_swigregister
SoQtMouse_swigregister(SoQtMouse)
def SoQtMouse_initClass(*args):
"""SoQtMouse_initClass()"""
return _soqt.SoQtMouse_initClass(*args)
def SoQtMouse_getClassTypeId(*args):
"""SoQtMouse_getClassTypeId() -> SoType"""
return _soqt.SoQtMouse_getClassTypeId(*args)
def SoQtMouse_createInstance(*args):
"""SoQtMouse_createInstance() -> void"""
return _soqt.SoQtMouse_createInstance(*args)
class SoQt(object):
"""Proxy of C++ SoQt class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def init(*args):
"""
init(char appname, char classname="SoQt") -> QWidget
init(char appname) -> QWidget
init(int argc, char argv, char appname, char classname="SoQt") -> QWidget
init(int argc, char argv, char appname) -> QWidget
init(QWidget toplevelwidget)
"""
return _soqt.SoQt_init(*args)
init = staticmethod(init)
def exitMainLoop(*args):
"""exitMainLoop()"""
return _soqt.SoQt_exitMainLoop(*args)
exitMainLoop = staticmethod(exitMainLoop)
def done(*args):
"""done()"""
return _soqt.SoQt_done(*args)
done = staticmethod(done)
def getTopLevelWidget(*args):
"""getTopLevelWidget() -> QWidget"""
return _soqt.SoQt_getTopLevelWidget(*args)
getTopLevelWidget = staticmethod(getTopLevelWidget)
def getShellWidget(*args):
"""getShellWidget(QWidget w) -> QWidget"""
return _soqt.SoQt_getShellWidget(*args)
getShellWidget = staticmethod(getShellWidget)
def show(*args):
"""show(QWidget widget)"""
return _soqt.SoQt_show(*args)
show = staticmethod(show)
def hide(*args):
"""hide(QWidget widget)"""
return _soqt.SoQt_hide(*args)
hide = staticmethod(hide)
def setWidgetSize(*args):
"""setWidgetSize(QWidget widget, SbVec2s size)"""
return _soqt.SoQt_setWidgetSize(*args)
setWidgetSize = staticmethod(setWidgetSize)
def getWidgetSize(*args):
"""getWidgetSize(QWidget widget) -> SbVec2s"""
return _soqt.SoQt_getWidgetSize(*args)
getWidgetSize = staticmethod(getWidgetSize)
def createSimpleErrorDialog(*args):
"""
createSimpleErrorDialog(QWidget widget, char title, char string1, char string2=None)
createSimpleErrorDialog(QWidget widget, char title, char string1)
"""
return _soqt.SoQt_createSimpleErrorDialog(*args)
createSimpleErrorDialog = staticmethod(createSimpleErrorDialog)
def getVersionInfo(*args):
"""
getVersionInfo(int major=None, int minor=None, int micro=None)
getVersionInfo(int major=None, int minor=None)
getVersionInfo(int major=None)
getVersionInfo()
"""
return _soqt.SoQt_getVersionInfo(*args)
getVersionInfo = staticmethod(getVersionInfo)
def getVersionString(*args):
"""getVersionString() -> char"""
return _soqt.SoQt_getVersionString(*args)
getVersionString = staticmethod(getVersionString)
def getVersionToolkitString(*args):
"""getVersionToolkitString() -> char"""
return _soqt.SoQt_getVersionToolkitString(*args)
getVersionToolkitString = staticmethod(getVersionToolkitString)
UNSPECIFIED_ERROR = _soqt.SoQt_UNSPECIFIED_ERROR
NO_OPENGL_CANVAS = _soqt.SoQt_NO_OPENGL_CANVAS
INTERNAL_ASSERT = _soqt.SoQt_INTERNAL_ASSERT
def setFatalErrorHandler(*args):
"""setFatalErrorHandler(FatalErrorCB cb, void userdata) -> FatalErrorCB"""
return _soqt.SoQt_setFatalErrorHandler(*args)
setFatalErrorHandler = staticmethod(setFatalErrorHandler)
def isDebugLibrary(*args):
"""isDebugLibrary() -> SbBool"""
return _soqt.SoQt_isDebugLibrary(*args)
isDebugLibrary = staticmethod(isDebugLibrary)
def isCompatible(*args):
"""isCompatible(unsigned int major, unsigned int minor) -> SbBool"""
return _soqt.SoQt_isCompatible(*args)
isCompatible = staticmethod(isCompatible)
DLL = _soqt.SoQt_DLL
LIB = _soqt.SoQt_LIB
UNKNOWN = _soqt.SoQt_UNKNOWN
def getABIType(*args):
"""getABIType() -> ABIType"""
return _soqt.SoQt_getABIType(*args)
getABIType = staticmethod(getABIType)
def lockGL(*args):
"""lockGL()"""
return _soqt.SoQt_lockGL(*args)
lockGL = staticmethod(lockGL)
def unlockGL(*args):
"""unlockGL()"""
return _soqt.SoQt_unlockGL(*args)
unlockGL = staticmethod(unlockGL)
def mainLoop(*args):
"""mainLoop()"""
return _soqt.SoQt_mainLoop(*args)
mainLoop = staticmethod(mainLoop)
SoQt_swigregister = _soqt.SoQt_swigregister
SoQt_swigregister(SoQt)
def SoQt_init(*args):
"""
init(char appname, char classname="SoQt") -> QWidget
init(char appname) -> QWidget
init(int argc, char argv, char appname, char classname="SoQt") -> QWidget
init(int argc, char argv, char appname) -> QWidget
SoQt_init(QWidget toplevelwidget)
"""
return _soqt.SoQt_init(*args)
def SoQt_exitMainLoop(*args):
"""SoQt_exitMainLoop()"""
return _soqt.SoQt_exitMainLoop(*args)
def SoQt_done(*args):
"""SoQt_done()"""
return _soqt.SoQt_done(*args)
def SoQt_getTopLevelWidget(*args):
"""SoQt_getTopLevelWidget() -> QWidget"""
return _soqt.SoQt_getTopLevelWidget(*args)
def SoQt_getShellWidget(*args):
"""SoQt_getShellWidget(QWidget w) -> QWidget"""
return _soqt.SoQt_getShellWidget(*args)
def SoQt_show(*args):
"""SoQt_show(QWidget widget)"""
return _soqt.SoQt_show(*args)
def SoQt_hide(*args):
"""SoQt_hide(QWidget widget)"""
return _soqt.SoQt_hide(*args)
def SoQt_setWidgetSize(*args):
"""SoQt_setWidgetSize(QWidget widget, SbVec2s size)"""
return _soqt.SoQt_setWidgetSize(*args)
def SoQt_getWidgetSize(*args):
"""SoQt_getWidgetSize(QWidget widget) -> SbVec2s"""
return _soqt.SoQt_getWidgetSize(*args)
def SoQt_createSimpleErrorDialog(*args):
"""
createSimpleErrorDialog(QWidget widget, char title, char string1, char string2=None)
SoQt_createSimpleErrorDialog(QWidget widget, char title, char string1)
"""
return _soqt.SoQt_createSimpleErrorDialog(*args)
def SoQt_getVersionInfo(*args):
"""
getVersionInfo(int major=None, int minor=None, int micro=None)
getVersionInfo(int major=None, int minor=None)
getVersionInfo(int major=None)
SoQt_getVersionInfo()
"""
return _soqt.SoQt_getVersionInfo(*args)
def SoQt_getVersionString(*args):
"""SoQt_getVersionString() -> char"""
return _soqt.SoQt_getVersionString(*args)
def SoQt_getVersionToolkitString(*args):
"""SoQt_getVersionToolkitString() -> char"""
return _soqt.SoQt_getVersionToolkitString(*args)
def SoQt_setFatalErrorHandler(*args):
"""SoQt_setFatalErrorHandler(FatalErrorCB cb, void userdata) -> FatalErrorCB"""
return _soqt.SoQt_setFatalErrorHandler(*args)
def SoQt_isDebugLibrary(*args):
"""SoQt_isDebugLibrary() -> SbBool"""
return _soqt.SoQt_isDebugLibrary(*args)
def SoQt_isCompatible(*args):
"""SoQt_isCompatible(unsigned int major, unsigned int minor) -> SbBool"""
return _soqt.SoQt_isCompatible(*args)
def SoQt_getABIType(*args):
"""SoQt_getABIType() -> ABIType"""
return _soqt.SoQt_getABIType(*args)
def SoQt_lockGL(*args):
"""SoQt_lockGL()"""
return _soqt.SoQt_lockGL(*args)
def SoQt_unlockGL(*args):
"""SoQt_unlockGL()"""
return _soqt.SoQt_unlockGL(*args)
def SoQt_mainLoop(*args):
"""SoQt_mainLoop()"""
return _soqt.SoQt_mainLoop(*args)
class SoQtComponent(SoQtObject):
"""Proxy of C++ SoQtComponent class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtComponent_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtComponent_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtComponent_getTypeId(*args)
__swig_destroy__ = _soqt.delete_SoQtComponent
__del__ = lambda self : None;
def show(*args):
"""show(self)"""
return _soqt.SoQtComponent_show(*args)
def hide(*args):
"""hide(self)"""
return _soqt.SoQtComponent_hide(*args)
def setComponentCursor(*args):
"""setComponentCursor(self, SoQtCursor cursor)"""
return _soqt.SoQtComponent_setComponentCursor(*args)
def setWidgetCursor(*args):
"""setWidgetCursor(QWidget w, SoQtCursor cursor)"""
return _soqt.SoQtComponent_setWidgetCursor(*args)
setWidgetCursor = staticmethod(setWidgetCursor)
def isFullScreen(*args):
"""isFullScreen(self) -> SbBool"""
return _soqt.SoQtComponent_isFullScreen(*args)
def setFullScreen(*args):
"""setFullScreen(self, SbBool onoff) -> SbBool"""
return _soqt.SoQtComponent_setFullScreen(*args)
def isVisible(*args):
"""isVisible(self) -> SbBool"""
return _soqt.SoQtComponent_isVisible(*args)
def isTopLevelShell(*args):
"""isTopLevelShell(self) -> SbBool"""
return _soqt.SoQtComponent_isTopLevelShell(*args)
def getWidget(*args):
"""getWidget(self) -> QWidget"""
return _soqt.SoQtComponent_getWidget(*args)
def getBaseWidget(*args):
"""getBaseWidget(self) -> QWidget"""
return _soqt.SoQtComponent_getBaseWidget(*args)
def getShellWidget(*args):
"""getShellWidget(self) -> QWidget"""
return _soqt.SoQtComponent_getShellWidget(*args)
def getParentWidget(*args):
"""getParentWidget(self) -> QWidget"""
return _soqt.SoQtComponent_getParentWidget(*args)
def setSize(*args):
"""setSize(self, SbVec2s size)"""
return _soqt.SoQtComponent_setSize(*args)
def getSize(*args):
"""getSize(self) -> SbVec2s"""
return _soqt.SoQtComponent_getSize(*args)
def setTitle(*args):
"""setTitle(self, char title)"""
return _soqt.SoQtComponent_setTitle(*args)
def getTitle(*args):
"""getTitle(self) -> char"""
return _soqt.SoQtComponent_getTitle(*args)
def setIconTitle(*args):
"""setIconTitle(self, char title)"""
return _soqt.SoQtComponent_setIconTitle(*args)
def getIconTitle(*args):
"""getIconTitle(self) -> char"""
return _soqt.SoQtComponent_getIconTitle(*args)
def getWidgetName(*args):
"""getWidgetName(self) -> char"""
return _soqt.SoQtComponent_getWidgetName(*args)
def getClassName(*args):
"""getClassName(self) -> char"""
return _soqt.SoQtComponent_getClassName(*args)
def setWindowCloseCallback(*args):
"""
setWindowCloseCallback(self, SoQtComponentCB func, void user=None)
setWindowCloseCallback(self, SoQtComponentCB func)
"""
return _soqt.SoQtComponent_setWindowCloseCallback(*args)
def getComponent(*args):
"""getComponent(QWidget widget) -> SoQtComponent"""
return _soqt.SoQtComponent_getComponent(*args)
getComponent = staticmethod(getComponent)
def initClasses(*args):
"""initClasses()"""
return _soqt.SoQtComponent_initClasses(*args)
initClasses = staticmethod(initClasses)
SoQtComponent_swigregister = _soqt.SoQtComponent_swigregister
SoQtComponent_swigregister(SoQtComponent)
def SoQtComponent_initClass(*args):
"""SoQtComponent_initClass()"""
return _soqt.SoQtComponent_initClass(*args)
def SoQtComponent_getClassTypeId(*args):
"""SoQtComponent_getClassTypeId() -> SoType"""
return _soqt.SoQtComponent_getClassTypeId(*args)
def SoQtComponent_setWidgetCursor(*args):
"""SoQtComponent_setWidgetCursor(QWidget w, SoQtCursor cursor)"""
return _soqt.SoQtComponent_setWidgetCursor(*args)
def SoQtComponent_getComponent(*args):
"""SoQtComponent_getComponent(QWidget widget) -> SoQtComponent"""
return _soqt.SoQtComponent_getComponent(*args)
def SoQtComponent_initClasses(*args):
"""SoQtComponent_initClasses()"""
return _soqt.SoQtComponent_initClasses(*args)
SO_GL_RGB = _soqt.SO_GL_RGB
SO_GLX_RGB = _soqt.SO_GLX_RGB
SO_GL_DOUBLE = _soqt.SO_GL_DOUBLE
SO_GLX_DOUBLE = _soqt.SO_GLX_DOUBLE
SO_GL_ZBUFFER = _soqt.SO_GL_ZBUFFER
SO_GLX_ZBUFFER = _soqt.SO_GLX_ZBUFFER
SO_GL_OVERLAY = _soqt.SO_GL_OVERLAY
SO_GLX_OVERLAY = _soqt.SO_GLX_OVERLAY
SO_GL_STEREO = _soqt.SO_GL_STEREO
SO_GLX_STEREO = _soqt.SO_GLX_STEREO
class SoQtGLWidget(SoQtComponent):
"""Proxy of C++ SoQtGLWidget class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtGLWidget_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtGLWidget_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtGLWidget_getTypeId(*args)
def setBorder(*args):
"""setBorder(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setBorder(*args)
def isBorder(*args):
"""isBorder(self) -> SbBool"""
return _soqt.SoQtGLWidget_isBorder(*args)
def setDoubleBuffer(*args):
"""setDoubleBuffer(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setDoubleBuffer(*args)
def isDoubleBuffer(*args):
"""isDoubleBuffer(self) -> SbBool"""
return _soqt.SoQtGLWidget_isDoubleBuffer(*args)
def setDrawToFrontBufferEnable(*args):
"""setDrawToFrontBufferEnable(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setDrawToFrontBufferEnable(*args)
def isDrawToFrontBufferEnable(*args):
"""isDrawToFrontBufferEnable(self) -> SbBool"""
return _soqt.SoQtGLWidget_isDrawToFrontBufferEnable(*args)
def setQuadBufferStereo(*args):
"""setQuadBufferStereo(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setQuadBufferStereo(*args)
def isQuadBufferStereo(*args):
"""isQuadBufferStereo(self) -> SbBool"""
return _soqt.SoQtGLWidget_isQuadBufferStereo(*args)
def setAccumulationBuffer(*args):
"""setAccumulationBuffer(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setAccumulationBuffer(*args)
def getAccumulationBuffer(*args):
"""getAccumulationBuffer(self) -> SbBool"""
return _soqt.SoQtGLWidget_getAccumulationBuffer(*args)
def setStencilBuffer(*args):
"""setStencilBuffer(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setStencilBuffer(*args)
def getStencilBuffer(*args):
"""getStencilBuffer(self) -> SbBool"""
return _soqt.SoQtGLWidget_getStencilBuffer(*args)
def setAlphaChannel(*args):
"""setAlphaChannel(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setAlphaChannel(*args)
def getAlphaChannel(*args):
"""getAlphaChannel(self) -> SbBool"""
return _soqt.SoQtGLWidget_getAlphaChannel(*args)
def setOverlayRender(*args):
"""setOverlayRender(self, SbBool onoff)"""
return _soqt.SoQtGLWidget_setOverlayRender(*args)
def isOverlayRender(*args):
"""isOverlayRender(self) -> SbBool"""
return _soqt.SoQtGLWidget_isOverlayRender(*args)
def setStealFocus(*args):
"""setStealFocus(self, SbBool enable)"""
return _soqt.SoQtGLWidget_setStealFocus(*args)
def isStealFocus(*args):
"""isStealFocus(self) -> SbBool"""
return _soqt.SoQtGLWidget_isStealFocus(*args)
def getGLWidget(*args):
"""getGLWidget(self) -> QWidget"""
return _soqt.SoQtGLWidget_getGLWidget(*args)
def getNormalWidget(*args):
"""getNormalWidget(self) -> QWidget"""
return _soqt.SoQtGLWidget_getNormalWidget(*args)
def getOverlayWidget(*args):
"""getOverlayWidget(self) -> QWidget"""
return _soqt.SoQtGLWidget_getOverlayWidget(*args)
def hasOverlayGLArea(*args):
"""hasOverlayGLArea(self) -> SbBool"""
return _soqt.SoQtGLWidget_hasOverlayGLArea(*args)
def hasNormalGLArea(*args):
"""hasNormalGLArea(self) -> SbBool"""
return _soqt.SoQtGLWidget_hasNormalGLArea(*args)
def getOverlayTransparentPixel(*args):
"""getOverlayTransparentPixel(self) -> unsigned long"""
return _soqt.SoQtGLWidget_getOverlayTransparentPixel(*args)
def getPointSizeLimits(*args):
"""getPointSizeLimits(self, SbVec2f range, float granularity)"""
return _soqt.SoQtGLWidget_getPointSizeLimits(*args)
def getLineWidthLimits(*args):
"""getLineWidthLimits(self, SbVec2f range, float granularity)"""
return _soqt.SoQtGLWidget_getLineWidthLimits(*args)
SoQtGLWidget_swigregister = _soqt.SoQtGLWidget_swigregister
SoQtGLWidget_swigregister(SoQtGLWidget)
def SoQtGLWidget_initClass(*args):
"""SoQtGLWidget_initClass()"""
return _soqt.SoQtGLWidget_initClass(*args)
def SoQtGLWidget_getClassTypeId(*args):
"""SoQtGLWidget_getClassTypeId() -> SoType"""
return _soqt.SoQtGLWidget_getClassTypeId(*args)
class SoQtRenderArea(SoQtGLWidget):
"""Proxy of C++ SoQtRenderArea class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtRenderArea_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtRenderArea_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtRenderArea_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtRenderArea_createInstance(*args)
createInstance = staticmethod(createInstance)
def __init__(self, *args):
"""
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
SbBool mouseInput=1, SbBool keyboardInput=1) -> SoQtRenderArea
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
SbBool mouseInput=1) -> SoQtRenderArea
__init__(self, QWidget parent=None, char name=None, SbBool embed=1) -> SoQtRenderArea
__init__(self, QWidget parent=None, char name=None) -> SoQtRenderArea
__init__(self, QWidget parent=None) -> SoQtRenderArea
__init__(self) -> SoQtRenderArea
"""
this = _soqt.new_SoQtRenderArea(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtRenderArea
__del__ = lambda self : None;
def setSceneGraph(*args):
"""setSceneGraph(self, SoNode scene)"""
return _soqt.SoQtRenderArea_setSceneGraph(*args)
def getSceneGraph(*args):
"""getSceneGraph(self) -> SoNode"""
return _soqt.SoQtRenderArea_getSceneGraph(*args)
def setOverlaySceneGraph(*args):
"""setOverlaySceneGraph(self, SoNode scene)"""
return _soqt.SoQtRenderArea_setOverlaySceneGraph(*args)
def getOverlaySceneGraph(*args):
"""getOverlaySceneGraph(self) -> SoNode"""
return _soqt.SoQtRenderArea_getOverlaySceneGraph(*args)
def setBackgroundColor(*args):
"""setBackgroundColor(self, SbColor color)"""
return _soqt.SoQtRenderArea_setBackgroundColor(*args)
def getBackgroundColor(*args):
"""getBackgroundColor(self) -> SbColor"""
return _soqt.SoQtRenderArea_getBackgroundColor(*args)
def setBackgroundIndex(*args):
"""setBackgroundIndex(self, int idx)"""
return _soqt.SoQtRenderArea_setBackgroundIndex(*args)
def getBackgroundIndex(*args):
"""getBackgroundIndex(self) -> int"""
return _soqt.SoQtRenderArea_getBackgroundIndex(*args)
def setOverlayBackgroundIndex(*args):
"""setOverlayBackgroundIndex(self, int idx)"""
return _soqt.SoQtRenderArea_setOverlayBackgroundIndex(*args)
def getOverlayBackgroundIndex(*args):
"""getOverlayBackgroundIndex(self) -> int"""
return _soqt.SoQtRenderArea_getOverlayBackgroundIndex(*args)
def setColorMap(*args):
"""setColorMap(self, int start, int num, SbColor colors)"""
return _soqt.SoQtRenderArea_setColorMap(*args)
def setOverlayColorMap(*args):
"""setOverlayColorMap(self, int start, int num, SbColor colors)"""
return _soqt.SoQtRenderArea_setOverlayColorMap(*args)
def setViewportRegion(*args):
"""setViewportRegion(self, SbViewportRegion newRegion)"""
return _soqt.SoQtRenderArea_setViewportRegion(*args)
def getViewportRegion(*args):
"""getViewportRegion(self) -> SbViewportRegion"""
return _soqt.SoQtRenderArea_getViewportRegion(*args)
def setTransparencyType(*args):
"""setTransparencyType(self, TransparencyType type)"""
return _soqt.SoQtRenderArea_setTransparencyType(*args)
def getTransparencyType(*args):
"""getTransparencyType(self) -> TransparencyType"""
return _soqt.SoQtRenderArea_getTransparencyType(*args)
def setAntialiasing(*args):
"""setAntialiasing(self, SbBool smoothing, int numPasses)"""
return _soqt.SoQtRenderArea_setAntialiasing(*args)
def getAntialiasing(*args):
"""getAntialiasing(self, SbBool smoothing, int numPasses)"""
return _soqt.SoQtRenderArea_getAntialiasing(*args)
def setClearBeforeRender(*args):
"""
setClearBeforeRender(self, SbBool enable, SbBool zbEnable=1)
setClearBeforeRender(self, SbBool enable)
"""
return _soqt.SoQtRenderArea_setClearBeforeRender(*args)
def isClearBeforeRender(*args):
"""isClearBeforeRender(self) -> SbBool"""
return _soqt.SoQtRenderArea_isClearBeforeRender(*args)
def isClearZBufferBeforeRender(*args):
"""isClearZBufferBeforeRender(self) -> SbBool"""
return _soqt.SoQtRenderArea_isClearZBufferBeforeRender(*args)
def setClearBeforeOverlayRender(*args):
"""setClearBeforeOverlayRender(self, SbBool enable)"""
return _soqt.SoQtRenderArea_setClearBeforeOverlayRender(*args)
def isClearBeforeOverlayRender(*args):
"""isClearBeforeOverlayRender(self) -> SbBool"""
return _soqt.SoQtRenderArea_isClearBeforeOverlayRender(*args)
def setAutoRedraw(*args):
"""setAutoRedraw(self, SbBool enable)"""
return _soqt.SoQtRenderArea_setAutoRedraw(*args)
def isAutoRedraw(*args):
"""isAutoRedraw(self) -> SbBool"""
return _soqt.SoQtRenderArea_isAutoRedraw(*args)
def setRedrawPriority(*args):
"""setRedrawPriority(self, uint32_t priority)"""
return _soqt.SoQtRenderArea_setRedrawPriority(*args)
def getRedrawPriority(*args):
"""getRedrawPriority(self) -> uint32_t"""
return _soqt.SoQtRenderArea_getRedrawPriority(*args)
def getDefaultRedrawPriority(*args):
"""getDefaultRedrawPriority() -> uint32_t"""
return _soqt.SoQtRenderArea_getDefaultRedrawPriority(*args)
getDefaultRedrawPriority = staticmethod(getDefaultRedrawPriority)
def render(*args):
"""render(self)"""
return _soqt.SoQtRenderArea_render(*args)
def renderOverlay(*args):
"""renderOverlay(self)"""
return _soqt.SoQtRenderArea_renderOverlay(*args)
def scheduleRedraw(*args):
"""scheduleRedraw(self)"""
return _soqt.SoQtRenderArea_scheduleRedraw(*args)
def scheduleOverlayRedraw(*args):
"""scheduleOverlayRedraw(self)"""
return _soqt.SoQtRenderArea_scheduleOverlayRedraw(*args)
def redrawOnSelectionChange(*args):
"""redrawOnSelectionChange(self, SoSelection selection)"""
return _soqt.SoQtRenderArea_redrawOnSelectionChange(*args)
def redrawOverlayOnSelectionChange(*args):
"""redrawOverlayOnSelectionChange(self, SoSelection selection)"""
return _soqt.SoQtRenderArea_redrawOverlayOnSelectionChange(*args)
def setSceneManager(*args):
"""setSceneManager(self, SoSceneManager manager)"""
return _soqt.SoQtRenderArea_setSceneManager(*args)
def getSceneManager(*args):
"""getSceneManager(self) -> SoSceneManager"""
return _soqt.SoQtRenderArea_getSceneManager(*args)
def setOverlaySceneManager(*args):
"""setOverlaySceneManager(self, SoSceneManager manager)"""
return _soqt.SoQtRenderArea_setOverlaySceneManager(*args)
def getOverlaySceneManager(*args):
"""getOverlaySceneManager(self) -> SoSceneManager"""
return _soqt.SoQtRenderArea_getOverlaySceneManager(*args)
def setGLRenderAction(*args):
"""setGLRenderAction(self, SoGLRenderAction action)"""
return _soqt.SoQtRenderArea_setGLRenderAction(*args)
def getGLRenderAction(*args):
"""getGLRenderAction(self) -> SoGLRenderAction"""
return _soqt.SoQtRenderArea_getGLRenderAction(*args)
def setOverlayGLRenderAction(*args):
"""setOverlayGLRenderAction(self, SoGLRenderAction action)"""
return _soqt.SoQtRenderArea_setOverlayGLRenderAction(*args)
def getOverlayGLRenderAction(*args):
"""getOverlayGLRenderAction(self) -> SoGLRenderAction"""
return _soqt.SoQtRenderArea_getOverlayGLRenderAction(*args)
def sendSoEvent(*args):
"""sendSoEvent(self, SoEvent event) -> SbBool"""
return _soqt.SoQtRenderArea_sendSoEvent(*args)
def registerDevice(*args):
"""registerDevice(self, SoQtDevice device)"""
return _soqt.SoQtRenderArea_registerDevice(*args)
def unregisterDevice(*args):
"""unregisterDevice(self, SoQtDevice device)"""
return _soqt.SoQtRenderArea_unregisterDevice(*args)
def setEventCallback(*args):
"""
setEventCallback(self, SoQtRenderAreaEventCB func, void user=None)
setEventCallback(self, SoQtRenderAreaEventCB func)
setEventCallback(self, PyObject pyfunc, PyObject user=None)
setEventCallback(self, PyObject pyfunc)
"""
return _soqt.SoQtRenderArea_setEventCallback(*args)
SoQtRenderArea_swigregister = _soqt.SoQtRenderArea_swigregister
SoQtRenderArea_swigregister(SoQtRenderArea)
def SoQtRenderArea_initClass(*args):
"""SoQtRenderArea_initClass()"""
return _soqt.SoQtRenderArea_initClass(*args)
def SoQtRenderArea_getClassTypeId(*args):
"""SoQtRenderArea_getClassTypeId() -> SoType"""
return _soqt.SoQtRenderArea_getClassTypeId(*args)
def SoQtRenderArea_createInstance(*args):
"""SoQtRenderArea_createInstance() -> void"""
return _soqt.SoQtRenderArea_createInstance(*args)
def SoQtRenderArea_getDefaultRedrawPriority(*args):
"""SoQtRenderArea_getDefaultRedrawPriority() -> uint32_t"""
return _soqt.SoQtRenderArea_getDefaultRedrawPriority(*args)
class SoQtViewer(SoQtRenderArea):
"""Proxy of C++ SoQtViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtViewer_getTypeId(*args)
BROWSER = _soqt.SoQtViewer_BROWSER
EDITOR = _soqt.SoQtViewer_EDITOR
VIEW_AS_IS = _soqt.SoQtViewer_VIEW_AS_IS
VIEW_HIDDEN_LINE = _soqt.SoQtViewer_VIEW_HIDDEN_LINE
VIEW_NO_TEXTURE = _soqt.SoQtViewer_VIEW_NO_TEXTURE
VIEW_LOW_COMPLEXITY = _soqt.SoQtViewer_VIEW_LOW_COMPLEXITY
VIEW_LINE = _soqt.SoQtViewer_VIEW_LINE
VIEW_POINT = _soqt.SoQtViewer_VIEW_POINT
VIEW_BBOX = _soqt.SoQtViewer_VIEW_BBOX
VIEW_LOW_RES_LINE = _soqt.SoQtViewer_VIEW_LOW_RES_LINE
VIEW_LOW_RES_POINT = _soqt.SoQtViewer_VIEW_LOW_RES_POINT
VIEW_SAME_AS_STILL = _soqt.SoQtViewer_VIEW_SAME_AS_STILL
VIEW_WIREFRAME_OVERLAY = _soqt.SoQtViewer_VIEW_WIREFRAME_OVERLAY
STILL = _soqt.SoQtViewer_STILL
INTERACTIVE = _soqt.SoQtViewer_INTERACTIVE
BUFFER_SINGLE = _soqt.SoQtViewer_BUFFER_SINGLE
BUFFER_DOUBLE = _soqt.SoQtViewer_BUFFER_DOUBLE
BUFFER_INTERACTIVE = _soqt.SoQtViewer_BUFFER_INTERACTIVE
VARIABLE_NEAR_PLANE = _soqt.SoQtViewer_VARIABLE_NEAR_PLANE
CONSTANT_NEAR_PLANE = _soqt.SoQtViewer_CONSTANT_NEAR_PLANE
def setCamera(*args):
"""setCamera(self, SoCamera camera)"""
return _soqt.SoQtViewer_setCamera(*args)
def getCamera(*args):
"""getCamera(self) -> SoCamera"""
return _soqt.SoQtViewer_getCamera(*args)
def setCameraType(*args):
"""setCameraType(self, SoType type)"""
return _soqt.SoQtViewer_setCameraType(*args)
def getCameraType(*args):
"""getCameraType(self) -> SoType"""
return _soqt.SoQtViewer_getCameraType(*args)
def toggleCameraType(*args):
"""toggleCameraType(self)"""
return _soqt.SoQtViewer_toggleCameraType(*args)
def viewAll(*args):
"""viewAll(self)"""
return _soqt.SoQtViewer_viewAll(*args)
def saveHomePosition(*args):
"""saveHomePosition(self)"""
return _soqt.SoQtViewer_saveHomePosition(*args)
def resetToHomePosition(*args):
"""resetToHomePosition(self)"""
return _soqt.SoQtViewer_resetToHomePosition(*args)
def setHeadlight(*args):
"""setHeadlight(self, SbBool enable)"""
return _soqt.SoQtViewer_setHeadlight(*args)
def isHeadlight(*args):
"""isHeadlight(self) -> SbBool"""
return _soqt.SoQtViewer_isHeadlight(*args)
def getHeadlight(*args):
"""getHeadlight(self) -> SoDirectionalLight"""
return _soqt.SoQtViewer_getHeadlight(*args)
def setDrawStyle(*args):
"""setDrawStyle(self, DrawType type, DrawStyle style)"""
return _soqt.SoQtViewer_setDrawStyle(*args)
def getDrawStyle(*args):
"""getDrawStyle(self, DrawType type) -> DrawStyle"""
return _soqt.SoQtViewer_getDrawStyle(*args)
def setBufferingType(*args):
"""setBufferingType(self, BufferType type)"""
return _soqt.SoQtViewer_setBufferingType(*args)
def getBufferingType(*args):
"""getBufferingType(self) -> BufferType"""
return _soqt.SoQtViewer_getBufferingType(*args)
def setViewing(*args):
"""setViewing(self, SbBool enable)"""
return _soqt.SoQtViewer_setViewing(*args)
def isViewing(*args):
"""isViewing(self) -> SbBool"""
return _soqt.SoQtViewer_isViewing(*args)
def setCursorEnabled(*args):
"""setCursorEnabled(self, SbBool enable)"""
return _soqt.SoQtViewer_setCursorEnabled(*args)
def isCursorEnabled(*args):
"""isCursorEnabled(self) -> SbBool"""
return _soqt.SoQtViewer_isCursorEnabled(*args)
def setAutoClipping(*args):
"""setAutoClipping(self, SbBool enable)"""
return _soqt.SoQtViewer_setAutoClipping(*args)
def isAutoClipping(*args):
"""isAutoClipping(self) -> SbBool"""
return _soqt.SoQtViewer_isAutoClipping(*args)
def setAutoClippingStrategy(*args):
"""
setAutoClippingStrategy(self, AutoClippingStrategy strategy, float value=0.6, SoQtAutoClippingCB cb=None,
void cbuserdata=None)
setAutoClippingStrategy(self, AutoClippingStrategy strategy, float value=0.6, SoQtAutoClippingCB cb=None)
setAutoClippingStrategy(self, AutoClippingStrategy strategy, float value=0.6)
setAutoClippingStrategy(self, AutoClippingStrategy strategy)
"""
return _soqt.SoQtViewer_setAutoClippingStrategy(*args)
def setStereoViewing(*args):
"""setStereoViewing(self, SbBool enable)"""
return _soqt.SoQtViewer_setStereoViewing(*args)
def isStereoViewing(*args):
"""isStereoViewing(self) -> SbBool"""
return _soqt.SoQtViewer_isStereoViewing(*args)
def setStereoOffset(*args):
"""setStereoOffset(self, float dist)"""
return _soqt.SoQtViewer_setStereoOffset(*args)
def getStereoOffset(*args):
"""getStereoOffset(self) -> float"""
return _soqt.SoQtViewer_getStereoOffset(*args)
STEREO_NONE = _soqt.SoQtViewer_STEREO_NONE
STEREO_ANAGLYPH = _soqt.SoQtViewer_STEREO_ANAGLYPH
STEREO_QUADBUFFER = _soqt.SoQtViewer_STEREO_QUADBUFFER
STEREO_INTERLEAVED_ROWS = _soqt.SoQtViewer_STEREO_INTERLEAVED_ROWS
STEREO_INTERLEAVED_COLUMNS = _soqt.SoQtViewer_STEREO_INTERLEAVED_COLUMNS
def setStereoType(*args):
"""setStereoType(self, StereoType s) -> SbBool"""
return _soqt.SoQtViewer_setStereoType(*args)
def getStereoType(*args):
"""getStereoType(self) -> StereoType"""
return _soqt.SoQtViewer_getStereoType(*args)
def setAnaglyphStereoColorMasks(*args):
"""setAnaglyphStereoColorMasks(self, SbBool left, SbBool right)"""
return _soqt.SoQtViewer_setAnaglyphStereoColorMasks(*args)
def getAnaglyphStereoColorMasks(*args):
"""getAnaglyphStereoColorMasks(self, SbBool left, SbBool right)"""
return _soqt.SoQtViewer_getAnaglyphStereoColorMasks(*args)
def setDetailSeek(*args):
"""setDetailSeek(self, SbBool enable)"""
return _soqt.SoQtViewer_setDetailSeek(*args)
def isDetailSeek(*args):
"""isDetailSeek(self) -> SbBool"""
return _soqt.SoQtViewer_isDetailSeek(*args)
def setSeekTime(*args):
"""setSeekTime(self, float seconds)"""
return _soqt.SoQtViewer_setSeekTime(*args)
def getSeekTime(*args):
"""getSeekTime(self) -> float"""
return _soqt.SoQtViewer_getSeekTime(*args)
def addStartCallback(*args):
"""
addStartCallback(self, SoQtViewerCB func, void data=None)
addStartCallback(self, SoQtViewerCB func)
"""
return _soqt.SoQtViewer_addStartCallback(*args)
def addFinishCallback(*args):
"""
addFinishCallback(self, SoQtViewerCB func, void data=None)
addFinishCallback(self, SoQtViewerCB func)
"""
return _soqt.SoQtViewer_addFinishCallback(*args)
def removeStartCallback(*args):
"""
removeStartCallback(self, SoQtViewerCB func, void data=None)
removeStartCallback(self, SoQtViewerCB func)
"""
return _soqt.SoQtViewer_removeStartCallback(*args)
def removeFinishCallback(*args):
"""
removeFinishCallback(self, SoQtViewerCB func, void data=None)
removeFinishCallback(self, SoQtViewerCB func)
"""
return _soqt.SoQtViewer_removeFinishCallback(*args)
def setWireframeOverlayColor(*args):
"""setWireframeOverlayColor(self, SbColor color)"""
return _soqt.SoQtViewer_setWireframeOverlayColor(*args)
def getWireframeOverlayColor(*args):
"""getWireframeOverlayColor(self) -> SbColor"""
return _soqt.SoQtViewer_getWireframeOverlayColor(*args)
def setDoubleBuffer(*args):
"""setDoubleBuffer(self, SbBool enable)"""
return _soqt.SoQtViewer_setDoubleBuffer(*args)
def setSceneGraph(*args):
"""setSceneGraph(self, SoNode root)"""
return _soqt.SoQtViewer_setSceneGraph(*args)
def getSceneGraph(*args):
"""getSceneGraph(self) -> SoNode"""
return _soqt.SoQtViewer_getSceneGraph(*args)
SoQtViewer_swigregister = _soqt.SoQtViewer_swigregister
SoQtViewer_swigregister(SoQtViewer)
def SoQtViewer_initClass(*args):
"""SoQtViewer_initClass()"""
return _soqt.SoQtViewer_initClass(*args)
def SoQtViewer_getClassTypeId(*args):
"""SoQtViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtViewer_getClassTypeId(*args)
class SoQtFullViewer(SoQtViewer):
"""Proxy of C++ SoQtFullViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtFullViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtFullViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtFullViewer_getTypeId(*args)
BUILD_NONE = _soqt.SoQtFullViewer_BUILD_NONE
BUILD_DECORATION = _soqt.SoQtFullViewer_BUILD_DECORATION
BUILD_POPUP = _soqt.SoQtFullViewer_BUILD_POPUP
BUILD_ALL = _soqt.SoQtFullViewer_BUILD_ALL
def setDecoration(*args):
"""setDecoration(self, SbBool on)"""
return _soqt.SoQtFullViewer_setDecoration(*args)
def isDecoration(*args):
"""isDecoration(self) -> SbBool"""
return _soqt.SoQtFullViewer_isDecoration(*args)
def setPopupMenuEnabled(*args):
"""setPopupMenuEnabled(self, SbBool on)"""
return _soqt.SoQtFullViewer_setPopupMenuEnabled(*args)
def isPopupMenuEnabled(*args):
"""isPopupMenuEnabled(self) -> SbBool"""
return _soqt.SoQtFullViewer_isPopupMenuEnabled(*args)
def getAppPushButtonParent(*args):
"""getAppPushButtonParent(self) -> QWidget"""
return _soqt.SoQtFullViewer_getAppPushButtonParent(*args)
def addAppPushButton(*args):
"""addAppPushButton(self, QWidget newButton)"""
return _soqt.SoQtFullViewer_addAppPushButton(*args)
def insertAppPushButton(*args):
"""insertAppPushButton(self, QWidget newButton, int index)"""
return _soqt.SoQtFullViewer_insertAppPushButton(*args)
def removeAppPushButton(*args):
"""removeAppPushButton(self, QWidget oldButton)"""
return _soqt.SoQtFullViewer_removeAppPushButton(*args)
def findAppPushButton(*args):
"""findAppPushButton(self, QWidget oldButton) -> int"""
return _soqt.SoQtFullViewer_findAppPushButton(*args)
def lengthAppPushButton(*args):
"""lengthAppPushButton(self) -> int"""
return _soqt.SoQtFullViewer_lengthAppPushButton(*args)
def getRenderAreaWidget(*args):
"""getRenderAreaWidget(self) -> QWidget"""
return _soqt.SoQtFullViewer_getRenderAreaWidget(*args)
def setViewing(*args):
"""setViewing(self, SbBool on)"""
return _soqt.SoQtFullViewer_setViewing(*args)
def setComponentCursor(*args):
"""setComponentCursor(self, SoQtCursor cursor)"""
return _soqt.SoQtFullViewer_setComponentCursor(*args)
SoQtFullViewer_swigregister = _soqt.SoQtFullViewer_swigregister
SoQtFullViewer_swigregister(SoQtFullViewer)
def SoQtFullViewer_initClass(*args):
"""SoQtFullViewer_initClass()"""
return _soqt.SoQtFullViewer_initClass(*args)
def SoQtFullViewer_getClassTypeId(*args):
"""SoQtFullViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtFullViewer_getClassTypeId(*args)
class SoQtPlaneViewer(SoQtFullViewer):
"""Proxy of C++ SoQtPlaneViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtPlaneViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtPlaneViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtPlaneViewer_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtPlaneViewer_createInstance(*args)
createInstance = staticmethod(createInstance)
def __init__(self, *args):
"""
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL, Type type=BROWSER) -> SoQtPlaneViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL) -> SoQtPlaneViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1) -> SoQtPlaneViewer
__init__(self, QWidget parent=None, char name=None) -> SoQtPlaneViewer
__init__(self, QWidget parent=None) -> SoQtPlaneViewer
__init__(self) -> SoQtPlaneViewer
"""
this = _soqt.new_SoQtPlaneViewer(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtPlaneViewer
__del__ = lambda self : None;
def setViewing(*args):
"""setViewing(self, SbBool enable)"""
return _soqt.SoQtPlaneViewer_setViewing(*args)
def setCamera(*args):
"""setCamera(self, SoCamera camera)"""
return _soqt.SoQtPlaneViewer_setCamera(*args)
def setCursorEnabled(*args):
"""setCursorEnabled(self, SbBool enable)"""
return _soqt.SoQtPlaneViewer_setCursorEnabled(*args)
SoQtPlaneViewer_swigregister = _soqt.SoQtPlaneViewer_swigregister
SoQtPlaneViewer_swigregister(SoQtPlaneViewer)
def SoQtPlaneViewer_initClass(*args):
"""SoQtPlaneViewer_initClass()"""
return _soqt.SoQtPlaneViewer_initClass(*args)
def SoQtPlaneViewer_getClassTypeId(*args):
"""SoQtPlaneViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtPlaneViewer_getClassTypeId(*args)
def SoQtPlaneViewer_createInstance(*args):
"""SoQtPlaneViewer_createInstance() -> void"""
return _soqt.SoQtPlaneViewer_createInstance(*args)
class SoQtExaminerViewer(SoQtFullViewer):
"""Proxy of C++ SoQtExaminerViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtExaminerViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtExaminerViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtExaminerViewer_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtExaminerViewer_createInstance(*args)
createInstance = staticmethod(createInstance)
def __init__(self, *args):
"""
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL, Type type=BROWSER) -> SoQtExaminerViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL) -> SoQtExaminerViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1) -> SoQtExaminerViewer
__init__(self, QWidget parent=None, char name=None) -> SoQtExaminerViewer
__init__(self, QWidget parent=None) -> SoQtExaminerViewer
__init__(self) -> SoQtExaminerViewer
"""
this = _soqt.new_SoQtExaminerViewer(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtExaminerViewer
__del__ = lambda self : None;
def setAnimationEnabled(*args):
"""setAnimationEnabled(self, SbBool enable)"""
return _soqt.SoQtExaminerViewer_setAnimationEnabled(*args)
def isAnimationEnabled(*args):
"""isAnimationEnabled(self) -> SbBool"""
return _soqt.SoQtExaminerViewer_isAnimationEnabled(*args)
def stopAnimating(*args):
"""stopAnimating(self)"""
return _soqt.SoQtExaminerViewer_stopAnimating(*args)
def isAnimating(*args):
"""isAnimating(self) -> SbBool"""
return _soqt.SoQtExaminerViewer_isAnimating(*args)
def setFeedbackVisibility(*args):
"""setFeedbackVisibility(self, SbBool enable)"""
return _soqt.SoQtExaminerViewer_setFeedbackVisibility(*args)
def isFeedbackVisible(*args):
"""isFeedbackVisible(self) -> SbBool"""
return _soqt.SoQtExaminerViewer_isFeedbackVisible(*args)
def setFeedbackSize(*args):
"""setFeedbackSize(self, int size)"""
return _soqt.SoQtExaminerViewer_setFeedbackSize(*args)
def getFeedbackSize(*args):
"""getFeedbackSize(self) -> int"""
return _soqt.SoQtExaminerViewer_getFeedbackSize(*args)
def setViewing(*args):
"""setViewing(self, SbBool enable)"""
return _soqt.SoQtExaminerViewer_setViewing(*args)
def setCamera(*args):
"""setCamera(self, SoCamera camera)"""
return _soqt.SoQtExaminerViewer_setCamera(*args)
def setCursorEnabled(*args):
"""setCursorEnabled(self, SbBool enable)"""
return _soqt.SoQtExaminerViewer_setCursorEnabled(*args)
SoQtExaminerViewer_swigregister = _soqt.SoQtExaminerViewer_swigregister
SoQtExaminerViewer_swigregister(SoQtExaminerViewer)
def SoQtExaminerViewer_initClass(*args):
"""SoQtExaminerViewer_initClass()"""
return _soqt.SoQtExaminerViewer_initClass(*args)
def SoQtExaminerViewer_getClassTypeId(*args):
"""SoQtExaminerViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtExaminerViewer_getClassTypeId(*args)
def SoQtExaminerViewer_createInstance(*args):
"""SoQtExaminerViewer_createInstance() -> void"""
return _soqt.SoQtExaminerViewer_createInstance(*args)
class SoQtConstrainedViewer(SoQtFullViewer):
"""Proxy of C++ SoQtConstrainedViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtConstrainedViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtConstrainedViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtConstrainedViewer_getTypeId(*args)
def setUpDirection(*args):
"""setUpDirection(self, SbVec3f dir)"""
return _soqt.SoQtConstrainedViewer_setUpDirection(*args)
def getUpDirection(*args):
"""getUpDirection(self) -> SbVec3f"""
return _soqt.SoQtConstrainedViewer_getUpDirection(*args)
def setCamera(*args):
"""setCamera(self, SoCamera newcam)"""
return _soqt.SoQtConstrainedViewer_setCamera(*args)
def saveHomePosition(*args):
"""saveHomePosition(self)"""
return _soqt.SoQtConstrainedViewer_saveHomePosition(*args)
def resetToHomePosition(*args):
"""resetToHomePosition(self)"""
return _soqt.SoQtConstrainedViewer_resetToHomePosition(*args)
SoQtConstrainedViewer_swigregister = _soqt.SoQtConstrainedViewer_swigregister
SoQtConstrainedViewer_swigregister(SoQtConstrainedViewer)
def SoQtConstrainedViewer_initClass(*args):
"""SoQtConstrainedViewer_initClass()"""
return _soqt.SoQtConstrainedViewer_initClass(*args)
def SoQtConstrainedViewer_getClassTypeId(*args):
"""SoQtConstrainedViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtConstrainedViewer_getClassTypeId(*args)
class SoQtFlyViewer(SoQtConstrainedViewer):
"""Proxy of C++ SoQtFlyViewer class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtFlyViewer_initClass(*args)
initClass = staticmethod(initClass)
def getClassTypeId(*args):
"""getClassTypeId() -> SoType"""
return _soqt.SoQtFlyViewer_getClassTypeId(*args)
getClassTypeId = staticmethod(getClassTypeId)
def getTypeId(*args):
"""getTypeId(self) -> SoType"""
return _soqt.SoQtFlyViewer_getTypeId(*args)
def createInstance(*args):
"""createInstance() -> void"""
return _soqt.SoQtFlyViewer_createInstance(*args)
createInstance = staticmethod(createInstance)
def __init__(self, *args):
"""
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL, Type type=BROWSER) -> SoQtFlyViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1,
BuildFlag flag=BUILD_ALL) -> SoQtFlyViewer
__init__(self, QWidget parent=None, char name=None, SbBool embed=1) -> SoQtFlyViewer
__init__(self, QWidget parent=None, char name=None) -> SoQtFlyViewer
__init__(self, QWidget parent=None) -> SoQtFlyViewer
__init__(self) -> SoQtFlyViewer
"""
this = _soqt.new_SoQtFlyViewer(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtFlyViewer
__del__ = lambda self : None;
def setViewing(*args):
"""setViewing(self, SbBool enable)"""
return _soqt.SoQtFlyViewer_setViewing(*args)
def viewAll(*args):
"""viewAll(self)"""
return _soqt.SoQtFlyViewer_viewAll(*args)
def resetToHomePosition(*args):
"""resetToHomePosition(self)"""
return _soqt.SoQtFlyViewer_resetToHomePosition(*args)
def setCamera(*args):
"""setCamera(self, SoCamera camera)"""
return _soqt.SoQtFlyViewer_setCamera(*args)
def setCursorEnabled(*args):
"""setCursorEnabled(self, SbBool enable)"""
return _soqt.SoQtFlyViewer_setCursorEnabled(*args)
def setCameraType(*args):
"""setCameraType(self, SoType type)"""
return _soqt.SoQtFlyViewer_setCameraType(*args)
SoQtFlyViewer_swigregister = _soqt.SoQtFlyViewer_swigregister
SoQtFlyViewer_swigregister(SoQtFlyViewer)
def SoQtFlyViewer_initClass(*args):
"""SoQtFlyViewer_initClass()"""
return _soqt.SoQtFlyViewer_initClass(*args)
def SoQtFlyViewer_getClassTypeId(*args):
"""SoQtFlyViewer_getClassTypeId() -> SoType"""
return _soqt.SoQtFlyViewer_getClassTypeId(*args)
def SoQtFlyViewer_createInstance(*args):
"""SoQtFlyViewer_createInstance() -> void"""
return _soqt.SoQtFlyViewer_createInstance(*args)
class SoQtPopupMenu(object):
"""Proxy of C++ SoQtPopupMenu class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def createInstance(*args):
"""createInstance() -> SoQtPopupMenu"""
return _soqt.SoQtPopupMenu_createInstance(*args)
createInstance = staticmethod(createInstance)
__swig_destroy__ = _soqt.delete_SoQtPopupMenu
__del__ = lambda self : None;
def newMenu(*args):
"""
newMenu(self, char name, int menuid=-1) -> int
newMenu(self, char name) -> int
"""
return _soqt.SoQtPopupMenu_newMenu(*args)
def getMenu(*args):
"""getMenu(self, char name) -> int"""
return _soqt.SoQtPopupMenu_getMenu(*args)
def setMenuTitle(*args):
"""setMenuTitle(self, int id, char title)"""
return _soqt.SoQtPopupMenu_setMenuTitle(*args)
def getMenuTitle(*args):
"""getMenuTitle(self, int id) -> char"""
return _soqt.SoQtPopupMenu_getMenuTitle(*args)
def newMenuItem(*args):
"""
newMenuItem(self, char name, int itemid=-1) -> int
newMenuItem(self, char name) -> int
"""
return _soqt.SoQtPopupMenu_newMenuItem(*args)
def getMenuItem(*args):
"""getMenuItem(self, char name) -> int"""
return _soqt.SoQtPopupMenu_getMenuItem(*args)
def setMenuItemTitle(*args):
"""setMenuItemTitle(self, int itemid, char title)"""
return _soqt.SoQtPopupMenu_setMenuItemTitle(*args)
def getMenuItemTitle(*args):
"""getMenuItemTitle(self, int itemid) -> char"""
return _soqt.SoQtPopupMenu_getMenuItemTitle(*args)
def setMenuItemEnabled(*args):
"""setMenuItemEnabled(self, int itemid, SbBool enabled)"""
return _soqt.SoQtPopupMenu_setMenuItemEnabled(*args)
def getMenuItemEnabled(*args):
"""getMenuItemEnabled(self, int itemid) -> SbBool"""
return _soqt.SoQtPopupMenu_getMenuItemEnabled(*args)
def setMenuItemMarked(*args):
"""setMenuItemMarked(self, int itemid, SbBool marked)"""
return _soqt.SoQtPopupMenu_setMenuItemMarked(*args)
def getMenuItemMarked(*args):
"""getMenuItemMarked(self, int itemid) -> SbBool"""
return _soqt.SoQtPopupMenu_getMenuItemMarked(*args)
def addMenu(*args):
"""
addMenu(self, int menuid, int submenuid, int pos=-1)
addMenu(self, int menuid, int submenuid)
"""
return _soqt.SoQtPopupMenu_addMenu(*args)
def addMenuItem(*args):
"""
addMenuItem(self, int menuid, int itemid, int pos=-1)
addMenuItem(self, int menuid, int itemid)
"""
return _soqt.SoQtPopupMenu_addMenuItem(*args)
def addSeparator(*args):
"""
addSeparator(self, int menuid, int pos=-1)
addSeparator(self, int menuid)
"""
return _soqt.SoQtPopupMenu_addSeparator(*args)
def removeMenu(*args):
"""removeMenu(self, int menuid)"""
return _soqt.SoQtPopupMenu_removeMenu(*args)
def removeMenuItem(*args):
"""removeMenuItem(self, int itemid)"""
return _soqt.SoQtPopupMenu_removeMenuItem(*args)
def popUp(*args):
"""popUp(self, QWidget inside, int x, int y)"""
return _soqt.SoQtPopupMenu_popUp(*args)
def newRadioGroup(*args):
"""
newRadioGroup(self, int groupid=-1) -> int
newRadioGroup(self) -> int
"""
return _soqt.SoQtPopupMenu_newRadioGroup(*args)
def getRadioGroup(*args):
"""getRadioGroup(self, int itemid) -> int"""
return _soqt.SoQtPopupMenu_getRadioGroup(*args)
def getRadioGroupSize(*args):
"""getRadioGroupSize(self, int groupid) -> int"""
return _soqt.SoQtPopupMenu_getRadioGroupSize(*args)
def addRadioGroupItem(*args):
"""addRadioGroupItem(self, int groupid, int itemid)"""
return _soqt.SoQtPopupMenu_addRadioGroupItem(*args)
def removeRadioGroupItem(*args):
"""removeRadioGroupItem(self, int itemid)"""
return _soqt.SoQtPopupMenu_removeRadioGroupItem(*args)
def setRadioGroupMarkedItem(*args):
"""setRadioGroupMarkedItem(self, int itemid)"""
return _soqt.SoQtPopupMenu_setRadioGroupMarkedItem(*args)
def getRadioGroupMarkedItem(*args):
"""getRadioGroupMarkedItem(self, int groupid) -> int"""
return _soqt.SoQtPopupMenu_getRadioGroupMarkedItem(*args)
def addMenuSelectionCallback(*args):
"""addMenuSelectionCallback(self, SoQtMenuSelectionCallback callback, void data)"""
return _soqt.SoQtPopupMenu_addMenuSelectionCallback(*args)
def removeMenuSelectionCallback(*args):
"""removeMenuSelectionCallback(self, SoQtMenuSelectionCallback callback, void data)"""
return _soqt.SoQtPopupMenu_removeMenuSelectionCallback(*args)
SoQtPopupMenu_swigregister = _soqt.SoQtPopupMenu_swigregister
SoQtPopupMenu_swigregister(SoQtPopupMenu)
def SoQtPopupMenu_createInstance(*args):
"""SoQtPopupMenu_createInstance() -> SoQtPopupMenu"""
return _soqt.SoQtPopupMenu_createInstance(*args)
class SoQtCursor(object):
"""Proxy of C++ SoQtCursor class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def initClass(*args):
"""initClass()"""
return _soqt.SoQtCursor_initClass(*args)
initClass = staticmethod(initClass)
CUSTOM_BITMAP = _soqt.SoQtCursor_CUSTOM_BITMAP
DEFAULT = _soqt.SoQtCursor_DEFAULT
BUSY = _soqt.SoQtCursor_BUSY
CROSSHAIR = _soqt.SoQtCursor_CROSSHAIR
UPARROW = _soqt.SoQtCursor_UPARROW
def __init__(self, *args):
"""
__init__(self) -> SoQtCursor
__init__(self, Shape shape) -> SoQtCursor
__init__(self, CustomCursor cc) -> SoQtCursor
__init__(self, SoQtCursor cursor) -> SoQtCursor
"""
this = _soqt.new_SoQtCursor(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _soqt.delete_SoQtCursor
__del__ = lambda self : None;
def getShape(*args):
"""getShape(self) -> Shape"""
return _soqt.SoQtCursor_getShape(*args)
def setShape(*args):
"""setShape(self, Shape shape)"""
return _soqt.SoQtCursor_setShape(*args)
def getCustomCursor(*args):
"""getCustomCursor(self) -> CustomCursor"""
return _soqt.SoQtCursor_getCustomCursor(*args)
def getZoomCursor(*args):
"""getZoomCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getZoomCursor(*args)
getZoomCursor = staticmethod(getZoomCursor)
def getPanCursor(*args):
"""getPanCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getPanCursor(*args)
getPanCursor = staticmethod(getPanCursor)
def getRotateCursor(*args):
"""getRotateCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getRotateCursor(*args)
getRotateCursor = staticmethod(getRotateCursor)
def getBlankCursor(*args):
"""getBlankCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getBlankCursor(*args)
getBlankCursor = staticmethod(getBlankCursor)
SoQtCursor_swigregister = _soqt.SoQtCursor_swigregister
SoQtCursor_swigregister(SoQtCursor)
def SoQtCursor_initClass(*args):
"""SoQtCursor_initClass()"""
return _soqt.SoQtCursor_initClass(*args)
def SoQtCursor_getZoomCursor(*args):
"""SoQtCursor_getZoomCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getZoomCursor(*args)
def SoQtCursor_getPanCursor(*args):
"""SoQtCursor_getPanCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getPanCursor(*args)
def SoQtCursor_getRotateCursor(*args):
"""SoQtCursor_getRotateCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getRotateCursor(*args)
def SoQtCursor_getBlankCursor(*args):
"""SoQtCursor_getBlankCursor() -> SoQtCursor"""
return _soqt.SoQtCursor_getBlankCursor(*args)
| lgpl-2.1 |
indictranstech/phr-frappe | frappe/utils/email_lib/smtp.py | 29 | 3466 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import smtplib
import _socket
from frappe.utils import cint
from frappe import _
def send(email, as_bulk=False):
"""send the message or add it to Outbox Email"""
if frappe.flags.mute_emails or frappe.conf.get("mute_emails") or False:
frappe.msgprint(_("Emails are muted"))
return
try:
smtpserver = SMTPServer()
if hasattr(smtpserver, "always_use_login_id_as_sender") and \
cint(smtpserver.always_use_login_id_as_sender) and smtpserver.login:
if not email.reply_to:
email.reply_to = email.sender
email.sender = smtpserver.login
smtpserver.sess.sendmail(email.sender, email.recipients + (email.cc or []),
email.as_string())
except smtplib.SMTPSenderRefused:
frappe.msgprint(_("Invalid login or password"))
raise
except smtplib.SMTPRecipientsRefused:
frappe.msgprint(_("Invalid recipient address"))
raise
class SMTPServer:
def __init__(self, login=None, password=None, server=None, port=None, use_ssl=None):
# get defaults from mail settings
try:
self.email_settings = frappe.get_doc('Outgoing Email Settings', 'Outgoing Email Settings')
except frappe.DoesNotExistError:
self.email_settings = None
self._sess = None
if server:
self.server = server
self.port = port
self.use_ssl = cint(use_ssl)
self.login = login
self.password = password
elif self.email_settings and cint(self.email_settings.enabled):
self.server = self.email_settings.mail_server
self.port = self.email_settings.mail_port
self.use_ssl = cint(self.email_settings.use_ssl)
self.login = self.email_settings.mail_login
self.password = self.email_settings.mail_password
self.always_use_login_id_as_sender = self.email_settings.always_use_login_id_as_sender
else:
self.server = frappe.conf.get("mail_server") or ""
self.port = frappe.conf.get("mail_port") or None
self.use_ssl = cint(frappe.conf.get("use_ssl") or 0)
self.login = frappe.conf.get("mail_login") or ""
self.password = frappe.conf.get("mail_password") or ""
@property
def sess(self):
"""get session"""
if self._sess:
return self._sess
# check if email server specified
if not self.server:
err_msg = _('Outgoing Mail Server not specified')
frappe.msgprint(err_msg)
raise frappe.OutgoingEmailError, err_msg
try:
if self.use_ssl and not self.port:
self.port = 587
self._sess = smtplib.SMTP((self.server or "").encode('utf-8'),
cint(self.port) or None)
if not self._sess:
err_msg = _('Could not connect to outgoing email server')
frappe.msgprint(err_msg)
raise frappe.OutgoingEmailError, err_msg
if self.use_ssl:
self._sess.ehlo()
self._sess.starttls()
self._sess.ehlo()
if self.login:
ret = self._sess.login((self.login or "").encode('utf-8'),
(self.password or "").encode('utf-8'))
# check if logged correctly
if ret[0]!=235:
frappe.msgprint(ret[1])
raise frappe.OutgoingEmailError, ret[1]
return self._sess
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.msgprint(_('Invalid Outgoing Mail Server or Port'))
raise
except smtplib.SMTPAuthenticationError:
frappe.msgprint(_("Invalid login or password"))
raise
except smtplib.SMTPException:
frappe.msgprint(_('Unable to send emails at this time'))
raise
| mit |
albertomurillo/ansible | lib/ansible/module_utils/rax.py | 38 | 12105 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their own
# license to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import re
from uuid import UUID
from ansible.module_utils.six import text_type, binary_type
FINAL_STATUSES = ('ACTIVE', 'ERROR')
VOLUME_STATUS = ('available', 'attaching', 'creating', 'deleting', 'in-use',
'error', 'error_deleting')
CLB_ALGORITHMS = ['RANDOM', 'LEAST_CONNECTIONS', 'ROUND_ROBIN',
'WEIGHTED_LEAST_CONNECTIONS', 'WEIGHTED_ROUND_ROBIN']
CLB_PROTOCOLS = ['DNS_TCP', 'DNS_UDP', 'FTP', 'HTTP', 'HTTPS', 'IMAPS',
'IMAPv4', 'LDAP', 'LDAPS', 'MYSQL', 'POP3', 'POP3S', 'SMTP',
'TCP', 'TCP_CLIENT_FIRST', 'UDP', 'UDP_STREAM', 'SFTP']
NON_CALLABLES = (text_type, binary_type, bool, dict, int, list, type(None))
PUBLIC_NET_ID = "00000000-0000-0000-0000-000000000000"
SERVICE_NET_ID = "11111111-1111-1111-1111-111111111111"
def rax_slugify(value):
"""Prepend a key with rax_ and normalize the key name"""
return 'rax_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_'))
def rax_clb_node_to_dict(obj):
"""Function to convert a CLB Node object to a dict"""
if not obj:
return {}
node = obj.to_dict()
node['id'] = obj.id
node['weight'] = obj.weight
return node
def rax_to_dict(obj, obj_type='standard'):
"""Generic function to convert a pyrax object to a dict
obj_type values:
standard
clb
server
"""
instance = {}
for key in dir(obj):
value = getattr(obj, key)
if obj_type == 'clb' and key == 'nodes':
instance[key] = []
for node in value:
instance[key].append(rax_clb_node_to_dict(node))
elif (isinstance(value, list) and len(value) > 0 and
not isinstance(value[0], NON_CALLABLES)):
instance[key] = []
for item in value:
instance[key].append(rax_to_dict(item))
elif (isinstance(value, NON_CALLABLES) and not key.startswith('_')):
if obj_type == 'server':
if key == 'image':
if not value:
instance['rax_boot_source'] = 'volume'
else:
instance['rax_boot_source'] = 'local'
key = rax_slugify(key)
instance[key] = value
if obj_type == 'server':
for attr in ['id', 'accessIPv4', 'name', 'status']:
instance[attr] = instance.get(rax_slugify(attr))
return instance
def rax_find_bootable_volume(module, rax_module, server, exit=True):
"""Find a servers bootable volume"""
cs = rax_module.cloudservers
cbs = rax_module.cloud_blockstorage
server_id = rax_module.utils.get_id(server)
volumes = cs.volumes.get_server_volumes(server_id)
bootable_volumes = []
for volume in volumes:
vol = cbs.get(volume)
if module.boolean(vol.bootable):
bootable_volumes.append(vol)
if not bootable_volumes:
if exit:
module.fail_json(msg='No bootable volumes could be found for '
'server %s' % server_id)
else:
return False
elif len(bootable_volumes) > 1:
if exit:
module.fail_json(msg='Multiple bootable volumes found for server '
'%s' % server_id)
else:
return False
return bootable_volumes[0]
def rax_find_image(module, rax_module, image, exit=True):
"""Find a server image by ID or Name"""
cs = rax_module.cloudservers
try:
UUID(image)
except ValueError:
try:
image = cs.images.find(human_id=image)
except(cs.exceptions.NotFound,
cs.exceptions.NoUniqueMatch):
try:
image = cs.images.find(name=image)
except (cs.exceptions.NotFound,
cs.exceptions.NoUniqueMatch):
if exit:
module.fail_json(msg='No matching image found (%s)' %
image)
else:
return False
return rax_module.utils.get_id(image)
def rax_find_volume(module, rax_module, name):
"""Find a Block storage volume by ID or name"""
cbs = rax_module.cloud_blockstorage
try:
UUID(name)
volume = cbs.get(name)
except ValueError:
try:
volume = cbs.find(name=name)
except rax_module.exc.NotFound:
volume = None
except Exception as e:
module.fail_json(msg='%s' % e)
return volume
def rax_find_network(module, rax_module, network):
"""Find a cloud network by ID or name"""
cnw = rax_module.cloud_networks
try:
UUID(network)
except ValueError:
if network.lower() == 'public':
return cnw.get_server_networks(PUBLIC_NET_ID)
elif network.lower() == 'private':
return cnw.get_server_networks(SERVICE_NET_ID)
else:
try:
network_obj = cnw.find_network_by_label(network)
except (rax_module.exceptions.NetworkNotFound,
rax_module.exceptions.NetworkLabelNotUnique):
module.fail_json(msg='No matching network found (%s)' %
network)
else:
return cnw.get_server_networks(network_obj)
else:
return cnw.get_server_networks(network)
def rax_find_server(module, rax_module, server):
"""Find a Cloud Server by ID or name"""
cs = rax_module.cloudservers
try:
UUID(server)
server = cs.servers.get(server)
except ValueError:
servers = cs.servers.list(search_opts=dict(name='^%s$' % server))
if not servers:
module.fail_json(msg='No Server was matched by name, '
'try using the Server ID instead')
if len(servers) > 1:
module.fail_json(msg='Multiple servers matched by name, '
'try using the Server ID instead')
# We made it this far, grab the first and hopefully only server
# in the list
server = servers[0]
return server
def rax_find_loadbalancer(module, rax_module, loadbalancer):
"""Find a Cloud Load Balancer by ID or name"""
clb = rax_module.cloud_loadbalancers
try:
found = clb.get(loadbalancer)
except Exception:
found = []
for lb in clb.list():
if loadbalancer == lb.name:
found.append(lb)
if not found:
module.fail_json(msg='No loadbalancer was matched')
if len(found) > 1:
module.fail_json(msg='Multiple loadbalancers matched')
# We made it this far, grab the first and hopefully only item
# in the list
found = found[0]
return found
def rax_argument_spec():
"""Return standard base dictionary used for the argument_spec
argument in AnsibleModule
"""
return dict(
api_key=dict(type='str', aliases=['password'], no_log=True),
auth_endpoint=dict(type='str'),
credentials=dict(type='path', aliases=['creds_file']),
env=dict(type='str'),
identity_type=dict(type='str', default='rackspace'),
region=dict(type='str'),
tenant_id=dict(type='str'),
tenant_name=dict(type='str'),
username=dict(type='str'),
validate_certs=dict(type='bool', aliases=['verify_ssl']),
)
def rax_required_together():
"""Return the default list used for the required_together argument to
AnsibleModule"""
return [['api_key', 'username']]
def setup_rax_module(module, rax_module, region_required=True):
"""Set up pyrax in a standard way for all modules"""
rax_module.USER_AGENT = 'ansible/%s %s' % (module.ansible_version,
rax_module.USER_AGENT)
api_key = module.params.get('api_key')
auth_endpoint = module.params.get('auth_endpoint')
credentials = module.params.get('credentials')
env = module.params.get('env')
identity_type = module.params.get('identity_type')
region = module.params.get('region')
tenant_id = module.params.get('tenant_id')
tenant_name = module.params.get('tenant_name')
username = module.params.get('username')
verify_ssl = module.params.get('validate_certs')
if env is not None:
rax_module.set_environment(env)
rax_module.set_setting('identity_type', identity_type)
if verify_ssl is not None:
rax_module.set_setting('verify_ssl', verify_ssl)
if auth_endpoint is not None:
rax_module.set_setting('auth_endpoint', auth_endpoint)
if tenant_id is not None:
rax_module.set_setting('tenant_id', tenant_id)
if tenant_name is not None:
rax_module.set_setting('tenant_name', tenant_name)
try:
username = username or os.environ.get('RAX_USERNAME')
if not username:
username = rax_module.get_setting('keyring_username')
if username:
api_key = 'USE_KEYRING'
if not api_key:
api_key = os.environ.get('RAX_API_KEY')
credentials = (credentials or os.environ.get('RAX_CREDENTIALS') or
os.environ.get('RAX_CREDS_FILE'))
region = (region or os.environ.get('RAX_REGION') or
rax_module.get_setting('region'))
except KeyError as e:
module.fail_json(msg='Unable to load %s' % e.message)
try:
if api_key and username:
if api_key == 'USE_KEYRING':
rax_module.keyring_auth(username, region=region)
else:
rax_module.set_credentials(username, api_key=api_key,
region=region)
elif credentials:
credentials = os.path.expanduser(credentials)
rax_module.set_credential_file(credentials, region=region)
else:
raise Exception('No credentials supplied!')
except Exception as e:
if e.message:
msg = str(e.message)
else:
msg = repr(e)
module.fail_json(msg=msg)
if region_required and region not in rax_module.regions:
module.fail_json(msg='%s is not a valid region, must be one of: %s' %
(region, ','.join(rax_module.regions)))
return rax_module
| gpl-3.0 |
kralf/morsel | python/lib/morsel/nodes/ode/scene.py | 1 | 3228 | from morsel.panda import *
from solid import Solid
from body import Body
from joint import Joint
from morsel.nodes.iterator import Iterator
from morsel.nodes.scene import Scene as Base
#-------------------------------------------------------------------------------
class Scene(Base):
def __init__(self, **kargs):
super(Scene, self).__init__(**kargs)
framework.addShortcut("alt-s", self.toggleShowSolids,
"Show/hide collision solids in the scene")
framework.addShortcut("alt-b", self.toggleShowBodies,
"Show/hide rigid bodies in the scene")
framework.addShortcut("alt-j", self.toggleShowJoints,
"Show/hide rigid body joints in the scene")
#-------------------------------------------------------------------------------
def getSolids(self):
return Iterator(self, Solid).generator
solids = property(getSolids)
#-------------------------------------------------------------------------------
def getBodies(self):
return Iterator(self, Body).generator
bodies = property(getBodies)
#-------------------------------------------------------------------------------
def getJoints(self):
return Iterator(self, Joint).generator
joints = property(getJoints)
#-------------------------------------------------------------------------------
def showSolids(self, cameraMask = 0x10000000):
for solid in self.solids:
solid.show(panda.BitMask32(cameraMask))
#-------------------------------------------------------------------------------
def hideSolids(self):
for solid in self.solids:
solid.hide(panda.BitMask32.allOn())
#-------------------------------------------------------------------------------
def showBodies(self, cameraMask = 0x10000000):
for body in self.bodies:
body.show(panda.BitMask32(cameraMask))
#-------------------------------------------------------------------------------
def hideBodies(self):
for body in self.bodies:
body.hide(panda.BitMask32.allOn())
#-------------------------------------------------------------------------------
def showJoints(self, cameraMask = 0x10000000):
for joint in self.joints:
joint.show(panda.BitMask32(cameraMask))
#-------------------------------------------------------------------------------
def hideJoints(self):
for joint in self.joints:
joint.hide(panda.BitMask32.allOn())
#-------------------------------------------------------------------------------
def toggleShowSolids(self):
for solid in self.solids:
if not solid.isHidden(panda.BitMask32(0x10000000)):
self.hideSolids()
return
self.showSolids()
#-------------------------------------------------------------------------------
def toggleShowBodies(self):
for body in self.bodies:
if not body.isHidden(panda.BitMask32(0x10000000)):
self.hideBodies()
return
self.showBodies()
#-------------------------------------------------------------------------------
def toggleShowJoints(self):
for joint in self.joints:
if not joint.isHidden(panda.BitMask32(0x10000000)):
self.hideJoints()
return
self.showJoints()
| gpl-2.0 |
markand/duktape | testrunner/run_commit_test.py | 1 | 38382 | #!/usr/bin/env python2
#
# Python commit test run script.
#
# Intended to work on Linux, OS X, and Windows (both Cygwin and command
# prompt). Some notes for portability:
#
# * Use os.path.join() to join paths.
#
# * Use tarfile, zipfile, etc instead of native commands to avoid awkward
# situations on Windows, e.g. we might be running from the command prompt
# and executing 'tar' which is provided by Cygwin. Paths will work very
# badly in such situations. In general, don't mix Cygwin / non-Cygwin
# commands on Windows.
#
# * Avoid Duktape Makefile targets: they sometimes depend on /tmp which
# interferes with parallel runs.
#
# * Avoid mixing Cygwin and non-Cygwin repo snapshots (and git commands),
# as there are issues with mixed use like permissions causing unintended
# diffs.
#
import os
import sys
import re
import optparse
import subprocess
import time
import datetime
import traceback
import tarfile
import zipfile
import md5
#
# Parameters and option parsing, some control globals
#
# Whitelisted repos, limit to main repo for now.
repo_whitelist = [
'svaarala/duktape'
]
# Strict reponame filter.
re_reponame = re.compile(r'^[a-zA-Z0-9/-]+$')
# Parse arguments.
parser = optparse.OptionParser()
parser.add_option('--repo-full-name', dest='repo_full_name', help='Full name of repository, e.g. "svaarala/duktape"')
parser.add_option('--repo-clone-url', dest='repo_clone_url', help='Repo HTTPS clone URI, e.g. "https://github.com/svaarala/duktape.git"')
parser.add_option('--commit-name', dest='commit_name', help='Commit SHA hash or tag name')
parser.add_option('--fetch-ref', dest='fetch_ref', default=None, help='Ref to fetch before checkout out SHA (e.g. +refs/pull/NNN/head)')
parser.add_option('--context', dest='context', help='Context identifying test type, e.g. "linux-x64-ecmatest"')
parser.add_option('--temp-dir', dest='temp_dir', help='Automatic temp dir created by testclient, automatically deleted (recursively) by testclient when test is done')
parser.add_option('--repo-snapshot-dir', dest='repo_snapshot_dir', help='Directory for repo tar.gz snapshots for faster test init')
(opts, args) = parser.parse_args()
repo_full_name = opts.repo_full_name
assert(repo_full_name is not None)
repo_clone_url = opts.repo_clone_url
assert(repo_clone_url is not None)
commit_name = opts.commit_name
assert(commit_name is not None)
context = opts.context
assert(context is not None)
temp_dir = opts.temp_dir
assert(temp_dir is not None)
repo_snapshot_dir = opts.repo_snapshot_dir
assert(repo_snapshot_dir is not None)
#
# Helpers
#
def newenv(**kw):
ret = {}
for k in os.environ.keys():
ret[k] = str(os.environ[k])
for k in kw.keys():
ret[k] = str(kw[k])
#print('Final environment: %r' % ret)
return ret
def execute(cmd, env=None, catch=False, input='', dump_stdout=True, dump_stderr=True):
print(' - ' + repr(cmd))
success = True
def dump(x):
if isinstance(x, unicode):
x = x.encode('utf-8')
if len(x) == 0 or x[-1] != '\n':
x = x + '\n'
sys.stdout.write(x)
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
ret = proc.communicate(input=input)
if ret[0] != '' and dump_stdout:
dump(ret[0])
if ret[1] != '' and dump_stderr:
dump(ret[1])
if proc.returncode != 0:
if catch:
success = False
else:
raise Exception('command failed: %r' % cmd)
return {
'returncode': proc.returncode,
'stdout': ret[0],
'stderr': ret[1],
'success': success
}
def unpack_targz(fn):
print('Extracting %s to %s' % (fn, os.getcwd()))
t = tarfile.open(fn)
t.extractall()
t.close()
def unpack_zip(fn):
print('Extracting %s to %s' % (fn, os.getcwd()))
z = zipfile.ZipFile(fn, 'r')
z.extractall()
z.close()
def get_binary_size(fn):
# Pattern works for Linux and OS X.
res = execute([ 'size', fn ])
m = re.compile(r'.*?^\s*(\d+)\s+(\d+)\s+(\d+)\s+(\d+).*?', re.MULTILINE | re.DOTALL).match(res['stdout'])
if m is None:
raise Exception('cannot figure out size for binary %r' % fn)
return {
'text': int(m.group(1)),
'data': int(m.group(2)),
'bss': int(m.group(3)),
'total': int(m.group(4))
}
def format_size_diff(newsz, oldsz):
return '%d %d %d (%d %d %d): %d' % (
newsz['text'], newsz['data'], newsz['bss'],
oldsz['text'], oldsz['data'], oldsz['bss'],
newsz['total'] - oldsz['total']
)
output_description = None
def set_output_description(desc):
global output_description
output_description = desc
def prep(options=None, options_yaml=None):
cwd = os.getcwd()
execute([ 'rm', '-rf', os.path.join(cwd, 'prep') ])
cmd = [
'python2', os.path.join(cwd, 'tools', 'configure.py'),
'--source-directory', os.path.join(cwd, 'src-input'),
'--output-directory', os.path.join(cwd, 'prep'),
'--config-metadata', os.path.join(cwd, 'config')
]
cmd += [ '--line-directives' ]
if options is not None:
cmd += options
if options_yaml is not None:
with open(os.path.join(cwd, 'prep_options.yaml'), 'wb') as f:
f.write(options_yaml)
cmd += [ '--option-file', os.path.join(cwd, 'prep_options.yaml') ]
print('Prep options:')
execute([ 'cat', os.path.join(cwd, 'prep_options.yaml') ])
execute(cmd)
execute([ 'ls', '-l', os.path.join(cwd, 'prep') ])
#
# Test context handlers
#
def genconfig_dist_src(genconfig_opts):
cwd = os.getcwd()
execute([
'python2', os.path.join(cwd, 'tools', 'genconfig.py'),
'--metadata', os.path.join(cwd, 'config'),
'--output', os.path.join(cwd, 'dist', 'src', 'duk_config.h')
] + genconfig_opts + [
'duk-config-header'
])
def context_codepolicycheck():
return execute([ 'make', 'codepolicycheck' ], env=newenv(TRAVIS=1), catch=True)['success']
def context_helper_x64_ecmatest(env=None, genconfig_opts=[], valgrind=False):
cwd = os.getcwd()
execute([ 'make', 'dist' ])
genconfig_dist_src(genconfig_opts)
execute([ 'make', 'duk', 'runtestsdeps' ])
opts = []
if valgrind:
opts.append('--valgrind')
return execute([
'node',
os.path.join(cwd, 'runtests', 'runtests.js'),
'--prep-test-path', os.path.join(cwd, 'util', 'prep_test.py'),
'--minify-uglifyjs2', os.path.join(cwd, 'UglifyJS2', 'bin', 'uglifyjs'),
'--util-include-path', os.path.join(cwd, 'tests', 'ecmascript'),
'--known-issues', os.path.join(cwd, 'doc', 'testcase-known-issues.yaml'),
'--run-duk', '--cmd-duk', os.path.join(cwd, 'duk'),
'--num-threads', '1',
'--log-file', os.path.join(cwd, 'test.out')
] + opts + [
os.path.join(cwd, 'tests', 'ecmascript')
], env=env, catch=True)['success']
def context_linux_x64_ecmatest():
return context_helper_x64_ecmatest(env=newenv())
def context_linux_arm_ecmatest():
return context_helper_x64_ecmatest(env=newenv()) # no difference to x64 now
def context_linux_x64_ecmatest_assert():
return context_helper_x64_ecmatest(env=newenv(), genconfig_opts=[ '-DDUK_USE_ASSERTIONS' ])
def context_linux_x64_ecmatest_valgrind():
return context_helper_x64_ecmatest(env=newenv(), valgrind=True)
def context_helper_x64_apitest(env=None, genconfig_opts=[], valgrind=False):
cwd = os.getcwd()
execute([ 'make', 'dist' ])
genconfig_dist_src(genconfig_opts)
execute([ 'make', 'apiprep' ])
opts = []
if valgrind:
opts.append('--valgrind')
return execute([
'node',
os.path.join(cwd, 'runtests', 'runtests.js'),
'--prep-test-path', os.path.join(cwd, 'util', 'prep_test.py'),
'--minify-uglifyjs2', os.path.join(cwd, 'UglifyJS2', 'bin', 'uglifyjs'),
'--util-include-path', os.path.join(cwd, 'tests', 'ecmascript'),
'--known-issues', os.path.join(cwd, 'doc', 'testcase-known-issues.yaml'),
'--run-duk', '--cmd-duk', os.path.join(cwd, 'duk'),
'--num-threads', '1',
'--log-file', os.path.join(cwd, 'test.out'),
os.path.join(cwd, 'tests', 'api')
] + opts + [
], env=env, catch=True)['success']
def context_linux_x64_apitest():
return context_helper_x64_apitest(env=newenv())
def context_linux_arm_apitest():
return context_helper_x64_apitest(env=newenv()) # no difference to x64 now
def context_linux_x64_apitest_assert():
return context_helper_x64_apitest(env=newenv(), genconfig_opts=[ '-DDUK_USE_ASSERTIONS' ])
def context_linux_x64_apitest_valgrind():
return context_helper_x64_apitest(env=newenv(), valgrind=True)
def context_linux_x64_v8_bench_pass():
cwd = os.getcwd()
print('NOTE: This performance test is executed as a functional')
print('test because it also stress GC etc; the benchmark score')
print('is meaningless unless executed on dedicated hardware.')
print('')
unpack_targz(os.path.join(repo_snapshot_dir, 'google-v8-benchmark-v7.tar.gz'))
execute([ 'make', 'duk' ])
os.chdir(os.path.join(cwd, 'tests', 'google-v8-benchmark-v7'))
execute([ 'make', 'combined.js' ])
os.chdir(cwd)
execute([ os.path.join(cwd, 'duk'), os.path.join('tests', 'google-v8-benchmark-v7', 'combined.js') ])
return True
def context_linux_x64_octane():
cwd = os.getcwd()
execute([ 'make', 'duk.O2' ])
os.chdir(os.path.join(cwd, 'tests', 'octane'))
scores = []
for i in xrange(5):
res = execute([ 'make', 'test' ])
m = re.match(r'.*?SCORE (\d+)', res['stdout'], re.DOTALL)
if m is not None:
scores.append(float(m.group(1)))
print('scores so far: min=%f, max=%f, avg=%f: %r' % (min(scores), max(scores), sum(scores) / float(len(scores)), scores))
print('TESTRUNNER_DESCRIPTION: %.1f (%d-%d)' % (float(sum(scores) / float(len(scores))), int(min(scores)), int(max(scores))))
return True
def context_linux_x64_duk_clang():
cwd = os.getcwd()
execute([ 'make', 'duk-clang' ])
res = execute([
os.path.join(cwd, 'duk-clang'),
'-e', 'print("hello world!");'
])
return res['stdout'] == 'hello world!\n'
def context_linux_x64_duk_gxx():
cwd = os.getcwd()
execute([ 'make', 'duk-g++' ])
res = execute([
os.path.join(cwd, 'duk-g++'),
'-e', 'print("hello world!");'
])
return res['stdout'] == 'hello world!\n'
def context_helper_get_binary_size_diff(compfn):
cwd = os.getcwd()
execute([ 'git', 'clean', '-f' ])
execute([ 'git', 'reset', '--quiet', '--hard' ])
compfn()
newsz = get_binary_size(os.path.join(cwd, 'duk'))
execute([ 'git', 'clean', '-f' ])
execute([ 'git', 'reset', '--quiet', '--hard' ])
execute([ 'git', 'checkout', '--quiet', 'master' ])
execute([ 'git', 'clean', '-f' ])
execute([ 'git', 'reset', '--quiet', '--hard' ])
execute([ 'make', 'clean' ])
compfn()
oldsz = get_binary_size(os.path.join(cwd, 'duk'))
set_output_description(format_size_diff(newsz, oldsz))
return True
def context_linux_x64_gcc_defsize_makeduk():
cwd = os.getcwd()
def comp():
execute([ 'make', 'duk' ])
return context_helper_get_binary_size_diff(comp)
def context_linux_x64_gcc_defsize_fltoetc():
cwd = os.getcwd()
def comp():
execute([ 'make', 'dist' ])
execute([
'gcc', '-oduk',
'-Os', '-fomit-frame-pointer',
'-fno-stack-protector',
'-flto', '-fno-asynchronous-unwind-tables',
'-ffunction-sections', '-Wl,--gc-sections',
'-I' + os.path.join('dist', 'src'),
'-I' + os.path.join('dist', 'examples', 'cmdline'),
os.path.join(cwd, 'dist', 'src', 'duktape.c'),
os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'),
'-lm'
])
return context_helper_get_binary_size_diff(comp)
def context_helper_minsize_fltoetc(archopt, strip):
cwd = os.getcwd()
def comp():
execute([ 'make', 'clean' ])
execute([ 'rm', '-rf', os.path.join(cwd, 'prep') ])
cmd = [
'python2', os.path.join(cwd, 'tools', 'configure.py'),
'--source-directory', os.path.join(cwd, 'src-input'),
'--output-directory', os.path.join(cwd, 'prep'),
'--config-metadata', os.path.join(cwd, 'config'),
'--option-file', os.path.join(cwd, 'config', 'examples', 'low_memory.yaml')
]
if strip:
cmd += [
'--option-file', os.path.join(cwd, 'config', 'examples', 'low_memory_strip.yaml'),
'--unicode-data', os.path.join(cwd, 'src-input', 'UnicodeData-8bit.txt'),
'--special-casing', os.path.join(cwd, 'src-input', 'SpecialCasing-8bit.txt')
]
execute(cmd)
execute([
'gcc', '-oduk', archopt,
'-Os', '-fomit-frame-pointer',
'-fno-stack-protector',
'-flto', '-fno-asynchronous-unwind-tables',
'-ffunction-sections', '-Wl,--gc-sections',
'-I' + os.path.join('prep'),
'-I' + os.path.join('examples', 'cmdline'),
os.path.join(cwd, 'prep', 'duktape.c'),
os.path.join(cwd, 'examples', 'cmdline', 'duk_cmdline.c'),
'-lm'
])
res = execute([
os.path.join(cwd, 'duk')
], input='1+2, "hello world!"')
return 'hello world' in res['stdout']
return context_helper_get_binary_size_diff(comp)
def context_linux_x64_gcc_minsize_fltoetc():
return context_helper_minsize_fltoetc('-m64', False)
def context_linux_x86_gcc_minsize_fltoetc():
return context_helper_minsize_fltoetc('-m32', False)
def context_linux_x32_gcc_minsize_fltoetc():
return context_helper_minsize_fltoetc('-mx32', False)
def context_linux_x64_gcc_stripsize_fltoetc():
return context_helper_minsize_fltoetc('-m64', True)
def context_linux_x86_gcc_stripsize_fltoetc():
return context_helper_minsize_fltoetc('-m32', True)
def context_linux_x32_gcc_stripsize_fltoetc():
return context_helper_minsize_fltoetc('-mx32', True)
def context_linux_x64_cpp_exceptions():
# For now rather simple: compile, run, and grep for my_class
# destruction prints. There are only 3 without C++ exceptions
# and 15 with them.
cwd = os.getcwd()
prep(options=[ '-DDUK_USE_CPP_EXCEPTIONS' ])
execute([
'g++', '-oduk-cpp-exc',
'-I' + os.path.join(cwd, 'prep'),
os.path.join(cwd, 'prep', 'duktape.c'),
os.path.join(cwd, 'examples', 'cpp-exceptions', 'cpp_exceptions.cpp'),
'-lm'
])
res = execute([
os.path.join(cwd, 'duk-cpp-exc')
])
count = 0
for line in res['stdout'].split('\n'):
if 'my_class instance destroyed' in line:
count += 1
print('Destruct count: %d' % count)
if count >= 15:
print('C++ exceptions seem to be working')
return True
else:
print('C++ exceptions don\'t seem to be working')
return False
def context_linux_x86_ajduk():
cwd = os.getcwd()
execute([ 'make', 'ajduk' ])
res = execute([
os.path.join(cwd, 'ajduk'),
'-e', 'print("hello world!");'
])
return 'hello world!\n' in res['stdout']
def context_linux_x86_ajduk_norefc():
cwd = os.getcwd()
execute([ 'make', 'ajduk-norefc' ])
res = execute([
os.path.join(cwd, 'ajduk-norefc'),
'-e', 'print("hello world!");'
])
return 'hello world!\n' in res['stdout']
def context_linux_x86_ajduk_rombuild():
cwd = os.getcwd()
execute([ 'make', 'ajduk-rom' ])
got_hello = False
got_startrek = False
res = execute([
os.path.join(cwd, 'ajduk-rom'),
'-e', 'print("hello world!");'
])
got_hello = ('hello world!\n' in res['stdout']) # ajduk stdout has pool dumps etc
print('Got hello: %r' % got_hello)
res = execute([
os.path.join(cwd, 'ajduk-rom'),
'-e', 'print("StarTrek.ent:", StarTrek.ent);'
])
got_startrek = ('StarTrek.ent: true\n' in res['stdout'])
print('Got StarTrek: %r' % got_startrek)
return got_hello and got_startrek
def context_linux_x64_test262test():
cwd = os.getcwd()
execute([ 'make', 'duk' ])
# Unpack separately, 'make clean' wipes this.
unpack_targz(os.path.join(repo_snapshot_dir, 'test262-es5-tests.tar.gz'))
unpack_zip(os.path.join(cwd, 'es5-tests.zip'))
os.chdir(os.path.join(cwd, 'test262-es5-tests'))
res = execute([
'python2',
os.path.join(cwd, 'test262-es5-tests', 'tools', 'packaging', 'test262.py'),
'--command', os.path.join(cwd, 'duk') + ' {{path}}'
], dump_stdout=False, dump_stderr=True)
test262_log = res['stdout']
os.chdir(cwd)
res = execute([
'python2',
os.path.join(cwd, 'util', 'filter_test262_log.py'),
os.path.join(cwd, 'doc', 'test262-known-issues.yaml')
], input=test262_log)
# Test result plumbing a bit awkward but works for now.
# Known and diagnosed issues are considered a "pass" for
# Github status.
return 'TEST262 SUCCESS\n' in res['stdout']
def context_linux_x64_duk_dddprint():
cwd = os.getcwd()
prep(options_yaml=r"""
DUK_USE_ASSERTIONS: true
DUK_USE_SELF_TESTS: true
DUK_USE_DEBUG: true
DUK_USE_DEBUG_LEVEL: 2
DUK_USE_DEBUG_WRITE:
verbatim: "#define DUK_USE_DEBUG_WRITE(level,file,line,func,msg) do {fprintf(stderr, \"%ld %s:%ld (%s): %s\\n\", (long) (level), (file), (long) (line), (func), (msg));} while(0)"
""")
res = execute([
'gcc', '-oduk',
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join(cwd, 'prep'),
'-I' + os.path.join(cwd, 'extras', 'print-alert'),
os.path.join(cwd, 'prep', 'duktape.c'),
os.path.join(cwd, 'examples', 'cmdline', 'duk_cmdline.c'),
os.path.join(cwd, 'extras', 'print-alert', 'duk_print_alert.c'),
'-lm'
], catch=True)
if not res['success']:
print('Compilation failed.')
return False
res = execute([
os.path.join(cwd, 'duk'),
'-e', 'print("Hello world!");'
], dump_stderr=False)
return 'Hello world!\n' in res['stdout']
def context_linux_x64_duk_separate_src():
cwd = os.getcwd()
execute([ 'make', 'dist' ])
os.chdir(os.path.join(cwd, 'dist'))
cfiles =[]
for fn in os.listdir(os.path.join(cwd, 'dist', 'src-separate')):
if fn[-2:] == '.c':
cfiles.append(os.path.join(cwd, 'dist', 'src-separate', fn))
cfiles.append(os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'))
cfiles.append(os.path.join(cwd, 'dist', 'extras', 'print-alert', 'duk_print_alert.c'))
execute([
'gcc', '-oduk',
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join(cwd, 'dist', 'src-separate'),
'-I' + os.path.join(cwd, 'dist', 'extras', 'print-alert')
] + cfiles + [
'-lm'
])
res = execute([
os.path.join(cwd, 'dist', 'duk'),
'-e', 'print("Hello world!");'
])
return 'Hello world!\n' in res['stdout']
def context_linux_x86_packed_tval():
cwd = os.getcwd()
execute([ 'make', 'dist' ])
os.chdir(os.path.join(cwd, 'dist'))
execute([
'gcc', '-oduk', '-m32',
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join(cwd, 'dist', 'src'),
'-I' + os.path.join(cwd, 'dist', 'extras', 'print-alert'),
os.path.join(cwd, 'dist', 'src', 'duktape.c'),
os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'),
os.path.join(cwd, 'dist', 'extras', 'print-alert', 'duk_print_alert.c'),
'-lm'
])
# Size of a 3-element array is 25 + 3x16 = 73 on x64 and
# 13 + 3x8 = 37 on x86.
res = execute([
os.path.join(cwd, 'dist', 'duk'),
'-e',
'var arr = Duktape.compact([1,2,3]); ' +
'print(Duktape.info(true).itag >= 0xf000); ' + # packed internal tag
'print(Duktape.info(arr).pbytes <= 40)' # array size (1 element + .length property)
]);
return res['stdout'] == 'true\ntrue\n'
def context_linux_x86_dist_genconfig():
cwd = os.getcwd()
execute([ 'make', 'dist' ])
os.chdir(os.path.join(cwd, 'dist'))
execute([
'python2', os.path.join(cwd, 'dist', 'tools', 'genconfig.py'),
'--metadata', os.path.join(cwd, 'dist', 'config'),
'--output', os.path.join(cwd, 'dist', 'src', 'duk_config.h'), # overwrite default duk_config.h
'-DDUK_USE_FASTINT', '-UDUK_USE_JX', '-UDUK_USE_JC',
'duk-config-header'
])
os.chdir(os.path.join(cwd, 'dist'))
execute([
'gcc', '-oduk',
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join(cwd, 'dist', 'src'),
'-I' + os.path.join(cwd, 'dist', 'extras', 'print-alert'),
os.path.join(cwd, 'dist', 'src', 'duktape.c'),
os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'),
os.path.join(cwd, 'dist', 'extras', 'print-alert', 'duk_print_alert.c'),
'-lm'
])
res = execute([
os.path.join(cwd, 'dist', 'duk'),
'-e', 'try { print(Duktape.enc("jx", {})); } catch (e) { print("ERROR: " + e.name); }'
])
return 'ERROR: TypeError\n' in res['stdout']
def context_linux_x64_error_variants():
# Test Duktape build using:
# (1) verbose and non-paranoid errors
# (2) verbose and paranoid errors
# (3) non-verbose errors
cwd = os.getcwd()
retval = True
for params in [
{ 'genconfig_opts': [ '-DDUK_USE_VERBOSE_ERRORS', '-UDUK_USE_PARANOID_ERRORS' ],
'binary_name': 'duk.verbose_nonparanoid' },
{ 'genconfig_opts': [ '-DDUK_USE_VERBOSE_ERRORS', '-DDUK_USE_PARANOID_ERRORS' ],
'binary_name': 'duk.verbose_paranoid' },
{ 'genconfig_opts': [ '-UDUK_USE_VERBOSE_ERRORS', '-UDUK_USE_PARANOID_ERRORS' ],
'binary_name': 'duk.nonverbose' },
]:
os.chdir(cwd)
execute([ 'make', 'clean', 'dist' ])
os.chdir(os.path.join(cwd, 'dist'))
execute([
'python2', os.path.join(cwd, 'dist', 'tools', 'genconfig.py'),
'--metadata', os.path.join(cwd, 'dist', 'config'),
'--output', os.path.join(cwd, 'dist', 'src', 'duk_config.h') # overwrite default duk_config.h
] + params['genconfig_opts'] + [
'duk-config-header'
])
execute([
'gcc', '-o' + params['binary_name'],
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join(cwd, 'dist', 'src'),
'-I' + os.path.join(cwd, 'dist', 'extras', 'print-alert'),
os.path.join(cwd, 'dist', 'src', 'duktape.c'),
os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'),
os.path.join(cwd, 'dist', 'extras', 'print-alert', 'duk_print_alert.c'),
'-lm'
])
execute([ 'size', params['binary_name'] ])
with open('test.js', 'wb') as f:
f.write("""\
try {
(undefined).foo = 123;
} catch (e) {
print('ERRORNAME: ' + e.name);
print('ERRORMESSAGE: ' + e);
print(e.stack);
}
""")
res = execute([
os.path.join(cwd, 'dist', params['binary_name']),
'test.js'
])
if 'ERRORNAME: TypeError\n' not in res['stdout']:
print('Cannot find error name in output')
retval = False
# For now, just check that the code compiles and error Type is
# correct. XXX: add check for error message too.
return retval
def context_helper_hello_ram(archopt):
cwd = os.getcwd()
def test(genconfig_opts):
os.chdir(cwd)
execute([ 'make', 'clean' ])
execute([ 'rm', '-rf', os.path.join(cwd, 'prep') ])
cmd = [
'python2', os.path.join(cwd, 'tools', 'configure.py'),
'--source-directory', os.path.join(cwd, 'src-input'),
'--output-directory', os.path.join(cwd, 'prep'),
'--config-metadata', os.path.join(cwd, 'config'),
'--rom-support'
] + genconfig_opts
print(repr(cmd))
execute(cmd)
execute([
'gcc', '-ohello', archopt,
'-Os', '-fomit-frame-pointer',
'-fno-stack-protector',
'-flto', '-fno-asynchronous-unwind-tables',
'-ffunction-sections', '-Wl,--gc-sections',
'-I' + os.path.join('prep'),
os.path.join(cwd, 'prep', 'duktape.c'),
os.path.join(cwd, 'examples', 'hello', 'hello.c'),
'-lm'
])
execute([
'size',
os.path.join(cwd, 'hello')
])
execute([
'valgrind', '--tool=massif',
'--massif-out-file=' + os.path.join(cwd, 'massif.out'),
'--peak-inaccuracy=0.0',
os.path.join(cwd, 'hello')
])
res = execute([
'ms_print',
os.path.join(cwd, 'massif.out')
], dump_stdout=False)
lines = res['stdout'].split('\n')
print('\n'.join(lines[0:50])) # print 50 first lines only
# KB
#107.5^ :
# | @#::::@:: :::@::: : :
kb = '???'
re_kb = re.compile(r'^([0-9\.]+)\^.*?$')
for line in lines[0:10]:
m = re_kb.match(line)
if m is not None:
kb = m.group(1)
print(' --> KB: ' + kb)
return kb
print('--- Default')
print('')
kb_default = test([])
print('')
print('--- No bufferobject support')
print('')
kb_nobufobj = test([
'-UDUK_USE_BUFFEROBJECT_SUPPORT'
])
print('')
print('--- ROM built-ins, global object inherits from ROM global')
print('--- No other low memory options (fast paths, pointer compression, etc)')
print('')
kb_rom = test([
'-DDUK_USE_ROM_OBJECTS',
'-DDUK_USE_ROM_STRINGS',
'-DDUK_USE_ROM_GLOBAL_INHERIT',
'-UDUK_USE_HSTRING_ARRIDX'
])
set_output_description('%s %s %s (kB)' % (kb_default, kb_nobufobj, kb_rom))
return True
def context_linux_x64_hello_ram():
return context_helper_hello_ram('-m64')
def context_linux_x86_hello_ram():
return context_helper_hello_ram('-m32')
def context_linux_x32_hello_ram():
return context_helper_hello_ram('-mx32')
def mandel_test(archopt, genconfig_opts):
cwd = os.getcwd()
execute([ 'make', 'dist' ])
execute([
'python2', os.path.join(cwd, 'tools', 'genconfig.py'),
'--metadata', os.path.join(cwd, 'config'),
'--output', os.path.join(cwd, 'dist', 'src', 'duk_config.h')
] + genconfig_opts + [
'duk-config-header'
])
execute([
'gcc', '-oduk', archopt,
'-Os', '-fomit-frame-pointer',
'-fno-stack-protector',
'-flto', '-fno-asynchronous-unwind-tables',
'-ffunction-sections', '-Wl,--gc-sections',
'-DDUK_CMDLINE_PRINTALERT_SUPPORT',
'-I' + os.path.join('dist', 'src'),
'-I' + os.path.join(cwd, 'dist', 'extras', 'print-alert'),
'-I' + os.path.join('dist', 'examples', 'cmdline'),
os.path.join(cwd, 'dist', 'src', 'duktape.c'),
os.path.join(cwd, 'dist', 'examples', 'cmdline', 'duk_cmdline.c'),
os.path.join(cwd, 'dist', 'extras', 'print-alert', 'duk_print_alert.c'),
'-lm'
])
execute([ 'size', os.path.join(cwd, 'duk') ])
res = execute([
os.path.join(cwd, 'duk'),
'-e', 'print(Duktape.version); print(Duktape.env); print(Math.PI)'
])
res = execute([
os.path.join(cwd, 'duk'),
os.path.join('dist', 'mandel.js')
])
md5_stdout = md5.md5(res['stdout']).digest().encode('hex')
md5_expect = '627cd86f0a4255e018c564f86c6d0ab3'
print(md5_stdout)
print(md5_expect)
return md5_stdout == md5_expect
def context_linux_regconst_variants():
res = True
res = res and mandel_test('-m64', [ '-DDUK_USE_EXEC_REGCONST_OPTIMIZE' ])
res = res and mandel_test('-m64', [ '-UDUK_USE_EXEC_REGCONST_OPTIMIZE' ])
res = res and mandel_test('-m32', [ '-DDUK_USE_EXEC_REGCONST_OPTIMIZE' ])
res = res and mandel_test('-m32', [ '-UDUK_USE_EXEC_REGCONST_OPTIMIZE' ])
return res
def context_linux_tval_variants():
# Cover most duk_tval.h cases, but only for little endian now.
res = True
for archopt in [ '-m64', '-m32' ]:
optsets = []
optsets.append([ '-UDUK_USE_PACKED_TVAL', '-DDUK_USE_64BIT_OPS', '-DDUK_USE_FASTINT' ])
optsets.append([ '-UDUK_USE_PACKED_TVAL', '-DDUK_USE_64BIT_OPS', '-UDUK_USE_FASTINT' ])
optsets.append([ '-UDUK_USE_PACKED_TVAL', '-UDUK_USE_64BIT_OPS', '-UDUK_USE_FASTINT' ])
if archopt == '-m32':
optsets.append([ '-DDUK_USE_PACKED_TVAL', '-DDUK_USE_64BIT_OPS', '-DDUK_USE_FASTINT' ])
optsets.append([ '-DDUK_USE_PACKED_TVAL', '-DDUK_USE_64BIT_OPS', '-UDUK_USE_FASTINT' ])
optsets.append([ '-DDUK_USE_PACKED_TVAL', '-UDUK_USE_64BIT_OPS', '-UDUK_USE_FASTINT' ])
for optset in optsets:
res = res and mandel_test(archopt, optset)
return res
def context_linux_x64_minisphere():
cwd = os.getcwd()
execute([ 'make', 'dist' ])
# Unpack minisphere snapshot and copy Duktape files over.
unpack_targz(os.path.join(repo_snapshot_dir, 'minisphere-20160516.tar.gz'))
prep(options=[ '--fixup-file', os.path.join(cwd, 'minisphere', 'src', 'engine', 'duk_custom.h') ])
for i in [ 'duktape.c', 'duktape.h', 'duk_config.h' ]:
execute([
'cp',
os.path.join(cwd, 'prep', i),
os.path.join(cwd, 'minisphere', 'src', 'shared', i)
])
# sudo apt-get install liballegro5-dev libmng-dev
os.chdir(os.path.join(cwd, 'minisphere'))
return execute([ 'make' ], catch=True)['success']
def context_linux_x64_dukluv():
cwd = os.getcwd()
execute([ 'make', 'dist' ])
# Unpack dukluv snapshot and symlink dukluv/lib/duktape to dist.
unpack_targz(os.path.join(repo_snapshot_dir, 'dukluv-20160528.tar.gz'))
execute([
'mv',
os.path.join(cwd, 'dukluv', 'lib', 'duktape'),
os.path.join(cwd, 'dukluv', 'lib', 'duktape-moved')
])
execute([
'ln',
'-s',
os.path.join(cwd, 'dist'),
os.path.join(cwd, 'dukluv', 'lib', 'duktape')
])
os.chdir(os.path.join(cwd, 'dukluv'))
execute([ 'mkdir', 'build' ])
os.chdir(os.path.join(cwd, 'dukluv', 'build'))
execute([ 'cmake', '..' ])
res = execute([ 'make' ], catch=True)
if not res['success']:
print('Build failed!')
return False
# Binary is in dukluv/build/dukluv.
execute([
os.path.join(cwd, 'dukluv', 'build', 'dukluv'),
os.path.join(cwd, 'dukluv', 'test-argv.js')
])
return True
context_handlers = {
# Linux
'codepolicycheck': context_codepolicycheck,
'linux-x64-ecmatest': context_linux_x64_ecmatest,
'linux-arm-ecmatest': context_linux_arm_ecmatest,
'linux-x64-ecmatest-assert': context_linux_x64_ecmatest_assert,
'linux-x64-ecmatest-valgrind': context_linux_x64_ecmatest_valgrind,
# old names
'linux-x64-qecmatest': context_linux_x64_ecmatest,
'linux-arm-qecmatest': context_linux_arm_ecmatest,
'linux-x64-qecmatest-assert': context_linux_x64_ecmatest_assert,
'linux-x64-qecmatest-valgrind': context_linux_x64_ecmatest_valgrind,
# XXX: torture options
'linux-x64-apitest': context_linux_x64_apitest,
'linux-arm-apitest': context_linux_x64_apitest,
'linux-x64-apitest-assert': context_linux_x64_apitest_assert,
'linux-x64-apitest-valgrind': context_linux_x64_apitest_valgrind,
'linux-x64-test262test': context_linux_x64_test262test,
# XXX: torture options
# XXX: regfuzztest
# XXX: luajstest
# XXX: jsinterpretertest
# XXX: bluebirdtest
# XXX: emscripteninceptiontest
'linux-x64-duk-clang': context_linux_x64_duk_clang,
'linux-x64-duk-gxx': context_linux_x64_duk_gxx,
'linux-x64-gcc-defsize-makeduk': context_linux_x64_gcc_defsize_makeduk,
'linux-x64-gcc-defsize-fltoetc': context_linux_x64_gcc_defsize_fltoetc,
'linux-x64-gcc-minsize-fltoetc': context_linux_x64_gcc_minsize_fltoetc,
'linux-x86-gcc-minsize-fltoetc': context_linux_x86_gcc_minsize_fltoetc,
'linux-x32-gcc-minsize-fltoetc': context_linux_x32_gcc_minsize_fltoetc,
'linux-x64-gcc-stripsize-fltoetc': context_linux_x64_gcc_stripsize_fltoetc,
'linux-x86-gcc-stripsize-fltoetc': context_linux_x86_gcc_stripsize_fltoetc,
'linux-x32-gcc-stripsize-fltoetc': context_linux_x32_gcc_stripsize_fltoetc,
'linux-x64-cpp-exceptions': context_linux_x64_cpp_exceptions,
'linux-x86-ajduk': context_linux_x86_ajduk,
'linux-x86-ajduk-norefc': context_linux_x86_ajduk_norefc,
'linux-x86-ajduk-rombuild': context_linux_x86_ajduk_rombuild,
'linux-x64-v8-bench-pass': context_linux_x64_v8_bench_pass,
'linux-x64-octane': context_linux_x64_octane,
'linux-x64-duk-dddprint': context_linux_x64_duk_dddprint,
'linux-x64-duk-separate-src': context_linux_x64_duk_separate_src,
'linux-x86-packed-tval': context_linux_x86_packed_tval,
'linux-x86-dist-genconfig': context_linux_x86_dist_genconfig,
'linux-x64-error-variants': context_linux_x64_error_variants,
'linux-x64-hello-ram': context_linux_x64_hello_ram,
'linux-x86-hello-ram': context_linux_x86_hello_ram,
'linux-x32-hello-ram': context_linux_x32_hello_ram,
'linux-regconst-variants': context_linux_regconst_variants,
'linux-tval-variants': context_linux_tval_variants,
'linux-x64-minisphere': context_linux_x64_minisphere,
'linux-x64-dukluv': context_linux_x64_dukluv,
# OS X: can currently share Linux handlers
'osx-x64-ecmatest': context_linux_x64_ecmatest,
'osx-x64-qecmatest': context_linux_x64_ecmatest,
'osx-x64-duk-clang': context_linux_x64_duk_clang,
'osx-x64-duk-gxx': context_linux_x64_duk_gxx,
'osx-x64-gcc-minsize-makeduk': context_linux_x64_gcc_defsize_makeduk
}
#
# Main
#
def main():
print('*** Running test %r on %s' % (context, datetime.datetime.utcnow().isoformat() + 'Z'))
print('')
print('repo_full_name: ' + repo_full_name)
print('repo_clone_url: ' + repo_clone_url)
if opts.fetch_ref is not None:
print('fetch_ref: ' + opts.fetch_ref)
print('commit_name: ' + commit_name)
print('context: ' + context);
if not os.path.isdir(temp_dir):
raise Exception('missing or invalid temporary directory: %r' % temp_dir)
m = re_reponame.match(repo_full_name)
if m is None:
raise Exception('invalid repo name: %r' % repo_full_name)
if repo_full_name not in repo_whitelist:
raise Exception('repo name is not whitelisted: %r' % repo_full_name)
# Replace full repo forward slashes with platform separator
repo_targz = apply(os.path.join, [ repo_snapshot_dir ] + (repo_full_name + '.tar.gz').split('/'))
if repo_targz[0:len(repo_snapshot_dir)] != repo_snapshot_dir:
raise Exception('internal error figuring out repo_targz: %r' % repo_targz)
repo_dir = os.path.join(temp_dir, 'repo')
os.chdir(temp_dir)
os.mkdir(repo_dir)
print('')
print('*** GCC and Clang versions')
print('')
execute([ 'gcc', '-v' ], catch=True)
execute([ 'clang', '-v' ], catch=True)
print('')
print('*** Unpack repos and helpers')
print('')
os.chdir(repo_dir)
unpack_targz(repo_targz)
execute([ 'git', 'config', 'core.filemode', 'false' ]) # avoid perm issues on Windows
for fn in [
'alljoyn-72930212134129ae0464df93c526a6d110cb82f7.tar.gz',
'ajtcl-cf47440914f31553a0064f3dabbbf337921ea357.tar.gz',
'closure-20160317.tar.gz',
'uglifyjs2-20160317.tar.gz',
'runtests-node-modules-20160320.tar.gz'
]:
unpack_targz(os.path.join(repo_snapshot_dir, fn))
execute([ 'git', 'clean', '-f' ])
execute([ 'git', 'reset', '--quiet', '--hard' ])
execute([ 'git', 'checkout', '--quiet', 'master' ])
execute([ 'git', 'pull', '--rebase' ])
execute([ 'git', 'clean', '-f' ])
execute([ 'git', 'reset', '--quiet', '--hard' ])
if opts.fetch_ref is not None:
# For pull requests, fetch pull request head and hope the commit
# still exists.
execute([ 'git', 'fetch', '--quiet', '--force', 'origin', opts.fetch_ref ])
execute([ 'git', 'checkout', '--quiet', commit_name ])
execute([ 'git', 'describe', '--always', '--dirty' ])
fn = context_handlers.get(context)
if fn is None:
print('Unknown context %s, supported contexts:')
for ctx in sorted(context_handlers.keys()):
print(' ' + ctx)
raise Exception('context unknown: ' + context)
print('')
print('*** Running test for context: ' + context)
print('')
success = fn()
print('')
print('*** Finished test for context: ' + context + ', success: ' + repr(success))
print('')
if output_description is not None:
print('TESTRUNNER_DESCRIPTION: ' + output_description)
if success == True:
# Testcase successful
print('Test succeeded')
sys.exit(0)
elif success == False:
# Testcase failed, but no test script error (= don't rerun automatically)
print('Test failed')
sys.exit(1)
else:
raise Exception('context handler returned a non-boolean: %r' % success)
if __name__ == '__main__':
start_time = time.time()
try:
try:
main()
except SystemExit:
raise
except:
# Test script failed, automatic retry is useful
print('')
print('*** Test script failed')
print('')
traceback.print_exc()
print('TESTRUNNER_DESCRIPTION: Test script error')
sys.exit(2)
finally:
end_time = time.time()
print('')
print('Test took %.2f minutes' % ((end_time - start_time) / 60.0))
| mit |
keimlink/django-cms | cms/tests/test_apphooks.py | 3 | 36973 | # -*- coding: utf-8 -*-
import sys
from django.contrib.admin.models import CHANGE, LogEntry
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import clear_url_caches, reverse, resolve
from django.test.utils import override_settings
from django.utils import six
from django.utils.timezone import now
from cms.api import create_page, create_title
from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from cms.appresolver import applications_page_check, clear_app_resolvers, get_app_patterns
from cms.cms_toolbars import PlaceholderToolbar
from cms.models import Title, Page
from cms.test_utils.project.placeholderapp.models import Example1
from cms.test_utils.testcases import CMSTestCase
from cms.tests.test_menu_utils import DumbPageLanguageUrl
from cms.toolbar.toolbar import CMSToolbar
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import force_language
from cms.utils.urlutils import admin_reverse
from menus.utils import DefaultLanguageChanger
APP_NAME = 'SampleApp'
NS_APP_NAME = 'NamespacedApp'
APP_MODULE = "cms.test_utils.project.sampleapp.cms_apps"
class ApphooksTestCase(CMSTestCase):
def setUp(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
self.apphook_clear()
def tearDown(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
self.apphook_clear()
def reload_urls(self):
from django.conf import settings
url_modules = [
'cms.urls',
# TODO: Add here intermediary modules which may
# include() the 'cms.urls' if it isn't included
# directly in the root urlconf.
# '...',
'cms.test_utils.project.second_cms_urls_for_apphook_tests',
'cms.test_utils.project.urls_for_apphook_tests',
settings.ROOT_URLCONF,
]
clear_app_resolvers()
clear_url_caches()
for module in url_modules:
if module in sys.modules:
del sys.modules[module]
def _fake_logentry(self, instance_id, user, text, model=Page):
LogEntry.objects.log_action(
user_id=user.id,
content_type_id=ContentType.objects.get_for_model(model).pk,
object_id=instance_id,
object_repr=text,
action_flag=CHANGE,
)
entry = LogEntry.objects.filter(user=user, action_flag__in=(CHANGE,))[0]
session = self.client.session
session['cms_log_latest'] = entry.pk
session.save()
def create_base_structure(self, apphook, title_langs, namespace=None):
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
self.superuser = superuser
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True)
create_title('de', page.get_title(), page)
page.publish('de')
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, published=True, parent=page)
create_title('de', child_page.get_title(), child_page)
child_page.publish('de')
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, published=True, parent=child_page, apphook=apphook,
apphook_namespace=namespace)
create_title("de", child_child_page.get_title(), child_child_page)
child_child_page.publish('de')
# publisher_public is set to draft on publish, issue with onetoone reverse
child_child_page = self.reload(child_child_page)
if isinstance(title_langs, six.string_types):
titles = child_child_page.publisher_public.get_title_obj(title_langs)
else:
titles = [child_child_page.publisher_public.get_title_obj(l) for l in title_langs]
self.reload_urls()
return titles
@override_settings(CMS_APPHOOKS=['%s.%s' % (APP_MODULE, APP_NAME)])
def test_explicit_apphooks(self):
"""
Test explicit apphook loading with the CMS_APPHOOKS setting.
"""
self.apphook_clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 1)
self.assertEqual(app_names, [APP_NAME])
self.apphook_clear()
@override_settings(
INSTALLED_APPS=['cms.test_utils.project.sampleapp'],
ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests',
)
def test_implicit_apphooks(self):
"""
Test implicit apphook loading with INSTALLED_APPS cms_apps.py
"""
self.apphook_clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 6)
self.assertIn(NS_APP_NAME, app_names)
self.assertIn(APP_NAME, app_names)
self.apphook_clear()
def test_apphook_on_root(self):
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
blank_page = create_page("not-apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="", slug='blankapp')
english_title = page.title_set.all()[0]
self.assertEqual(english_title.language, 'en')
create_title("de", "aphooked-page-de", page)
self.assertTrue(page.publish('en'))
self.assertTrue(page.publish('de'))
self.assertTrue(blank_page.publish('en'))
with force_language("en"):
response = self.client.get(self.get_pages_root())
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, '<--noplaceholder-->')
response = self.client.get('/en/blankapp/')
self.assertTemplateUsed(response, 'nav_playground.html')
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests')
def test_apphook_on_root_reverse(self):
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
create_title("de", "aphooked-page-de", page)
self.assertTrue(page.publish('de'))
self.assertTrue(page.publish('en'))
self.reload_urls()
self.assertFalse(reverse('sample-settings').startswith('//'))
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_page_for_apphook(self):
en_title, de_title = self.create_base_structure(APP_NAME, ['en', 'de'])
with force_language("en"):
path = reverse('sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, en_title.title)
with force_language("de"):
path = reverse('sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'de'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash and language prefix
self.assertEqual(attached_to_page.pk, de_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, de_title.title)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_apphook_permissions(self):
en_title, de_title = self.create_base_structure(APP_NAME, ['en', 'de'])
with force_language("en"):
path = reverse('sample-settings')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
page = en_title.page.publisher_public
page.login_required = True
page.save()
page.publish('en')
response = self.client.get(path)
self.assertEqual(response.status_code, 302)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_apphook_permissions_preserves_view_name(self):
self.create_base_structure(APP_NAME, ['en', 'de'])
view_names = (
('sample-settings', 'sample_view'),
('sample-class-view', 'ClassView'),
('sample-class-based-view', 'ClassBasedView'),
)
with force_language("en"):
for url_name, view_name in view_names:
path = reverse(url_name)
match = resolve(path)
self.assertEqual(match.func.__name__, view_name)
def test_apphooks_with_excluded_permissions(self):
en_title = self.create_base_structure('SampleAppWithExcludedPermissions', 'en')
with force_language("en"):
excluded_path = reverse('excluded:example')
not_excluded_path = reverse('not_excluded:example')
page = en_title.page.publisher_public
page.login_required = True
page.save()
page.publish('en')
excluded_response = self.client.get(excluded_path)
not_excluded_response = self.client.get(not_excluded_path)
self.assertEqual(excluded_response.status_code, 200)
self.assertEqual(not_excluded_response.status_code, 302)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.urls_3')
def test_get_page_for_apphook_on_preview_or_edit(self):
if get_user_model().USERNAME_FIELD == 'email':
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin@admin.com')
else:
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True, apphook=APP_NAME)
create_title('de', page.get_title(), page)
page.publish('en')
page.publish('de')
page.save()
public_page = page.get_public_object()
with self.login_user_context(superuser):
with force_language("en"):
path = reverse('sample-settings')
request = self.get_request(path + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
response = self.client.get(path+"?edit")
self.assertContains(response, '?redirect=')
with force_language("de"):
path = reverse('sample-settings')
request = self.get_request(path + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
request.LANGUAGE_CODE = 'de'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, public_page.pk)
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_root_page_for_apphook_with_instance_namespace(self):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
self.reload_urls()
with force_language("en"):
reverse("example_app:example")
reverse("example1:example")
reverse("example2:example")
path = reverse('namespaced_app_ns:sample-root')
path_instance = reverse('instance_ns:sample-root')
self.assertEqual(path, path_instance)
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_child_page_for_apphook_with_instance_namespace(self):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:sample-settings')
path_instance1 = reverse('instance_ns:sample-settings')
path_instance2 = reverse('namespaced_app_ns:sample-settings', current_app='instance_ns')
self.assertEqual(path, path_instance1)
self.assertEqual(path, path_instance2)
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page_id)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_sub_page_for_apphook_with_implicit_current_app(self):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'namespaced_app_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:current-app')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/app.html')
self.assertContains(response, 'namespaced_app_ns')
self.assertContains(response, path)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_default_language_changer_with_implicit_current_app(self):
self.create_base_structure(NS_APP_NAME, ['en', 'de'], 'namespaced_app_ns')
self.reload_urls()
with force_language("en"):
path = reverse('namespaced_app_ns:translated-url')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
url = DefaultLanguageChanger(request)('en')
self.assertEqual(url, path)
url = DefaultLanguageChanger(request)('de')
self.assertEqual(url, '/de%s' % path[3:].replace('/page', '/Seite'))
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_i18n_apphook_with_explicit_current_app(self):
titles = self.create_base_structure(NS_APP_NAME, ['en', 'de'], 'instance_1')
public_de_title = titles[1]
de_title = Title.objects.get(page=public_de_title.page.publisher_draft, language="de")
de_title.slug = "de"
de_title.save()
de_title.page.publish('de')
page2 = create_page("page2", "nav_playground.html",
"en", created_by=self.superuser, published=True, parent=de_title.page.parent,
apphook=NS_APP_NAME,
apphook_namespace="instance_2")
create_title("de", "de_title", page2, slug="slug")
page2.publish('de')
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
with force_language("de"):
reverse('namespaced_app_ns:current-app', current_app="instance_1")
reverse('namespaced_app_ns:current-app', current_app="instance_2")
reverse('namespaced_app_ns:current-app')
with force_language("en"):
reverse('namespaced_app_ns:current-app', current_app="instance_1")
reverse('namespaced_app_ns:current-app', current_app="instance_2")
reverse('namespaced_app_ns:current-app')
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_apphook_include_extra_parameters(self):
self.create_base_structure(NS_APP_NAME, ['en', 'de'], 'instance_1')
with force_language("en"):
path = reverse('namespaced_app_ns:extra_second')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, 'someopts')
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_get_sub_page_for_apphook_with_explicit_current_app(self):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:current-app')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/app.html')
self.assertContains(response, 'instance_ns')
self.assertContains(response, path)
self.apphook_clear()
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_include_urlconf(self):
self.create_base_structure(APP_NAME, 'en')
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
path = reverse('extra_first')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
with force_language("de"):
path = reverse('extra_first')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
with force_language("de"):
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
self.apphook_clear()
@override_settings(CMS_PERMISSION=False, ROOT_URLCONF='cms.test_utils.project.urls_2')
def test_apphook_breaking_under_home_with_new_path_caching(self):
home = create_page("home", "nav_playground.html", "en", published=True)
child = create_page("child", "nav_playground.html", "en", published=True, parent=home)
# not-home is what breaks stuff, because it contains the slug of the home page
not_home = create_page("not-home", "nav_playground.html", "en", published=True, parent=child)
create_page("subchild", "nav_playground.html", "en", published=True, parent=not_home, apphook='SampleApp')
with force_language("en"):
self.reload_urls()
urlpatterns = get_app_patterns()
resolver = urlpatterns[0]
url = resolver.reverse('sample-root')
self.assertEqual(url, 'child/not-home/subchild/')
@override_settings(ROOT_URLCONF='cms.test_utils.project.urls')
def test_apphook_urlpattern_order(self):
# this one includes the actual cms.urls, so it can be tested if
# they are loaded in the correct order (the cms page pattern must be last)
# (the other testcases replicate the inclusion code and thus don't test this)
self.create_base_structure(APP_NAME, 'en')
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
@override_settings(ROOT_URLCONF='cms.test_utils.project.urls')
def test_apphooks_receive_url_params(self):
# make sure that urlparams actually reach the apphook views
self.create_base_structure(APP_NAME, 'en')
path = reverse('sample-params', kwargs=dict(my_params='is-my-param-really-in-the-context-QUESTIONMARK'))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, 'my_params: is-my-param-really-in-the-context-QUESTIONMARK')
@override_settings(ROOT_URLCONF='cms.test_utils.project.third_urls_for_apphook_tests')
def test_multiple_apphooks(self):
# test for #1538
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
create_page("home", "nav_playground.html", "en", created_by=superuser, published=True, )
create_page("apphook1-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
create_page("apphook2-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp2")
reverse('sample-root')
reverse('sample2-root')
self.apphook_clear()
def test_apphook_pool_register_returns_apphook(self):
@apphook_pool.register
class TestApp(CMSApp):
name = "Test App"
self.assertIsNotNone(TestApp)
# Now test the quick return codepath, when apphooks is not empty
apphook_pool.apphooks.append("foo")
@apphook_pool.register
class TestApp2(CMSApp):
name = "Test App 2"
self.assertIsNotNone(TestApp2)
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_toolbar_current_app_namespace(self):
self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:sample-settings')
request = self.get_request(path)
toolbar = CMSToolbar(request)
self.assertTrue(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].is_current_app)
self.assertFalse(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].is_current_app)
# Testing a decorated view
with force_language("en"):
path = reverse('namespaced_app_ns:sample-exempt')
request = self.get_request(path)
toolbar = CMSToolbar(request)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].app_path,
'cms.test_utils.project.sampleapp')
self.assertTrue(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].is_current_app)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].app_path,
'cms.test_utils.project.sampleapp')
self.assertFalse(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].is_current_app)
@override_settings(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests')
def test_toolbar_current_app_apphook_with_implicit_current_app(self):
self.create_base_structure(NS_APP_NAME, 'en', 'namespaced_app_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:current-app')
request = self.get_request(path)
toolbar = CMSToolbar(request)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].app_path,
'cms.test_utils.project.sampleapp')
self.assertTrue(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].is_current_app)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].app_path,
'cms.test_utils.project.sampleapp')
self.assertFalse(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].is_current_app)
@override_settings(ROOT_URLCONF='cms.test_utils.project.placeholderapp_urls')
def test_toolbar_no_namespace(self):
# Test with a basic application with no defined app_name and no namespace
self.create_base_structure(APP_NAME, 'en')
path = reverse('detail', kwargs={'id': 20})
request = self.get_request(path)
toolbar = CMSToolbar(request)
self.assertFalse(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].is_current_app)
self.assertFalse(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].is_current_app)
self.assertTrue(toolbar.toolbars['cms.test_utils.project.placeholderapp.cms_toolbars.Example1Toolbar'].is_current_app)
@override_settings(ROOT_URLCONF='cms.test_utils.project.placeholderapp_urls')
def test_toolbar_multiple_supported_apps(self):
# Test with a basic application with no defined app_name and no namespace
self.create_base_structure(APP_NAME, 'en')
path = reverse('detail', kwargs={'id': 20})
request = self.get_request(path)
toolbar = CMSToolbar(request)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].app_path,
'cms.test_utils.project.placeholderapp')
self.assertFalse(toolbar.toolbars['cms.test_utils.project.sampleapp.cms_toolbars.CategoryToolbar'].is_current_app)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].app_path,
'cms.test_utils.project.placeholderapp')
self.assertFalse(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyTitleExtensionToolbar'].is_current_app)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyPageExtensionToolbar'].app_path,
'cms.test_utils.project.placeholderapp')
self.assertTrue(toolbar.toolbars['cms.test_utils.project.extensionapp.cms_toolbars.MyPageExtensionToolbar'].is_current_app)
self.assertEqual(toolbar.toolbars['cms.test_utils.project.placeholderapp.cms_toolbars.Example1Toolbar'].app_path,
'cms.test_utils.project.placeholderapp')
self.assertTrue(toolbar.toolbars['cms.test_utils.project.placeholderapp.cms_toolbars.Example1Toolbar'].is_current_app)
@override_settings(
CMS_APPHOOKS=['cms.test_utils.project.placeholderapp.cms_apps.Example1App'],
ROOT_URLCONF='cms.test_utils.project.placeholderapp_urls',
)
def test_toolbar_staff(self):
# Test that the toolbar contains edit mode switcher if placeholders are available
apphooks = (
'cms.test_utils.project.placeholderapp.cms_apps.Example1App',
)
with self.settings(CMS_APPHOOKS=apphooks, ROOT_URLCONF='cms.test_utils.project.placeholderapp_urls'):
self.create_base_structure('Example1App', 'en')
ex1 = Example1.objects.create(char_1='1', char_2='2', char_3='3', char_4='4', date_field=now())
path = reverse('example_detail', kwargs={'pk': ex1.pk})
self.user = self._create_user('admin_staff', True, True)
with self.login_user_context(self.user):
response = self.client.get(path+"?edit")
toolbar = CMSToolbar(response.context['request'])
toolbar.populate()
placeholder_toolbar = PlaceholderToolbar(response.context['request'], toolbar, True, path)
placeholder_toolbar.populate()
placeholder_toolbar.init_placeholders_from_request()
placeholder_toolbar.add_structure_mode()
self.assertEqual(len(placeholder_toolbar.toolbar.get_right_items()), 1)
self.user = self._create_user('staff', True, False)
with self.login_user_context(self.user):
response = self.client.get(path+"?edit")
response.context['request'].user = get_user_model().objects.get(pk=self.user.pk)
toolbar = CMSToolbar(response.context['request'])
toolbar.populate()
placeholder_toolbar = PlaceholderToolbar(response.context['request'], toolbar, True, path)
placeholder_toolbar.populate()
placeholder_toolbar.init_placeholders_from_request()
placeholder_toolbar.add_structure_mode()
self.assertEqual(len(placeholder_toolbar.toolbar.get_right_items()), 0)
self.user.user_permissions.add(Permission.objects.get(codename='change_example1'))
with self.login_user_context(self.user):
response = self.client.get(path+"?edit")
response.context['request'].user = get_user_model().objects.get(pk=self.user.pk)
toolbar = CMSToolbar(response.context['request'])
toolbar.populate()
placeholder_toolbar = PlaceholderToolbar(response.context['request'], toolbar, True, path)
placeholder_toolbar.populate()
placeholder_toolbar.init_placeholders_from_request()
placeholder_toolbar.add_structure_mode()
self.assertEqual(len(placeholder_toolbar.toolbar.get_right_items()), 0)
permission = Permission.objects.get(codename='use_structure')
self.user.user_permissions.add(permission)
response.context['request'].user = get_user_model().objects.get(pk=self.user.pk)
placeholder_toolbar = PlaceholderToolbar(response.context['request'], toolbar, True, path)
placeholder_toolbar.populate()
placeholder_toolbar.init_placeholders_from_request()
placeholder_toolbar.add_structure_mode()
self.assertEqual(len(placeholder_toolbar.toolbar.get_right_items()), 1)
self.user = None
def test_page_edit_redirect_models(self):
apphooks = (
'cms.test_utils.project.placeholderapp.cms_apps.Example1App',
)
ex1 = Example1.objects.create(char_1="char_1", char_2="char_2",
char_3="char_3", char_4="char_4")
with self.settings(CMS_APPHOOKS=apphooks, ROOT_URLCONF='cms.test_utils.project.placeholderapp_urls'):
self.create_base_structure('Example1App', 'en')
url = admin_reverse('cms_page_resolve')
self.user = self._create_user('admin_staff', True, True)
with self.login_user_context(self.user):
# parameters - non page object
response = self.client.post(url, {'pk': ex1.pk, 'model': 'placeholderapp.example1'})
self.assertEqual(response.content.decode('utf-8'), ex1.get_absolute_url())
def test_nested_apphooks_urls(self):
# make sure that urlparams actually reach the apphook views
with self.settings(ROOT_URLCONF='cms.test_utils.project.urls'):
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
create_page("home", "nav_playground.html", "en", created_by=superuser, published=True, )
parent_page = create_page("parent-apphook-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="ParentApp")
create_page("child-apphook-page", "nav_playground.html", "en", parent=parent_page,
created_by=superuser, published=True, apphook="ChildApp")
parent_app_path = reverse('parentapp_view', kwargs={'path': 'parent/path/'})
child_app_path = reverse('childapp_view', kwargs={'path': 'child-path/'})
# Ensure the page structure is ok before getting responses
self.assertEqual(parent_app_path, '/en/parent-apphook-page/parent/path/')
self.assertEqual(child_app_path, '/en/parent-apphook-page/child-apphook-page/child-path/')
# Get responses for both paths and ensure that the right view will answer
response = self.client.get(parent_app_path)
self.assertContains(response, 'parent app content', status_code=200)
response = self.client.get(child_app_path)
self.assertContains(response, 'child app content', status_code=200)
self.apphook_clear()
class ApphooksPageLanguageUrlTestCase(CMSTestCase):
def setUp(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
def tearDown(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.apphook_clear()
def reload_urls(self):
from django.conf import settings
url_modules = [
'cms.urls',
'cms.test_utils.project.second_cms_urls_for_apphook_tests',
settings.ROOT_URLCONF,
]
clear_app_resolvers()
clear_url_caches()
for module in url_modules:
if module in sys.modules:
del sys.modules[module]
def test_page_language_url_for_apphook(self):
self.apphook_clear()
superuser = get_user_model().objects.create_superuser('admin', 'admin@admin.com', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser)
create_title('de', page.get_title(), page)
page.publish('en')
page.publish('de')
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, parent=page)
create_title('de', child_page.get_title(), child_page)
child_page.publish('en')
child_page.publish('de')
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, parent=child_page, apphook='SampleApp')
create_title("de", '%s_de' % child_child_page.get_title(), child_child_page)
child_child_page.publish('en')
child_child_page.publish('de')
# publisher_public is set to draft on publish, issue with one to one reverse
child_child_page = self.reload(child_child_page)
with force_language("en"):
path = reverse('extra_first')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
request.current_page = child_child_page
fake_context = {'request': request}
tag = DumbPageLanguageUrl()
output = tag.get_context(fake_context, 'en')
url = output['content']
self.assertEqual(url, '/en/child_page/child_child_page/extra_1/')
output = tag.get_context(fake_context, 'de')
url = output['content']
# look the extra "_de"
self.assertEqual(url, '/de/child_page/child_child_page_de/extra_1/')
output = tag.get_context(fake_context, 'fr')
url = output['content']
self.assertEqual(url, '/fr/child_page/child_child_page/extra_1/')
self.apphook_clear()
| bsd-3-clause |
havt/odoo | addons/sale_crm/__init__.py | 353 | 1076 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import sale_crm
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
SerpentCS/odoo | openerp/addons/base/ir/ir_ui_view.py | 44 | 52067 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import collections
import copy
import datetime
import dateutil
from dateutil.relativedelta import relativedelta
import fnmatch
import logging
import os
import time
from operator import itemgetter
import simplejson
import werkzeug
import HTMLParser
from lxml import etree
import openerp
from openerp import tools, api
from openerp.http import request
from openerp.osv import fields, osv, orm
from openerp.tools import graph, SKIPPED_ELEMENT_TYPES, SKIPPED_ELEMENTS
from openerp.tools.parse_version import parse_version
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.view_validation import valid_view
from openerp.tools import misc
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MOVABLE_BRANDING = ['data-oe-model', 'data-oe-id', 'data-oe-field', 'data-oe-xpath', 'data-oe-source-id']
def keep_query(*keep_params, **additional_params):
"""
Generate a query string keeping the current request querystring's parameters specified
in ``keep_params`` and also adds the parameters specified in ``additional_params``.
Multiple values query string params will be merged into a single one with comma seperated
values.
The ``keep_params`` arguments can use wildcards too, eg:
keep_query('search', 'shop_*', page=4)
"""
if not keep_params and not additional_params:
keep_params = ('*',)
params = additional_params.copy()
qs_keys = request.httprequest.args.keys()
for keep_param in keep_params:
for param in fnmatch.filter(qs_keys, keep_param):
if param not in additional_params and param in qs_keys:
params[param] = ','.join(request.httprequest.args.getlist(param))
return werkzeug.urls.url_encode(params)
class view_custom(osv.osv):
_name = 'ir.ui.view.custom'
_order = 'create_date desc' # search(limit=1) should return the last customization
_columns = {
'ref_id': fields.many2one('ir.ui.view', 'Original View', select=True, required=True, ondelete='cascade'),
'user_id': fields.many2one('res.users', 'User', select=True, required=True, ondelete='cascade'),
'arch': fields.text('View Architecture', required=True),
}
def name_get(self, cr, uid, ids, context=None):
return [(rec.id, rec.user_id.name) for rec in self.browse(cr, uid, ids, context=context)]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if args is None:
args = []
if name:
ids = self.search(cr, user, [('user_id', operator, name)] + args, limit=limit)
return self.name_get(cr, user, ids, context=context)
return super(view_custom, self).name_search(cr, user, name, args=args, operator=operator, context=context, limit=limit)
def _auto_init(self, cr, context=None):
super(view_custom, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_custom_user_id_ref_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_custom_user_id_ref_id ON ir_ui_view_custom (user_id, ref_id)')
def _hasclass(context, *cls):
""" Checks if the context node has all the classes passed as arguments
"""
node_classes = set(context.context_node.attrib.get('class', '').split())
return node_classes.issuperset(cls)
xpath_utils = etree.FunctionNamespace(None)
xpath_utils['hasclass'] = _hasclass
class view(osv.osv):
_name = 'ir.ui.view'
def _get_model_data(self, cr, uid, ids, fname, args, context=None):
result = dict.fromkeys(ids, False)
IMD = self.pool['ir.model.data']
data_ids = IMD.search_read(cr, uid, [('res_id', 'in', ids), ('model', '=', 'ir.ui.view')], ['res_id'], context=context)
result.update(map(itemgetter('res_id', 'id'), data_ids))
return result
def _views_from_model_data(self, cr, uid, ids, context=None):
IMD = self.pool['ir.model.data']
data_ids = IMD.search_read(cr, uid, [('id', 'in', ids), ('model', '=', 'ir.ui.view')], ['res_id'], context=context)
return map(itemgetter('res_id'), data_ids)
_columns = {
'name': fields.char('View Name', required=True),
'model': fields.char('Object', select=True),
'priority': fields.integer('Sequence', required=True),
'type': fields.selection([
('tree','Tree'),
('form','Form'),
('graph', 'Graph'),
('calendar', 'Calendar'),
('diagram','Diagram'),
('gantt', 'Gantt'),
('kanban', 'Kanban'),
('search','Search'),
('qweb', 'QWeb')], string='View Type'),
'arch': fields.text('View Architecture', required=True),
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View', ondelete='restrict', select=True),
'inherit_children_ids': fields.one2many('ir.ui.view','inherit_id', 'Inherit Views'),
'field_parent': fields.char('Child Field'),
'model_data_id': fields.function(_get_model_data, type='many2one', relation='ir.model.data', string="Model Data",
store={
_name: (lambda s, c, u, i, ctx=None: i, None, 10),
'ir.model.data': (_views_from_model_data, ['model', 'res_id'], 10),
}),
'xml_id': fields.function(osv.osv.get_xml_id, type='char', size=128, string="External ID",
help="ID of the view defined in xml file"),
'groups_id': fields.many2many('res.groups', 'ir_ui_view_group_rel', 'view_id', 'group_id',
string='Groups', help="If this field is empty, the view applies to all users. Otherwise, the view applies to the users of those groups only."),
'model_ids': fields.one2many('ir.model.data', 'res_id', domain=[('model','=','ir.ui.view')], auto_join=True),
'create_date': fields.datetime('Create Date', readonly=True),
'write_date': fields.datetime('Last Modification Date', readonly=True),
'mode': fields.selection(
[('primary', "Base view"), ('extension', "Extension View")],
string="View inheritance mode", required=True,
help="""Only applies if this view inherits from an other one (inherit_id is not False/Null).
* if extension (default), if this view is requested the closest primary view
is looked up (via inherit_id), then all views inheriting from it with this
view's model are applied
* if primary, the closest primary view is fully resolved (even if it uses a
different model than this one), then this view's inheritance specs
(<xpath/>) are applied, and the result is used as if it were this view's
actual arch.
"""),
'active': fields.boolean("Active",
help="""If this view is inherited,
* if True, the view always extends its parent
* if False, the view currently does not extend its parent but can be enabled
"""),
}
_defaults = {
'mode': 'primary',
'active': True,
'priority': 16,
}
_order = "priority,name"
# Holds the RNG schema
_relaxng_validator = None
def _relaxng(self):
if not self._relaxng_validator:
frng = tools.file_open(os.path.join('base','rng','view.rng'))
try:
relaxng_doc = etree.parse(frng)
self._relaxng_validator = etree.RelaxNG(relaxng_doc)
except Exception:
_logger.exception('Failed to load RelaxNG XML schema for views validation')
finally:
frng.close()
return self._relaxng_validator
def _check_xml(self, cr, uid, ids, context=None):
if context is None:
context = {}
context = dict(context, check_view_ids=ids)
# Sanity checks: the view should not break anything upon rendering!
# Any exception raised below will cause a transaction rollback.
for view in self.browse(cr, uid, ids, context):
view_def = self.read_combined(cr, uid, view.id, ['arch'], context=context)
view_arch_utf8 = view_def['arch']
if view.type != 'qweb':
view_doc = etree.fromstring(view_arch_utf8)
# verify that all fields used are valid, etc.
self.postprocess_and_fields(cr, uid, view.model, view_doc, view.id, context=context)
# RNG-based validation is not possible anymore with 7.0 forms
view_docs = [view_doc]
if view_docs[0].tag == 'data':
# A <data> element is a wrapper for multiple root nodes
view_docs = view_docs[0]
validator = self._relaxng()
for view_arch in view_docs:
version = view_arch.get('version', '7.0')
if parse_version(version) < parse_version('7.0') and validator and not validator.validate(view_arch):
for error in validator.error_log:
_logger.error(tools.ustr(error))
return False
if not valid_view(view_arch):
return False
return True
_sql_constraints = [
('inheritance_mode',
"CHECK (mode != 'extension' OR inherit_id IS NOT NULL)",
"Invalid inheritance mode: if the mode is 'extension', the view must"
" extend an other view"),
]
_constraints = [
(_check_xml, 'Invalid view definition', ['arch']),
]
def _auto_init(self, cr, context=None):
super(view, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_model_type_inherit_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_model_type_inherit_id ON ir_ui_view (model, inherit_id)')
def _compute_defaults(self, cr, uid, values, context=None):
if 'inherit_id' in values:
values.setdefault(
'mode', 'extension' if values['inherit_id'] else 'primary')
return values
def create(self, cr, uid, values, context=None):
if not values.get('type'):
if values.get('inherit_id'):
values['type'] = self.browse(cr, uid, values['inherit_id'], context).type
else:
values['type'] = etree.fromstring(values['arch']).tag
if not values.get('name'):
values['name'] = "%s %s" % (values.get('model'), values['type'])
self.clear_cache()
return super(view, self).create(
cr, uid,
self._compute_defaults(cr, uid, values, context=context),
context=context)
def write(self, cr, uid, ids, vals, context=None):
if not isinstance(ids, (list, tuple)):
ids = [ids]
if context is None:
context = {}
# drop the corresponding view customizations (used for dashboards for example), otherwise
# not all users would see the updated views
custom_view_ids = self.pool.get('ir.ui.view.custom').search(cr, uid, [('ref_id', 'in', ids)])
if custom_view_ids:
self.pool.get('ir.ui.view.custom').unlink(cr, uid, custom_view_ids)
self.clear_cache()
ret = super(view, self).write(
cr, uid, ids,
self._compute_defaults(cr, uid, vals, context=context),
context)
return ret
def toggle(self, cr, uid, ids, context=None):
""" Switches between enabled and disabled statuses
"""
for view in self.browse(cr, uid, ids, context=dict(context or {}, active_test=False)):
view.write({'active': not view.active})
# default view selection
def default_view(self, cr, uid, model, view_type, context=None):
""" Fetches the default view for the provided (model, view_type) pair:
primary view with the lowest priority.
:param str model:
:param int view_type:
:return: id of the default view of False if none found
:rtype: int
"""
domain = [
['model', '=', model],
['type', '=', view_type],
['mode', '=', 'primary'],
]
ids = self.search(cr, uid, domain, limit=1, context=context)
if not ids:
return False
return ids[0]
#------------------------------------------------------
# Inheritance mecanism
#------------------------------------------------------
def get_inheriting_views_arch(self, cr, uid, view_id, model, context=None):
"""Retrieves the architecture of views that inherit from the given view, from the sets of
views that should currently be used in the system. During the module upgrade phase it
may happen that a view is present in the database but the fields it relies on are not
fully loaded yet. This method only considers views that belong to modules whose code
is already loaded. Custom views defined directly in the database are loaded only
after the module initialization phase is completely finished.
:param int view_id: id of the view whose inheriting views should be retrieved
:param str model: model identifier of the inheriting views.
:rtype: list of tuples
:return: [(view_arch,view_id), ...]
"""
if not context:
context = {}
user = self.pool['res.users'].browse(cr, 1, uid, context=context)
user_groups = frozenset(user.groups_id or ())
conditions = [
['inherit_id', '=', view_id],
['model', '=', model],
['mode', '=', 'extension'],
['active', '=', True],
]
if self.pool._init and not context.get('load_all_views'):
# Module init currently in progress, only consider views from
# modules whose code is already loaded
conditions.extend([
'|',
['model_ids.module', 'in', tuple(self.pool._init_modules)],
['id', 'in', context.get('check_view_ids') or (0,)],
])
view_ids = self.search(cr, uid, conditions, context=context)
return [(view.arch, view.id)
for view in self.browse(cr, 1, view_ids, context)
if not (view.groups_id and user_groups.isdisjoint(view.groups_id))]
def raise_view_error(self, cr, uid, message, view_id, context=None):
view = self.browse(cr, uid, view_id, context)
not_avail = _('n/a')
message = ("%(msg)s\n\n" +
_("Error context:\nView `%(view_name)s`") +
"\n[view_id: %(viewid)s, xml_id: %(xmlid)s, "
"model: %(model)s, parent_id: %(parent)s]") % \
{
'view_name': view.name or not_avail,
'viewid': view_id or not_avail,
'xmlid': view.xml_id or not_avail,
'model': view.model or not_avail,
'parent': view.inherit_id.id or not_avail,
'msg': message,
}
_logger.error(message)
raise AttributeError(message)
def locate_node(self, arch, spec):
""" Locate a node in a source (parent) architecture.
Given a complete source (parent) architecture (i.e. the field
`arch` in a view), and a 'spec' node (a node in an inheriting
view that specifies the location in the source view of what
should be changed), return (if it exists) the node in the
source view matching the specification.
:param arch: a parent architecture to modify
:param spec: a modifying node in an inheriting view
:return: a node in the source matching the spec
"""
if spec.tag == 'xpath':
nodes = arch.xpath(spec.get('expr'))
return nodes[0] if nodes else None
elif spec.tag == 'field':
# Only compare the field name: a field can be only once in a given view
# at a given level (and for multilevel expressions, we should use xpath
# inheritance spec anyway).
for node in arch.iter('field'):
if node.get('name') == spec.get('name'):
return node
return None
for node in arch.iter(spec.tag):
if isinstance(node, SKIPPED_ELEMENT_TYPES):
continue
if all(node.get(attr) == spec.get(attr) for attr in spec.attrib
if attr not in ('position','version')):
# Version spec should match parent's root element's version
if spec.get('version') and spec.get('version') != arch.get('version'):
return None
return node
return None
def inherit_branding(self, specs_tree, view_id, root_id):
for node in specs_tree.iterchildren(tag=etree.Element):
xpath = node.getroottree().getpath(node)
if node.tag == 'data' or node.tag == 'xpath':
self.inherit_branding(node, view_id, root_id)
else:
node.set('data-oe-id', str(view_id))
node.set('data-oe-source-id', str(root_id))
node.set('data-oe-xpath', xpath)
node.set('data-oe-model', 'ir.ui.view')
node.set('data-oe-field', 'arch')
return specs_tree
def apply_inheritance_specs(self, cr, uid, source, specs_tree, inherit_id, context=None):
""" Apply an inheriting view (a descendant of the base view)
Apply to a source architecture all the spec nodes (i.e. nodes
describing where and what changes to apply to some parent
architecture) given by an inheriting view.
:param Element source: a parent architecture to modify
:param Elepect specs_tree: a modifying architecture in an inheriting view
:param inherit_id: the database id of specs_arch
:return: a modified source where the specs are applied
:rtype: Element
"""
# Queue of specification nodes (i.e. nodes describing where and
# changes to apply to some parent architecture).
specs = [specs_tree]
while len(specs):
spec = specs.pop(0)
if isinstance(spec, SKIPPED_ELEMENT_TYPES):
continue
if spec.tag == 'data':
specs += [c for c in spec]
continue
node = self.locate_node(source, spec)
if node is not None:
pos = spec.get('position', 'inside')
if pos == 'replace':
if node.getparent() is None:
source = copy.deepcopy(spec[0])
else:
for child in spec:
node.addprevious(child)
node.getparent().remove(node)
elif pos == 'attributes':
for child in spec.getiterator('attribute'):
attribute = (child.get('name'), child.text or None)
if attribute[1]:
node.set(attribute[0], attribute[1])
elif attribute[0] in node.attrib:
del node.attrib[attribute[0]]
else:
sib = node.getnext()
for child in spec:
if pos == 'inside':
node.append(child)
elif pos == 'after':
if sib is None:
node.addnext(child)
node = child
else:
sib.addprevious(child)
elif pos == 'before':
node.addprevious(child)
else:
self.raise_view_error(cr, uid, _("Invalid position attribute: '%s'") % pos, inherit_id, context=context)
else:
attrs = ''.join([
' %s="%s"' % (attr, spec.get(attr))
for attr in spec.attrib
if attr != 'position'
])
tag = "<%s%s>" % (spec.tag, attrs)
self.raise_view_error(cr, uid, _("Element '%s' cannot be located in parent view") % tag, inherit_id, context=context)
return source
def apply_view_inheritance(self, cr, uid, source, source_id, model, root_id=None, context=None):
""" Apply all the (directly and indirectly) inheriting views.
:param source: a parent architecture to modify (with parent modifications already applied)
:param source_id: the database view_id of the parent view
:param model: the original model for which we create a view (not
necessarily the same as the source's model); only the inheriting
views with that specific model will be applied.
:return: a modified source where all the modifying architecture are applied
"""
if context is None: context = {}
if root_id is None:
root_id = source_id
sql_inherit = self.get_inheriting_views_arch(cr, uid, source_id, model, context=context)
for (specs, view_id) in sql_inherit:
specs_tree = etree.fromstring(specs.encode('utf-8'))
if context.get('inherit_branding'):
self.inherit_branding(specs_tree, view_id, root_id)
source = self.apply_inheritance_specs(cr, uid, source, specs_tree, view_id, context=context)
source = self.apply_view_inheritance(cr, uid, source, view_id, model, root_id=root_id, context=context)
return source
def read_combined(self, cr, uid, view_id, fields=None, context=None):
"""
Utility function to get a view combined with its inherited views.
* Gets the top of the view tree if a sub-view is requested
* Applies all inherited archs on the root view
* Returns the view with all requested fields
.. note:: ``arch`` is always added to the fields list even if not
requested (similar to ``id``)
"""
if context is None: context = {}
# if view_id is not a root view, climb back to the top.
base = v = self.browse(cr, uid, view_id, context=context)
while v.mode != 'primary':
v = v.inherit_id
root_id = v.id
# arch and model fields are always returned
if fields:
fields = list({'arch', 'model'}.union(fields))
# read the view arch
[view] = self.read(cr, uid, [root_id], fields=fields, context=context)
view_arch = etree.fromstring(view['arch'].encode('utf-8'))
if not v.inherit_id:
arch_tree = view_arch
else:
parent_view = self.read_combined(
cr, uid, v.inherit_id.id, fields=fields, context=context)
arch_tree = etree.fromstring(parent_view['arch'])
arch_tree = self.apply_inheritance_specs(
cr, uid, arch_tree, view_arch, parent_view['id'], context=context)
if context.get('inherit_branding'):
arch_tree.attrib.update({
'data-oe-model': 'ir.ui.view',
'data-oe-id': str(root_id),
'data-oe-field': 'arch',
})
# and apply inheritance
arch = self.apply_view_inheritance(
cr, uid, arch_tree, root_id, base.model, context=context)
return dict(view, arch=etree.tostring(arch, encoding='utf-8'))
#------------------------------------------------------
# Postprocessing: translation, groups and modifiers
#------------------------------------------------------
# TODO:
# - split postprocess so that it can be used instead of translate_qweb
# - remove group processing from ir_qweb
#------------------------------------------------------
def postprocess(self, cr, user, model, node, view_id, in_tree_view, model_fields, context=None):
"""Return the description of the fields in the node.
In a normal call to this method, node is a complete view architecture
but it is actually possible to give some sub-node (this is used so
that the method can call itself recursively).
Originally, the field descriptions are drawn from the node itself.
But there is now some code calling fields_get() in order to merge some
of those information in the architecture.
"""
if context is None:
context = {}
result = False
fields = {}
children = True
modifiers = {}
Model = self.pool.get(model)
if Model is None:
self.raise_view_error(cr, user, _('Model not found: %(model)s') % dict(model=model),
view_id, context)
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def check_group(node):
"""Apply group restrictions, may be set at view level or model level::
* at view level this means the element should be made invisible to
people who are not members
* at model level (exclusively for fields, obviously), this means
the field should be completely removed from the view, as it is
completely unavailable for non-members
:return: True if field should be included in the result of fields_view_get
"""
if node.tag == 'field' and node.get('name') in Model._fields:
field = Model._fields[node.get('name')]
if field.groups and not self.user_has_groups(
cr, user, groups=field.groups, context=context):
node.getparent().remove(node)
fields.pop(node.get('name'), None)
# no point processing view-level ``groups`` anymore, return
return False
if node.get('groups'):
can_see = self.user_has_groups(
cr, user, groups=node.get('groups'), context=context)
if not can_see:
node.set('invisible', '1')
modifiers['invisible'] = True
if 'attrs' in node.attrib:
del(node.attrib['attrs']) #avoid making field visible later
del(node.attrib['groups'])
return True
if node.tag in ('field', 'node', 'arrow'):
if node.get('object'):
attrs = {}
views = {}
xml = "<form>"
for f in node:
if f.tag == 'field':
xml += etree.tostring(f, encoding="utf-8")
xml += "</form>"
new_xml = etree.fromstring(encode(xml))
ctx = context.copy()
ctx['base_model_name'] = model
xarch, xfields = self.postprocess_and_fields(cr, user, node.get('object'), new_xml, view_id, ctx)
views['form'] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
fields = xfields
if node.get('name'):
attrs = {}
field = Model._fields.get(node.get('name'))
if field:
children = False
views = {}
for f in node:
if f.tag in ('form', 'tree', 'graph', 'kanban', 'calendar'):
node.remove(f)
ctx = context.copy()
ctx['base_model_name'] = model
xarch, xfields = self.postprocess_and_fields(cr, user, field.comodel_name, f, view_id, ctx)
views[str(f.tag)] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
fields[node.get('name')] = attrs
field = model_fields.get(node.get('name'))
if field:
orm.transfer_field_to_modifiers(field, modifiers)
elif node.tag in ('form', 'tree'):
result = Model.view_header_get(cr, user, False, node.tag, context=context)
if result:
node.set('string', result)
in_tree_view = node.tag == 'tree'
elif node.tag == 'calendar':
for additional_field in ('date_start', 'date_delay', 'date_stop', 'color', 'all_day', 'attendee'):
if node.get(additional_field):
fields[node.get(additional_field)] = {}
if not check_group(node):
# node must be removed, no need to proceed further with its children
return fields
# The view architeture overrides the python model.
# Get the attrs before they are (possibly) deleted by check_group below
orm.transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
# TODO remove attrs counterpart in modifiers when invisible is true ?
# translate view
if 'lang' in context:
Translations = self.pool['ir.translation']
if node.text and node.text.strip():
term = node.text.strip()
trans = Translations._get_source(cr, user, model, 'view', context['lang'], term)
if trans:
node.text = node.text.replace(term, trans)
if node.tail and node.tail.strip():
term = node.tail.strip()
trans = Translations._get_source(cr, user, model, 'view', context['lang'], term)
if trans:
node.tail = node.tail.replace(term, trans)
if node.get('string') and node.get('string').strip() and not result:
term = node.get('string').strip()
trans = Translations._get_source(cr, user, model, 'view', context['lang'], term)
if trans == term:
if 'base_model_name' in context:
# If translation is same as source, perhaps we'd have more luck with the alternative model name
# (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
trans = Translations._get_source(cr, user, context['base_model_name'], 'view', context['lang'], term)
else:
inherit_model = self.browse(cr, user, view_id, context=context).inherit_id.model or model
if inherit_model != model:
# parent view has a different model, if the terms belongs to the parent view, the translation
# should be checked on the parent model as well
trans = Translations._get_source(cr, user, inherit_model, 'view', context['lang'], term)
if trans:
node.set('string', trans)
for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
attr_value = node.get(attr_name)
if attr_value and attr_value.strip():
trans = Translations._get_source(cr, user, model, 'view', context['lang'], attr_value.strip())
if trans:
node.set(attr_name, trans)
for f in node:
if children or (node.tag == 'field' and f.tag in ('filter','separator')):
fields.update(self.postprocess(cr, user, model, f, view_id, in_tree_view, model_fields, context))
orm.transfer_modifiers_to_node(modifiers, node)
return fields
def add_on_change(self, cr, user, model_name, arch):
""" Add attribute on_change="1" on fields that are dependencies of
computed fields on the same view.
"""
# map each field object to its corresponding nodes in arch
field_nodes = collections.defaultdict(list)
def collect(node, model):
if node.tag == 'field':
field = model._fields.get(node.get('name'))
if field:
field_nodes[field].append(node)
if field.relational:
model = self.pool.get(field.comodel_name)
for child in node:
collect(child, model)
collect(arch, self.pool[model_name])
for field, nodes in field_nodes.iteritems():
# if field should trigger an onchange, add on_change="1" on the
# nodes referring to field
model = self.pool[field.model_name]
if model._has_onchange(field, field_nodes):
for node in nodes:
if not node.get('on_change'):
node.set('on_change', '1')
return arch
def _disable_workflow_buttons(self, cr, user, model, node):
""" Set the buttons in node to readonly if the user can't activate them. """
if model is None or user == 1:
# admin user can always activate workflow buttons
return node
# TODO handle the case of more than one workflow for a model or multiple
# transitions with different groups and same signal
usersobj = self.pool.get('res.users')
buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
for button in buttons:
user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
cr.execute("""SELECT DISTINCT t.group_id
FROM wkf
INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
INNER JOIN wkf_transition t ON (t.act_to = a.id)
WHERE wkf.osv = %s
AND t.signal = %s
AND t.group_id is NOT NULL
""", (model, button.get('name')))
group_ids = [x[0] for x in cr.fetchall() if x[0]]
can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
button.set('readonly', str(int(not can_click)))
return node
def postprocess_and_fields(self, cr, user, model, node, view_id, context=None):
""" Return an architecture and a description of all the fields.
The field description combines the result of fields_get() and
postprocess().
:param node: the architecture as as an etree
:return: a tuple (arch, fields) where arch is the given node as a
string and fields is the description of all the fields.
"""
fields = {}
Model = self.pool.get(model)
if Model is None:
self.raise_view_error(cr, user, _('Model not found: %(model)s') % dict(model=model), view_id, context)
if node.tag == 'diagram':
if node.getchildren()[0].tag == 'node':
node_model = self.pool[node.getchildren()[0].get('object')]
node_fields = node_model.fields_get(cr, user, None, context=context)
fields.update(node_fields)
if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
node.set("create", 'false')
if node.getchildren()[1].tag == 'arrow':
arrow_fields = self.pool[node.getchildren()[1].get('object')].fields_get(cr, user, None, context=context)
fields.update(arrow_fields)
else:
fields = Model.fields_get(cr, user, None, context=context)
node = self.add_on_change(cr, user, model, node)
fields_def = self.postprocess(cr, user, model, node, view_id, False, fields, context=context)
node = self._disable_workflow_buttons(cr, user, model, node)
if node.tag in ('kanban', 'tree', 'form', 'gantt'):
for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
if not node.get(action) and not Model.check_access_rights(cr, user, operation, raise_exception=False):
node.set(action, 'false')
if node.tag in ('kanban'):
group_by_name = node.get('default_group_by')
if group_by_name in Model._fields:
group_by_field = Model._fields[group_by_name]
if group_by_field.type == 'many2one':
group_by_model = Model.pool[group_by_field.comodel_name]
for action, operation in (('group_create', 'create'), ('group_delete', 'unlink'), ('group_edit', 'write')):
if not node.get(action) and not group_by_model.check_access_rights(cr, user, operation, raise_exception=False):
node.set(action, 'false')
arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
for k in fields.keys():
if k not in fields_def:
del fields[k]
for field in fields_def:
if field in fields:
fields[field].update(fields_def[field])
else:
message = _("Field `%(field_name)s` does not exist") % \
dict(field_name=field)
self.raise_view_error(cr, user, message, view_id, context)
return arch, fields
#------------------------------------------------------
# QWeb template views
#------------------------------------------------------
@tools.ormcache_context(accepted_keys=('lang','inherit_branding', 'editable', 'translatable'))
def read_template(self, cr, uid, xml_id, context=None):
if isinstance(xml_id, (int, long)):
view_id = xml_id
else:
if '.' not in xml_id:
raise ValueError('Invalid template id: %r' % (xml_id,))
view_id = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, xml_id, raise_if_not_found=True)
arch = self.read_combined(cr, uid, view_id, fields=['arch'], context=context)['arch']
arch_tree = etree.fromstring(arch)
if 'lang' in context:
arch_tree = self.translate_qweb(cr, uid, view_id, arch_tree, context['lang'], context)
self.distribute_branding(arch_tree)
root = etree.Element('templates')
root.append(arch_tree)
arch = etree.tostring(root, encoding='utf-8', xml_declaration=True)
return arch
def clear_cache(self):
self.read_template.clear_cache(self)
def _contains_branded(self, node):
return node.tag == 't'\
or 't-raw' in node.attrib\
or any(self.is_node_branded(child) for child in node.iterdescendants())
def _pop_view_branding(self, element):
distributed_branding = dict(
(attribute, element.attrib.pop(attribute))
for attribute in MOVABLE_BRANDING
if element.get(attribute))
return distributed_branding
def distribute_branding(self, e, branding=None, parent_xpath='',
index_map=misc.ConstantMapping(1)):
if e.get('t-ignore') or e.tag == 'head':
# remove any view branding possibly injected by inheritance
attrs = set(MOVABLE_BRANDING)
for descendant in e.iterdescendants(tag=etree.Element):
if not attrs.intersection(descendant.attrib): continue
self._pop_view_branding(descendant)
# TODO: find a better name and check if we have a string to boolean helper
return
node_path = e.get('data-oe-xpath')
if node_path is None:
node_path = "%s/%s[%d]" % (parent_xpath, e.tag, index_map[e.tag])
if branding and not (e.get('data-oe-model') or e.get('t-field')):
e.attrib.update(branding)
e.set('data-oe-xpath', node_path)
if not e.get('data-oe-model'): return
if {'t-esc', 't-raw'}.intersection(e.attrib):
# nodes which fully generate their content and have no reason to
# be branded because they can not sensibly be edited
self._pop_view_branding(e)
elif self._contains_branded(e):
# if a branded element contains branded elements distribute own
# branding to children unless it's t-raw, then just remove branding
# on current element
distributed_branding = self._pop_view_branding(e)
if 't-raw' not in e.attrib:
# TODO: collections.Counter if remove p2.6 compat
# running index by tag type, for XPath query generation
indexes = collections.defaultdict(lambda: 0)
for child in e.iterchildren(tag=etree.Element):
if child.get('data-oe-xpath'):
# injected by view inheritance, skip otherwise
# generated xpath is incorrect
self.distribute_branding(child)
else:
indexes[child.tag] += 1
self.distribute_branding(
child, distributed_branding,
parent_xpath=node_path, index_map=indexes)
def is_node_branded(self, node):
""" Finds out whether a node is branded or qweb-active (bears a
@data-oe-model or a @t-* *which is not t-field* as t-field does not
section out views)
:param node: an etree-compatible element to test
:type node: etree._Element
:rtype: boolean
"""
return any(
(attr in ('data-oe-model', 'group') or (attr != 't-field' and attr.startswith('t-')))
for attr in node.attrib
)
def _translate_qweb(self, cr, uid, arch, translate_func, context=None):
# TODO: this should be moved in a place before inheritance is applied
# but process() is only called on fields_view_get()
h = HTMLParser.HTMLParser()
def get_trans(text):
if not text or not text.strip():
return None
text = text.strip()
if len(text) < 2 or (text.startswith('<!') and text.endswith('>')):
return None
return translate_func(text)
if type(arch) not in SKIPPED_ELEMENT_TYPES and arch.tag not in SKIPPED_ELEMENTS:
text = get_trans(arch.text)
if text:
arch.text = arch.text.replace(arch.text.strip(), text)
tail = get_trans(arch.tail)
if tail:
arch.tail = arch.tail.replace(arch.tail.strip(), tail)
for attr_name in ('title', 'alt', 'label', 'placeholder'):
attr = get_trans(arch.get(attr_name))
if attr:
arch.set(attr_name, attr)
for node in arch.iterchildren("*"):
self._translate_qweb(cr, uid, node, translate_func, context)
def translate_qweb(self, cr, uid, id_, arch, lang, context=None):
view_ids = []
view = self.browse(cr, uid, id_, context=context)
if view:
view_ids.append(view.id)
if view.mode == 'primary' and view.inherit_id.mode == 'primary':
# template is `cloned` from parent view
view_ids.append(view.inherit_id.id)
Translations = self.pool['ir.translation']
def translate_func(term):
trans = Translations._get_source(cr, uid, 'website', 'view', lang, term, view_ids)
return trans
self._translate_qweb(cr, uid, arch, translate_func, context=context)
return arch
@openerp.tools.ormcache()
def get_view_xmlid(self, cr, uid, id):
imd = self.pool['ir.model.data']
domain = [('model', '=', 'ir.ui.view'), ('res_id', '=', id)]
xmlid = imd.search_read(cr, uid, domain, ['module', 'name'])[0]
return '%s.%s' % (xmlid['module'], xmlid['name'])
@api.cr_uid_ids_context
def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None):
if isinstance(id_or_xml_id, list):
id_or_xml_id = id_or_xml_id[0]
if not context:
context = {}
if values is None:
values = dict()
qcontext = dict(
env=api.Environment(cr, uid, context),
keep_query=keep_query,
request=request, # might be unbound if we're not in an httprequest context
debug=request.debug if request else False,
json=simplejson,
quote_plus=werkzeug.url_quote_plus,
time=time,
datetime=datetime,
relativedelta=relativedelta,
)
qcontext.update(values)
# TODO: This helper can be used by any template that wants to embedd the backend.
# It is currently necessary because the ir.ui.view bundle inheritance does not
# match the module dependency graph.
def get_modules_order():
if request:
from openerp.addons.web.controllers.main import module_boot
return simplejson.dumps(module_boot())
return '[]'
qcontext['get_modules_order'] = get_modules_order
def loader(name):
return self.read_template(cr, uid, name, context=context)
return self.pool[engine].render(cr, uid, id_or_xml_id, qcontext, loader=loader, context=context)
#------------------------------------------------------
# Misc
#------------------------------------------------------
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None):
nodes=[]
nodes_name=[]
transitions=[]
start=[]
tres={}
labels={}
no_ancester=[]
blank_nodes = []
_Model_Obj = self.pool[model]
_Node_Obj = self.pool[node_obj]
_Arrow_Obj = self.pool[conn_obj]
for model_key,model_value in _Model_Obj._columns.items():
if model_value._type=='one2many':
if model_value._obj==node_obj:
_Node_Field=model_key
_Model_Field=model_value._fields_id
for node_key,node_value in _Node_Obj._columns.items():
if node_value._type=='one2many':
if node_value._obj==conn_obj:
# _Source_Field = "Incoming Arrows" (connected via des_node)
if node_value._fields_id == des_node:
_Source_Field=node_key
# _Destination_Field = "Outgoing Arrows" (connected via src_node)
if node_value._fields_id == src_node:
_Destination_Field=node_key
datas = _Model_Obj.read(cr, uid, id, [],context)
for a in _Node_Obj.read(cr,uid,datas[_Node_Field],[]):
if a[_Source_Field] or a[_Destination_Field]:
nodes_name.append((a['id'],a['name']))
nodes.append(a['id'])
else:
blank_nodes.append({'id': a['id'],'name':a['name']})
if a.has_key('flow_start') and a['flow_start']:
start.append(a['id'])
else:
if not a[_Source_Field]:
no_ancester.append(a['id'])
for t in _Arrow_Obj.read(cr,uid, a[_Destination_Field],[]):
transitions.append((a['id'], t[des_node][0]))
tres[str(t['id'])] = (a['id'],t[des_node][0])
label_string = ""
if label:
for lbl in eval(label):
if t.has_key(tools.ustr(lbl)) and tools.ustr(t[lbl])=='False':
label_string += ' '
else:
label_string = label_string + " " + tools.ustr(t[lbl])
labels[str(t['id'])] = (a['id'],label_string)
g = graph(nodes, transitions, no_ancester)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
for node in nodes_name:
results[str(node[0])] = result[node[0]]
results[str(node[0])]['name'] = node[1]
return {'nodes': results,
'transitions': tres,
'label' : labels,
'blank_nodes': blank_nodes,
'node_parent_field': _Model_Field,}
def _validate_custom_views(self, cr, uid, model):
"""Validate architecture of custom views (= without xml id) for a given model.
This method is called at the end of registry update.
"""
cr.execute("""SELECT max(v.id)
FROM ir_ui_view v
LEFT JOIN ir_model_data md ON (md.model = 'ir.ui.view' AND md.res_id = v.id)
WHERE md.module IS NULL
AND v.model = %s
GROUP BY coalesce(v.inherit_id, v.id)
""", (model,))
ids = map(itemgetter(0), cr.fetchall())
context = dict(load_all_views=True)
return self._check_xml(cr, uid, ids, context=context)
def _validate_module_views(self, cr, uid, module):
"""Validate architecture of all the views of a given module"""
assert not self.pool._init or module in self.pool._init_modules
xmlid_filter = ''
params = (module,)
if self.pool._init:
# only validate the views that are still existing...
xmlid_filter = "AND md.name IN %s"
names = tuple(name for (xmod, name), (model, res_id) in self.pool.model_data_reference_ids.items() if xmod == module and model == self._name)
if not names:
# no views for this module, nothing to validate
return
params += (names,)
cr.execute("""SELECT max(v.id)
FROM ir_ui_view v
LEFT JOIN ir_model_data md ON (md.model = 'ir.ui.view' AND md.res_id = v.id)
WHERE md.module = %s
{0}
GROUP BY coalesce(v.inherit_id, v.id)
""".format(xmlid_filter), params)
for vid, in cr.fetchall():
if not self._check_xml(cr, uid, [vid]):
self.raise_view_error(cr, uid, "Can't validate view", vid)
# vim:et:
| agpl-3.0 |
MziRintu/kitsune | kitsune/kbforums/tests/test_templates.py | 13 | 14727 | import time
from nose.tools import eq_
from pyquery import PyQuery as pq
from kitsune.flagit.models import FlaggedObject
from kitsune.kbforums.models import Post, Thread
from kitsune.kbforums.tests import KBForumTestCase, thread, post as post_
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.tests import get, post
from kitsune.users.tests import user, add_permission
from kitsune.wiki.tests import document, revision
class PostsTemplateTests(KBForumTestCase):
def test_empty_reply_errors(self):
"""Posting an empty reply shows errors."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
t = thread(document=d, save=True)
response = post(self.client, 'wiki.discuss.reply', {'content': ''},
args=[d.slug, t.id])
doc = pq(response.content)
error_msg = doc('ul.errorlist li a')[0]
eq_(error_msg.text, 'Please provide a message.')
def test_edit_post_errors(self):
"""Changing post content works."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
t = thread(creator=u, is_locked=False, save=True)
p = t.new_post(creator=u, content='foo')
response = post(self.client, 'wiki.discuss.edit_post',
{'content': 'wha?'},
args=[t.document.slug, t.id, p.id])
doc = pq(response.content)
errors = doc('ul.errorlist li a')
eq_(errors[0].text,
'Your message is too short (4 characters). ' +
'It must be at least 5 characters.')
def test_edit_thread_template(self):
"""The edit-post template should render."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
t = thread(creator=u, is_locked=False, save=True)
p = t.new_post(creator=u, content='foo')
res = get(self.client, 'wiki.discuss.edit_post',
args=[p.thread.document.slug, p.thread.id, p.id])
doc = pq(res.content)
eq_(len(doc('form.edit-post')), 1)
def test_edit_post(self):
"""Changing post content works."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
t = thread(document=d, save=True)
p = t.new_post(creator=u, content='foo')
post(self.client, 'wiki.discuss.edit_post',
{'content': 'Some new content'},
args=[d.slug, t.id, p.id])
edited_p = t.post_set.get(pk=p.id)
eq_('Some new content', edited_p.content)
def test_long_title_truncated_in_crumbs(self):
"""A very long thread title gets truncated in the breadcrumbs"""
d = document(save=True)
t = thread(title='A thread with a very very very very very' * 5, document=d,
save=True)
response = get(self.client, 'wiki.discuss.posts', args=[d.slug, t.id])
doc = pq(response.content)
crumb = doc('#breadcrumbs li:last-child')
eq_(crumb.text(), 'A thread with a very very very very ...')
def test_edit_post_moderator(self):
"""Editing post as a moderator works."""
u = user(save=True)
add_permission(u, Post, 'change_post')
self.client.login(username=u.username, password='testpass')
p = post_(save=True)
t = p.thread
d = t.document
r = post(self.client, 'wiki.discuss.edit_post',
{'content': 'More new content'}, args=[d.slug, t.id, p.id])
eq_(200, r.status_code)
edited_p = Post.objects.get(pk=p.pk)
eq_('More new content', edited_p.content)
def test_preview_reply(self):
"""Preview a reply."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
t = thread(document=d, save=True)
num_posts = t.post_set.count()
content = 'Full of awesome.'
response = post(self.client, 'wiki.discuss.reply',
{'content': content, 'preview': 'any string'},
args=[d.slug, t.id])
eq_(200, response.status_code)
doc = pq(response.content)
eq_(content, doc('#post-preview div.content').text())
eq_(num_posts, t.post_set.count())
def test_preview_async(self):
"""Preview a reply."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
content = 'Full of awesome.'
response = post(self.client, 'wiki.discuss.post_preview_async',
{'content': content}, args=[d.slug])
eq_(200, response.status_code)
doc = pq(response.content)
eq_(content, doc('div.content').text())
def test_watch_thread(self):
"""Watch and unwatch a thread."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
t = thread(save=True)
response = post(self.client, 'wiki.discuss.watch_thread',
{'watch': 'yes'}, args=[t.document.slug, t.id])
self.assertContains(response, 'Stop')
response = post(self.client, 'wiki.discuss.watch_thread',
{'watch': 'no'}, args=[t.document.slug, t.id])
self.assertNotContains(response, 'Stop')
def test_links_nofollow(self):
"""Links posted should have rel=nofollow."""
u = user(save=True)
t = thread(save=True)
t.new_post(creator=u, content='linking http://test.org')
response = get(self.client, 'wiki.discuss.posts',
args=[t.document.slug, t.pk])
doc = pq(response.content)
eq_('nofollow', doc('ol.posts div.content a')[0].attrib['rel'])
class ThreadsTemplateTests(KBForumTestCase):
def test_last_thread_post_link_has_post_id(self):
"""Make sure the last post url links to the last post (#post-<id>)."""
u = user(save=True)
t = thread(save=True)
t.new_post(creator=u, content='foo')
p2 = t.new_post(creator=u, content='bar')
response = get(self.client, 'wiki.discuss.threads',
args=[t.document.slug])
doc = pq(response.content)
last_post_link = doc('ol.threads div.last-post a:not(.username)')[0]
href = last_post_link.attrib['href']
eq_(href.split('#')[1], 'post-%d' % p2.id)
def test_empty_thread_errors(self):
"""Posting an empty thread shows errors."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
response = post(self.client, 'wiki.discuss.new_thread',
{'title': '', 'content': ''}, args=[d.slug])
doc = pq(response.content)
errors = doc('ul.errorlist li a')
eq_(errors[0].text, 'Please provide a title.')
eq_(errors[1].text, 'Please provide a message.')
def test_new_short_thread_errors(self):
"""Posting a short new thread shows errors."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
response = post(self.client, 'wiki.discuss.new_thread',
{'title': 'wha?', 'content': 'wha?'}, args=[d.slug])
doc = pq(response.content)
errors = doc('ul.errorlist li a')
eq_(errors[0].text,
'Your title is too short (4 characters). ' +
'It must be at least 5 characters.')
eq_(errors[1].text,
'Your message is too short (4 characters). ' +
'It must be at least 5 characters.')
def test_edit_thread_errors(self):
"""Editing thread with too short of a title shows errors."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
t = thread(document=d, creator=u, save=True)
response = post(self.client, 'wiki.discuss.edit_thread',
{'title': 'wha?'}, args=[d.slug, t.id])
doc = pq(response.content)
errors = doc('ul.errorlist li a')
eq_(errors[0].text,
'Your title is too short (4 characters). ' +
'It must be at least 5 characters.')
def test_edit_thread_template(self):
"""The edit-thread template should render."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
t = thread(creator=u, is_locked=False, save=True)
res = get(self.client, 'wiki.discuss.edit_thread',
args=[t.document.slug, t.id])
doc = pq(res.content)
eq_(len(doc('form.edit-thread')), 1)
def test_watch_forum(self):
"""Watch and unwatch a forum."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
response = post(self.client, 'wiki.discuss.watch_forum',
{'watch': 'yes'}, args=[d.slug])
self.assertContains(response, 'Stop')
response = post(self.client, 'wiki.discuss.watch_forum',
{'watch': 'no'}, args=[d.slug])
self.assertNotContains(response, 'Stop')
def test_watch_locale(self):
"""Watch and unwatch a locale."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
next_url = reverse('wiki.discuss.threads', args=[d.slug])
response = post(self.client, 'wiki.discuss.watch_locale',
{'watch': 'yes', 'next': next_url})
self.assertContains(response, 'Turn off emails')
response = post(self.client, 'wiki.discuss.watch_locale',
{'watch': 'no', 'next': next_url})
self.assertContains(response,
'Get emailed when there is new discussion')
def test_orphan_non_english(self):
"""Discussing a non-English article with no parent shouldn't crash."""
# Guard against regressions of bug 658045.
r = revision(document=document(locale='de', save=True),
is_approved=True, save=True)
response = self.client.get(
reverse('wiki.discuss.threads', args=[r.document.slug],
locale='de'))
eq_(200, response.status_code)
def test_all_locale_discussions(self):
"""Start or stop watching all discussions in a locale."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
next_url = reverse('wiki.locale_discussions')
# Watch locale.
response = post(self.client, 'wiki.discuss.watch_locale',
{'watch': 'yes', 'next': next_url})
self.assertContains(response, 'Stop watching this locale')
# Stop watching locale.
response = post(self.client, 'wiki.discuss.watch_locale',
{'watch': 'no', 'next': next_url})
self.assertContains(response, 'Watch this locale')
def test_locale_discussions_ignores_sticky(self):
"""Sticky flag is ignored in locale discussions view"""
u = user(save=True)
d = document(save=True)
t = thread(title='Sticky Thread', is_sticky=True, document=d,
save=True)
t.new_post(creator=u, content='foo')
t2 = thread(title='A thread with a very very long',
is_sticky=False, document=d, save=True)
t2.new_post(creator=u, content='bar')
time.sleep(1)
t2.new_post(creator=u, content='last')
self.client.login(username=u.username, password='testpass')
response = post(self.client, 'wiki.locale_discussions')
eq_(200, response.status_code)
doc = pq(response.content)
title = doc('ol.threads li div.title a:first').text()
assert title.startswith('A thread with a very very long')
class NewThreadTemplateTests(KBForumTestCase):
def test_preview(self):
"""Preview the thread post."""
u = user(save=True)
self.client.login(username=u.username, password='testpass')
d = document(save=True)
num_threads = d.thread_set.count()
content = 'Full of awesome.'
response = post(self.client, 'wiki.discuss.new_thread',
{'title': 'Topic', 'content': content,
'preview': 'any string'}, args=[d.slug])
eq_(200, response.status_code)
doc = pq(response.content)
eq_(content, doc('#post-preview div.content').text())
eq_(num_threads, d.thread_set.count())
class FlaggedPostTests(KBForumTestCase):
def test_flag_kbforum_post(self):
u = user(save=True)
t = thread(save=True)
p = t.new_post(creator=u, content='foo')
f = FlaggedObject(content_object=p, reason='spam', creator_id=u.id)
f.save()
# Make sure flagit queue page works
u2 = user(save=True)
add_permission(u2, FlaggedObject, 'can_moderate')
self.client.login(username=u2.username, password='testpass')
response = get(self.client, 'flagit.queue')
eq_(200, response.status_code)
doc = pq(response.content)
eq_(1, len(doc('#flagged-queue li')))
class TestRatelimiting(KBForumTestCase):
def test_post_ratelimit(self):
"""Verify that rate limiting kicks in after 4 threads or replies."""
d = document(save=True)
u = user(save=True)
self.client.login(username=u.username, password='testpass')
# Create 2 threads:
for i in range(2):
response = post(self.client, 'wiki.discuss.new_thread',
{'title': 'Topic', 'content': 'hellooo'},
args=[d.slug])
eq_(200, response.status_code)
# Now 3 replies (only 2 should save):
t = Thread.objects.all()[0]
for i in range(3):
response = post(self.client, 'wiki.discuss.reply',
{'content': 'hellooo'}, args=[d.slug, t.id])
eq_(200, response.status_code)
# And another thread that shouldn't save:
response = post(self.client, 'wiki.discuss.new_thread',
{'title': 'Topic', 'content': 'hellooo'},
args=[d.slug])
# We should only have 4 posts (each thread and reply creates a post).
eq_(4, Post.objects.count())
| bsd-3-clause |
Limags/MissionPlanner | Lib/site-packages/scipy/fftpack/tests/test_convolve.py | 51 | 4016 | #!"C:\Users\hog\Documents\Visual Studio 2010\Projects\ArdupilotMega\ArdupilotMega\bin\Debug\ipy.exe"
""" Test functions for fftpack.convolve, in particular
behaviour of callbacks.
This is present to assist in switching from f2py to fwrap,
to make sure results are the same. True results are derived
from running the tests with an older SciPy with f2py.
"""
import sys
import traceback
import re
from numpy.testing import *
from scipy.fftpack import convolve
class TestCallback(TestCase):
def test_extra_args(self):
n = 10
def kernel(k, aux1, aux2):
assert aux1 == 10
assert aux2 == 2
return k
omega = convolve.init_convolution_kernel(n, kernel, d=1, zero_nyquist=1,
kernel_func_extra_args=(10, 2))
assert_array_almost_equal(omega, [ 0., 0.1, -0.1, 0.2, -0.2, 0.3, -0.3, 0.4, -0.4, 0.])
def test_nested(self):
n = 4
trace = []
def level3(k):
trace.append('(3)')
return 3 * k
def level2(k):
trace.append('<2')
omega = convolve.init_convolution_kernel(n, level3, d=1, zero_nyquist=1)
trace.append('2>')
assert_array_almost_equal(omega, [ 0, 0.75, -0.75, 0.])
return 2 * k
def level1(k):
trace.append('<1')
omega = convolve.init_convolution_kernel(n, level2, d=1, zero_nyquist=1)
trace.append('1>')
return 1
omega = convolve.init_convolution_kernel(n, level1, d=1, zero_nyquist=1)
assert_equal(' '.join(trace), '<1 <2 (3) (3) 2> <2 (3) (3) 2> 1> '
'<1 <2 (3) (3) 2> <2 (3) (3) 2> 1>')
assert_array_almost_equal(omega, [ 0.25, 0.25, -0.25, 0. ])
@dec.knownfailureif(sys.platform == "cli", "Exception handling is different under .NET")
def test_exception(self):
n = 4
class MyException(Exception):
pass
exc_container = [None]
trace = []
def level3(k):
trace.append('(3)')
if k == 1:
e = exc_container[0] = MyException()
raise e
return 3 * k
def level2(k):
trace.append('<2')
try:
omega = convolve.init_convolution_kernel(n, level3, d=1, zero_nyquist=1)
except:
trace.append('2@')
raise
trace.append('2>')
assert_array_almost_equal(omega, [ 0, 0.75, -0.75, 0.])
return 2 * k
def level1(k):
trace.append('<1')
try:
omega = convolve.init_convolution_kernel(n, level2, d=1, zero_nyquist=1)
except:
trace.append('1@')
raise
trace.append('1>')
return 1
try:
convolve.init_convolution_kernel(n, level1, d=1, zero_nyquist=1)
except MyException, e:
assert_(e is exc_container[0])
_, _, tb = sys.exc_info()
# Check that exception stack contains level1 -> level2 -> level3
lines = traceback.format_tb(tb)
##print ''.join(lines)
curlevel = 1
for line in lines:
if re.match(r'\s*File .*, line \d+, in level%d' % curlevel, line):
curlevel += 1
assert_equal(curlevel, 4)
else:
assert_(False)
assert_equal(' '.join(trace), '<1 <2 (3) (3) 2@ 1@')
def benchmark():
"""
Function that can be called from IPython to do benchmarks
using %timeit with different SciPy implementations.
"""
n = 400
def level2(k):
return 2 * k
def level1(k):
omega = convolve.init_convolution_kernel(n, level2, d=1)
return k
convolve.init_convolution_kernel(n, level1, d=1)
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
SUSE-Cloud/nova | nova/db/sqlalchemy/migrate_repo/versions/208_expand_compute_node.py | 11 | 1943 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, Text
from nova.db.sqlalchemy import api as db
from nova.db.sqlalchemy import types
from nova.db.sqlalchemy import utils
def upgrade(migrate_engine):
compute_nodes = utils.get_table(migrate_engine, 'compute_nodes')
host_ip = Column('host_ip', types.IPAddress())
supported_instances = Column('supported_instances', Text)
compute_nodes.create_column(host_ip)
compute_nodes.create_column(supported_instances)
shadow_compute_nodes = utils.get_table(migrate_engine,
db._SHADOW_TABLE_PREFIX + 'compute_nodes')
host_ip = Column('host_ip', types.IPAddress())
supported_instances = Column('supported_instances', Text)
shadow_compute_nodes.create_column(host_ip)
shadow_compute_nodes.create_column(supported_instances)
# NOTE: don't need to populate the new columns since they will
# automatically be populate by a periodic task
def downgrade(migrate_engine):
compute_nodes = utils.get_table(migrate_engine, 'compute_nodes')
compute_nodes.drop_column('host_ip')
compute_nodes.drop_column('supported_instances')
shadow_compute_nodes = utils.get_table(migrate_engine,
db._SHADOW_TABLE_PREFIX + 'compute_nodes')
shadow_compute_nodes.drop_column('host_ip')
shadow_compute_nodes.drop_column('supported_instances')
| apache-2.0 |
matthewoliver/swift | swift/common/middleware/dlo.py | 2 | 19051 | # Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Middleware that will provide Dynamic Large Object (DLO) support.
---------------
Using ``swift``
---------------
The quickest way to try out this feature is use the ``swift`` Swift Tool
included with the `python-swiftclient`_ library. You can use the ``-S``
option to specify the segment size to use when splitting a large file. For
example::
swift upload test_container -S 1073741824 large_file
This would split the large_file into 1G segments and begin uploading those
segments in parallel. Once all the segments have been uploaded, ``swift`` will
then create the manifest file so the segments can be downloaded as one.
So now, the following ``swift`` command would download the entire large
object::
swift download test_container large_file
``swift`` command uses a strict convention for its segmented object
support. In the above example it will upload all the segments into a
second container named test_container_segments. These segments will
have names like large_file/1290206778.25/21474836480/00000000,
large_file/1290206778.25/21474836480/00000001, etc.
The main benefit for using a separate container is that the main container
listings will not be polluted with all the segment names. The reason for using
the segment name format of <name>/<timestamp>/<size>/<segment> is so that an
upload of a new file with the same name won't overwrite the contents of the
first until the last moment when the manifest file is updated.
``swift`` will manage these segment files for you, deleting old segments on
deletes and overwrites, etc. You can override this behavior with the
``--leave-segments`` option if desired; this is useful if you want to have
multiple versions of the same large object available.
.. _`python-swiftclient`: http://github.com/openstack/python-swiftclient
----------
Direct API
----------
You can also work with the segments and manifests directly with HTTP
requests instead of having ``swift`` do that for you. You can just
upload the segments like you would any other object and the manifest
is just a zero-byte (not enforced) file with an extra
``X-Object-Manifest`` header.
All the object segments need to be in the same container, have a common object
name prefix, and sort in the order in which they should be concatenated.
Object names are sorted lexicographically as UTF-8 byte strings.
They don't have to be in the same container as the manifest file will be, which
is useful to keep container listings clean as explained above with ``swift``.
The manifest file is simply a zero-byte (not enforced) file with the extra
``X-Object-Manifest: <container>/<prefix>`` header, where ``<container>`` is
the container the object segments are in and ``<prefix>`` is the common prefix
for all the segments.
It is best to upload all the segments first and then create or update the
manifest. In this way, the full object won't be available for downloading
until the upload is complete. Also, you can upload a new set of segments to
a second location and then update the manifest to point to this new location.
During the upload of the new segments, the original manifest will still be
available to download the first set of segments.
.. note::
When updating a manifest object using a POST request, a
``X-Object-Manifest`` header must be included for the object to
continue to behave as a manifest object.
The manifest file should have no content. However, this is not enforced.
If the manifest path itself conforms to container/prefix specified in
``X-Object-Manifest``, and if manifest has some content/data in it, it
would also be considered as segment and manifest's content will be part of
the concatenated GET response. The order of concatenation follows the usual
DLO logic which is - the order of concatenation adheres to order returned
when segment names are sorted.
Here's an example using ``curl`` with tiny 1-byte segments::
# First, upload the segments
curl -X PUT -H 'X-Auth-Token: <token>' \
http://<storage_url>/container/myobject/00000001 --data-binary '1'
curl -X PUT -H 'X-Auth-Token: <token>' \
http://<storage_url>/container/myobject/00000002 --data-binary '2'
curl -X PUT -H 'X-Auth-Token: <token>' \
http://<storage_url>/container/myobject/00000003 --data-binary '3'
# Next, create the manifest file
curl -X PUT -H 'X-Auth-Token: <token>' \
-H 'X-Object-Manifest: container/myobject/' \
http://<storage_url>/container/myobject --data-binary ''
# And now we can download the segments as a single object
curl -H 'X-Auth-Token: <token>' \
http://<storage_url>/container/myobject
"""
import json
import six
from six.moves.urllib.parse import unquote
from hashlib import md5
from swift.common import constraints
from swift.common.exceptions import ListingIterError, SegmentError
from swift.common.http import is_success
from swift.common.swob import Request, Response, \
HTTPRequestedRangeNotSatisfiable, HTTPBadRequest, HTTPConflict
from swift.common.utils import get_logger, \
RateLimitedIterator, quote, close_if_possible, closing_if_possible
from swift.common.request_helpers import SegmentedIterable
from swift.common.wsgi import WSGIContext, make_subrequest, load_app_config
class GetContext(WSGIContext):
def __init__(self, dlo, logger):
super(GetContext, self).__init__(dlo.app)
self.dlo = dlo
self.logger = logger
def _get_container_listing(self, req, version, account, container,
prefix, marker=''):
con_req = make_subrequest(
req.environ, path='/'.join(['', version, account, container]),
method='GET',
headers={'x-auth-token': req.headers.get('x-auth-token')},
agent=('%(orig)s ' + 'DLO MultipartGET'), swift_source='DLO')
con_req.query_string = 'prefix=%s' % quote(prefix)
if marker:
con_req.query_string += '&marker=%s' % quote(marker)
con_resp = con_req.get_response(self.dlo.app)
if not is_success(con_resp.status_int):
return con_resp, None
with closing_if_possible(con_resp.app_iter):
return None, json.loads(''.join(con_resp.app_iter))
def _segment_listing_iterator(self, req, version, account, container,
prefix, segments, first_byte=None,
last_byte=None):
# It's sort of hokey that this thing takes in the first page of
# segments as an argument, but we need to compute the etag and content
# length from the first page, and it's better to have a hokey
# interface than to make redundant requests.
if first_byte is None:
first_byte = 0
if last_byte is None:
last_byte = float("inf")
marker = ''
while True:
for segment in segments:
seg_length = int(segment['bytes'])
if first_byte >= seg_length:
# don't need any bytes from this segment
first_byte = max(first_byte - seg_length, -1)
last_byte = max(last_byte - seg_length, -1)
continue
elif last_byte < 0:
# no bytes are needed from this or any future segment
break
seg_name = segment['name']
if isinstance(seg_name, six.text_type):
seg_name = seg_name.encode("utf-8")
# We deliberately omit the etag and size here;
# SegmentedIterable will check size and etag if
# specified, but we don't want it to. DLOs only care
# that the objects' names match the specified prefix.
# SegmentedIterable will instead check that the data read
# from each segment matches the response headers.
_path = "/".join(["", version, account, container, seg_name])
_first = None if first_byte <= 0 else first_byte
_last = None if last_byte >= seg_length - 1 else last_byte
yield {
'path': _path,
'first_byte': _first,
'last_byte': _last
}
first_byte = max(first_byte - seg_length, -1)
last_byte = max(last_byte - seg_length, -1)
if len(segments) < constraints.CONTAINER_LISTING_LIMIT:
# a short page means that we're done with the listing
break
elif last_byte < 0:
break
marker = segments[-1]['name']
error_response, segments = self._get_container_listing(
req, version, account, container, prefix, marker)
if error_response:
# we've already started sending the response body to the
# client, so all we can do is raise an exception to make the
# WSGI server close the connection early
close_if_possible(error_response.app_iter)
raise ListingIterError(
"Got status %d listing container /%s/%s" %
(error_response.status_int, account, container))
def get_or_head_response(self, req, x_object_manifest,
response_headers=None):
if response_headers is None:
response_headers = self._response_headers
container, obj_prefix = x_object_manifest.split('/', 1)
container = unquote(container)
obj_prefix = unquote(obj_prefix)
version, account, _junk = req.split_path(2, 3, True)
error_response, segments = self._get_container_listing(
req, version, account, container, obj_prefix)
if error_response:
return error_response
have_complete_listing = len(segments) < \
constraints.CONTAINER_LISTING_LIMIT
first_byte = last_byte = None
actual_content_length = None
content_length_for_swob_range = None
if req.range and len(req.range.ranges) == 1:
content_length_for_swob_range = sum(o['bytes'] for o in segments)
# This is a hack to handle suffix byte ranges (e.g. "bytes=-5"),
# which we can't honor unless we have a complete listing.
_junk, range_end = req.range.ranges_for_length(float("inf"))[0]
# If this is all the segments, we know whether or not this
# range request is satisfiable.
#
# Alternately, we may not have all the segments, but this range
# falls entirely within the first page's segments, so we know
# that it is satisfiable.
if (have_complete_listing
or range_end < content_length_for_swob_range):
byteranges = req.range.ranges_for_length(
content_length_for_swob_range)
if not byteranges:
headers = {'Accept-Ranges': 'bytes'}
if have_complete_listing:
headers['Content-Range'] = 'bytes */%d' % (
content_length_for_swob_range, )
return HTTPRequestedRangeNotSatisfiable(
request=req, headers=headers)
first_byte, last_byte = byteranges[0]
# For some reason, swob.Range.ranges_for_length adds 1 to the
# last byte's position.
last_byte -= 1
actual_content_length = last_byte - first_byte + 1
else:
# The range may or may not be satisfiable, but we can't tell
# based on just one page of listing, and we're not going to go
# get more pages because that would use up too many resources,
# so we ignore the Range header and return the whole object.
actual_content_length = None
content_length_for_swob_range = None
req.range = None
response_headers = [
(h, v) for h, v in response_headers
if h.lower() not in ("content-length", "content-range")]
if content_length_for_swob_range is not None:
# Here, we have to give swob a big-enough content length so that
# it can compute the actual content length based on the Range
# header. This value will not be visible to the client; swob will
# substitute its own Content-Length.
#
# Note: if the manifest points to at least CONTAINER_LISTING_LIMIT
# segments, this may be less than the sum of all the segments'
# sizes. However, it'll still be greater than the last byte in the
# Range header, so it's good enough for swob.
response_headers.append(('Content-Length',
str(content_length_for_swob_range)))
elif have_complete_listing:
actual_content_length = sum(o['bytes'] for o in segments)
response_headers.append(('Content-Length',
str(actual_content_length)))
if have_complete_listing:
response_headers = [(h, v) for h, v in response_headers
if h.lower() != "etag"]
etag = md5()
for seg_dict in segments:
etag.update(seg_dict['hash'].strip('"'))
response_headers.append(('Etag', '"%s"' % etag.hexdigest()))
app_iter = None
if req.method == 'GET':
listing_iter = RateLimitedIterator(
self._segment_listing_iterator(
req, version, account, container, obj_prefix, segments,
first_byte=first_byte, last_byte=last_byte),
self.dlo.rate_limit_segments_per_sec,
limit_after=self.dlo.rate_limit_after_segment)
app_iter = SegmentedIterable(
req, self.dlo.app, listing_iter, ua_suffix="DLO MultipartGET",
swift_source="DLO", name=req.path, logger=self.logger,
max_get_time=self.dlo.max_get_time,
response_body_length=actual_content_length)
try:
app_iter.validate_first_segment()
except (SegmentError, ListingIterError):
return HTTPConflict(request=req)
resp = Response(request=req, headers=response_headers,
conditional_response=True,
app_iter=app_iter)
return resp
def handle_request(self, req, start_response):
"""
Take a GET or HEAD request, and if it is for a dynamic large object
manifest, return an appropriate response.
Otherwise, simply pass it through.
"""
resp_iter = self._app_call(req.environ)
# make sure this response is for a dynamic large object manifest
for header, value in self._response_headers:
if (header.lower() == 'x-object-manifest'):
close_if_possible(resp_iter)
response = self.get_or_head_response(req, value)
return response(req.environ, start_response)
# Not a dynamic large object manifest; just pass it through.
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return resp_iter
class DynamicLargeObject(object):
def __init__(self, app, conf):
self.app = app
self.logger = get_logger(conf, log_route='dlo')
# DLO functionality used to live in the proxy server, not middleware,
# so let's try to go find config values in the proxy's config section
# to ease cluster upgrades.
self._populate_config_from_old_location(conf)
self.max_get_time = int(conf.get('max_get_time', '86400'))
self.rate_limit_after_segment = int(conf.get(
'rate_limit_after_segment', '10'))
self.rate_limit_segments_per_sec = int(conf.get(
'rate_limit_segments_per_sec', '1'))
def _populate_config_from_old_location(self, conf):
if ('rate_limit_after_segment' in conf or
'rate_limit_segments_per_sec' in conf or
'max_get_time' in conf or
'__file__' not in conf):
return
proxy_conf = load_app_config(conf['__file__'])
for setting in ('rate_limit_after_segment',
'rate_limit_segments_per_sec',
'max_get_time'):
if setting in proxy_conf:
conf[setting] = proxy_conf[setting]
def __call__(self, env, start_response):
"""
WSGI entry point
"""
req = Request(env)
try:
vrs, account, container, obj = req.split_path(4, 4, True)
except ValueError:
return self.app(env, start_response)
if ((req.method == 'GET' or req.method == 'HEAD') and
req.params.get('multipart-manifest') != 'get'):
return GetContext(self, self.logger).\
handle_request(req, start_response)
elif req.method == 'PUT':
error_response = self._validate_x_object_manifest_header(req)
if error_response:
return error_response(env, start_response)
return self.app(env, start_response)
def _validate_x_object_manifest_header(self, req):
"""
Make sure that X-Object-Manifest is valid if present.
"""
if 'X-Object-Manifest' in req.headers:
value = req.headers['X-Object-Manifest']
container = prefix = None
try:
container, prefix = value.split('/', 1)
except ValueError:
pass
if not container or not prefix or '?' in value or '&' in value or \
prefix.startswith('/'):
return HTTPBadRequest(
request=req,
body=('X-Object-Manifest must be in the '
'format container/prefix'))
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def dlo_filter(app):
return DynamicLargeObject(app, conf)
return dlo_filter
| apache-2.0 |
alexcuellar/odoo | addons/payment_authorize/models/authorize.py | 33 | 7015 | # -*- coding: utf-'8' "-*-"
import hashlib
import hmac
import logging
import time
import urlparse
from openerp import api, fields, models
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_authorize.controllers.main import AuthorizeController
from openerp.tools.float_utils import float_compare
_logger = logging.getLogger(__name__)
class PaymentAcquirerAuthorize(models.Model):
_inherit = 'payment.acquirer'
def _get_authorize_urls(self, environment):
""" Authorize URLs """
if environment == 'prod':
return {'authorize_form_url': 'https://secure.authorize.net/gateway/transact.dll'}
else:
return {'authorize_form_url': 'https://test.authorize.net/gateway/transact.dll'}
@api.model
def _get_providers(self):
providers = super(PaymentAcquirerAuthorize, self)._get_providers()
providers.append(['authorize', 'Authorize.Net'])
return providers
authorize_login = fields.Char(string='API Login Id', required_if_provider='authorize', groups='base.group_user')
authorize_transaction_key = fields.Char(string='API Transaction Key', required_if_provider='authorize', groups='base.group_user')
def _authorize_generate_hashing(self, values):
data = '^'.join([
values['x_login'],
values['x_fp_sequence'],
values['x_fp_timestamp'],
values['x_amount'],
values['x_currency_code']])
return hmac.new(str(values['x_trans_key']), data, hashlib.md5).hexdigest()
@api.multi
def authorize_form_generate_values(self, partner_values, tx_values):
self.ensure_one()
base_url = self.env['ir.config_parameter'].get_param('web.base.url')
authorize_tx_values = dict(tx_values)
temp_authorize_tx_values = {
'x_login': self.authorize_login,
'x_trans_key': self.authorize_transaction_key,
'x_amount': str(tx_values['amount']),
'x_show_form': 'PAYMENT_FORM',
'x_type': 'AUTH_CAPTURE',
'x_method': 'CC',
'x_fp_sequence': '%s%s' % (self.id, int(time.time())),
'x_version': '3.1',
'x_relay_response': 'TRUE',
'x_fp_timestamp': str(int(time.time())),
'x_relay_url': '%s' % urlparse.urljoin(base_url, AuthorizeController._return_url),
'x_cancel_url': '%s' % urlparse.urljoin(base_url, AuthorizeController._cancel_url),
'x_currency_code': tx_values['currency'] and tx_values['currency'].name or '',
'address': partner_values['address'],
'city': partner_values['city'],
'country': partner_values['country'] and partner_values['country'].name or '',
'email': partner_values['email'],
'zip': partner_values['zip'],
'first_name': partner_values['first_name'],
'last_name': partner_values['last_name'],
'phone': partner_values['phone'],
'state': partner_values.get('state') and partner_values['state'].name or '',
}
temp_authorize_tx_values['returndata'] = authorize_tx_values.pop('return_url', '')
temp_authorize_tx_values['x_fp_hash'] = self._authorize_generate_hashing(temp_authorize_tx_values)
authorize_tx_values.update(temp_authorize_tx_values)
return partner_values, authorize_tx_values
@api.multi
def authorize_get_form_action_url(self):
self.ensure_one()
return self._get_authorize_urls(self.environment)['authorize_form_url']
class TxAuthorize(models.Model):
_inherit = 'payment.transaction'
authorize_txnid = fields.Char(string='Transaction ID')
_authorize_valid_tx_status = 1
_authorize_pending_tx_status = 4
_authorize_cancel_tx_status = 2
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
@api.model
def _authorize_form_get_tx_from_data(self, data):
""" Given a data dict coming from authorize, verify it and find the related
transaction record. """
reference, trans_id, fingerprint = data.get('x_invoice_num'), data.get('x_trans_id'), data.get('x_MD5_Hash')
if not reference or not trans_id or not fingerprint:
error_msg = 'Authorize: received data with missing reference (%s) or trans_id (%s) or fingerprint (%s)' % (reference, trans_id, fingerprint)
_logger.error(error_msg)
raise ValidationError(error_msg)
tx = self.search([('reference', '=', reference)])
if not tx or len(tx) > 1:
error_msg = 'Authorize: received data for reference %s' % (reference)
if not tx:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
return tx[0]
@api.model
def _authorize_form_get_invalid_parameters(self, tx, data):
invalid_parameters = []
if tx.authorize_txnid and data.get('x_trans_id') != tx.authorize_txnid:
invalid_parameters.append(('Transaction Id', data.get('x_trans_id'), tx.authorize_txnid))
# check what is buyed
if float_compare(float(data.get('x_amount', '0.0')), tx.amount, 2) != 0:
invalid_parameters.append(('Amount', data.get('x_amount'), '%.2f' % tx.amount))
return invalid_parameters
@api.model
def _authorize_form_validate(self, tx, data):
if tx.state == 'done':
_logger.warning('Authorize: trying to validate an already validated tx (ref %s)' % tx.reference)
return True
status_code = int(data.get('x_response_code', '0'))
if status_code == self._authorize_valid_tx_status:
tx.write({
'state': 'done',
'authorize_txnid': data.get('x_trans_id'),
'acquirer_reference': data['x_invoice_num'],
})
return True
elif status_code == self._authorize_pending_tx_status:
tx.write({
'state': 'pending',
'authorize_txnid': data.get('x_trans_id'),
'acquirer_reference': data['x_invoice_num'],
})
return True
elif status_code == self._authorize_cancel_tx_status:
tx.write({
'state': 'cancel',
'authorize_txnid': data.get('x_trans_id'),
'acquirer_reference': data['x_invoice_num'],
})
return True
else:
error = data.get('x_response_reason_text')
_logger.info(error)
tx.write({
'state': 'error',
'state_message': error,
'authorize_txnid': data.get('x_trans_id'),
'acquirer_reference': data['x_invoice_num'],
})
return False
| agpl-3.0 |
mmohiudd/xhtml2pdf | testrender/testrender.py | 147 | 10887 | #!/usr/bin/env python
import datetime
import os
import shutil
import sys
import glob
from optparse import OptionParser
from subprocess import Popen, PIPE
from xhtml2pdf import pisa
def render_pdf(filename, output_dir, options):
if options.debug:
print 'Rendering %s' % filename
basename = os.path.basename(filename)
outname = '%s.pdf' % os.path.splitext(basename)[0]
outfile = os.path.join(output_dir, outname)
input = open(filename, 'rb')
output = open(outfile, 'wb')
result = pisa.pisaDocument(input, output, path=filename)
input.close()
output.close()
if result.err:
print 'Error rendering %s: %s' % (filename, result.err)
sys.exit(1)
return outfile
def convert_to_png(infile, output_dir, options):
if options.debug:
print 'Converting %s to PNG' % infile
basename = os.path.basename(infile)
filename = os.path.splitext(basename)[0]
outname = '%s.page%%0d.png' % filename
globname = '%s.page*.png' % filename
outfile = os.path.join(output_dir, outname)
exec_cmd(options, options.convert_cmd, '-density', '150', infile, outfile)
outfiles = glob.glob(os.path.join(output_dir, globname))
outfiles.sort()
return outfiles
def create_diff_image(srcfile1, srcfile2, output_dir, options):
if options.debug:
print 'Creating difference image for %s and %s' % (srcfile1, srcfile2)
outname = '%s.diff%s' % os.path.splitext(srcfile1)
outfile = os.path.join(output_dir, outname)
_, result = exec_cmd(options, options.compare_cmd, '-metric', 'ae', srcfile1, srcfile2, '-lowlight-color', 'white', outfile)
diff_value = int(result.strip())
if diff_value > 0:
if not options.quiet:
print 'Image %s differs from reference, value is %i' % (srcfile1, diff_value)
return outfile, diff_value
def copy_ref_image(srcname, output_dir, options):
if options.debug:
print 'Copying reference image %s ' % srcname
dstname = os.path.basename(srcname)
dstfile = os.path.join(output_dir, '%s.ref%s' % os.path.splitext(dstname))
shutil.copyfile(srcname, dstfile)
return dstfile
def create_thumbnail(filename, options):
thumbfile = '%s.thumb%s' % os.path.splitext(filename)
if options.debug:
print 'Creating thumbnail of %s' % filename
exec_cmd(options, options.convert_cmd, '-resize', '20%', filename, thumbfile)
return thumbfile
def render_file(filename, output_dir, ref_dir, options):
if not options.quiet:
print 'Rendering %s' % filename
pdf = render_pdf(filename, output_dir, options)
pngs = convert_to_png(pdf, output_dir, options)
if options.create_reference:
return None, None, 0
thumbs = [create_thumbnail(png, options) for png in pngs]
pages = [{'png': p, 'png_thumb': thumbs[i]}
for i,p in enumerate(pngs)]
diff_count = 0
if not options.no_compare:
for page in pages:
refsrc = os.path.join(ref_dir, os.path.basename(page['png']))
if not os.path.isfile(refsrc):
print 'Reference image for %s not found!' % page['png']
continue
page['ref'] = copy_ref_image(refsrc, output_dir, options)
page['ref_thumb'] = create_thumbnail(page['ref'], options)
page['diff'], page['diff_value'] = \
create_diff_image(page['png'], page['ref'],
output_dir, options)
page['diff_thumb'] = create_thumbnail(page['diff'], options)
if page['diff_value']:
diff_count += 1
return pdf, pages, diff_count
def exec_cmd(options, *args):
if options.debug:
print 'Executing %s' % ' '.join(args)
proc = Popen(args, stdout=PIPE, stderr=PIPE)
result = proc.communicate()
if options.debug:
print result[0], result[1]
if proc.returncode:
print 'exec error (%i): %s' % (proc.returncode, result[1])
sys.exit(1)
return result[0], result[1]
def create_html_file(results, template_file, output_dir, options):
html = []
for pdf, pages, diff_count in results:
if options.only_errors and not diff_count:
continue
pdfname = os.path.basename(pdf)
html.append('<div class="result">\n'
'<h2><a href="%(pdf)s" class="pdf-file">%(pdf)s</a></h2>\n'
% {'pdf': pdfname})
for i, page in enumerate(pages):
vars = dict(((k, os.path.basename(v)) for k,v in page.items()
if k != 'diff_value'))
vars['page'] = i+1
if 'diff' in page:
vars['diff_value'] = page['diff_value']
if vars['diff_value']:
vars['class'] = 'result-page-diff error'
else:
if options.only_errors:
continue
vars['class'] = 'result-page-diff'
html.append('<div class="%(class)s">\n'
'<h3>Page %(page)i</h3>\n'
'<div class="result-img">\n'
'<div class="result-type">Difference '
'(Score %(diff_value)i)</div>\n'
'<a href="%(diff)s" class="diff-file">'
'<img src="%(diff_thumb)s"/></a>\n'
'</div>\n'
'<div class="result-img">\n'
'<div class="result-type">Rendered</div>\n'
'<a href="%(png)s" class="png-file">'
'<img src="%(png_thumb)s"/></a>\n'
'</div>\n'
'<div class="result-img">\n'
'<div class="result-type">Reference</div>\n'
'<a href="%(ref)s" class="ref-file">'
'<img src="%(ref_thumb)s"/></a>\n'
'</div>\n'
'</div>\n' % vars)
else:
html.append('<div class="result-page">\n'
'<h3>Page %(page)i</h3>\n'
'<div class="result-img">\n'
'<a href="%(png)s" class="png-file">'
'<img src="%(png_thumb)s"/></a>\n'
'</div>\n'
'</div>\n' % vars)
html.append('</div>\n\n')
now = datetime.datetime.now()
title = 'xhtml2pdf Test Rendering Results, %s' % now.strftime('%c')
template = open(template_file, 'rb').read()
template = template.replace('%%TITLE%%', title)
template = template.replace('%%RESULTS%%', '\n'.join(html))
htmlfile = os.path.join(output_dir, 'index.html')
outfile = open(htmlfile, 'wb')
outfile.write(template)
outfile.close()
return htmlfile
def main():
options, args = parser.parse_args()
base_dir = os.path.abspath(os.path.join(__file__, os.pardir))
source_dir = os.path.join(base_dir, options.source_dir)
if options.create_reference is not None:
output_dir = os.path.join(base_dir, options.create_reference)
else:
output_dir = os.path.join(base_dir, options.output_dir)
template_file = os.path.join(base_dir, options.html_template)
ref_dir = os.path.join(base_dir, options.ref_dir)
if os.path.isdir(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
results = []
diff_count = 0
if len(args) == 0:
files = glob.glob(os.path.join(source_dir, '*.html'))
else:
files = [os.path.join(source_dir, arg) for arg in args]
for filename in files:
pdf, pages, diff = render_file(filename, output_dir, ref_dir, options)
diff_count += diff
results.append((pdf, pages, diff))
num = len(results)
if options.create_reference is not None:
print 'Created reference for %i file%s' % (num, '' if num == 1 else 's')
else:
htmlfile = create_html_file(results, template_file, output_dir, options)
if not options.quiet:
print 'Rendered %i file%s' % (num, '' if num == 1 else 's')
print '%i file%s differ%s from reference' % \
(diff_count, diff_count != 1 and 's' or '',
diff_count == 1 and 's' or '')
print 'Check %s for results' % htmlfile
if diff_count:
sys.exit(1)
parser = OptionParser(
usage='rendertest.py [options] [source_file] [source_file] ...',
description='Renders a single html source file or all files in the data '
'directory, converts them to PNG format and prepares a result '
'HTML file for comparing the output with an expected result')
parser.add_option('-s', '--source-dir', dest='source_dir', default='data/source',
help=('Path to directory containing the html source files'))
parser.add_option('-o', '--output-dir', dest='output_dir', default='output',
help='Path to directory for output files. CAREFUL: this '
'directory will be deleted and recreated before rendering!')
parser.add_option('-r', '--ref-dir', dest='ref_dir', default='data/reference',
help='Path to directory containing the reference images '
'to compare the result with')
parser.add_option('-t', '--template', dest='html_template',
default='data/template.html', help='Name of HTML template file')
parser.add_option('-e', '--only-errors', dest='only_errors', action='store_true',
default=False, help='Only include images in HTML file which '
'differ from reference')
parser.add_option('-q', '--quiet', dest='quiet', action='store_true',
default=False, help='Try to be quiet')
parser.add_option('--no-compare', dest='no_compare', action='store_true',
default=False, help='Do not compare with reference image, '
'only render to png')
parser.add_option('-c', '--create-reference', dest='create_reference',
metavar='DIR',
default=None, help='Do not output anything, render source to '
'specified directory for reference. CAREFUL: this directory '
'will be deleted and recreated before rendering!')
parser.add_option('--debug', dest='debug', action='store_true',
default=False, help='More output for debugging')
parser.add_option('--convert-cmd', dest='convert_cmd', default='/usr/bin/convert',
help='Path to ImageMagick "convert" tool')
parser.add_option('--compare-cmd', dest='compare_cmd', default='/usr/bin/compare',
help='Path to ImageMagick "compare" tool')
if __name__ == '__main__':
main()
| apache-2.0 |
yordan-desta/QgisIns | python/ext-libs/pygments/lexers/_mapping.py | 68 | 36995 | # -*- coding: utf-8 -*-
"""
pygments.lexers._mapping
~~~~~~~~~~~~~~~~~~~~~~~~
Lexer mapping defintions. This file is generated by itself. Everytime
you change something on a builtin lexer defintion, run this script from
the lexers folder to update it.
Do not alter the LEXERS dictionary by hand.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
LEXERS = {
'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit', 'Autoit'), ('*.au3',), ('text/x-autoit',)),
'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk',), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat',), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()),
'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml', '*.cfc'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)),
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control',), ('control',), ()),
'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('Clipper', 'XBase'), ('*.PRG', '*.prg'), ()),
'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas',), ('*.s', '*.S'), ('text/x-gas',)),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('Cucumber', 'cucumber', 'Gherkin', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml', 'HAML'), ('*.haml',), ('text/x-haml',)),
'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
'HaxeLexer': ('pygments.lexers.web', 'haXe', ('hx', 'haXe'), ('*.hx',), ('text/haxe',)),
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja'), (), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg'), ('*.ini', '*.cfg'), ('text/x-ini',)),
'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade', 'JADE'), ('*.jade',), ('text/x-jade',)),
'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()),
'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell'), ('*.lhs',), ('text/x-literate-haskell',)),
'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode',), ('*.moo',), ('text/x-moocode',)),
'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm'), ('text/x-perl', 'application/x-perl')),
'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript',), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1'), ('*.ps1',), ('text/x-powershell',)),
'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties',), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf',), ('*.proto',), ()),
'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
'QmlLexer': ('pygments.lexers.web', 'QML', ('qml', 'Qt Meta Language', 'Qt modeling Language'), ('*.qml',), ('application/x-qml',)),
'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3'), ('text/x-rebol',)),
'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('RobotFramework', 'robotframework'), ('*.txt', '*.robot'), ('text/x-robotframework',)),
'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs', '*.rc'), ('text/x-rustsrc',)),
'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SassLexer': ('pygments.lexers.web', 'Sass', ('sass', 'SASS'), ('*.sass',), ('text/x-sass',)),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml', 'SCAML'), ('*.scaml',), ('text/x-scaml',)),
'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list'), ('sources.list',), ()),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()),
'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
}
if __name__ == '__main__':
import sys
import os
# lookup lexers
found_lexers = []
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
for filename in os.listdir('.'):
if filename.endswith('.py') and not filename.startswith('_'):
module_name = 'pygments.lexers.%s' % filename[:-3]
print module_name
module = __import__(module_name, None, None, [''])
for lexer_name in module.__all__:
lexer = getattr(module, lexer_name)
found_lexers.append(
'%r: %r' % (lexer_name,
(module_name,
lexer.name,
tuple(lexer.aliases),
tuple(lexer.filenames),
tuple(lexer.mimetypes))))
# sort them, that should make the diff files for svn smaller
found_lexers.sort()
# extract useful sourcecode from this file
f = open(__file__)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('LEXERS = {')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
f = open(__file__, 'wb')
f.write(header)
f.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
f.write(footer)
f.close()
| gpl-2.0 |
tjsavage/djangononrel-starter | django/contrib/messages/storage/fallback.py | 627 | 2171 | from django.contrib.messages.storage.base import BaseStorage
from django.contrib.messages.storage.cookie import CookieStorage
from django.contrib.messages.storage.session import SessionStorage
class FallbackStorage(BaseStorage):
"""
Tries to store all messages in the first backend, storing any unstored
messages in each subsequent backend backend.
"""
storage_classes = (CookieStorage, SessionStorage)
def __init__(self, *args, **kwargs):
super(FallbackStorage, self).__init__(*args, **kwargs)
self.storages = [storage_class(*args, **kwargs)
for storage_class in self.storage_classes]
self._used_storages = set()
def _get(self, *args, **kwargs):
"""
Gets a single list of messages from all storage backends.
"""
all_messages = []
for storage in self.storages:
messages, all_retrieved = storage._get()
# If the backend hasn't been used, no more retrieval is necessary.
if messages is None:
break
if messages:
self._used_storages.add(storage)
all_messages.extend(messages)
# If this storage class contained all the messages, no further
# retrieval is necessary
if all_retrieved:
break
return all_messages, all_retrieved
def _store(self, messages, response, *args, **kwargs):
"""
Stores the messages, returning any unstored messages after trying all
backends.
For each storage backend, any messages not stored are passed on to the
next backend.
"""
for storage in self.storages:
if messages:
messages = storage._store(messages, response,
remove_oldest=False)
# Even if there are no more messages, continue iterating to ensure
# storages which contained messages are flushed.
elif storage in self._used_storages:
storage._store([], response)
self._used_storages.remove(storage)
return messages
| bsd-3-clause |
wbsavage/shinken | test/test_reactionner_tag_get_notif.py | 2 | 5747 | #!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class TestReactionnerTagGetNotifs(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/nagios_reactionner_tag_get_notif.cfg')
# For a service, we generate a notification and a event handler.
# Each one got a specific reactionner_tag that we will look for.
def test_good_checks_get_only_tags_with_specific_tags(self):
now = int(time.time())
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 0, 'BAD | value1=0 value2=0']])
print "Go bad now"
self.scheduler_loop(2, [[svc, 2, 'BAD | value1=0 value2=0']])
for a in self.sched.actions.values():
# Set them go NOW
a.t_to_go = now
# In fact they are already launched, so we-reenabled them :)
a.status = 'scheduled'
# And look for good tagging
if a.command.startswith('plugins/notifier.pl'):
print a.__dict__
print a.reactionner_tag
self.assert_(a.reactionner_tag == 'runonwindows')
if a.command.startswith('plugins/test_eventhandler.pl'):
print a.__dict__
print a.reactionner_tag
self.assert_(a.reactionner_tag == 'eventtag')
# Ok the tags are defined as it should, now try to get them as a reactionner :)
# Now get only tag ones
taggued_runonwindows_checks = self.sched.get_to_run_checks(False, True, reactionner_tags=['runonwindows'])
self.assert_(len(taggued_runonwindows_checks) > 0)
for c in taggued_runonwindows_checks:
# Should be the host one only
self.assert_(c.command.startswith('plugins/notifier.pl'))
taggued_eventtag_checks = self.sched.get_to_run_checks(False, True, reactionner_tags=['eventtag'])
self.assert_(len(taggued_eventtag_checks) > 0)
for c in taggued_eventtag_checks:
# Should be the host one only
self.assert_(c.command.startswith('plugins/test_eventhandler.pl'))
# Same that upper, but with modules types
def test_good_checks_get_only_tags_with_specific_tags_andmodule_types(self):
now = int(time.time())
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
self.scheduler_loop(2, [[host, 0, 'UP | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 0, 'BAD | value1=0 value2=0']])
print "Go bad now"
self.scheduler_loop(2, [[svc, 2, 'BAD | value1=0 value2=0']])
for a in self.sched.actions.values():
# Set them go NOW
a.t_to_go = now
# In fact they are already launched, so we-reenabled them :)
a.status = 'scheduled'
# And look for good tagging
if a.command.startswith('plugins/notifier.pl'):
print a.__dict__
print a.reactionner_tag
self.assert_(a.reactionner_tag == 'runonwindows')
if a.command.startswith('plugins/test_eventhandler.pl'):
print a.__dict__
print a.reactionner_tag
self.assert_(a.reactionner_tag == 'eventtag')
# Ok the tags are defined as it should, now try to get them as a reactionner :)
# Now get only tag ones
taggued_runonwindows_checks = self.sched.get_to_run_checks(False, True, reactionner_tags=['runonwindows'], module_types=['fork'])
self.assert_(len(taggued_runonwindows_checks) > 0)
for c in taggued_runonwindows_checks:
# Should be the host one only
self.assert_(c.command.startswith('plugins/notifier.pl'))
taggued_eventtag_checks = self.sched.get_to_run_checks(False, True, reactionner_tags=['eventtag'], module_types=['myassischicken'])
self.assert_(len(taggued_eventtag_checks) == 0)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
jazzmes/ryu | ryu/services/protocols/bgp/api/import_map.py | 52 | 2700 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Import-map configuration.
"""
import logging
from ryu.services.protocols.bgp.api.base import register
from ryu.services.protocols.bgp.core_manager import CORE_MANAGER
from ryu.services.protocols.bgp.core_managers.import_map_manager\
import ImportMapAlreadyExistsError
from ryu.services.protocols.bgp.rtconf.base import RuntimeConfigError
LOG = logging.getLogger('bgpspeaker.api.import_map')
@register(name='importmap.create')
def create_importmap(type, action, name, value, route_family=None):
if action != 'drop':
raise RuntimeConfigError(
'Unknown action. For now we only support "drop" action.'
)
if type not in ('prefix_match', 'rt_match'):
raise RuntimeConfigError(
'Unknown type. We support only "prefix_match" and "rt_match".'
)
if type == 'prefix_match':
return _create_prefix_match_importmap(name, value, route_family)
elif type == 'rt_match':
return _create_rt_match_importmap(name, value)
def _create_prefix_match_importmap(name, value, route_family):
core_service = CORE_MANAGER.get_core_service()
importmap_manager = core_service.importmap_manager
try:
if route_family == 'ipv4':
importmap_manager.create_vpnv4_nlri_import_map(name, value)
elif route_family == 'ipv6':
importmap_manager.create_vpnv6_nlri_import_map(name, value)
else:
raise RuntimeConfigError(
'Unknown address family %s. it should be ipv4 or ipv6'
% route_family
)
except ImportMapAlreadyExistsError:
raise RuntimeConfigError(
'Map with this name already exists'
)
return True
def _create_rt_match_importmap(name, value):
core_service = CORE_MANAGER.get_core_service()
importmap_manager = core_service.importmap_manager
try:
importmap_manager.create_rt_import_map(name, value)
except ImportMapAlreadyExistsError:
raise RuntimeConfigError(
'Map with this name already exists'
)
return True
| apache-2.0 |
piotroxp/scibibscan | scib/lib/python3.5/site-packages/astropy/io/votable/tests/table_test.py | 1 | 5035 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Test the conversion to/from astropy.table
"""
# TEST_UNICODE_LITERALS
import io
import os
import numpy as np
from ....utils.data import get_pkg_data_filename, get_pkg_data_fileobj
from ..table import parse, writeto
from .. import tree
def test_table(tmpdir):
# Read the VOTABLE
votable = parse(
get_pkg_data_filename('data/regression.xml'),
pedantic=False)
table = votable.get_first_table()
astropy_table = table.to_table()
for name in table.array.dtype.names:
assert np.all(astropy_table.mask[name] == table.array.mask[name])
votable2 = tree.VOTableFile.from_table(astropy_table)
t = votable2.get_first_table()
field_types = [
('string_test', {'datatype': 'char', 'arraysize': '*'}),
('string_test_2', {'datatype': 'char', 'arraysize': '10'}),
('unicode_test', {'datatype': 'unicodeChar', 'arraysize': '*'}),
('fixed_unicode_test', {'datatype': 'unicodeChar', 'arraysize': '10'}),
('string_array_test', {'datatype': 'char', 'arraysize': '4'}),
('unsignedByte', {'datatype': 'unsignedByte'}),
('short', {'datatype': 'short'}),
('int', {'datatype': 'int'}),
('long', {'datatype': 'long'}),
('double', {'datatype': 'double'}),
('float', {'datatype': 'float'}),
('array', {'datatype': 'long', 'arraysize': '2*'}),
('bit', {'datatype': 'bit'}),
('bitarray', {'datatype': 'bit', 'arraysize': '3x2'}),
('bitvararray', {'datatype': 'bit', 'arraysize': '*'}),
('bitvararray2', {'datatype': 'bit', 'arraysize': '3x2*'}),
('floatComplex', {'datatype': 'floatComplex'}),
('doubleComplex', {'datatype': 'doubleComplex'}),
('doubleComplexArray', {'datatype': 'doubleComplex', 'arraysize': '*'}),
('doubleComplexArrayFixed', {'datatype': 'doubleComplex', 'arraysize': '2'}),
('boolean', {'datatype': 'bit'}),
('booleanArray', {'datatype': 'bit', 'arraysize': '4'}),
('nulls', {'datatype': 'int'}),
('nulls_array', {'datatype': 'int', 'arraysize': '2x2'}),
('precision1', {'datatype': 'double'}),
('precision2', {'datatype': 'double'}),
('doublearray', {'datatype': 'double', 'arraysize': '*'}),
('bitarray2', {'datatype': 'bit', 'arraysize': '16'})]
for field, type in zip(t.fields, field_types):
name, d = type
assert field.ID == name
assert field.datatype == d['datatype']
if 'arraysize' in d:
assert field.arraysize == d['arraysize']
writeto(votable2, os.path.join(str(tmpdir), "through_table.xml"))
def test_read_through_table_interface(tmpdir):
from ....table import Table
with get_pkg_data_fileobj('data/regression.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable', table_id='main_table')
assert len(t) == 5
fn = os.path.join(str(tmpdir), "table_interface.xml")
t.write(fn, table_id='FOO', format='votable')
with open(fn, 'rb') as fd:
t2 = Table.read(fd, format='votable', table_id='FOO')
assert len(t2) == 5
def test_read_through_table_interface2():
from ....table import Table
with get_pkg_data_fileobj('data/regression.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable', table_id='last_table')
assert len(t) == 0
def test_names_over_ids():
with get_pkg_data_fileobj('data/names.xml', encoding='binary') as fd:
votable = parse(fd)
table = votable.get_first_table().to_table(use_names_over_ids=True)
assert table.colnames == [
'Name', 'GLON', 'GLAT', 'RAdeg', 'DEdeg', 'Jmag', 'Hmag', 'Kmag',
'G3.6mag', 'G4.5mag', 'G5.8mag', 'G8.0mag', '4.5mag', '8.0mag',
'Emag', '24mag', 'f_Name']
def test_table_read_with_unnamed_tables():
"""
Issue #927
"""
from ....table import Table
with get_pkg_data_fileobj('data/names.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable')
assert len(t) == 1
def test_from_table_without_mask():
from ....table import Table, Column
t = Table()
c = Column(data=[1, 2, 3], name='a')
t.add_column(c)
output = io.BytesIO()
t.write(output, format='votable')
def test_write_with_format():
from ....table import Table, Column
t = Table()
c = Column(data=[1, 2, 3], name='a')
t.add_column(c)
output = io.BytesIO()
t.write(output, format='votable', tabledata_format="binary")
assert b'BINARY' in output.getvalue()
assert b'TABLEDATA' not in output.getvalue()
output = io.BytesIO()
t.write(output, format='votable', tabledata_format="binary2")
assert b'BINARY2' in output.getvalue()
assert b'TABLEDATA' not in output.getvalue()
def test_empty_table():
votable = parse(
get_pkg_data_filename('data/empty_table.xml'),
pedantic=False)
table = votable.get_first_table()
astropy_table = table.to_table()
| mit |
dex4er/django | django/core/cache/__init__.py | 105 | 4876 | """
Caching framework.
This package defines set of cache backends that all conform to a simple API.
In a nutshell, a cache is a set of values -- which can be any object that
may be pickled -- identified by string keys. For the complete API, see
the abstract BaseCache class in django.core.cache.backends.base.
Client code should not access a cache backend directly; instead it should
either use the "cache" variable made available here, or it should use the
get_cache() function made available here. get_cache() takes a backend URI
(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
cache class.
See docs/topics/cache.txt for information on the public API.
"""
from django.conf import settings
from django.core import signals
from django.core.cache.backends.base import (
InvalidCacheBackendError, CacheKeyWarning, BaseCache)
from django.core.exceptions import ImproperlyConfigured
from django.utils import importlib
from django.utils.module_loading import import_by_path
from django.utils.six.moves.urllib.parse import parse_qsl
__all__ = [
'get_cache', 'cache', 'DEFAULT_CACHE_ALIAS'
]
# Name for use in settings file --> name of module in "backends" directory.
# Any backend scheme that is not in this dictionary is treated as a Python
# import path to a custom backend.
BACKENDS = {
'memcached': 'memcached',
'locmem': 'locmem',
'file': 'filebased',
'db': 'db',
'dummy': 'dummy',
}
DEFAULT_CACHE_ALIAS = 'default'
def parse_backend_uri(backend_uri):
"""
Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a
host and any extra params that are required for the backend. Returns a
(scheme, host, params) tuple.
"""
if backend_uri.find(':') == -1:
raise InvalidCacheBackendError("Backend URI must start with scheme://")
scheme, rest = backend_uri.split(':', 1)
if not rest.startswith('//'):
raise InvalidCacheBackendError("Backend URI must start with scheme://")
host = rest[2:]
qpos = rest.find('?')
if qpos != -1:
params = dict(parse_qsl(rest[qpos+1:]))
host = rest[2:qpos]
else:
params = {}
if host.endswith('/'):
host = host[:-1]
return scheme, host, params
if DEFAULT_CACHE_ALIAS not in settings.CACHES:
raise ImproperlyConfigured("You must define a '%s' cache" % DEFAULT_CACHE_ALIAS)
def parse_backend_conf(backend, **kwargs):
"""
Helper function to parse the backend configuration
that doesn't use the URI notation.
"""
# Try to get the CACHES entry for the given backend name first
conf = settings.CACHES.get(backend, None)
if conf is not None:
args = conf.copy()
args.update(kwargs)
backend = args.pop('BACKEND')
location = args.pop('LOCATION', '')
return backend, location, args
else:
try:
# Trying to import the given backend, in case it's a dotted path
backend_cls = import_by_path(backend)
except ImproperlyConfigured as e:
raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
backend, e))
location = kwargs.pop('LOCATION', '')
return backend, location, kwargs
def get_cache(backend, **kwargs):
"""
Function to load a cache backend dynamically. This is flexible by design
to allow different use cases:
To load a backend with the old URI-based notation::
cache = get_cache('locmem://')
To load a backend that is pre-defined in the settings::
cache = get_cache('default')
To load a backend with its dotted import path,
including arbitrary options::
cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', **{
'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 30,
})
"""
try:
if '://' in backend:
# for backwards compatibility
backend, location, params = parse_backend_uri(backend)
if backend in BACKENDS:
backend = 'django.core.cache.backends.%s' % BACKENDS[backend]
params.update(kwargs)
mod = importlib.import_module(backend)
backend_cls = mod.CacheClass
else:
backend, location, params = parse_backend_conf(backend, **kwargs)
backend_cls = import_by_path(backend)
except (AttributeError, ImportError, ImproperlyConfigured) as e:
raise InvalidCacheBackendError(
"Could not find backend '%s': %s" % (backend, e))
cache = backend_cls(location, params)
# Some caches -- python-memcached in particular -- need to do a cleanup at the
# end of a request cycle. If not implemented in a particular backend
# cache.close is a no-op
signals.request_finished.connect(cache.close)
return cache
cache = get_cache(DEFAULT_CACHE_ALIAS)
| bsd-3-clause |
neutronpy/neutronpy | neutronpy/fileio/data.py | 3 | 7117 | # -*- coding: utf-8 -*-
import copy
import numbers
import numpy as np
from .instrument import save_instrument
from .loaders import DcsMslice, Grasp, Ice, Icp, Mad, Neutronpy, Spice
def load_data(files, filetype='auto', tols=1e-4, build_hkl=True, load_instrument=False):
r"""Loads one or more files and creates a :class:`Data` object with the
loaded data.
Parameters
----------
files : str or tuple of str
A file or non-keyworded list of files containing data for input.
filetype : str, optional
Default: `'auto'`. Specify file type; Currently supported file types
are `'SPICE'` (HFIR), `'ICE'` and `'ICP'` (NIST), `'MAD'` (ILL),
`'dcs_mslice'` DAVE exported ascii formats, GRASP exported ascii and
HDF5 formats, and neutronpy exported formats. By default the function
will attempt to determine the filetype automatically.
tols : float or array_like
Default: `1e-4`. A float or array of shape `(5,)` giving tolerances
for combining multiple files. If multiple points are within the given
tolerances then they will be combined into a single point. If a float
is given, tolerances will all be the same for all variables in **Q**.
If an array is given tolerances should be in the format
`[h, k, l, e, temp]`.
Returns
-------
Data : object
A :class:`Data` object populated with the data from the input file or
files.
"""
load_filetype = {'dcs_mslice': DcsMslice,
'grasp': Grasp,
'ice': Ice,
'icp': Icp,
'mad': Mad,
'neutronpy': Neutronpy,
'spice': Spice}
if not isinstance(files, (tuple, list)):
files = (files,)
for filename in files:
if filetype == 'auto':
try:
filetype = detect_filetype(filename)
except ValueError:
raise
try:
_data_object_temp = load_filetype[filetype.lower()]()
_data_object_temp.load(filename, build_hkl=build_hkl, load_instrument=load_instrument)
except KeyError:
raise KeyError('Filetype not supported.')
if isinstance(tols, numbers.Number):
tols = [tols for i in range(len(_data_object_temp._data) - len(_data_object_temp.data_keys))]
try:
_data_object.combine_data(_data_object_temp, tols=tols)
except (NameError, UnboundLocalError):
_data_object = copy.deepcopy(_data_object_temp)
return _data_object
def save_data(obj, filename, filetype='ascii', save_instr=False, overwrite=False, **kwargs):
"""Saves a given object to a file in a specified format.
Parameters
----------
obj : :class:`Data`
A :class:`Data` object to be saved to disk
filename : str
Path to file where data will be saved
filetype : str, optional
Default: `'ascii'`. Data can either be saved in human-readable
`'ascii'` format, binary `'hdf5'` format, or binary `'pickle'`
format (not recommended).
save_instr : bool, optional
Default: False.
overwrite : bool, optional
Default: False.
"""
if filetype == 'ascii':
from datetime import datetime
if overwrite:
mode = 'w+'
else:
mode = 'r+'
header = '### NeutronPy ::: {0} ###\n\n'.format(datetime.now().isoformat())
header += 'data_keys = {0}\n'.format(str(obj.data_keys))
if hasattr(obj, 'Q_keys'):
header += 'Q_keys = {0}\n'.format(str(obj.Q_keys))
header += '\n\noriginal_header = \n\t'
if hasattr(obj, 'file_header'):
old_header = '\n\t'.join(obj.file_header)
else:
old_header = ''
old_header += '\n\n'
data_columns = obj.data_columns
data = obj.data
if hasattr(obj, '_err'):
data_columns.append('error')
data['error'] = obj._err
col_header = '\nnpy_col_headers =\n' + '\t'.join(data_columns)
header += old_header + col_header
output = np.vstack((value for value in data.values())).T
np.savetxt(filename + '.npy', output, header=header, **kwargs)
if save_instr:
save_instrument(obj.instrument, filename, filetype='ascii', overwrite=overwrite)
elif filetype == 'hdf5':
import h5py
if overwrite:
mode = 'w'
else:
mode = 'a'
with h5py.File(filename + '.h5', mode) as f:
data = f.create_group('data')
try:
data.attrs.create('file_header', obj.file_header.encode('utf8'))
except AttributeError:
pass
data_keys = data.create_group('data_keys')
for key, value in obj.data_keys.items():
data_keys.attrs.create(key, value.encode('utf8'))
if hasattr(obj, 'Q_keys'):
Q_keys = data.create_group('Q_keys')
for key, value in obj.Q_keys.items():
Q_keys.attrs.create(key, value.encode('utf8'))
for key, value in obj.data.items():
data.create_dataset(key, data=value)
if hasattr(obj, '_err'):
data.create_dataset('error', data=obj._err)
if save_instr:
try:
save_instrument(obj.instrument, filename, filetype='hdf5', overwrite=False)
except AttributeError:
pass
elif filetype == 'pickle':
import pickle
with open(filename, 'wb') as f:
pickle.dump(obj, f)
else:
raise ValueError("""Format not supported. Please use 'ascii', 'hdf5', or 'pickle'""")
def detect_filetype(filename):
r"""Simple method for quickly determining filetype of a given input file.
Parameters
----------
filename : str
File path
Returns
-------
filetype : str
The filetype of the given input file
"""
if filename[-3:].lower() == 'nxs':
return 'grasp'
elif (filename[-4:].lower() == 'iexy') or (filename[-3:].lower() == 'spe') or (filename[-3:].lower() == 'xye') or (filename[-4:] == 'xyie'):
return 'dcs_mslice'
elif filename[-2:].lower() == 'h5' or filename[-3:].lower() == 'npy':
return 'neutronpy'
else:
with open(filename) as f:
first_line = f.readline()
second_line = f.readline()
if '#ICE' in first_line:
return 'ice'
elif '# scan' in first_line:
return 'spice'
elif 'GRASP' in first_line.upper():
return 'grasp'
elif 'Filename' in second_line:
return 'icp'
elif 'RRR' in first_line or 'AAA' in first_line or 'VVV' in first_line:
return 'mad'
elif 'NeutronPy' in first_line:
return 'neutronpy'
else:
raise ValueError('Unknown filetype.')
| mit |
bpgc-cte/python2017 | Week 7/django/lib/python3.6/encodings/cp855.py | 272 | 33850 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp855',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0452, # CYRILLIC SMALL LETTER DJE
0x0081: 0x0402, # CYRILLIC CAPITAL LETTER DJE
0x0082: 0x0453, # CYRILLIC SMALL LETTER GJE
0x0083: 0x0403, # CYRILLIC CAPITAL LETTER GJE
0x0084: 0x0451, # CYRILLIC SMALL LETTER IO
0x0085: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x0086: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0087: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0088: 0x0455, # CYRILLIC SMALL LETTER DZE
0x0089: 0x0405, # CYRILLIC CAPITAL LETTER DZE
0x008a: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x008b: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x008c: 0x0457, # CYRILLIC SMALL LETTER YI
0x008d: 0x0407, # CYRILLIC CAPITAL LETTER YI
0x008e: 0x0458, # CYRILLIC SMALL LETTER JE
0x008f: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x0090: 0x0459, # CYRILLIC SMALL LETTER LJE
0x0091: 0x0409, # CYRILLIC CAPITAL LETTER LJE
0x0092: 0x045a, # CYRILLIC SMALL LETTER NJE
0x0093: 0x040a, # CYRILLIC CAPITAL LETTER NJE
0x0094: 0x045b, # CYRILLIC SMALL LETTER TSHE
0x0095: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
0x0096: 0x045c, # CYRILLIC SMALL LETTER KJE
0x0097: 0x040c, # CYRILLIC CAPITAL LETTER KJE
0x0098: 0x045e, # CYRILLIC SMALL LETTER SHORT U
0x0099: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
0x009a: 0x045f, # CYRILLIC SMALL LETTER DZHE
0x009b: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
0x009c: 0x044e, # CYRILLIC SMALL LETTER YU
0x009d: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x009e: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x009f: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
0x00a1: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00a2: 0x0431, # CYRILLIC SMALL LETTER BE
0x00a3: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00a4: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00a5: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00a6: 0x0434, # CYRILLIC SMALL LETTER DE
0x00a7: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00a8: 0x0435, # CYRILLIC SMALL LETTER IE
0x00a9: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00aa: 0x0444, # CYRILLIC SMALL LETTER EF
0x00ab: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00ac: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00ad: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00b6: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00b7: 0x0438, # CYRILLIC SMALL LETTER I
0x00b8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00be: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x043a, # CYRILLIC SMALL LETTER KA
0x00c7: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x043b, # CYRILLIC SMALL LETTER EL
0x00d1: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00d2: 0x043c, # CYRILLIC SMALL LETTER EM
0x00d3: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00d4: 0x043d, # CYRILLIC SMALL LETTER EN
0x00d5: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00d6: 0x043e, # CYRILLIC SMALL LETTER O
0x00d7: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00d8: 0x043f, # CYRILLIC SMALL LETTER PE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00de: 0x044f, # CYRILLIC SMALL LETTER YA
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00e1: 0x0440, # CYRILLIC SMALL LETTER ER
0x00e2: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00e3: 0x0441, # CYRILLIC SMALL LETTER ES
0x00e4: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00e5: 0x0442, # CYRILLIC SMALL LETTER TE
0x00e6: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00e7: 0x0443, # CYRILLIC SMALL LETTER U
0x00e8: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00e9: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00ea: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00eb: 0x0432, # CYRILLIC SMALL LETTER VE
0x00ec: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00ed: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00ee: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00ef: 0x2116, # NUMERO SIGN
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00f2: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00f3: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00f4: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00f5: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00f6: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00f7: 0x044d, # CYRILLIC SMALL LETTER E
0x00f8: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00fa: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00fb: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00fc: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00fd: 0x00a7, # SECTION SIGN
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\u0452' # 0x0080 -> CYRILLIC SMALL LETTER DJE
'\u0402' # 0x0081 -> CYRILLIC CAPITAL LETTER DJE
'\u0453' # 0x0082 -> CYRILLIC SMALL LETTER GJE
'\u0403' # 0x0083 -> CYRILLIC CAPITAL LETTER GJE
'\u0451' # 0x0084 -> CYRILLIC SMALL LETTER IO
'\u0401' # 0x0085 -> CYRILLIC CAPITAL LETTER IO
'\u0454' # 0x0086 -> CYRILLIC SMALL LETTER UKRAINIAN IE
'\u0404' # 0x0087 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
'\u0455' # 0x0088 -> CYRILLIC SMALL LETTER DZE
'\u0405' # 0x0089 -> CYRILLIC CAPITAL LETTER DZE
'\u0456' # 0x008a -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
'\u0406' # 0x008b -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
'\u0457' # 0x008c -> CYRILLIC SMALL LETTER YI
'\u0407' # 0x008d -> CYRILLIC CAPITAL LETTER YI
'\u0458' # 0x008e -> CYRILLIC SMALL LETTER JE
'\u0408' # 0x008f -> CYRILLIC CAPITAL LETTER JE
'\u0459' # 0x0090 -> CYRILLIC SMALL LETTER LJE
'\u0409' # 0x0091 -> CYRILLIC CAPITAL LETTER LJE
'\u045a' # 0x0092 -> CYRILLIC SMALL LETTER NJE
'\u040a' # 0x0093 -> CYRILLIC CAPITAL LETTER NJE
'\u045b' # 0x0094 -> CYRILLIC SMALL LETTER TSHE
'\u040b' # 0x0095 -> CYRILLIC CAPITAL LETTER TSHE
'\u045c' # 0x0096 -> CYRILLIC SMALL LETTER KJE
'\u040c' # 0x0097 -> CYRILLIC CAPITAL LETTER KJE
'\u045e' # 0x0098 -> CYRILLIC SMALL LETTER SHORT U
'\u040e' # 0x0099 -> CYRILLIC CAPITAL LETTER SHORT U
'\u045f' # 0x009a -> CYRILLIC SMALL LETTER DZHE
'\u040f' # 0x009b -> CYRILLIC CAPITAL LETTER DZHE
'\u044e' # 0x009c -> CYRILLIC SMALL LETTER YU
'\u042e' # 0x009d -> CYRILLIC CAPITAL LETTER YU
'\u044a' # 0x009e -> CYRILLIC SMALL LETTER HARD SIGN
'\u042a' # 0x009f -> CYRILLIC CAPITAL LETTER HARD SIGN
'\u0430' # 0x00a0 -> CYRILLIC SMALL LETTER A
'\u0410' # 0x00a1 -> CYRILLIC CAPITAL LETTER A
'\u0431' # 0x00a2 -> CYRILLIC SMALL LETTER BE
'\u0411' # 0x00a3 -> CYRILLIC CAPITAL LETTER BE
'\u0446' # 0x00a4 -> CYRILLIC SMALL LETTER TSE
'\u0426' # 0x00a5 -> CYRILLIC CAPITAL LETTER TSE
'\u0434' # 0x00a6 -> CYRILLIC SMALL LETTER DE
'\u0414' # 0x00a7 -> CYRILLIC CAPITAL LETTER DE
'\u0435' # 0x00a8 -> CYRILLIC SMALL LETTER IE
'\u0415' # 0x00a9 -> CYRILLIC CAPITAL LETTER IE
'\u0444' # 0x00aa -> CYRILLIC SMALL LETTER EF
'\u0424' # 0x00ab -> CYRILLIC CAPITAL LETTER EF
'\u0433' # 0x00ac -> CYRILLIC SMALL LETTER GHE
'\u0413' # 0x00ad -> CYRILLIC CAPITAL LETTER GHE
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u0445' # 0x00b5 -> CYRILLIC SMALL LETTER HA
'\u0425' # 0x00b6 -> CYRILLIC CAPITAL LETTER HA
'\u0438' # 0x00b7 -> CYRILLIC SMALL LETTER I
'\u0418' # 0x00b8 -> CYRILLIC CAPITAL LETTER I
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u0439' # 0x00bd -> CYRILLIC SMALL LETTER SHORT I
'\u0419' # 0x00be -> CYRILLIC CAPITAL LETTER SHORT I
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u043a' # 0x00c6 -> CYRILLIC SMALL LETTER KA
'\u041a' # 0x00c7 -> CYRILLIC CAPITAL LETTER KA
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\xa4' # 0x00cf -> CURRENCY SIGN
'\u043b' # 0x00d0 -> CYRILLIC SMALL LETTER EL
'\u041b' # 0x00d1 -> CYRILLIC CAPITAL LETTER EL
'\u043c' # 0x00d2 -> CYRILLIC SMALL LETTER EM
'\u041c' # 0x00d3 -> CYRILLIC CAPITAL LETTER EM
'\u043d' # 0x00d4 -> CYRILLIC SMALL LETTER EN
'\u041d' # 0x00d5 -> CYRILLIC CAPITAL LETTER EN
'\u043e' # 0x00d6 -> CYRILLIC SMALL LETTER O
'\u041e' # 0x00d7 -> CYRILLIC CAPITAL LETTER O
'\u043f' # 0x00d8 -> CYRILLIC SMALL LETTER PE
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u041f' # 0x00dd -> CYRILLIC CAPITAL LETTER PE
'\u044f' # 0x00de -> CYRILLIC SMALL LETTER YA
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\u042f' # 0x00e0 -> CYRILLIC CAPITAL LETTER YA
'\u0440' # 0x00e1 -> CYRILLIC SMALL LETTER ER
'\u0420' # 0x00e2 -> CYRILLIC CAPITAL LETTER ER
'\u0441' # 0x00e3 -> CYRILLIC SMALL LETTER ES
'\u0421' # 0x00e4 -> CYRILLIC CAPITAL LETTER ES
'\u0442' # 0x00e5 -> CYRILLIC SMALL LETTER TE
'\u0422' # 0x00e6 -> CYRILLIC CAPITAL LETTER TE
'\u0443' # 0x00e7 -> CYRILLIC SMALL LETTER U
'\u0423' # 0x00e8 -> CYRILLIC CAPITAL LETTER U
'\u0436' # 0x00e9 -> CYRILLIC SMALL LETTER ZHE
'\u0416' # 0x00ea -> CYRILLIC CAPITAL LETTER ZHE
'\u0432' # 0x00eb -> CYRILLIC SMALL LETTER VE
'\u0412' # 0x00ec -> CYRILLIC CAPITAL LETTER VE
'\u044c' # 0x00ed -> CYRILLIC SMALL LETTER SOFT SIGN
'\u042c' # 0x00ee -> CYRILLIC CAPITAL LETTER SOFT SIGN
'\u2116' # 0x00ef -> NUMERO SIGN
'\xad' # 0x00f0 -> SOFT HYPHEN
'\u044b' # 0x00f1 -> CYRILLIC SMALL LETTER YERU
'\u042b' # 0x00f2 -> CYRILLIC CAPITAL LETTER YERU
'\u0437' # 0x00f3 -> CYRILLIC SMALL LETTER ZE
'\u0417' # 0x00f4 -> CYRILLIC CAPITAL LETTER ZE
'\u0448' # 0x00f5 -> CYRILLIC SMALL LETTER SHA
'\u0428' # 0x00f6 -> CYRILLIC CAPITAL LETTER SHA
'\u044d' # 0x00f7 -> CYRILLIC SMALL LETTER E
'\u042d' # 0x00f8 -> CYRILLIC CAPITAL LETTER E
'\u0449' # 0x00f9 -> CYRILLIC SMALL LETTER SHCHA
'\u0429' # 0x00fa -> CYRILLIC CAPITAL LETTER SHCHA
'\u0447' # 0x00fb -> CYRILLIC SMALL LETTER CHE
'\u0427' # 0x00fc -> CYRILLIC CAPITAL LETTER CHE
'\xa7' # 0x00fd -> SECTION SIGN
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00fd, # SECTION SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ad: 0x00f0, # SOFT HYPHEN
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x0401: 0x0085, # CYRILLIC CAPITAL LETTER IO
0x0402: 0x0081, # CYRILLIC CAPITAL LETTER DJE
0x0403: 0x0083, # CYRILLIC CAPITAL LETTER GJE
0x0404: 0x0087, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0405: 0x0089, # CYRILLIC CAPITAL LETTER DZE
0x0406: 0x008b, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x0407: 0x008d, # CYRILLIC CAPITAL LETTER YI
0x0408: 0x008f, # CYRILLIC CAPITAL LETTER JE
0x0409: 0x0091, # CYRILLIC CAPITAL LETTER LJE
0x040a: 0x0093, # CYRILLIC CAPITAL LETTER NJE
0x040b: 0x0095, # CYRILLIC CAPITAL LETTER TSHE
0x040c: 0x0097, # CYRILLIC CAPITAL LETTER KJE
0x040e: 0x0099, # CYRILLIC CAPITAL LETTER SHORT U
0x040f: 0x009b, # CYRILLIC CAPITAL LETTER DZHE
0x0410: 0x00a1, # CYRILLIC CAPITAL LETTER A
0x0411: 0x00a3, # CYRILLIC CAPITAL LETTER BE
0x0412: 0x00ec, # CYRILLIC CAPITAL LETTER VE
0x0413: 0x00ad, # CYRILLIC CAPITAL LETTER GHE
0x0414: 0x00a7, # CYRILLIC CAPITAL LETTER DE
0x0415: 0x00a9, # CYRILLIC CAPITAL LETTER IE
0x0416: 0x00ea, # CYRILLIC CAPITAL LETTER ZHE
0x0417: 0x00f4, # CYRILLIC CAPITAL LETTER ZE
0x0418: 0x00b8, # CYRILLIC CAPITAL LETTER I
0x0419: 0x00be, # CYRILLIC CAPITAL LETTER SHORT I
0x041a: 0x00c7, # CYRILLIC CAPITAL LETTER KA
0x041b: 0x00d1, # CYRILLIC CAPITAL LETTER EL
0x041c: 0x00d3, # CYRILLIC CAPITAL LETTER EM
0x041d: 0x00d5, # CYRILLIC CAPITAL LETTER EN
0x041e: 0x00d7, # CYRILLIC CAPITAL LETTER O
0x041f: 0x00dd, # CYRILLIC CAPITAL LETTER PE
0x0420: 0x00e2, # CYRILLIC CAPITAL LETTER ER
0x0421: 0x00e4, # CYRILLIC CAPITAL LETTER ES
0x0422: 0x00e6, # CYRILLIC CAPITAL LETTER TE
0x0423: 0x00e8, # CYRILLIC CAPITAL LETTER U
0x0424: 0x00ab, # CYRILLIC CAPITAL LETTER EF
0x0425: 0x00b6, # CYRILLIC CAPITAL LETTER HA
0x0426: 0x00a5, # CYRILLIC CAPITAL LETTER TSE
0x0427: 0x00fc, # CYRILLIC CAPITAL LETTER CHE
0x0428: 0x00f6, # CYRILLIC CAPITAL LETTER SHA
0x0429: 0x00fa, # CYRILLIC CAPITAL LETTER SHCHA
0x042a: 0x009f, # CYRILLIC CAPITAL LETTER HARD SIGN
0x042b: 0x00f2, # CYRILLIC CAPITAL LETTER YERU
0x042c: 0x00ee, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x042d: 0x00f8, # CYRILLIC CAPITAL LETTER E
0x042e: 0x009d, # CYRILLIC CAPITAL LETTER YU
0x042f: 0x00e0, # CYRILLIC CAPITAL LETTER YA
0x0430: 0x00a0, # CYRILLIC SMALL LETTER A
0x0431: 0x00a2, # CYRILLIC SMALL LETTER BE
0x0432: 0x00eb, # CYRILLIC SMALL LETTER VE
0x0433: 0x00ac, # CYRILLIC SMALL LETTER GHE
0x0434: 0x00a6, # CYRILLIC SMALL LETTER DE
0x0435: 0x00a8, # CYRILLIC SMALL LETTER IE
0x0436: 0x00e9, # CYRILLIC SMALL LETTER ZHE
0x0437: 0x00f3, # CYRILLIC SMALL LETTER ZE
0x0438: 0x00b7, # CYRILLIC SMALL LETTER I
0x0439: 0x00bd, # CYRILLIC SMALL LETTER SHORT I
0x043a: 0x00c6, # CYRILLIC SMALL LETTER KA
0x043b: 0x00d0, # CYRILLIC SMALL LETTER EL
0x043c: 0x00d2, # CYRILLIC SMALL LETTER EM
0x043d: 0x00d4, # CYRILLIC SMALL LETTER EN
0x043e: 0x00d6, # CYRILLIC SMALL LETTER O
0x043f: 0x00d8, # CYRILLIC SMALL LETTER PE
0x0440: 0x00e1, # CYRILLIC SMALL LETTER ER
0x0441: 0x00e3, # CYRILLIC SMALL LETTER ES
0x0442: 0x00e5, # CYRILLIC SMALL LETTER TE
0x0443: 0x00e7, # CYRILLIC SMALL LETTER U
0x0444: 0x00aa, # CYRILLIC SMALL LETTER EF
0x0445: 0x00b5, # CYRILLIC SMALL LETTER HA
0x0446: 0x00a4, # CYRILLIC SMALL LETTER TSE
0x0447: 0x00fb, # CYRILLIC SMALL LETTER CHE
0x0448: 0x00f5, # CYRILLIC SMALL LETTER SHA
0x0449: 0x00f9, # CYRILLIC SMALL LETTER SHCHA
0x044a: 0x009e, # CYRILLIC SMALL LETTER HARD SIGN
0x044b: 0x00f1, # CYRILLIC SMALL LETTER YERU
0x044c: 0x00ed, # CYRILLIC SMALL LETTER SOFT SIGN
0x044d: 0x00f7, # CYRILLIC SMALL LETTER E
0x044e: 0x009c, # CYRILLIC SMALL LETTER YU
0x044f: 0x00de, # CYRILLIC SMALL LETTER YA
0x0451: 0x0084, # CYRILLIC SMALL LETTER IO
0x0452: 0x0080, # CYRILLIC SMALL LETTER DJE
0x0453: 0x0082, # CYRILLIC SMALL LETTER GJE
0x0454: 0x0086, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0455: 0x0088, # CYRILLIC SMALL LETTER DZE
0x0456: 0x008a, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x0457: 0x008c, # CYRILLIC SMALL LETTER YI
0x0458: 0x008e, # CYRILLIC SMALL LETTER JE
0x0459: 0x0090, # CYRILLIC SMALL LETTER LJE
0x045a: 0x0092, # CYRILLIC SMALL LETTER NJE
0x045b: 0x0094, # CYRILLIC SMALL LETTER TSHE
0x045c: 0x0096, # CYRILLIC SMALL LETTER KJE
0x045e: 0x0098, # CYRILLIC SMALL LETTER SHORT U
0x045f: 0x009a, # CYRILLIC SMALL LETTER DZHE
0x2116: 0x00ef, # NUMERO SIGN
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| mit |
pigeonflight/strider-plone | docker/appengine/lib/cherrypy/cherrypy/process/win32.py | 93 | 5870 | """Windows service. Requires pywin32."""
import os
import win32api
import win32con
import win32event
import win32service
import win32serviceutil
from cherrypy.process import wspbus, plugins
class ConsoleCtrlHandler(plugins.SimplePlugin):
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
def __init__(self, bus):
self.is_set = False
plugins.SimplePlugin.__init__(self, bus)
def start(self):
if self.is_set:
self.bus.log('Handler for console events already set.', level=40)
return
result = win32api.SetConsoleCtrlHandler(self.handle, 1)
if result == 0:
self.bus.log('Could not SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Set handler for console events.', level=40)
self.is_set = True
def stop(self):
if not self.is_set:
self.bus.log('Handler for console events already off.', level=40)
return
try:
result = win32api.SetConsoleCtrlHandler(self.handle, 0)
except ValueError:
# "ValueError: The object has not been registered"
result = 1
if result == 0:
self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Removed handler for console events.', level=40)
self.is_set = False
def handle(self, event):
"""Handle console control events (like Ctrl-C)."""
if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
win32con.CTRL_CLOSE_EVENT):
self.bus.log('Console event %s: shutting down bus' % event)
# Remove self immediately so repeated Ctrl-C doesn't re-call it.
try:
self.stop()
except ValueError:
pass
self.bus.exit()
# 'First to return True stops the calls'
return 1
return 0
class Win32Bus(wspbus.Bus):
"""A Web Site Process Bus implementation for Win32.
Instead of time.sleep, this bus blocks using native win32event objects.
"""
def __init__(self):
self.events = {}
wspbus.Bus.__init__(self)
def _get_state_event(self, state):
"""Return a win32event for the given state (creating it if needed)."""
try:
return self.events[state]
except KeyError:
event = win32event.CreateEvent(None, 0, 0,
"WSPBus %s Event (pid=%r)" %
(state.name, os.getpid()))
self.events[state] = event
return event
def _get_state(self):
return self._state
def _set_state(self, value):
self._state = value
event = self._get_state_event(value)
win32event.PulseEvent(event)
state = property(_get_state, _set_state)
def wait(self, state, interval=0.1, channel=None):
"""Wait for the given state(s), KeyboardInterrupt or SystemExit.
Since this class uses native win32event objects, the interval
argument is ignored.
"""
if isinstance(state, (tuple, list)):
# Don't wait for an event that beat us to the punch ;)
if self.state not in state:
events = tuple([self._get_state_event(s) for s in state])
win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE)
else:
# Don't wait for an event that beat us to the punch ;)
if self.state != state:
event = self._get_state_event(state)
win32event.WaitForSingleObject(event, win32event.INFINITE)
class _ControlCodes(dict):
"""Control codes used to "signal" a service via ControlService.
User-defined control codes are in the range 128-255. We generally use
the standard Python value for the Linux signal and add 128. Example:
>>> signal.SIGUSR1
10
control_codes['graceful'] = 128 + 10
"""
def key_for(self, obj):
"""For the given value, return its corresponding key."""
for key, val in self.items():
if val is obj:
return key
raise ValueError("The given object could not be found: %r" % obj)
control_codes = _ControlCodes({'graceful': 138})
def signal_child(service, command):
if command == 'stop':
win32serviceutil.StopService(service)
elif command == 'restart':
win32serviceutil.RestartService(service)
else:
win32serviceutil.ControlService(service, control_codes[command])
class PyWebService(win32serviceutil.ServiceFramework):
"""Python Web Service."""
_svc_name_ = "Python Web Service"
_svc_display_name_ = "Python Web Service"
_svc_deps_ = None # sequence of service names on which this depends
_exe_name_ = "pywebsvc"
_exe_args_ = None # Default to no arguments
# Only exists on Windows 2000 or later, ignored on windows NT
_svc_description_ = "Python Web Service"
def SvcDoRun(self):
from cherrypy import process
process.bus.start()
process.bus.block()
def SvcStop(self):
from cherrypy import process
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
process.bus.exit()
def SvcOther(self, control):
process.bus.publish(control_codes.key_for(control))
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(PyWebService)
| mit |
3dfxmadscientist/cbss-server | addons/web/tests/test_serving_base.py | 138 | 1031 | # -*- coding: utf-8 -*-
import random
import unittest2
from ..controllers.main import module_topological_sort as sort
def sample(population):
return random.sample(
population,
random.randint(0, min(len(population), 5)))
class TestModulesLoading(unittest2.TestCase):
def setUp(self):
self.mods = map(str, range(1000))
def test_topological_sort(self):
random.shuffle(self.mods)
modules = [
(k, sample(self.mods[:i]))
for i, k in enumerate(self.mods)]
random.shuffle(modules)
ms = dict(modules)
seen = set()
sorted_modules = sort(ms)
for module in sorted_modules:
deps = ms[module]
self.assertGreaterEqual(
seen, set(deps),
'Module %s (index %d), ' \
'missing dependencies %s from loaded modules %s' % (
module, sorted_modules.index(module), deps, seen
))
seen.add(module)
| agpl-3.0 |
takeflight/django | tests/view_tests/tests/test_defaults.py | 11 | 3203 | from __future__ import unicode_literals
from django.test import TestCase
from django.test.utils import override_settings
from ..models import UrlArticle
@override_settings(ROOT_URLCONF='view_tests.urls')
class DefaultsTests(TestCase):
"""Test django views in django/views/defaults.py"""
fixtures = ['testdata.json']
non_existing_urls = ['/non_existing_url/', # this is in urls.py
'/other_non_existing_url/'] # this NOT in urls.py
def test_page_not_found(self):
"A 404 status is returned by the page_not_found view"
for url in self.non_existing_urls:
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_csrf_token_in_404(self):
"""
The 404 page should have the csrf_token available in the context
"""
# See ticket #14565
for url in self.non_existing_urls:
response = self.client.get(url)
csrf_token = response.context['csrf_token']
self.assertNotEqual(str(csrf_token), 'NOTPROVIDED')
self.assertNotEqual(str(csrf_token), '')
def test_server_error(self):
"The server_error view raises a 500 status"
response = self.client.get('/server_error/')
self.assertEqual(response.status_code, 500)
def test_custom_templates(self):
"""
Test that 404.html and 500.html templates are picked by their respective
handler.
"""
with override_settings(TEMPLATE_LOADERS=[
('django.template.loaders.locmem.Loader', {
'404.html': 'This is a test template for a 404 error.',
'500.html': 'This is a test template for a 500 error.',
}),
]):
for code, url in ((404, '/non_existing_url/'), (500, '/server_error/')):
response = self.client.get(url)
self.assertContains(response, "test template for a %d error" % code,
status_code=code)
def test_get_absolute_url_attributes(self):
"A model can set attributes on the get_absolute_url method"
self.assertTrue(getattr(UrlArticle.get_absolute_url, 'purge', False),
'The attributes of the original get_absolute_url must be added.')
article = UrlArticle.objects.get(pk=1)
self.assertTrue(getattr(article.get_absolute_url, 'purge', False),
'The attributes of the original get_absolute_url must be added.')
@override_settings(DEFAULT_CONTENT_TYPE="text/xml")
def test_default_content_type_is_text_html(self):
"""
Content-Type of the default error responses is text/html. Refs #20822.
"""
response = self.client.get('/raises400/')
self.assertEqual(response['Content-Type'], 'text/html')
response = self.client.get('/raises403/')
self.assertEqual(response['Content-Type'], 'text/html')
response = self.client.get('/non_existing_url/')
self.assertEqual(response['Content-Type'], 'text/html')
response = self.client.get('/server_error/')
self.assertEqual(response['Content-Type'], 'text/html')
| bsd-3-clause |
ThCC/postman-client | .eggs/requests-2.11.0-py2.7.egg/requests/packages/urllib3/util/timeout.py | 713 | 9596 | from __future__ import absolute_import
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
import time
from ..exceptions import TimeoutStateError
# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()
def current_time():
"""
Retrieve the current time. This function is mocked out in unit testing.
"""
return time.time()
class Timeout(object):
""" Timeout configuration.
Timeouts can be defined as a default for a pool::
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool)::
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Timeouts can be disabled by setting all the parameters to ``None``::
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: integer, float, or None
:param connect:
The maximum amount of time to wait for a connection attempt to a server
to succeed. Omitting the parameter will default the connect timeout to
the system default, probably `the global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: integer, float, or None
:param read:
The maximum amount of time to wait between consecutive
read operations for a response from the server. Omitting
the parameter will default the read timeout to the system
default, probably `the global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: integer, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response.
For example, Python's DNS resolver does not obey the timeout specified
on the socket. Other factors that can affect total request time include
high CPU load, high swap, the program running at a low priority level,
or other behaviors.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not trigger, even though the request will take
several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, 'connect')
self._read = self._validate_timeout(read, 'read')
self.total = self._validate_timeout(total, 'total')
self._start_connect = None
def __str__(self):
return '%s(connect=%r, read=%r, total=%r)' % (
type(self).__name__, self._connect, self._read, self.total)
@classmethod
def _validate_timeout(cls, value, name):
""" Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If the type is not an integer or a float, or if it
is a numeric value less than zero.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
try:
float(value)
except (TypeError, ValueError):
raise ValueError("Timeout value %s was %s, but it must be an "
"int or float." % (name, value))
try:
if value < 0:
raise ValueError("Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than 0." % (name, value))
except TypeError: # Python 3
raise ValueError("Timeout value %s was %s, but it must be an "
"int or float." % (name, value))
return value
@classmethod
def from_float(cls, timeout):
""" Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value
passed to this function.
:param timeout: The legacy timeout value.
:type timeout: integer, float, sentinel default object, or None
:return: Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
""" Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read,
total=self.total)
def start_connect(self):
""" Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
""" Gets the time elapsed since the call to :meth:`start_connect`.
:return: Elapsed time.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError("Can't get connect duration for timer "
"that has not started.")
return current_time() - self._start_connect
@property
def connect_timeout(self):
""" Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: Connect timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
""" Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: Value to use for the read timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (self.total is not None and
self.total is not self.DEFAULT_TIMEOUT and
self._read is not None and
self._read is not self.DEFAULT_TIMEOUT):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(),
self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
| mit |
mlperf/training_results_v0.6 | NVIDIA/benchmarks/minigo/implementations/tensorflow/minigo/freeze_graph.py | 1 | 1220 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Freeze a model to a GraphDef proto."""
from absl import app, flags
import dual_net
flags.DEFINE_string('model_path', None, 'Path to model to freeze')
flags.DEFINE_integer('trt_batch', 0, 'Batch to freeze model to TRT graph')
flags.mark_flag_as_required('model_path')
FLAGS = flags.FLAGS
def main(unused_argv):
"""Freeze a model to a GraphDef proto."""
if FLAGS.use_tpu:
dual_net.freeze_graph_tpu(FLAGS.model_path)
elif FLAGS.trt_batch > 0:
dual_net.freeze_graph(FLAGS.model_path, True, FLAGS.trt_batch)
else:
dual_net.freeze_graph(FLAGS.model_path)
if __name__ == "__main__":
app.run(main)
| apache-2.0 |
gsmartway/odoo | openerp/addons/test_inherits/tests/test_inherits.py | 266 | 1066 | # -*- coding: utf-8 -*-
from openerp.tests import common
class test_inherits(common.TransactionCase):
def test_create_3_levels_inherits(self):
""" Check that we can create an inherits on 3 levels """
pallet = self.env['test.pallet'].create({
'name': 'B',
'field_in_box': 'box',
'field_in_pallet': 'pallet',
})
self.assertTrue(pallet)
self.assertEqual(pallet.name, 'B')
self.assertEqual(pallet.field_in_box, 'box')
self.assertEqual(pallet.field_in_pallet, 'pallet')
def test_read_3_levels_inherits(self):
""" Check that we can read an inherited field on 3 levels """
pallet = self.env.ref('test_inherits.pallet_a')
self.assertEqual(pallet.read(['name']), [{'id': pallet.id, 'name': 'Unit A'}])
def test_write_3_levels_inherits(self):
""" Check that we can create an inherits on 3 levels """
pallet = self.env.ref('test_inherits.pallet_a')
pallet.write({'name': 'C'})
self.assertEqual(pallet.name, 'C')
| agpl-3.0 |
alexeyum/scikit-learn | sklearn/feature_selection/tests/test_feature_select.py | 43 | 24671 | """
Todo: cross-check the F-value with stats model
"""
from __future__ import division
import itertools
import warnings
import numpy as np
from scipy import stats, sparse
from numpy.testing import run_module_suite
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_not_in
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils import safe_mask
from sklearn.datasets.samples_generator import (make_classification,
make_regression)
from sklearn.feature_selection import (
chi2, f_classif, f_oneway, f_regression, mutual_info_classif,
mutual_info_regression, SelectPercentile, SelectKBest, SelectFpr,
SelectFdr, SelectFwe, GenericUnivariateSelect)
##############################################################################
# Test the score functions
def test_f_oneway_vs_scipy_stats():
# Test that our f_oneway gives the same result as scipy.stats
rng = np.random.RandomState(0)
X1 = rng.randn(10, 3)
X2 = 1 + rng.randn(10, 3)
f, pv = stats.f_oneway(X1, X2)
f2, pv2 = f_oneway(X1, X2)
assert_true(np.allclose(f, f2))
assert_true(np.allclose(pv, pv2))
def test_f_oneway_ints():
# Smoke test f_oneway on integers: that it does raise casting errors
# with recent numpys
rng = np.random.RandomState(0)
X = rng.randint(10, size=(10, 10))
y = np.arange(10)
fint, pint = f_oneway(X, y)
# test that is gives the same result as with float
f, p = f_oneway(X.astype(np.float), y)
assert_array_almost_equal(f, fint, decimal=4)
assert_array_almost_equal(p, pint, decimal=4)
def test_f_classif():
# Test whether the F test yields meaningful results
# on a simple simulated classification problem
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
F, pv = f_classif(X, y)
F_sparse, pv_sparse = f_classif(sparse.csr_matrix(X), y)
assert_true((F > 0).all())
assert_true((pv > 0).all())
assert_true((pv < 1).all())
assert_true((pv[:5] < 0.05).all())
assert_true((pv[5:] > 1.e-4).all())
assert_array_almost_equal(F_sparse, F)
assert_array_almost_equal(pv_sparse, pv)
def test_f_regression():
# Test whether the F test yields meaningful results
# on a simple simulated regression problem
X, y = make_regression(n_samples=200, n_features=20, n_informative=5,
shuffle=False, random_state=0)
F, pv = f_regression(X, y)
assert_true((F > 0).all())
assert_true((pv > 0).all())
assert_true((pv < 1).all())
assert_true((pv[:5] < 0.05).all())
assert_true((pv[5:] > 1.e-4).all())
# again without centering, compare with sparse
F, pv = f_regression(X, y, center=False)
F_sparse, pv_sparse = f_regression(sparse.csr_matrix(X), y, center=False)
assert_array_almost_equal(F_sparse, F)
assert_array_almost_equal(pv_sparse, pv)
def test_f_regression_input_dtype():
# Test whether f_regression returns the same value
# for any numeric data_type
rng = np.random.RandomState(0)
X = rng.rand(10, 20)
y = np.arange(10).astype(np.int)
F1, pv1 = f_regression(X, y)
F2, pv2 = f_regression(X, y.astype(np.float))
assert_array_almost_equal(F1, F2, 5)
assert_array_almost_equal(pv1, pv2, 5)
def test_f_regression_center():
# Test whether f_regression preserves dof according to 'center' argument
# We use two centered variates so we have a simple relationship between
# F-score with variates centering and F-score without variates centering.
# Create toy example
X = np.arange(-5, 6).reshape(-1, 1) # X has zero mean
n_samples = X.size
Y = np.ones(n_samples)
Y[::2] *= -1.
Y[0] = 0. # have Y mean being null
F1, _ = f_regression(X, Y, center=True)
F2, _ = f_regression(X, Y, center=False)
assert_array_almost_equal(F1 * (n_samples - 1.) / (n_samples - 2.), F2)
assert_almost_equal(F2[0], 0.232558139) # value from statsmodels OLS
def test_f_classif_multi_class():
# Test whether the F test yields meaningful results
# on a simple simulated classification problem
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
F, pv = f_classif(X, y)
assert_true((F > 0).all())
assert_true((pv > 0).all())
assert_true((pv < 1).all())
assert_true((pv[:5] < 0.05).all())
assert_true((pv[5:] > 1.e-4).all())
def test_select_percentile_classif():
# Test whether the relative univariate feature selection
# gets the correct items in a simple classification problem
# with the percentile heuristic
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
univariate_filter = SelectPercentile(f_classif, percentile=25)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(f_classif, mode='percentile',
param=25).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support, gtruth)
def test_select_percentile_classif_sparse():
# Test whether the relative univariate feature selection
# gets the correct items in a simple classification problem
# with the percentile heuristic
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
X = sparse.csr_matrix(X)
univariate_filter = SelectPercentile(f_classif, percentile=25)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(f_classif, mode='percentile',
param=25).fit(X, y).transform(X)
assert_array_equal(X_r.toarray(), X_r2.toarray())
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support, gtruth)
X_r2inv = univariate_filter.inverse_transform(X_r2)
assert_true(sparse.issparse(X_r2inv))
support_mask = safe_mask(X_r2inv, support)
assert_equal(X_r2inv.shape, X.shape)
assert_array_equal(X_r2inv[:, support_mask].toarray(), X_r.toarray())
# Check other columns are empty
assert_equal(X_r2inv.getnnz(), X_r.getnnz())
##############################################################################
# Test univariate selection in classification settings
def test_select_kbest_classif():
# Test whether the relative univariate feature selection
# gets the correct items in a simple classification problem
# with the k best heuristic
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
univariate_filter = SelectKBest(f_classif, k=5)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(
f_classif, mode='k_best', param=5).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support, gtruth)
def test_select_kbest_all():
# Test whether k="all" correctly returns all features.
X, y = make_classification(n_samples=20, n_features=10,
shuffle=False, random_state=0)
univariate_filter = SelectKBest(f_classif, k='all')
X_r = univariate_filter.fit(X, y).transform(X)
assert_array_equal(X, X_r)
def test_select_kbest_zero():
# Test whether k=0 correctly returns no features.
X, y = make_classification(n_samples=20, n_features=10,
shuffle=False, random_state=0)
univariate_filter = SelectKBest(f_classif, k=0)
univariate_filter.fit(X, y)
support = univariate_filter.get_support()
gtruth = np.zeros(10, dtype=bool)
assert_array_equal(support, gtruth)
X_selected = assert_warns_message(UserWarning, 'No features were selected',
univariate_filter.transform, X)
assert_equal(X_selected.shape, (20, 0))
def test_select_heuristics_classif():
# Test whether the relative univariate feature selection
# gets the correct items in a simple classification problem
# with the fdr, fwe and fpr heuristics
X, y = make_classification(n_samples=200, n_features=20,
n_informative=3, n_redundant=2,
n_repeated=0, n_classes=8,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
univariate_filter = SelectFwe(f_classif, alpha=0.01)
X_r = univariate_filter.fit(X, y).transform(X)
gtruth = np.zeros(20)
gtruth[:5] = 1
for mode in ['fdr', 'fpr', 'fwe']:
X_r2 = GenericUnivariateSelect(
f_classif, mode=mode, param=0.01).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
assert_array_almost_equal(support, gtruth)
##############################################################################
# Test univariate selection in regression settings
def assert_best_scores_kept(score_filter):
scores = score_filter.scores_
support = score_filter.get_support()
assert_array_equal(np.sort(scores[support]),
np.sort(scores)[-support.sum():])
def test_select_percentile_regression():
# Test whether the relative univariate feature selection
# gets the correct items in a simple regression problem
# with the percentile heuristic
X, y = make_regression(n_samples=200, n_features=20,
n_informative=5, shuffle=False, random_state=0)
univariate_filter = SelectPercentile(f_regression, percentile=25)
X_r = univariate_filter.fit(X, y).transform(X)
assert_best_scores_kept(univariate_filter)
X_r2 = GenericUnivariateSelect(
f_regression, mode='percentile', param=25).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support, gtruth)
X_2 = X.copy()
X_2[:, np.logical_not(support)] = 0
assert_array_equal(X_2, univariate_filter.inverse_transform(X_r))
# Check inverse_transform respects dtype
assert_array_equal(X_2.astype(bool),
univariate_filter.inverse_transform(X_r.astype(bool)))
def test_select_percentile_regression_full():
# Test whether the relative univariate feature selection
# selects all features when '100%' is asked.
X, y = make_regression(n_samples=200, n_features=20,
n_informative=5, shuffle=False, random_state=0)
univariate_filter = SelectPercentile(f_regression, percentile=100)
X_r = univariate_filter.fit(X, y).transform(X)
assert_best_scores_kept(univariate_filter)
X_r2 = GenericUnivariateSelect(
f_regression, mode='percentile', param=100).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.ones(20)
assert_array_equal(support, gtruth)
def test_invalid_percentile():
X, y = make_regression(n_samples=10, n_features=20,
n_informative=2, shuffle=False, random_state=0)
assert_raises(ValueError, SelectPercentile(percentile=-1).fit, X, y)
assert_raises(ValueError, SelectPercentile(percentile=101).fit, X, y)
assert_raises(ValueError, GenericUnivariateSelect(mode='percentile',
param=-1).fit, X, y)
assert_raises(ValueError, GenericUnivariateSelect(mode='percentile',
param=101).fit, X, y)
def test_select_kbest_regression():
# Test whether the relative univariate feature selection
# gets the correct items in a simple regression problem
# with the k best heuristic
X, y = make_regression(n_samples=200, n_features=20, n_informative=5,
shuffle=False, random_state=0, noise=10)
univariate_filter = SelectKBest(f_regression, k=5)
X_r = univariate_filter.fit(X, y).transform(X)
assert_best_scores_kept(univariate_filter)
X_r2 = GenericUnivariateSelect(
f_regression, mode='k_best', param=5).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support, gtruth)
def test_select_heuristics_regression():
# Test whether the relative univariate feature selection
# gets the correct items in a simple regression problem
# with the fpr, fdr or fwe heuristics
X, y = make_regression(n_samples=200, n_features=20, n_informative=5,
shuffle=False, random_state=0, noise=10)
univariate_filter = SelectFpr(f_regression, alpha=0.01)
X_r = univariate_filter.fit(X, y).transform(X)
gtruth = np.zeros(20)
gtruth[:5] = 1
for mode in ['fdr', 'fpr', 'fwe']:
X_r2 = GenericUnivariateSelect(
f_regression, mode=mode, param=0.01).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
assert_array_equal(support[:5], np.ones((5, ), dtype=np.bool))
assert_less(np.sum(support[5:] == 1), 3)
def test_select_fdr_regression():
# Test that fdr heuristic actually has low FDR.
def single_fdr(alpha, n_informative, random_state):
X, y = make_regression(n_samples=150, n_features=20,
n_informative=n_informative, shuffle=False,
random_state=random_state, noise=10)
with warnings.catch_warnings(record=True):
# Warnings can be raised when no features are selected
# (low alpha or very noisy data)
univariate_filter = SelectFdr(f_regression, alpha=alpha)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(
f_regression, mode='fdr', param=alpha).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
num_false_positives = np.sum(support[n_informative:] == 1)
num_true_positives = np.sum(support[:n_informative] == 1)
if num_false_positives == 0:
return 0.
false_discovery_rate = (num_false_positives /
(num_true_positives + num_false_positives))
return false_discovery_rate
for alpha in [0.001, 0.01, 0.1]:
for n_informative in [1, 5, 10]:
# As per Benjamini-Hochberg, the expected false discovery rate
# should be lower than alpha:
# FDR = E(FP / (TP + FP)) <= alpha
false_discovery_rate = np.mean([single_fdr(alpha, n_informative,
random_state) for
random_state in range(30)])
assert_greater_equal(alpha, false_discovery_rate)
# Make sure that the empirical false discovery rate increases
# with alpha:
if false_discovery_rate != 0:
assert_greater(false_discovery_rate, alpha / 10)
def test_select_fwe_regression():
# Test whether the relative univariate feature selection
# gets the correct items in a simple regression problem
# with the fwe heuristic
X, y = make_regression(n_samples=200, n_features=20,
n_informative=5, shuffle=False, random_state=0)
univariate_filter = SelectFwe(f_regression, alpha=0.01)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(
f_regression, mode='fwe', param=0.01).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(20)
gtruth[:5] = 1
assert_array_equal(support[:5], np.ones((5, ), dtype=np.bool))
assert_less(np.sum(support[5:] == 1), 2)
def test_selectkbest_tiebreaking():
# Test whether SelectKBest actually selects k features in case of ties.
# Prior to 0.11, SelectKBest would return more features than requested.
Xs = [[0, 1, 1], [0, 0, 1], [1, 0, 0], [1, 1, 0]]
y = [1]
dummy_score = lambda X, y: (X[0], X[0])
for X in Xs:
sel = SelectKBest(dummy_score, k=1)
X1 = ignore_warnings(sel.fit_transform)([X], y)
assert_equal(X1.shape[1], 1)
assert_best_scores_kept(sel)
sel = SelectKBest(dummy_score, k=2)
X2 = ignore_warnings(sel.fit_transform)([X], y)
assert_equal(X2.shape[1], 2)
assert_best_scores_kept(sel)
def test_selectpercentile_tiebreaking():
# Test if SelectPercentile selects the right n_features in case of ties.
Xs = [[0, 1, 1], [0, 0, 1], [1, 0, 0], [1, 1, 0]]
y = [1]
dummy_score = lambda X, y: (X[0], X[0])
for X in Xs:
sel = SelectPercentile(dummy_score, percentile=34)
X1 = ignore_warnings(sel.fit_transform)([X], y)
assert_equal(X1.shape[1], 1)
assert_best_scores_kept(sel)
sel = SelectPercentile(dummy_score, percentile=67)
X2 = ignore_warnings(sel.fit_transform)([X], y)
assert_equal(X2.shape[1], 2)
assert_best_scores_kept(sel)
def test_tied_pvalues():
# Test whether k-best and percentiles work with tied pvalues from chi2.
# chi2 will return the same p-values for the following features, but it
# will return different scores.
X0 = np.array([[10000, 9999, 9998], [1, 1, 1]])
y = [0, 1]
for perm in itertools.permutations((0, 1, 2)):
X = X0[:, perm]
Xt = SelectKBest(chi2, k=2).fit_transform(X, y)
assert_equal(Xt.shape, (2, 2))
assert_not_in(9998, Xt)
Xt = SelectPercentile(chi2, percentile=67).fit_transform(X, y)
assert_equal(Xt.shape, (2, 2))
assert_not_in(9998, Xt)
def test_tied_scores():
# Test for stable sorting in k-best with tied scores.
X_train = np.array([[0, 0, 0], [1, 1, 1]])
y_train = [0, 1]
for n_features in [1, 2, 3]:
sel = SelectKBest(chi2, k=n_features).fit(X_train, y_train)
X_test = sel.transform([[0, 1, 2]])
assert_array_equal(X_test[0], np.arange(3)[-n_features:])
def test_nans():
# Assert that SelectKBest and SelectPercentile can handle NaNs.
# First feature has zero variance to confuse f_classif (ANOVA) and
# make it return a NaN.
X = [[0, 1, 0], [0, -1, -1], [0, .5, .5]]
y = [1, 0, 1]
for select in (SelectKBest(f_classif, 2),
SelectPercentile(f_classif, percentile=67)):
ignore_warnings(select.fit)(X, y)
assert_array_equal(select.get_support(indices=True), np.array([1, 2]))
def test_score_func_error():
X = [[0, 1, 0], [0, -1, -1], [0, .5, .5]]
y = [1, 0, 1]
for SelectFeatures in [SelectKBest, SelectPercentile, SelectFwe,
SelectFdr, SelectFpr, GenericUnivariateSelect]:
assert_raises(TypeError, SelectFeatures(score_func=10).fit, X, y)
def test_invalid_k():
X = [[0, 1, 0], [0, -1, -1], [0, .5, .5]]
y = [1, 0, 1]
assert_raises(ValueError, SelectKBest(k=-1).fit, X, y)
assert_raises(ValueError, SelectKBest(k=4).fit, X, y)
assert_raises(ValueError,
GenericUnivariateSelect(mode='k_best', param=-1).fit, X, y)
assert_raises(ValueError,
GenericUnivariateSelect(mode='k_best', param=4).fit, X, y)
def test_f_classif_constant_feature():
# Test that f_classif warns if a feature is constant throughout.
X, y = make_classification(n_samples=10, n_features=5)
X[:, 0] = 2.0
assert_warns(UserWarning, f_classif, X, y)
def test_no_feature_selected():
rng = np.random.RandomState(0)
# Generate random uncorrelated data: a strict univariate test should
# rejects all the features
X = rng.rand(40, 10)
y = rng.randint(0, 4, size=40)
strict_selectors = [
SelectFwe(alpha=0.01).fit(X, y),
SelectFdr(alpha=0.01).fit(X, y),
SelectFpr(alpha=0.01).fit(X, y),
SelectPercentile(percentile=0).fit(X, y),
SelectKBest(k=0).fit(X, y),
]
for selector in strict_selectors:
assert_array_equal(selector.get_support(), np.zeros(10))
X_selected = assert_warns_message(
UserWarning, 'No features were selected', selector.transform, X)
assert_equal(X_selected.shape, (40, 0))
def test_mutual_info_classif():
X, y = make_classification(n_samples=100, n_features=5,
n_informative=1, n_redundant=1,
n_repeated=0, n_classes=2,
n_clusters_per_class=1, flip_y=0.0,
class_sep=10, shuffle=False, random_state=0)
# Test in KBest mode.
univariate_filter = SelectKBest(mutual_info_classif, k=2)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(
mutual_info_classif, mode='k_best', param=2).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(5)
gtruth[:2] = 1
assert_array_equal(support, gtruth)
# Test in Percentile mode.
univariate_filter = SelectPercentile(mutual_info_classif, percentile=40)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(
mutual_info_classif, mode='percentile', param=40).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(5)
gtruth[:2] = 1
assert_array_equal(support, gtruth)
def test_mutual_info_regression():
X, y = make_regression(n_samples=100, n_features=10, n_informative=2,
shuffle=False, random_state=0, noise=10)
# Test in KBest mode.
univariate_filter = SelectKBest(mutual_info_regression, k=2)
X_r = univariate_filter.fit(X, y).transform(X)
assert_best_scores_kept(univariate_filter)
X_r2 = GenericUnivariateSelect(
mutual_info_regression, mode='k_best', param=2).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(10)
gtruth[:2] = 1
assert_array_equal(support, gtruth)
# Test in Percentile mode.
univariate_filter = SelectPercentile(mutual_info_regression, percentile=20)
X_r = univariate_filter.fit(X, y).transform(X)
X_r2 = GenericUnivariateSelect(mutual_info_regression, mode='percentile',
param=20).fit(X, y).transform(X)
assert_array_equal(X_r, X_r2)
support = univariate_filter.get_support()
gtruth = np.zeros(10)
gtruth[:2] = 1
assert_array_equal(support, gtruth)
if __name__ == '__main__':
run_module_suite()
| bsd-3-clause |
BuildingLink/sentry | src/sentry/south_migrations/0097_auto__del_affecteduserbygroup__del_unique_affecteduserbygroup_project_.py | 36 | 25874 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'TrackedUser', fields ['project', 'ident']
db.delete_unique('sentry_trackeduser', ['project_id', 'ident'])
# Removing unique constraint on 'AffectedUserByGroup', fields ['project', 'tuser', 'group']
db.delete_unique('sentry_affecteduserbygroup', ['project_id', 'tuser_id', 'group_id'])
# Deleting model 'AffectedUserByGroup'
db.delete_table('sentry_affecteduserbygroup')
# Deleting model 'TrackedUser'
db.delete_table('sentry_trackeduser')
# Deleting field 'Group.users_seen'
db.delete_column('sentry_groupedmessage', 'users_seen')
def backwards(self, orm):
raise NotImplementedError("This is no time machine bro")
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['sentry.User']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
computersalat/ansible | test/support/windows-integration/plugins/modules/win_ping.py | 146 | 1451 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>, and others
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: win_ping
version_added: "1.7"
short_description: A windows version of the classic ping module
description:
- Checks management connectivity of a windows host.
- This is NOT ICMP ping, this is just a trivial test module.
- For non-Windows targets, use the M(ping) module instead.
- For Network targets, use the M(net_ping) module instead.
options:
data:
description:
- Alternate data to return instead of 'pong'.
- If this parameter is set to C(crash), the module will cause an exception.
type: str
default: pong
seealso:
- module: ping
author:
- Chris Church (@cchurch)
'''
EXAMPLES = r'''
# Test connectivity to a windows host
# ansible winserver -m win_ping
- name: Example from an Ansible Playbook
win_ping:
- name: Induce an exception to see what happens
win_ping:
data: crash
'''
RETURN = r'''
ping:
description: Value provided with the data parameter.
returned: success
type: str
sample: pong
'''
| gpl-3.0 |
javrasya/watchdog | tests/test_skip_repeats_queue.py | 9 | 2322 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
from watchdog.utils.bricks import SkipRepeatsQueue
class TestSkipRepeatsQueue(unittest.TestCase):
def test_basic_queue(self):
q = SkipRepeatsQueue()
e1 = (2, 'fred')
e2 = (2, 'george')
e3 = (4, 'sally')
q.put(e1)
q.put(e2)
q.put(e3)
self.assertEqual(e1, q.get())
self.assertEqual(e2, q.get())
self.assertEqual(e3, q.get())
self.assertTrue(q.empty())
def test_allow_nonconsecutive(self):
q = SkipRepeatsQueue()
e1 = (2, 'fred')
e2 = (2, 'george')
q.put(e1)
q.put(e2)
q.put(e1) # repeat the first entry
self.assertEqual(e1, q.get())
self.assertEqual(e2, q.get())
self.assertEqual(e1, q.get())
self.assertTrue(q.empty())
def test_prevent_consecutive(self):
q = SkipRepeatsQueue()
e1 = (2, 'fred')
e2 = (2, 'george')
q.put(e1)
q.put(e1) # repeat the first entry (this shouldn't get added)
q.put(e2)
self.assertEqual(e1, q.get())
self.assertEqual(e2, q.get())
self.assertTrue(q.empty())
def test_consecutives_allowed_across_empties(self):
q = SkipRepeatsQueue()
e1 = (2, 'fred')
q.put(e1)
q.put(e1) # repeat the first entry (this shouldn't get added)
self.assertEqual(e1, q.get())
self.assertTrue(q.empty())
q.put(e1) # this repeat is allowed because 'last' added is now gone from queue
self.assertEqual(e1, q.get())
self.assertTrue(q.empty())
| apache-2.0 |
alexgorban/models | research/struct2depth/optimize.py | 4 | 19564 |
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Applies online refinement while running inference.
Instructions: Run static inference first before calling this script. Make sure
to point output_dir to the same folder where static inference results were
saved previously.
For example use, please refer to README.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
import random
from absl import app
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
import model
import nets
import reader
import util
gfile = tf.gfile
SAVE_EVERY = 1 # Defines the interval that predictions should be saved at.
SAVE_PREVIEWS = True # If set, while save image previews of depth predictions.
FIXED_SEED = 8964 # Fixed seed for repeatability.
flags.DEFINE_string('output_dir', None, 'Directory to store predictions. '
'Assumes that regular inference has been executed before '
'and results were stored in this folder.')
flags.DEFINE_string('data_dir', None, 'Folder pointing to preprocessed '
'triplets to fine-tune on.')
flags.DEFINE_string('triplet_list_file', None, 'Text file containing paths to '
'image files to process. Paths should be relative with '
'respect to the list file location. Every line should be '
'of the form [input_folder_name] [input_frame_num] '
'[output_path], where [output_path] is optional to specify '
'a different path to store the prediction.')
flags.DEFINE_string('triplet_list_file_remains', None, 'Optional text file '
'containing relative paths to image files which should not '
'be fine-tuned, e.g. because of missing adjacent frames. '
'For all files listed, the static prediction will be '
'copied instead. File can be empty. If not, every line '
'should be of the form [input_folder_name] '
'[input_frame_num] [output_path], where [output_path] is '
'optional to specify a different path to take and store '
'the unrefined prediction from/to.')
flags.DEFINE_string('model_ckpt', None, 'Model checkpoint to optimize.')
flags.DEFINE_string('ft_name', '', 'Optional prefix for temporary files.')
flags.DEFINE_string('file_extension', 'png', 'Image data file extension.')
flags.DEFINE_float('learning_rate', 0.0001, 'Adam learning rate.')
flags.DEFINE_float('beta1', 0.9, 'Adam momentum.')
flags.DEFINE_float('reconstr_weight', 0.85, 'Frame reconstruction loss weight.')
flags.DEFINE_float('ssim_weight', 0.15, 'SSIM loss weight.')
flags.DEFINE_float('smooth_weight', 0.01, 'Smoothness loss weight.')
flags.DEFINE_float('icp_weight', 0.0, 'ICP loss weight.')
flags.DEFINE_float('size_constraint_weight', 0.0005, 'Weight of the object '
'size constraint loss. Use only with motion handling.')
flags.DEFINE_integer('batch_size', 1, 'The size of a sample batch')
flags.DEFINE_integer('img_height', 128, 'Input frame height.')
flags.DEFINE_integer('img_width', 416, 'Input frame width.')
flags.DEFINE_integer('seq_length', 3, 'Number of frames in sequence.')
flags.DEFINE_enum('architecture', nets.RESNET, nets.ARCHITECTURES,
'Defines the architecture to use for the depth prediction '
'network. Defaults to ResNet-based encoder and accompanying '
'decoder.')
flags.DEFINE_boolean('imagenet_norm', True, 'Whether to normalize the input '
'images channel-wise so that they match the distribution '
'most ImageNet-models were trained on.')
flags.DEFINE_float('weight_reg', 0.05, 'The amount of weight regularization to '
'apply. This has no effect on the ResNet-based encoder '
'architecture.')
flags.DEFINE_boolean('exhaustive_mode', False, 'Whether to exhaustively warp '
'from any frame to any other instead of just considering '
'adjacent frames. Where necessary, multiple egomotion '
'estimates will be applied. Does not have an effect if '
'compute_minimum_loss is enabled.')
flags.DEFINE_boolean('random_scale_crop', False, 'Whether to apply random '
'image scaling and center cropping during training.')
flags.DEFINE_bool('depth_upsampling', True, 'Whether to apply depth '
'upsampling of lower-scale representations before warping to '
'compute reconstruction loss on full-resolution image.')
flags.DEFINE_bool('depth_normalization', True, 'Whether to apply depth '
'normalization, that is, normalizing inverse depth '
'prediction maps by their mean to avoid degeneration towards '
'small values.')
flags.DEFINE_bool('compute_minimum_loss', True, 'Whether to take the '
'element-wise minimum of the reconstruction/SSIM error in '
'order to avoid overly penalizing dis-occlusion effects.')
flags.DEFINE_bool('use_skip', True, 'Whether to use skip connections in the '
'encoder-decoder architecture.')
flags.DEFINE_bool('joint_encoder', False, 'Whether to share parameters '
'between the depth and egomotion networks by using a joint '
'encoder architecture. The egomotion network is then '
'operating only on the hidden representation provided by the '
'joint encoder.')
flags.DEFINE_float('egomotion_threshold', 0.01, 'Minimum egomotion magnitude '
'to apply finetuning. If lower, just forwards the ordinary '
'prediction.')
flags.DEFINE_integer('num_steps', 20, 'Number of optimization steps to run.')
flags.DEFINE_boolean('handle_motion', True, 'Whether the checkpoint was '
'trained with motion handling.')
flags.DEFINE_bool('flip', False, 'Whether images should be flipped as well as '
'resulting predictions (for test-time augmentation). This '
'currently applies to the depth network only.')
FLAGS = flags.FLAGS
flags.mark_flag_as_required('output_dir')
flags.mark_flag_as_required('data_dir')
flags.mark_flag_as_required('model_ckpt')
flags.mark_flag_as_required('triplet_list_file')
def main(_):
"""Runs fine-tuning and inference.
There are three categories of images.
1) Images where we have previous and next frame, and that are not filtered
out by the heuristic. For them, we will use the fine-tuned predictions.
2) Images where we have previous and next frame, but that were filtered out
by our heuristic. For them, we will use the ordinary prediction instead.
3) Images where we have at least one missing adjacent frame. For them, we will
use the ordinary prediction as indicated by triplet_list_file_remains (if
provided). They will also not be part of the generated inference list in
the first place.
Raises:
ValueError: Invalid parameters have been passed.
"""
if FLAGS.handle_motion and FLAGS.joint_encoder:
raise ValueError('Using a joint encoder is currently not supported when '
'modeling object motion.')
if FLAGS.handle_motion and FLAGS.seq_length != 3:
raise ValueError('The current motion model implementation only supports '
'using a sequence length of three.')
if FLAGS.handle_motion and not FLAGS.compute_minimum_loss:
raise ValueError('Computing the minimum photometric loss is required when '
'enabling object motion handling.')
if FLAGS.size_constraint_weight > 0 and not FLAGS.handle_motion:
raise ValueError('To enforce object size constraints, enable motion '
'handling.')
if FLAGS.icp_weight > 0.0:
raise ValueError('ICP is currently not supported.')
if FLAGS.compute_minimum_loss and FLAGS.seq_length % 2 != 1:
raise ValueError('Compute minimum loss requires using an odd number of '
'images in a sequence.')
if FLAGS.compute_minimum_loss and FLAGS.exhaustive_mode:
raise ValueError('Exhaustive mode has no effect when compute_minimum_loss '
'is enabled.')
if FLAGS.img_width % (2 ** 5) != 0 or FLAGS.img_height % (2 ** 5) != 0:
logging.warn('Image size is not divisible by 2^5. For the architecture '
'employed, this could cause artefacts caused by resizing in '
'lower dimensions.')
if FLAGS.output_dir.endswith('/'):
FLAGS.output_dir = FLAGS.output_dir[:-1]
# Create file lists to prepare fine-tuning, save it to unique_file.
unique_file_name = (str(datetime.datetime.now().date()) + '_' +
str(datetime.datetime.now().time()).replace(':', '_'))
unique_file = os.path.join(FLAGS.data_dir, unique_file_name + '.txt')
with gfile.FastGFile(FLAGS.triplet_list_file, 'r') as f:
files_to_process = f.readlines()
files_to_process = [line.rstrip() for line in files_to_process]
files_to_process = [line for line in files_to_process if len(line)]
logging.info('Creating unique file list %s with %s entries.', unique_file,
len(files_to_process))
with gfile.FastGFile(unique_file, 'w') as f_out:
fetches_network = FLAGS.num_steps * FLAGS.batch_size
fetches_saves = FLAGS.batch_size * int(np.floor(FLAGS.num_steps/SAVE_EVERY))
repetitions = fetches_network + 3 * fetches_saves
for i in range(len(files_to_process)):
for _ in range(repetitions):
f_out.write(files_to_process[i] + '\n')
# Read remaining files.
remaining = []
if gfile.Exists(FLAGS.triplet_list_file_remains):
with gfile.FastGFile(FLAGS.triplet_list_file_remains, 'r') as f:
remaining = f.readlines()
remaining = [line.rstrip() for line in remaining]
remaining = [line for line in remaining if len(line)]
logging.info('Running fine-tuning on %s files, %s files are remaining.',
len(files_to_process), len(remaining))
# Run fine-tuning process and save predictions in id-folders.
tf.set_random_seed(FIXED_SEED)
np.random.seed(FIXED_SEED)
random.seed(FIXED_SEED)
flipping_mode = reader.FLIP_ALWAYS if FLAGS.flip else reader.FLIP_NONE
train_model = model.Model(data_dir=FLAGS.data_dir,
file_extension=FLAGS.file_extension,
is_training=True,
learning_rate=FLAGS.learning_rate,
beta1=FLAGS.beta1,
reconstr_weight=FLAGS.reconstr_weight,
smooth_weight=FLAGS.smooth_weight,
ssim_weight=FLAGS.ssim_weight,
icp_weight=FLAGS.icp_weight,
batch_size=FLAGS.batch_size,
img_height=FLAGS.img_height,
img_width=FLAGS.img_width,
seq_length=FLAGS.seq_length,
architecture=FLAGS.architecture,
imagenet_norm=FLAGS.imagenet_norm,
weight_reg=FLAGS.weight_reg,
exhaustive_mode=FLAGS.exhaustive_mode,
random_scale_crop=FLAGS.random_scale_crop,
flipping_mode=flipping_mode,
random_color=False,
depth_upsampling=FLAGS.depth_upsampling,
depth_normalization=FLAGS.depth_normalization,
compute_minimum_loss=FLAGS.compute_minimum_loss,
use_skip=FLAGS.use_skip,
joint_encoder=FLAGS.joint_encoder,
build_sum=False,
shuffle=False,
input_file=unique_file_name,
handle_motion=FLAGS.handle_motion,
size_constraint_weight=FLAGS.size_constraint_weight,
train_global_scale_var=False)
failed_heuristic_ids = finetune_inference(train_model, FLAGS.model_ckpt,
FLAGS.output_dir + '_ft')
logging.info('Fine-tuning completed, %s files were filtered out by '
'heuristic.', len(failed_heuristic_ids))
for failed_id in failed_heuristic_ids:
failed_entry = files_to_process[failed_id]
remaining.append(failed_entry)
logging.info('In total, %s images were fine-tuned, while %s were not.',
len(files_to_process)-len(failed_heuristic_ids), len(remaining))
# Copy all results to have the same structural output as running ordinary
# inference.
for i in range(len(files_to_process)):
if files_to_process[i] not in remaining: # Use fine-tuned result.
elements = files_to_process[i].split(' ')
source_file = os.path.join(FLAGS.output_dir + '_ft', FLAGS.ft_name +
'id_' + str(i),
str(FLAGS.num_steps).zfill(10) +
('_flip' if FLAGS.flip else ''))
if len(elements) == 2: # No differing mapping defined.
target_dir = os.path.join(FLAGS.output_dir + '_ft', elements[0])
target_file = os.path.join(
target_dir, elements[1] + ('_flip' if FLAGS.flip else ''))
else: # Other mapping for file defined, copy to this location instead.
target_dir = os.path.join(
FLAGS.output_dir + '_ft', os.path.dirname(elements[2]))
target_file = os.path.join(
target_dir,
os.path.basename(elements[2]) + ('_flip' if FLAGS.flip else ''))
if not gfile.Exists(target_dir):
gfile.MakeDirs(target_dir)
logging.info('Copy refined result %s to %s.', source_file, target_file)
gfile.Copy(source_file + '.npy', target_file + '.npy', overwrite=True)
gfile.Copy(source_file + '.txt', target_file + '.txt', overwrite=True)
gfile.Copy(source_file + '.%s' % FLAGS.file_extension,
target_file + '.%s' % FLAGS.file_extension, overwrite=True)
for j in range(len(remaining)):
elements = remaining[j].split(' ')
if len(elements) == 2: # No differing mapping defined.
target_dir = os.path.join(FLAGS.output_dir + '_ft', elements[0])
target_file = os.path.join(
target_dir, elements[1] + ('_flip' if FLAGS.flip else ''))
else: # Other mapping for file defined, copy to this location instead.
target_dir = os.path.join(
FLAGS.output_dir + '_ft', os.path.dirname(elements[2]))
target_file = os.path.join(
target_dir,
os.path.basename(elements[2]) + ('_flip' if FLAGS.flip else ''))
if not gfile.Exists(target_dir):
gfile.MakeDirs(target_dir)
source_file = target_file.replace('_ft', '')
logging.info('Copy unrefined result %s to %s.', source_file, target_file)
gfile.Copy(source_file + '.npy', target_file + '.npy', overwrite=True)
gfile.Copy(source_file + '.%s' % FLAGS.file_extension,
target_file + '.%s' % FLAGS.file_extension, overwrite=True)
logging.info('Done, predictions saved in %s.', FLAGS.output_dir + '_ft')
def finetune_inference(train_model, model_ckpt, output_dir):
"""Train model."""
vars_to_restore = None
if model_ckpt is not None:
vars_to_restore = util.get_vars_to_save_and_restore(model_ckpt)
ckpt_path = model_ckpt
pretrain_restorer = tf.train.Saver(vars_to_restore)
sv = tf.train.Supervisor(logdir=None, save_summaries_secs=0, saver=None,
summary_op=None)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
img_nr = 0
failed_heuristic = []
with sv.managed_session(config=config) as sess:
# TODO(casser): Caching the weights would be better to avoid I/O bottleneck.
while True: # Loop terminates when all examples have been processed.
if model_ckpt is not None:
logging.info('Restored weights from %s', ckpt_path)
pretrain_restorer.restore(sess, ckpt_path)
logging.info('Running fine-tuning, image %s...', img_nr)
img_pred_folder = os.path.join(
output_dir, FLAGS.ft_name + 'id_' + str(img_nr))
if not gfile.Exists(img_pred_folder):
gfile.MakeDirs(img_pred_folder)
step = 1
# Run fine-tuning.
while step <= FLAGS.num_steps:
logging.info('Running step %s of %s.', step, FLAGS.num_steps)
fetches = {
'train': train_model.train_op,
'global_step': train_model.global_step,
'incr_global_step': train_model.incr_global_step
}
_ = sess.run(fetches)
if step % SAVE_EVERY == 0:
# Get latest prediction for middle frame, highest scale.
pred = train_model.depth[1][0].eval(session=sess)
if FLAGS.flip:
pred = np.flip(pred, axis=2)
input_img = train_model.image_stack.eval(session=sess)
input_img_prev = input_img[0, :, :, 0:3]
input_img_center = input_img[0, :, :, 3:6]
input_img_next = input_img[0, :, :, 6:]
img_pred_file = os.path.join(
img_pred_folder,
str(step).zfill(10) + ('_flip' if FLAGS.flip else '') + '.npy')
motion = np.squeeze(train_model.egomotion.eval(session=sess))
# motion of shape (seq_length - 1, 6).
motion = np.mean(motion, axis=0) # Average egomotion across frames.
if SAVE_PREVIEWS or step == FLAGS.num_steps:
# Also save preview of depth map.
color_map = util.normalize_depth_for_display(
np.squeeze(pred[0, :, :]))
visualization = np.concatenate(
(input_img_prev, input_img_center, input_img_next, color_map))
motion_s = [str(m) for m in motion]
s_rep = ','.join(motion_s)
with gfile.Open(img_pred_file.replace('.npy', '.txt'), 'w') as f:
f.write(s_rep)
util.save_image(
img_pred_file.replace('.npy', '.%s' % FLAGS.file_extension),
visualization, FLAGS.file_extension)
with gfile.Open(img_pred_file, 'wb') as f:
np.save(f, pred)
# Apply heuristic to not finetune if egomotion magnitude is too low.
ego_magnitude = np.linalg.norm(motion[:3], ord=2)
heuristic = ego_magnitude >= FLAGS.egomotion_threshold
if not heuristic and step == FLAGS.num_steps:
failed_heuristic.append(img_nr)
step += 1
img_nr += 1
return failed_heuristic
if __name__ == '__main__':
app.run(main)
| apache-2.0 |
sensarliar/paparazzi | sw/lib/python/pprz_msg/messages_xml_map.py | 6 | 6512 | #!/usr/bin/env python
from __future__ import absolute_import, print_function
import os
# if PAPARAZZI_HOME not set, then assume the tree containing this
# file is a reasonable substitute
PPRZ_HOME = os.getenv("PAPARAZZI_HOME", os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../../..')))
default_messages_file = '%s/conf/messages.xml' % PPRZ_HOME
message_dictionary = {}
message_dictionary_types = {}
message_dictionary_coefs = {}
message_dictionary_id_name = {}
message_dictionary_name_id = {}
class MessagesNotFound(Exception):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return "messages file " + repr(self.filename) + " not found"
def parse_messages(messages_file=''):
if not messages_file:
messages_file = default_messages_file
if not os.path.isfile(messages_file):
raise MessagesNotFound(messages_file)
#print("Parsing %s" % messages_file)
from lxml import etree
tree = etree.parse(messages_file)
for the_class in tree.xpath("//msg_class[@name]"):
class_name = the_class.attrib['name']
if class_name not in message_dictionary:
message_dictionary_id_name[class_name] = {}
message_dictionary_name_id[class_name] = {}
message_dictionary[class_name] = {}
message_dictionary_types[class_name] = {}
message_dictionary_coefs[class_name] = {}
for the_message in the_class.xpath("message[@name]"):
message_name = the_message.attrib['name']
if 'id' in the_message.attrib:
message_id = the_message.attrib['id']
else:
message_id = the_message.attrib['ID']
if message_id[0:2] == "0x":
message_id = int(message_id, 16)
else:
message_id = int(message_id)
message_dictionary_id_name[class_name][message_id] = message_name
message_dictionary_name_id[class_name][message_name] = message_id
# insert this message into our dictionary as a list with room for the fields
message_dictionary[class_name][message_name] = []
message_dictionary_types[class_name][message_id] = []
message_dictionary_coefs[class_name][message_id] = []
for the_field in the_message.xpath('field[@name]'):
# for now, just save the field names -- in the future maybe expand this to save a struct?
message_dictionary[class_name][message_name].append(the_field.attrib['name'])
message_dictionary_types[class_name][message_id].append(the_field.attrib['type'])
try:
message_dictionary_coefs[class_name][message_id].append(float(the_field.attrib['alt_unit_coef']))
except KeyError:
# print("no such key")
message_dictionary_coefs[class_name][message_id].append(1.)
def find_msg_by_name(name):
if not message_dictionary:
parse_messages()
for msg_class in message_dictionary:
if name in message_dictionary[msg_class]:
#print("found msg name %s in class %s" % (name, msg_class))
return msg_class, name
print("Error: msg_name %s not found." % name)
return None, None
def get_msgs(msg_class):
if not message_dictionary:
parse_messages()
if msg_class in message_dictionary:
return message_dictionary[msg_class]
else:
print("Error: msg_class %s not found." % msg_class)
return []
def get_msg_name(msg_class, msg_id):
if not message_dictionary:
parse_messages()
if msg_class in message_dictionary:
if msg_id in message_dictionary_id_name[msg_class]:
return message_dictionary_id_name[msg_class][msg_id]
else:
print("Error: msg_id %d not found in msg_class %s." % (msg_id, msg_class))
else:
print("Error: msg_class %s not found." % msg_class)
return ""
def get_msg_fields(msg_class, msg_name):
if not message_dictionary:
parse_messages()
if msg_class in message_dictionary:
if msg_name in message_dictionary[msg_class]:
return message_dictionary[msg_class][msg_name]
else:
print("Error: msg_name %s not found in msg_class %s." % (msg_name, msg_class))
else:
print("Error: msg_class %s not found." % msg_class)
return []
def get_msg_id(msg_class, msg_name):
if not message_dictionary:
parse_messages()
try:
return message_dictionary_name_id[msg_class][msg_name]
except KeyError:
print("Error: msg_name %s not found in msg_class %s." % (msg_name, msg_class))
return 0
def get_msg_fieldtypes(msg_class, msg_id):
if not message_dictionary:
parse_messages()
if msg_class in message_dictionary_types:
if msg_id in message_dictionary_types[msg_class]:
return message_dictionary_types[msg_class][msg_id]
else:
print("Error: message with ID %d not found in msg_class %s." % (msg_id, msg_class))
else:
print("Error: msg_class %s not found." % msg_class)
return []
def get_msg_fieldcoefs(msg_class, msg_id):
if not message_dictionary:
parse_messages()
if msg_class in message_dictionary_coefs:
if msg_id in message_dictionary_coefs[msg_class]:
return message_dictionary_coefs[msg_class][msg_id]
else:
print("Error: message with ID %d not found in msg_class %s." % (msg_id, msg_class))
else:
print("Error: msg_class %s not found." % msg_class)
return []
def test():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", help="path to messages.xml file")
parser.add_argument("-l", "--list", help="list parsed messages", action="store_true", dest="list_messages")
parser.add_argument("-c", "--class", help="message class", dest="msg_class", default="telemetry")
args = parser.parse_args()
parse_messages(args.file)
if args.list_messages:
print("Listing %i messages in '%s' msg_class" % (len(message_dictionary[args.msg_class]), args.msg_class))
for msg_name, msg_fields in message_dictionary[args.msg_class].iteritems():
print(msg_name + ": " + ", ".join(msg_fields))
if __name__ == '__main__':
test()
| gpl-2.0 |
ilpianista/ansible | lib/ansible/modules/packaging/os/apt_key.py | 27 | 12742 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2012, Jayson Vantuyl <jayson@aggressive.ly>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: apt_key
author:
- Jayson Vantuyl (@jvantuyl)
version_added: "1.0"
short_description: Add or remove an apt key
description:
- Add or remove an I(apt) key, optionally downloading it.
notes:
- Doesn't download the key unless it really needs it.
- As a sanity check, downloaded key id must match the one specified.
- "Use full fingerprint (40 characters) key ids to avoid key collisions.
To generate a full-fingerprint imported key: C(apt-key adv --list-public-keys --with-fingerprint --with-colons)."
- If you specify both the key id and the URL with C(state=present), the task can verify or add the key as needed.
- Adding a new key requires an apt cache update (e.g. using the apt module's update_cache option)
requirements:
- gpg
options:
id:
description:
- The identifier of the key.
- Including this allows check mode to correctly report the changed state.
- If specifying a subkey's id be aware that apt-key does not understand how to remove keys via a subkey id. Specify the primary key's id instead.
- This parameter is required when C(state) is set to C(absent).
data:
description:
- The keyfile contents to add to the keyring.
file:
description:
- The path to a keyfile on the remote server to add to the keyring.
keyring:
description:
- The full path to specific keyring file in /etc/apt/trusted.gpg.d/
version_added: "1.3"
url:
description:
- The URL to retrieve key from.
keyserver:
description:
- The keyserver to retrieve key from.
version_added: "1.6"
state:
description:
- Ensures that the key is present (added) or absent (revoked).
choices: [ absent, present ]
default: present
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
'''
EXAMPLES = '''
- name: Add an apt key by id from a keyserver
apt_key:
keyserver: keyserver.ubuntu.com
id: 36A1D7869245C8950F966E92D8576A8BA88D21E9
- name: Add an Apt signing key, uses whichever key is at the URL
apt_key:
url: https://ftp-master.debian.org/keys/archive-key-6.0.asc
state: present
- name: Add an Apt signing key, will not download if present
apt_key:
id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA
url: https://ftp-master.debian.org/keys/archive-key-6.0.asc
state: present
- name: Remove a Apt specific signing key, leading 0x is valid
apt_key:
id: 0x9FED2BCBDCD29CDF762678CBAED4B06F473041FA
state: absent
# Use armored file since utf-8 string is expected. Must be of "PGP PUBLIC KEY BLOCK" type.
- name: Add a key from a file on the Ansible server.
apt_key:
data: "{{ lookup('file', 'apt.asc') }}"
state: present
- name: Add an Apt signing key to a specific keyring file
apt_key:
id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA
url: https://ftp-master.debian.org/keys/archive-key-6.0.asc
keyring: /etc/apt/trusted.gpg.d/debian.gpg
- name: Add Apt signing key on remote server to keyring
apt_key:
id: 9FED2BCBDCD29CDF762678CBAED4B06F473041FA
file: /tmp/apt.gpg
state: present
'''
# FIXME: standardize into module_common
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
apt_key_bin = None
def find_needed_binaries(module):
global apt_key_bin
apt_key_bin = module.get_bin_path('apt-key', required=True)
# FIXME: Is there a reason that gpg and grep are checked? Is it just
# cruft or does the apt .deb package not require them (and if they're not
# installed, /usr/bin/apt-key fails?)
module.get_bin_path('gpg', required=True)
module.get_bin_path('grep', required=True)
def parse_key_id(key_id):
"""validate the key_id and break it into segments
:arg key_id: The key_id as supplied by the user. A valid key_id will be
8, 16, or more hexadecimal chars with an optional leading ``0x``.
:returns: The portion of key_id suitable for apt-key del, the portion
suitable for comparisons with --list-public-keys, and the portion that
can be used with --recv-key. If key_id is long enough, these will be
the last 8 characters of key_id, the last 16 characters, and all of
key_id. If key_id is not long enough, some of the values will be the
same.
* apt-key del <= 1.10 has a bug with key_id != 8 chars
* apt-key adv --list-public-keys prints 16 chars
* apt-key adv --recv-key can take more chars
"""
# Make sure the key_id is valid hexadecimal
int(key_id, 16)
key_id = key_id.upper()
if key_id.startswith('0X'):
key_id = key_id[2:]
key_id_len = len(key_id)
if (key_id_len != 8 and key_id_len != 16) and key_id_len <= 16:
raise ValueError('key_id must be 8, 16, or 16+ hexadecimal characters in length')
short_key_id = key_id[-8:]
fingerprint = key_id
if key_id_len > 16:
fingerprint = key_id[-16:]
return short_key_id, fingerprint, key_id
def all_keys(module, keyring, short_format):
if keyring:
cmd = "%s --keyring %s adv --list-public-keys --keyid-format=long" % (apt_key_bin, keyring)
else:
cmd = "%s adv --list-public-keys --keyid-format=long" % apt_key_bin
(rc, out, err) = module.run_command(cmd)
results = []
lines = to_native(out).split('\n')
for line in lines:
if (line.startswith("pub") or line.startswith("sub")) and "expired" not in line:
tokens = line.split()
code = tokens[1]
(len_type, real_code) = code.split("/")
results.append(real_code)
if short_format:
results = shorten_key_ids(results)
return results
def shorten_key_ids(key_id_list):
"""
Takes a list of key ids, and converts them to the 'short' format,
by reducing them to their last 8 characters.
"""
short = []
for key in key_id_list:
short.append(key[-8:])
return short
def download_key(module, url):
# FIXME: move get_url code to common, allow for in-memory D/L, support proxies
# and reuse here
if url is None:
module.fail_json(msg="needed a URL but was not specified")
try:
rsp, info = fetch_url(module, url)
if info['status'] != 200:
module.fail_json(msg="Failed to download key at %s: %s" % (url, info['msg']))
return rsp.read()
except Exception:
module.fail_json(msg="error getting key id from url: %s" % url, traceback=format_exc())
def import_key(module, keyring, keyserver, key_id):
if keyring:
cmd = "%s --keyring %s adv --no-tty --keyserver %s --recv %s" % (apt_key_bin, keyring, keyserver, key_id)
else:
cmd = "%s adv --no-tty --keyserver %s --recv %s" % (apt_key_bin, keyserver, key_id)
for retry in range(5):
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
(rc, out, err) = module.run_command(cmd, environ_update=lang_env)
if rc == 0:
break
else:
# Out of retries
if rc == 2 and 'not found on keyserver' in out:
msg = 'Key %s not found on keyserver %s' % (key_id, keyserver)
module.fail_json(cmd=cmd, msg=msg)
else:
msg = "Error fetching key %s from keyserver: %s" % (key_id, keyserver)
module.fail_json(cmd=cmd, msg=msg, rc=rc, stdout=out, stderr=err)
return True
def add_key(module, keyfile, keyring, data=None):
if data is not None:
if keyring:
cmd = "%s --keyring %s add -" % (apt_key_bin, keyring)
else:
cmd = "%s add -" % apt_key_bin
(rc, out, err) = module.run_command(cmd, data=data, check_rc=True, binary_data=True)
else:
if keyring:
cmd = "%s --keyring %s add %s" % (apt_key_bin, keyring, keyfile)
else:
cmd = "%s add %s" % (apt_key_bin, keyfile)
(rc, out, err) = module.run_command(cmd, check_rc=True)
return True
def remove_key(module, key_id, keyring):
# FIXME: use module.run_command, fail at point of error and don't discard useful stdin/stdout
if keyring:
cmd = '%s --keyring %s del %s' % (apt_key_bin, keyring, key_id)
else:
cmd = '%s del %s' % (apt_key_bin, key_id)
(rc, out, err) = module.run_command(cmd, check_rc=True)
return True
def main():
module = AnsibleModule(
argument_spec=dict(
id=dict(type='str'),
url=dict(type='str'),
data=dict(type='str'),
file=dict(type='path'),
key=dict(type='str'),
keyring=dict(type='path'),
validate_certs=dict(type='bool', default=True),
keyserver=dict(type='str'),
state=dict(type='str', default='present', choices=['absent', 'present']),
),
supports_check_mode=True,
mutually_exclusive=(('data', 'filename', 'keyserver', 'url'),),
)
key_id = module.params['id']
url = module.params['url']
data = module.params['data']
filename = module.params['file']
keyring = module.params['keyring']
state = module.params['state']
keyserver = module.params['keyserver']
changed = False
fingerprint = short_key_id = key_id
short_format = False
if key_id:
try:
short_key_id, fingerprint, key_id = parse_key_id(key_id)
except ValueError:
module.fail_json(msg='Invalid key_id', id=key_id)
if len(fingerprint) == 8:
short_format = True
find_needed_binaries(module)
keys = all_keys(module, keyring, short_format)
return_values = {}
if state == 'present':
if fingerprint and fingerprint in keys:
module.exit_json(changed=False)
elif fingerprint and fingerprint not in keys and module.check_mode:
# TODO: Someday we could go further -- write keys out to
# a temporary file and then extract the key id from there via gpg
# to decide if the key is installed or not.
module.exit_json(changed=True)
else:
if not filename and not data and not keyserver:
data = download_key(module, url)
if filename:
add_key(module, filename, keyring)
elif keyserver:
import_key(module, keyring, keyserver, key_id)
else:
add_key(module, "-", keyring, data)
changed = False
keys2 = all_keys(module, keyring, short_format)
if len(keys) != len(keys2):
changed = True
if fingerprint and fingerprint not in keys2:
module.fail_json(msg="key does not seem to have been added", id=key_id)
module.exit_json(changed=changed)
elif state == 'absent':
if not key_id:
module.fail_json(msg="key is required")
if fingerprint in keys:
if module.check_mode:
module.exit_json(changed=True)
# we use the "short" id: key_id[-8:], short_format=True
# it's a workaround for https://bugs.launchpad.net/ubuntu/+source/apt/+bug/1481871
if remove_key(module, short_key_id, keyring):
keys = all_keys(module, keyring, short_format)
if fingerprint in keys:
module.fail_json(msg="apt-key del did not return an error but the key was not removed (check that the id is correct and *not* a subkey)",
id=key_id)
changed = True
else:
# FIXME: module.fail_json or exit-json immediately at point of failure
module.fail_json(msg="error removing key_id", **return_values)
module.exit_json(changed=changed, **return_values)
if __name__ == '__main__':
main()
| gpl-3.0 |
smart-developerr/my-first-blog | Lib/site-packages/django/contrib/staticfiles/finders.py | 478 | 9854 | import os
from collections import OrderedDict
from django.apps import apps
from django.conf import settings
from django.contrib.staticfiles import utils
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import (
FileSystemStorage, Storage, default_storage,
)
from django.utils import lru_cache, six
from django.utils._os import safe_join
from django.utils.functional import LazyObject, empty
from django.utils.module_loading import import_string
# To keep track on which directories the finder has searched the static files.
searched_locations = []
class BaseFinder(object):
"""
A base file finder to be used for custom staticfiles finder classes.
"""
def find(self, path, all=False):
"""
Given a relative file path this ought to find an
absolute file path.
If the ``all`` parameter is ``False`` (default) only
the first found file path will be returned; if set
to ``True`` a list of all found files paths is returned.
"""
raise NotImplementedError('subclasses of BaseFinder must provide a find() method')
def list(self, ignore_patterns):
"""
Given an optional list of paths to ignore, this should return
a two item iterable consisting of the relative path and storage
instance.
"""
raise NotImplementedError('subclasses of BaseFinder must provide a list() method')
class FileSystemFinder(BaseFinder):
"""
A static files finder that uses the ``STATICFILES_DIRS`` setting
to locate files.
"""
def __init__(self, app_names=None, *args, **kwargs):
# List of locations with static files
self.locations = []
# Maps dir paths to an appropriate storage instance
self.storages = OrderedDict()
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
raise ImproperlyConfigured(
"Your STATICFILES_DIRS setting is not a tuple or list; "
"perhaps you forgot a trailing comma?")
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
else:
prefix = ''
if settings.STATIC_ROOT and os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root):
raise ImproperlyConfigured(
"The STATICFILES_DIRS setting should "
"not contain the STATIC_ROOT setting")
if (prefix, root) not in self.locations:
self.locations.append((prefix, root))
for prefix, root in self.locations:
filesystem_storage = FileSystemStorage(location=root)
filesystem_storage.prefix = prefix
self.storages[root] = filesystem_storage
super(FileSystemFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the extra locations
as defined in ``STATICFILES_DIRS``.
"""
matches = []
for prefix, root in self.locations:
if root not in searched_locations:
searched_locations.append(root)
matched_path = self.find_location(root, path, prefix)
if matched_path:
if not all:
return matched_path
matches.append(matched_path)
return matches
def find_location(self, root, path, prefix=None):
"""
Finds a requested static file in a location, returning the found
absolute path (or ``None`` if no match).
"""
if prefix:
prefix = '%s%s' % (prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
path = safe_join(root, path)
if os.path.exists(path):
return path
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
for prefix, root in self.locations:
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
class AppDirectoriesFinder(BaseFinder):
"""
A static files finder that looks in the directory of each app as
specified in the source_dir attribute.
"""
storage_class = FileSystemStorage
source_dir = 'static'
def __init__(self, app_names=None, *args, **kwargs):
# The list of apps that are handled
self.apps = []
# Mapping of app names to storage instances
self.storages = OrderedDict()
app_configs = apps.get_app_configs()
if app_names:
app_names = set(app_names)
app_configs = [ac for ac in app_configs if ac.name in app_names]
for app_config in app_configs:
app_storage = self.storage_class(
os.path.join(app_config.path, self.source_dir))
if os.path.isdir(app_storage.location):
self.storages[app_config.name] = app_storage
if app_config.name not in self.apps:
self.apps.append(app_config.name)
super(AppDirectoriesFinder, self).__init__(*args, **kwargs)
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in six.itervalues(self.storages):
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False):
"""
Looks for files in the app directories.
"""
matches = []
for app in self.apps:
app_location = self.storages[app].location
if app_location not in searched_locations:
searched_locations.append(app_location)
match = self.find_in_app(app, path)
if match:
if not all:
return match
matches.append(match)
return matches
def find_in_app(self, app, path):
"""
Find a requested static file in an app's static locations.
"""
storage = self.storages.get(app)
if storage:
# only try to find a file if the source dir actually exists
if storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
class BaseStorageFinder(BaseFinder):
"""
A base static files finder to be used to extended
with an own storage class.
"""
storage = None
def __init__(self, storage=None, *args, **kwargs):
if storage is not None:
self.storage = storage
if self.storage is None:
raise ImproperlyConfigured("The staticfiles storage finder %r "
"doesn't have a storage class "
"assigned." % self.__class__)
# Make sure we have an storage instance here.
if not isinstance(self.storage, (Storage, LazyObject)):
self.storage = self.storage()
super(BaseStorageFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the default file storage, if it's local.
"""
try:
self.storage.path('')
except NotImplementedError:
pass
else:
if self.storage.location not in searched_locations:
searched_locations.append(self.storage.location)
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
def list(self, ignore_patterns):
"""
List all files of the storage.
"""
for path in utils.get_files(self.storage, ignore_patterns):
yield path, self.storage
class DefaultStorageFinder(BaseStorageFinder):
"""
A static files finder that uses the default storage backend.
"""
storage = default_storage
def __init__(self, *args, **kwargs):
super(DefaultStorageFinder, self).__init__(*args, **kwargs)
base_location = getattr(self.storage, 'base_location', empty)
if not base_location:
raise ImproperlyConfigured("The storage backend of the "
"staticfiles finder %r doesn't have "
"a valid location." % self.__class__)
def find(path, all=False):
"""
Find a static file with the given path using all enabled finders.
If ``all`` is ``False`` (default), return the first matching
absolute path (or ``None`` if no match). Otherwise return a list.
"""
searched_locations[:] = []
matches = []
for finder in get_finders():
result = finder.find(path, all=all)
if not all and result:
return result
if not isinstance(result, (list, tuple)):
result = [result]
matches.extend(result)
if matches:
return matches
# No match.
return [] if all else None
def get_finders():
for finder_path in settings.STATICFILES_FINDERS:
yield get_finder(finder_path)
@lru_cache.lru_cache(maxsize=None)
def get_finder(import_path):
"""
Imports the staticfiles finder class described by import_path, where
import_path is the full Python path to the class.
"""
Finder = import_string(import_path)
if not issubclass(Finder, BaseFinder):
raise ImproperlyConfigured('Finder "%s" is not a subclass of "%s"' %
(Finder, BaseFinder))
return Finder()
| gpl-3.0 |
XiaosongWei/chromium-crosswalk | third_party/pycoverage/coverage/fullcoverage/encodings.py | 164 | 2386 | """Imposter encodings module that installs a coverage-style tracer.
This is NOT the encodings module; it is an imposter that sets up tracing
instrumentation and then replaces itself with the real encodings module.
If the directory that holds this file is placed first in the PYTHONPATH when
using "coverage" to run Python's tests, then this file will become the very
first module imported by the internals of Python 3. It installs a
coverage-compatible trace function that can watch Standard Library modules
execute from the very earliest stages of Python's own boot process. This fixes
a problem with coverage - that it starts too late to trace the coverage of many
of the most fundamental modules in the Standard Library.
"""
import sys
class FullCoverageTracer(object):
def __init__(self):
# `traces` is a list of trace events. Frames are tricky: the same
# frame object is used for a whole scope, with new line numbers
# written into it. So in one scope, all the frame objects are the
# same object, and will eventually all will point to the last line
# executed. So we keep the line numbers alongside the frames.
# The list looks like:
#
# traces = [
# ((frame, event, arg), lineno), ...
# ]
#
self.traces = []
def fullcoverage_trace(self, *args):
frame, event, arg = args
self.traces.append((args, frame.f_lineno))
return self.fullcoverage_trace
sys.settrace(FullCoverageTracer().fullcoverage_trace)
# In coverage/files.py is actual_filename(), which uses glob.glob. I don't
# understand why, but that use of glob borks everything if fullcoverage is in
# effect. So here we make an ugly hail-mary pass to switch off glob.glob over
# there. This means when using fullcoverage, Windows path names will not be
# their actual case.
#sys.fullcoverage = True
# Finally, remove our own directory from sys.path; remove ourselves from
# sys.modules; and re-import "encodings", which will be the real package
# this time. Note that the delete from sys.modules dictionary has to
# happen last, since all of the symbols in this module will become None
# at that exact moment, including "sys".
parentdir = max(filter(__file__.startswith, sys.path), key=len)
sys.path.remove(parentdir)
del sys.modules['encodings']
import encodings
| bsd-3-clause |
TheComet93/ontology | tests/gmock/gtest/test/gtest_shuffle_test.py | 3023 | 12549 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-2.0 |
viki9698/jizhanggroup | django/contrib/databrowse/datastructures.py | 100 | 9090 | """
These classes are light wrappers around Django's database API that provide
convenience functionality and permalink functions for the databrowse app.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils import formats
from django.utils.text import capfirst
from django.utils.encoding import smart_text, force_str, iri_to_uri
from django.db.models.query import QuerySet
from django.utils.encoding import python_2_unicode_compatible
EMPTY_VALUE = '(None)'
DISPLAY_SIZE = 100
class EasyModel(object):
def __init__(self, site, model):
self.site = site
self.model = model
self.model_list = list(site.registry.keys())
self.verbose_name = model._meta.verbose_name
self.verbose_name_plural = model._meta.verbose_name_plural
def __repr__(self):
return force_str('<EasyModel for %s>' % self.model._meta.object_name)
def model_databrowse(self):
"Returns the ModelDatabrowse class for this model."
return self.site.registry[self.model]
def url(self):
return '%s%s/%s/' % (self.site.root_url, self.model._meta.app_label, self.model._meta.module_name)
def objects(self, **kwargs):
return self.get_query_set().filter(**kwargs)
def get_query_set(self):
easy_qs = self.model._default_manager.get_query_set()._clone(klass=EasyQuerySet)
easy_qs._easymodel = self
return easy_qs
def object_by_pk(self, pk):
return EasyInstance(self, self.model._default_manager.get(pk=pk))
def sample_objects(self):
for obj in self.model._default_manager.all()[:3]:
yield EasyInstance(self, obj)
def field(self, name):
try:
f = self.model._meta.get_field(name)
except models.FieldDoesNotExist:
return None
return EasyField(self, f)
def fields(self):
return [EasyField(self, f) for f in (self.model._meta.fields + self.model._meta.many_to_many)]
class EasyField(object):
def __init__(self, easy_model, field):
self.model, self.field = easy_model, field
def __repr__(self):
return force_str('<EasyField for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def choices(self):
for value, label in self.field.choices:
yield EasyChoice(self.model, self, value, label)
def url(self):
if self.field.choices:
return '%s%s/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.name)
elif self.field.rel:
return '%s%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name)
class EasyChoice(object):
def __init__(self, easy_model, field, value, label):
self.model, self.field = easy_model, field
self.value, self.label = value, label
def __repr__(self):
return force_str('<EasyChoice for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def url(self):
return '%s%s/%s/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.field.name, iri_to_uri(self.value))
@python_2_unicode_compatible
class EasyInstance(object):
def __init__(self, easy_model, instance):
self.model, self.instance = easy_model, instance
def __repr__(self):
return force_str('<EasyInstance for %s (%s)>' % (self.model.model._meta.object_name, self.instance._get_pk_val()))
def __str__(self):
val = smart_text(self.instance)
if len(val) > DISPLAY_SIZE:
return val[:DISPLAY_SIZE] + '...'
return val
def pk(self):
return self.instance._get_pk_val()
def url(self):
return '%s%s/%s/objects/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, iri_to_uri(self.pk()))
def fields(self):
"""
Generator that yields EasyInstanceFields for each field in this
EasyInstance's model.
"""
for f in self.model.model._meta.fields + self.model.model._meta.many_to_many:
yield EasyInstanceField(self.model, self, f)
def related_objects(self):
"""
Generator that yields dictionaries of all models that have this
EasyInstance's model as a ForeignKey or ManyToManyField, along with
lists of related objects.
"""
for rel_object in self.model.model._meta.get_all_related_objects() + self.model.model._meta.get_all_related_many_to_many_objects():
if rel_object.model not in self.model.model_list:
continue # Skip models that aren't in the model_list
em = EasyModel(self.model.site, rel_object.model)
yield {
'model': em,
'related_field': rel_object.field.verbose_name,
'object_list': [EasyInstance(em, i) for i in getattr(self.instance, rel_object.get_accessor_name()).all()],
}
class EasyInstanceField(object):
def __init__(self, easy_model, instance, field):
self.model, self.field, self.instance = easy_model, field, instance
self.raw_value = getattr(instance.instance, field.name)
def __repr__(self):
return force_str('<EasyInstanceField for %s.%s>' % (self.model.model._meta.object_name, self.field.name))
def values(self):
"""
Returns a list of values for this field for this instance. It's a list
so we can accomodate many-to-many fields.
"""
# This import is deliberately inside the function because it causes
# some settings to be imported, and we don't want to do that at the
# module level.
if self.field.rel:
if isinstance(self.field.rel, models.ManyToOneRel):
objs = getattr(self.instance.instance, self.field.name)
elif isinstance(self.field.rel, models.ManyToManyRel): # ManyToManyRel
return list(getattr(self.instance.instance, self.field.name).all())
elif self.field.choices:
objs = dict(self.field.choices).get(self.raw_value, EMPTY_VALUE)
elif isinstance(self.field, models.DateField) or isinstance(self.field, models.TimeField):
if self.raw_value:
if isinstance(self.field, models.DateTimeField):
objs = capfirst(formats.date_format(self.raw_value, 'DATETIME_FORMAT'))
elif isinstance(self.field, models.TimeField):
objs = capfirst(formats.time_format(self.raw_value, 'TIME_FORMAT'))
else:
objs = capfirst(formats.date_format(self.raw_value, 'DATE_FORMAT'))
else:
objs = EMPTY_VALUE
elif isinstance(self.field, models.BooleanField) or isinstance(self.field, models.NullBooleanField):
objs = {True: 'Yes', False: 'No', None: 'Unknown'}[self.raw_value]
else:
objs = self.raw_value
return [objs]
def urls(self):
"Returns a list of (value, URL) tuples."
# First, check the urls() method for each plugin.
plugin_urls = []
for plugin_name, plugin in self.model.model_databrowse().plugins.items():
urls = plugin.urls(plugin_name, self)
if urls is not None:
return zip(self.values(), urls)
if self.field.rel:
m = EasyModel(self.model.site, self.field.rel.to)
if self.field.rel.to in self.model.model_list:
lst = []
for value in self.values():
if value is None:
continue
url = '%s%s/%s/objects/%s/' % (self.model.site.root_url, m.model._meta.app_label, m.model._meta.module_name, iri_to_uri(value._get_pk_val()))
lst.append((smart_text(value), url))
else:
lst = [(value, None) for value in self.values()]
elif self.field.choices:
lst = []
for value in self.values():
url = '%s%s/%s/fields/%s/%s/' % (self.model.site.root_url, self.model.model._meta.app_label, self.model.model._meta.module_name, self.field.name, iri_to_uri(self.raw_value))
lst.append((value, url))
elif isinstance(self.field, models.URLField):
val = list(self.values())[0]
lst = [(val, iri_to_uri(val))]
else:
lst = [(list(self.values())[0], None)]
return lst
class EasyQuerySet(QuerySet):
"""
When creating (or cloning to) an `EasyQuerySet`, make sure to set the
`_easymodel` variable to the related `EasyModel`.
"""
def iterator(self, *args, **kwargs):
for obj in super(EasyQuerySet, self).iterator(*args, **kwargs):
yield EasyInstance(self._easymodel, obj)
def _clone(self, *args, **kwargs):
c = super(EasyQuerySet, self)._clone(*args, **kwargs)
c._easymodel = self._easymodel
return c
| bsd-3-clause |
jsatch/creamas-matricula | CreamasMatricula/Creamas/urls.py | 1 | 1854 | from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'Creamas.views.home', name='home'),
# url(r'^Creamas/', include('Creamas.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# Debe de quitar el creamas/ para subirlo al apache de linux
url(r'^admin/', include(admin.site.urls)),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
url(r'^login$', 'matricula.views.loginUsuario'),
#url(r'^creamas/matricula$', 'matricula.views.listado_matricula'), deprecated
url(r'^matricula-registrar$', 'matricula.views.registro_matricula'),
url(r'^matricula-registrar/(\d+)$', 'matricula.views.modificacion_matricula'),
url(r'^matricula-listar-alumnos$', 'matricula.views.listado_alumnos'),
url(r'^matricula-registrar-alumno$', 'matricula.views.registro_alumno'),
url(r'^matricula-seguimiento$', 'matricula.views.reporte_seguimiento'),
url(r'^matricula-reporte$', 'matricula.views.reporte_matricula'),
url(r'^logout$', 'matricula.views.logout_matricula'),
url(r'^registrar_matricula$', 'matricula.views.guardar_matricula'),
#Backend
url(r'^listar_matricula', 'matricula.views.back_listar_matricula'),
#Modificacion de matricula
#url(r'^creamas/matricula-listar$', 'matricula.views.listado_matricula')
url(r'^matricula-modificar-listar$', 'matricula.views.listado_matricula'),
url(r'^matricula-modificar/(\d+)$', 'matricula.views.modificacion_matricula'),
url(r'^matricula-eliminar/(\d+)$', 'matricula.views.eliminar_matricula'),
)
| apache-2.0 |
shangwuhencc/scikit-learn | sklearn/utils/tests/test_testing.py | 107 | 4210 | import warnings
import unittest
import sys
from nose.tools import assert_raises
from sklearn.utils.testing import (
_assert_less,
_assert_greater,
assert_less_equal,
assert_greater_equal,
assert_warns,
assert_no_warnings,
assert_equal,
set_random_state,
assert_raise_message)
from sklearn.tree import DecisionTreeClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
try:
from nose.tools import assert_less
def test_assert_less():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_less(0, 1)
_assert_less(0, 1)
assert_raises(AssertionError, assert_less, 1, 0)
assert_raises(AssertionError, _assert_less, 1, 0)
except ImportError:
pass
try:
from nose.tools import assert_greater
def test_assert_greater():
# Check that the nose implementation of assert_less gives the
# same thing as the scikit's
assert_greater(1, 0)
_assert_greater(1, 0)
assert_raises(AssertionError, assert_greater, 0, 1)
assert_raises(AssertionError, _assert_greater, 0, 1)
except ImportError:
pass
def test_assert_less_equal():
assert_less_equal(0, 1)
assert_less_equal(1, 1)
assert_raises(AssertionError, assert_less_equal, 1, 0)
def test_assert_greater_equal():
assert_greater_equal(1, 0)
assert_greater_equal(1, 1)
assert_raises(AssertionError, assert_greater_equal, 0, 1)
def test_set_random_state():
lda = LinearDiscriminantAnalysis()
tree = DecisionTreeClassifier()
# Linear Discriminant Analysis doesn't have random state: smoke test
set_random_state(lda, 3)
set_random_state(tree, 3)
assert_equal(tree.random_state, 3)
def test_assert_raise_message():
def _raise_ValueError(message):
raise ValueError(message)
def _no_raise():
pass
assert_raise_message(ValueError, "test",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "something else",
_raise_ValueError, "test")
assert_raises(ValueError,
assert_raise_message, TypeError, "something else",
_raise_ValueError, "test")
assert_raises(AssertionError,
assert_raise_message, ValueError, "test",
_no_raise)
# multiple exceptions in a tuple
assert_raises(AssertionError,
assert_raise_message, (ValueError, AttributeError),
"test", _no_raise)
# This class is inspired from numpy 1.7 with an alteration to check
# the reset warning filters after calls to assert_warns.
# This assert_warns behavior is specific to scikit-learn because
#`clean_warning_registry()` is called internally by assert_warns
# and clears all previous filters.
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
return 3
# Test that assert_warns is not impacted by externally set
# filters and is reset internally.
# This is because `clean_warning_registry()` is called internally by
# assert_warns and clears all previous filters.
warnings.simplefilter("ignore", UserWarning)
assert_equal(assert_warns(UserWarning, f), 3)
# Test that the warning registry is empty after assert_warns
assert_equal(sys.modules['warnings'].filters, [])
assert_raises(AssertionError, assert_no_warnings, f)
assert_equal(assert_no_warnings(lambda x: x, 1), 1)
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
| bsd-3-clause |
sg00dwin/origin | vendor/github.com/google/certificate-transparency/python/ct/client/db/log_db.py | 35 | 2021 | import abc
class LogDB(object):
"""Database interface for storing client-side CT data."""
__metaclass__ = abc.ABCMeta
# The largest BSON can handle
timestamp_max = 2**63-1
@abc.abstractmethod
def add_log(self, metadata):
"""Store log metadata. This creates the necessary mappings between
tables so all logs must be explicitly added.
Params:
metadata: a client_pb2.CtLogMetadata proto."""
@abc.abstractmethod
def update_log(self, metadata):
"""Add a new log or update existing log metadata. When updating, does
not verify that the new metadata is consistent with stored values.
Params:
metadata: a client_pb2.CtLogMetadata proto."""
@abc.abstractmethod
def logs(self):
"""A generator that yields all currently known logs."""
@abc.abstractmethod
def store_sth(self, log_server, audited_sth):
"""Store the STH in the database.
Will store the STH with a unique ID unless an exact copy already exists.
Params:
log_server: the server name, i.e., the <log_server> path prefix
audited_sth: a client_pb2.AuditedSth proto
"""
@abc.abstractmethod
def get_latest_sth(self, log_server):
""""Get the AuditedSth with the latest timestamp."""
@abc.abstractmethod
def scan_latest_sth_range(self, log_server, start=0, end=timestamp_max,
limit=0):
"""Scan STHs by timestamp
Args:
logid: CT log to scan
start: earliest timestamp
end: latest timestamp
limit: maximum number of entries to return. Default is no limit.
Yields:
the AuditedSth protos in descending order of timestamps
Note the scan may be keeping the database connection open until the
generator is exhausted.
"""
@abc.abstractmethod
def get_log_id(self, log_server):
"""Get id in database of log_server."""
| apache-2.0 |
angr/cle | cle/backends/symbol.py | 1 | 4158 | from enum import Enum
import logging
from ..address_translator import AT
l = logging.getLogger(name=__name__)
class SymbolType(Enum):
"""
ABI-agnostic symbol types
"""
TYPE_OTHER = 0
TYPE_NONE = 1
TYPE_FUNCTION = 2
TYPE_OBJECT = 3
TYPE_SECTION = 4
TYPE_TLS_OBJECT = 5
class SymbolSubType(Enum):
"""
Abstract base class for ABI-specific symbol types
"""
def to_base_type(self) -> SymbolType: # pylint: disable=no-self-use
"""
A subclass' ABI-specific mapping to :SymbolType:
"""
raise ValueError("Abstract base class SymbolSubType has no base_type")
class Symbol:
"""
Representation of a symbol from a binary file. Smart enough to rebase itself.
There should never be more than one Symbol instance representing a single symbol. To make sure of this, only use
the :meth:`cle.backends.Backend.get_symbol()` to create new symbols.
:ivar owner: The object that contains this symbol
:vartype owner: cle.backends.Backend
:ivar str name: The name of this symbol
:ivar int addr: The un-based address of this symbol, an RVA
:ivar int size: The size of this symbol
:ivar SymbolType _type: The ABI-agnostic type of this symbol
:ivar bool resolved: Whether this import symbol has been resolved to a real symbol
:ivar resolvedby: The real symbol this import symbol has been resolve to
:vartype resolvedby: None or cle.backends.Symbol
:ivar str resolvewith: The name of the library we must use to resolve this symbol, or None if none is required.
"""
def __init__(self, owner, name, relative_addr, size, sym_type):
"""
Not documenting this since if you try calling it, you're wrong.
"""
self.owner = owner
self.name = name
self.relative_addr = relative_addr
self.size = size
self._type = SymbolType(sym_type)
self.resolved = False
self.resolvedby = None
def __repr__(self):
if self.is_import:
return '<Symbol "%s" in %s (import)>' % (self.name, self.owner.binary_basename)
else:
return '<Symbol "%s" in %s at %#x>' % (self.name, self.owner.binary_basename, self.rebased_addr)
def resolve(self, obj):
self.resolved = True
self.resolvedby = obj
self.owner.resolved_imports.append(self)
@property
def type(self) -> SymbolType:
"""
The ABI-agnostic SymbolType. Must be overridden by derived types.
"""
return self._type
@property
def subtype(self) -> SymbolSubType:
"""
A subclass' ABI-specific types
"""
raise ValueError("Base class Symbol has no subtype")
@property
def rebased_addr(self):
"""
The address of this symbol in the global memory space
"""
return AT.from_rva(self.relative_addr, self.owner).to_mva()
@property
def linked_addr(self):
return AT.from_rva(self.relative_addr, self.owner).to_lva()
@property
def is_function(self):
"""
Whether this symbol is a function
"""
return self.type is SymbolType.TYPE_FUNCTION
# These may be overridden in subclasses
is_static = False
is_common = False
is_import = False
is_export = False
is_local = False
is_weak = False
is_extern = False
is_forward = False
def resolve_forwarder(self):
"""
If this symbol is a forwarding export, return the symbol the forwarding refers to, or None if it cannot be found.
"""
return self
# compatibility layer
_complained_owner = False
@property
def owner_obj(self):
if not Symbol._complained_owner:
Symbol._complained_owner = True
l.critical("Deprecation warning: use symbol.owner instead of symbol.owner_obj")
return self.owner
def __getstate__(self):
return dict((k, v) for k, v in self.__dict__.items() if k != 'owner')
def __setstate__(self, state):
self.__dict__.update(state)
| bsd-2-clause |
ihatevim/aetherbot | plugins/steam_user.py | 33 | 2667 | import requests
from lxml import etree
from cloudbot import hook
from cloudbot.util import formatting
# security
parser = etree.XMLParser(resolve_entities=False, no_network=True)
API_URL = "http://steamcommunity.com/id/{}/"
ID_BASE = 76561197960265728
headers = {}
class SteamError(Exception):
pass
def convert_id32(id_64):
"""
Takes a Steam ID_64 formatted ID and returns a ID_32 formatted ID
:type id_64: int
:return: str
"""
out = ["STEAM_0:"]
final = id_64 - ID_BASE
if final % 2 == 0:
out.append("0:")
else:
out.append("1:")
out.append(str(final // 2))
return "".join(out)
def convert_id3(id_64):
"""
Takes a Steam ID_64 formatted ID and returns a ID_3 formatted ID
:typetype id_64: int
:return: str
"""
_id = (id_64 - ID_BASE) * 2
if _id % 2 == 0:
_id += 0
else:
_id += 1
actual = str(_id // 2)
return "U:1:{}".format(actual)
def get_data(user):
"""
Takes a Steam Community ID of a Steam user and returns a dict of data about that user
:type user: str
:return: dict
"""
data = {}
# form the request
params = {'xml': 1}
# get the page
try:
request = requests.get(API_URL.format(user), params=params, headers=headers)
request.raise_for_status()
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e:
raise SteamError("Could not get user info: {}".format(e))
profile = etree.fromstring(request.content, parser=parser)
try:
data["name"] = profile.find('steamID').text
data["id_64"] = int(profile.find('steamID64').text)
online_state = profile.find('stateMessage').text
except AttributeError:
raise SteamError("Could not get data for this user.")
online_state = online_state.replace("<br/>", ": ") # will make this pretty later
data["state"] = formatting.strip_html(online_state)
data["id_32"] = convert_id32(data["id_64"])
data["id_3"] = convert_id3(data["id_64"])
return data
@hook.on_start
def set_headers(bot):
""" Runs on initial plugin load and sets the HTTP headers for this plugin. """
global headers
headers = {
'User-Agent': bot.user_agent
}
@hook.command("steamid", "sid", "steamuser", "su")
def steamid(text):
"""steamid <username> -- gets the steam ID of <username>. Uses steamcommunity.com/id/<nickname>. """
try:
data = get_data(text)
except SteamError as e:
return "{}".format(e)
return "{name} ({state}): \x02ID64:\x02 {id_64}, \x02ID32:\x02 {id_32}, \x02ID3:\x02 {id_3}".format(**data)
| gpl-3.0 |
valexandersaulys/prudential_insurance_kaggle | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py | 679 | 3293 | import socket
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except socket.error:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
# This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err
else:
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
| gpl-2.0 |
TUDelftNAS/SDN-OpenNetMon | forwarding.py | 1 | 13257 | # #Copyright (C) 2013, Delft University of Technology, Faculty of Electrical Engineering, Mathematics and Computer Science, Network Architectures and Services, Niels van Adrichem
#
# This file is part of OpenNetMon.
#
# OpenNetMon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenNetMon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenNetMon. If not, see <http://www.gnu.org/licenses/>.
# Special thanks go to James McCauley and all people connected to the POX project, without their work and provided samples OpenNetMon could not have been created in the way it is now.
"""
OpenNetMon.Forwarding
Requires openflow.discovery
"""
from pox.lib.revent.revent import EventMixin, Event
from pox.lib.addresses import IPAddr
from pox.lib.packet.vlan import vlan
from pox.lib.packet.ipv4 import ipv4
import pox.lib.util as util
from pox.core import core
import pox.openflow.libopenflow_01 as of
from collections import defaultdict
import pox.lib.packet as pkt
from collections import namedtuple
log = core.getLogger()
switches = {}
switch_ports = {}
adj = defaultdict(lambda:defaultdict(lambda:None))
mac_learning = {}
class ofp_match_withHash(of.ofp_match):
##Our additions to enable indexing by match specifications
@classmethod
def from_ofp_match_Superclass(cls, other):
match = cls()
match.wildcards = other.wildcards
match.in_port = other.in_port
match.dl_src = other.dl_src
match.dl_dst = other.dl_dst
match.dl_vlan = other.dl_vlan
match.dl_vlan_pcp = other.dl_vlan_pcp
match.dl_type = other.dl_type
match.nw_tos = other.nw_tos
match.nw_proto = other.nw_proto
match.nw_src = other.nw_src
match.nw_dst = other.nw_dst
match.tp_src = other.tp_src
match.tp_dst = other.tp_dst
return match
def __hash__(self):
return hash((self.wildcards, self.in_port, self.dl_src, self.dl_dst, self.dl_vlan, self.dl_vlan_pcp, self.dl_type, self.nw_tos, self.nw_proto, self.nw_src, self.nw_dst, self.tp_src, self.tp_dst))
class Path(object):
def __init__(self, src, dst, prev, first_port):
self.src = src
self.dst = dst
self.prev = prev
self.first_port = first_port
def __repr__(self):
ret = util.dpid_to_str(self.dst)
u = self.prev[self.dst]
while(u != None):
ret = util.dpid_to_str(u) + "->" + ret
u = self.prev[u]
return ret
def _tuple_me(self):
list = [self.dst,]
u = self.prev[self.dst]
while u != None:
list.append(u)
u = self.prev[u]
#log.debug("List path: %s", list)
#log.debug("Tuple path: %s", tuple(list))
return tuple(list)
def __hash__(self):
return hash(self._tuple_me())
def __eq__(self, other):
return self._tuple_me() == other._tuple_me()
def _get_path(src, dst):
#Bellman-Ford algorithm
keys = switches.keys()
distance = {}
previous = {}
for dpid in keys:
distance[dpid] = float("+inf")
previous[dpid] = None
distance[src] = 0
for i in range(len(keys)-1):
for u in adj.keys(): #nested dict
for v in adj[u].keys():
w = 1
if distance[u] + w < distance[v]:
distance[v] = distance[u] + w
previous[v] = u
for u in adj.keys(): #nested dict
for v in adj[u].keys():
w = 1
if distance[u] + w < distance[v]:
log.error("Graph contains a negative-weight cycle")
return None
first_port = None
v = dst
u = previous[v]
while u is not None:
if u == src:
first_port = adj[u][v]
v = u
u = previous[v]
return Path(src, dst, previous, first_port) #path
def _install_path(prev_path, match):
dst_sw = prev_path.dst
cur_sw = prev_path.dst
dst_pck = match.dl_dst
msg = of.ofp_flow_mod()
msg.match = match
msg.idle_timeout = 10
msg.flags = of.OFPFF_SEND_FLOW_REM
msg.actions.append(of.ofp_action_output(port = mac_learning[dst_pck].port))
log.debug("Installing forward from switch %s to output port %s", util.dpid_to_str(cur_sw), mac_learning[dst_pck].port)
switches[dst_sw].connection.send(msg)
next_sw = cur_sw
cur_sw = prev_path.prev[next_sw]
while cur_sw is not None: #for switch in path.keys():
msg = of.ofp_flow_mod()
msg.match = match
msg.idle_timeout = 10
msg.flags = of.OFPFF_SEND_FLOW_REM
log.debug("Installing forward from switch %s to switch %s output port %s", util.dpid_to_str(cur_sw), util.dpid_to_str(next_sw), adj[cur_sw][next_sw])
msg.actions.append(of.ofp_action_output(port = adj[cur_sw][next_sw]))
switches[cur_sw].connection.send(msg)
next_sw = cur_sw
cur_sw = prev_path.prev[next_sw]
def _print_rev_path(dst_pck, src, dst, prev_path):
str = "Reverse path from %s to %s over: [%s->dst over port %s]" % (util.dpid_to_str(src), util.dpid_to_str(dst), util.dpid_to_str(dst), mac_learning[dst_pck].port)
next_sw = dst
cur_sw = prev_path[next_sw]
while cur_sw != None: #for switch in path.keys():
str += "[%s->%s over port %s]" % (util.dpid_to_str(cur_sw), util.dpid_to_str(next_sw), adj[cur_sw][next_sw])
next_sw = cur_sw
cur_sw = prev_path[next_sw]
log.debug(str)
class NewFlow(Event):
def __init__(self, prev_path, match, adj):
Event.__init__(self)
self.match = match
self.prev_path = prev_path
self.adj = adj
class Switch(EventMixin):
_eventMixin_events = set([
NewFlow,
])
def __init__(self, connection, l3_matching=False):
self.connection = connection
self.l3_matching = l3_matching
connection.addListeners(self)
for p in self.connection.ports.itervalues(): #Enable flooding on all ports until they are classified as links
self.enable_flooding(p.port_no)
def __repr__(self):
return util.dpid_to_str(self.connection.dpid)
def disable_flooding(self, port):
msg = of.ofp_port_mod(port_no = port,
hw_addr = self.connection.ports[port].hw_addr,
config = of.OFPPC_NO_FLOOD,
mask = of.OFPPC_NO_FLOOD)
self.connection.send(msg)
def enable_flooding(self, port):
msg = of.ofp_port_mod(port_no = port,
hw_addr = self.connection.ports[port].hw_addr,
config = 0, # opposite of of.OFPPC_NO_FLOOD,
mask = of.OFPPC_NO_FLOOD)
self.connection.send(msg)
def _handle_PacketIn(self, event):
def forward(port):
"""Tell the switch to drop the packet"""
msg = of.ofp_packet_out()
msg.actions.append(of.ofp_action_output(port = port))
if event.ofp.buffer_id is not None:
msg.buffer_id = event.ofp.buffer_id
else:
msg.data = event.ofp.data
msg.in_port = event.port
self.connection.send(msg)
def flood():
"""Tell all switches to flood the packet, remember that we disable inter-switch flooding at startup"""
#forward(of.OFPP_FLOOD)
for (dpid,switch) in switches.iteritems():
msg = of.ofp_packet_out()
if switch == self:
if event.ofp.buffer_id is not None:
msg.buffer_id = event.ofp.buffer_id
else:
msg.data = event.ofp.data
msg.in_port = event.port
else:
msg.data = event.ofp.data
ports = [p for p in switch.connection.ports if (dpid,p) not in switch_ports]
if len(ports) > 0:
for p in ports:
msg.actions.append(of.ofp_action_output(port = p))
switches[dpid].connection.send(msg)
def drop():
"""Tell the switch to drop the packet"""
if event.ofp.buffer_id is not None: #nothing to drop because the packet is not in the Switch buffer
msg = of.ofp_packet_out()
msg.buffer_id = event.ofp.buffer_id
event.ofp.buffer_id = None # Mark as dead, copied from James McCauley, not sure what it does but it does not work otherwise
msg.in_port = event.port
self.connection.send(msg)
log.debug("Received PacketIn")
packet = event.parsed
SwitchPort = namedtuple('SwitchPoint', 'dpid port')
if (event.dpid,event.port) not in switch_ports: # only relearn locations if they arrived from non-interswitch links
mac_learning[packet.src] = SwitchPort(event.dpid, event.port) #relearn the location of the mac-address
if packet.effective_ethertype == packet.LLDP_TYPE:
drop()
log.debug("Switch %s dropped LLDP packet", self)
elif packet.dst.is_multicast:
flood()
log.debug("Switch %s flooded multicast 0x%0.4X type packet", self, packet.effective_ethertype)
elif packet.dst not in mac_learning:
flood() #Let's first learn the location of the recipient before generating and installing any rules for this. We might flood this but that leads to further complications if half way the flood through the network the path has been learned.
log.debug("Switch %s flooded unicast 0x%0.4X type packet, due to unlearned MAC address", self, packet.effective_ethertype)
elif packet.effective_ethertype == packet.ARP_TYPE:
#These packets are sent so not-often that they don't deserve a flow
#Instead of flooding them, we drop it at the current switch and have it resend by the switch to which the recipient is connected.
#flood()
drop()
dst = mac_learning[packet.dst]
msg = of.ofp_packet_out()
msg.data = event.ofp.data
msg.actions.append(of.ofp_action_output(port = dst.port))
switches[dst.dpid].connection.send(msg)
log.debug("Switch %s processed unicast ARP (0x0806) packet, send to recipient by switch %s", self, util.dpid_to_str(dst.dpid))
else:
log.debug("Switch %s received PacketIn of type 0x%0.4X, received from %s.%s", self, packet.effective_ethertype, util.dpid_to_str(event.dpid), event.port)
dst = mac_learning[packet.dst]
prev_path = _get_path(self.connection.dpid, dst.dpid)
if prev_path is None:
flood()
return
log.debug("Path from %s to %s over path %s", packet.src, packet.dst, prev_path)
if self.l3_matching == True: #only match on l2-properties, useful when doing experiments with UDP streams as you can insert a flow using ping and then start sending udp.
match = ofp_match_withHash()
match.dl_src = packet.src
match.dl_dst = packet.dst
match.dl_type = packet.type
p = packet.next
if isinstance(p, vlan):
match.dl_type = p.eth_type
match.dl_vlan = p.id
match.dl_vlan_pcp = p.pcp
p = p.next
if isinstance(p, ipv4):
match.nw_src = p.srcip
match.nw_dst = p.dstip
match.nw_proto = p.protocol
match.nw_tos = p.tos
p = p.next
else:
match.dl_vlan = of.OFP_VLAN_NONE
match.dl_vlan_pcp = 0
else:
match = ofp_match_withHash.from_packet(packet)
_install_path(prev_path, match)
#forward the packet directly from the last switch, there is no need to have the packet run through the complete network.
drop()
dst = mac_learning[packet.dst]
msg = of.ofp_packet_out()
msg.data = event.ofp.data
msg.actions.append(of.ofp_action_output(port = dst.port))
switches[dst.dpid].connection.send(msg)
self.raiseEvent(NewFlow(prev_path, match, adj))
log.debug("Switch %s processed unicast 0x%0.4x type packet, send to recipient by switch %s", self, packet.effective_ethertype, util.dpid_to_str(dst.dpid))
def _handle_ConnectionDown(self, event):
log.debug("Switch %s going down", util.dpid_to_str(self.connection.dpid))
del switches[self.connection.dpid]
#pprint(switches)
class NewSwitch(Event):
def __init__(self, switch):
Event.__init__(self)
self.switch = switch
class Forwarding(EventMixin):
_core_name = "opennetmon_forwarding" # we want to be core.opennetmon_forwarding
_eventMixin_events = set([NewSwitch,])
def __init__ (self, l3_matching):
log.debug("Forwarding coming up")
def startup():
core.openflow.addListeners(self)
core.openflow_discovery.addListeners(self)
log.debug("Forwarding started")
self.l3_matching = l3_matching
core.call_when_ready(startup, 'openflow', 'openflow_discovery')
def _handle_LinkEvent(self, event):
link = event.link
if event.added:
log.debug("Received LinkEvent, Link Added from %s to %s over port %d", util.dpid_to_str(link.dpid1), util.dpid_to_str(link.dpid2), link.port1)
adj[link.dpid1][link.dpid2] = link.port1
switch_ports[link.dpid1,link.port1] = link
#switches[link.dpid1].disable_flooding(link.port1)
#pprint(adj)
else:
log.debug("Received LinkEvent, Link Removed from %s to %s over port %d", util.dpid_to_str(link.dpid1), util.dpid_to_str(link.dpid2), link.port1)
##Disabled those two lines to prevent interference with experiment due to falsely identified disconnected links.
#del adj[link.dpid1][link.dpid2]
#del switch_ports[link.dpid1,link.port1]
#switches[link.dpid1].enable_flooding(link.port1)
self._calc_ForwardingMatrix()
def _calc_ForwardingMatrix(self):
log.debug("Calculating forwarding matrix")
def _handle_ConnectionUp(self, event):
log.debug("New switch connection: %s", event.connection)
sw = Switch(event.connection, l3_matching=self.l3_matching)
switches[event.dpid] = sw;
self.raiseEvent(NewSwitch(sw))
def launch (l3_matching=False):
core.registerNew(Forwarding, l3_matching)
| gpl-3.0 |
n0trax/ansible | lib/ansible/modules/cloud/openstack/os_volume.py | 19 | 6392 | #!/usr/bin/python
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_volume
short_description: Create/Delete Cinder Volumes
extends_documentation_fragment: openstack
version_added: "2.0"
author: "Monty Taylor (@emonty)"
description:
- Create or Remove cinder block storage volumes
options:
size:
description:
- Size of volume in GB. This parameter is required when the
I(state) parameter is 'present'.
required: false
default: None
display_name:
description:
- Name of volume
required: true
display_description:
description:
- String describing the volume
required: false
default: None
volume_type:
description:
- Volume type for volume
required: false
default: None
image:
description:
- Image name or id for boot from volume
required: false
default: None
snapshot_id:
description:
- Volume snapshot id to create from
required: false
default: None
volume:
description:
- Volume name or id to create from
required: false
default: None
version_added: "2.3"
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
availability_zone:
description:
- Ignored. Present for backwards compatibility
required: false
scheduler_hints:
description:
- Scheduler hints passed to volume API in form of dict
required: false
default: None
version_added: "2.4"
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Creates a new volume
- name: create a volume
hosts: localhost
tasks:
- name: create 40g test volume
os_volume:
state: present
cloud: mordred
availability_zone: az2
size: 40
display_name: test_volume
scheduler_hints:
same_host: 243e8d3c-8f47-4a61-93d6-7215c344b0c0
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
from distutils.version import StrictVersion
def _present_volume(module, cloud):
if cloud.volume_exists(module.params['display_name']):
v = cloud.get_volume(module.params['display_name'])
module.exit_json(changed=False, id=v['id'], volume=v)
volume_args = dict(
size=module.params['size'],
volume_type=module.params['volume_type'],
display_name=module.params['display_name'],
display_description=module.params['display_description'],
snapshot_id=module.params['snapshot_id'],
availability_zone=module.params['availability_zone'],
)
if module.params['image']:
image_id = cloud.get_image_id(module.params['image'])
volume_args['imageRef'] = image_id
if module.params['volume']:
volume_id = cloud.get_volume_id(module.params['volume'])
if not volume_id:
module.fail_json(msg="Failed to find volume '%s'" % module.params['volume'])
volume_args['source_volid'] = volume_id
if module.params['scheduler_hints']:
volume_args['scheduler_hints'] = module.params['scheduler_hints']
volume = cloud.create_volume(
wait=module.params['wait'], timeout=module.params['timeout'],
**volume_args)
module.exit_json(changed=True, id=volume['id'], volume=volume)
def _absent_volume(module, cloud):
changed = False
if cloud.volume_exists(module.params['display_name']):
try:
changed = cloud.delete_volume(name_or_id=module.params['display_name'],
wait=module.params['wait'],
timeout=module.params['timeout'])
except shade.OpenStackCloudTimeout:
module.exit_json(changed=changed)
module.exit_json(changed=changed)
def main():
argument_spec = openstack_full_argument_spec(
size=dict(default=None),
volume_type=dict(default=None),
display_name=dict(required=True, aliases=['name']),
display_description=dict(default=None, aliases=['description']),
image=dict(default=None),
snapshot_id=dict(default=None),
volume=dict(default=None),
state=dict(default='present', choices=['absent', 'present']),
scheduler_hints=dict(default=None, type='dict')
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['image', 'snapshot_id', 'volume'],
],
)
module = AnsibleModule(argument_spec=argument_spec, **module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
if (module.params['scheduler_hints'] and
StrictVersion(shade.__version__) < StrictVersion('1.22')):
module.fail_json(msg="To utilize scheduler_hints, the installed version of"
"the shade library MUST be >= 1.22")
state = module.params['state']
if state == 'present' and not module.params['size']:
module.fail_json(msg="Size is required when state is 'present'")
try:
cloud = shade.openstack_cloud(**module.params)
if state == 'present':
_present_volume(module, cloud)
if state == 'absent':
_absent_volume(module, cloud)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
putcn/Paddle | python/paddle/fluid/tests/unittests/test_calc_gradient.py | 4 | 1449 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle.fluid as fluid
import paddle.fluid.layers as layers
import paddle.fluid.framework as framework
import paddle.fluid.optimizer as optimizer
from paddle.fluid.backward import calc_gradient
class TestCalcGradient(unittest.TestCase):
def test_calc_gradient(self):
x = layers.create_parameter(dtype="float32", shape=[5, 10])
y = layers.create_parameter(dtype="float32", shape=[10, 8])
mul_out = layers.mul(x=x, y=y)
mean_out = layers.mean(mul_out)
a = calc_gradient(mean_out, mul_out)
b = calc_gradient(mean_out, x)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
exe.run(fluid.default_main_program(), feed={}, fetch_list=[a, b])
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
knifenomad/django | tests/dispatch/tests.py | 346 | 6627 | import gc
import sys
import time
import unittest
import weakref
from types import TracebackType
from django.dispatch import Signal, receiver
if sys.platform.startswith('java'):
def garbage_collect():
# Some JVM GCs will execute finalizers in a different thread, meaning
# we need to wait for that to complete before we go on looking for the
# effects of that.
gc.collect()
time.sleep(0.1)
elif hasattr(sys, "pypy_version_info"):
def garbage_collect():
# Collecting weakreferences can take two collections on PyPy.
gc.collect()
gc.collect()
else:
def garbage_collect():
gc.collect()
def receiver_1_arg(val, **kwargs):
return val
class Callable(object):
def __call__(self, val, **kwargs):
return val
def a(self, val, **kwargs):
return val
a_signal = Signal(providing_args=["val"])
b_signal = Signal(providing_args=["val"])
c_signal = Signal(providing_args=["val"])
d_signal = Signal(providing_args=["val"], use_caching=True)
class DispatcherTests(unittest.TestCase):
"""Test suite for dispatcher (barely started)"""
def assertTestIsClean(self, signal):
"""Assert that everything has been cleaned up automatically"""
# Note that dead weakref cleanup happens as side effect of using
# the signal's receivers through the signals API. So, first do a
# call to an API method to force cleanup.
self.assertFalse(signal.has_listeners())
self.assertEqual(signal.receivers, [])
def test_exact(self):
a_signal.connect(receiver_1_arg, sender=self)
expected = [(receiver_1_arg, "test")]
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, expected)
a_signal.disconnect(receiver_1_arg, sender=self)
self.assertTestIsClean(a_signal)
def test_ignored_sender(self):
a_signal.connect(receiver_1_arg)
expected = [(receiver_1_arg, "test")]
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, expected)
a_signal.disconnect(receiver_1_arg)
self.assertTestIsClean(a_signal)
def test_garbage_collected(self):
a = Callable()
a_signal.connect(a.a, sender=self)
expected = []
del a
garbage_collect()
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, expected)
self.assertTestIsClean(a_signal)
def test_cached_garbaged_collected(self):
"""
Make sure signal caching sender receivers don't prevent garbage
collection of senders.
"""
class sender:
pass
wref = weakref.ref(sender)
d_signal.connect(receiver_1_arg)
d_signal.send(sender, val='garbage')
del sender
garbage_collect()
try:
self.assertIsNone(wref())
finally:
# Disconnect after reference check since it flushes the tested cache.
d_signal.disconnect(receiver_1_arg)
def test_multiple_registration(self):
a = Callable()
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
result = a_signal.send(sender=self, val="test")
self.assertEqual(len(result), 1)
self.assertEqual(len(a_signal.receivers), 1)
del a
del result
garbage_collect()
self.assertTestIsClean(a_signal)
def test_uid_registration(self):
def uid_based_receiver_1(**kwargs):
pass
def uid_based_receiver_2(**kwargs):
pass
a_signal.connect(uid_based_receiver_1, dispatch_uid="uid")
a_signal.connect(uid_based_receiver_2, dispatch_uid="uid")
self.assertEqual(len(a_signal.receivers), 1)
a_signal.disconnect(dispatch_uid="uid")
self.assertTestIsClean(a_signal)
def test_robust(self):
"""Test the send_robust() function"""
def fails(val, **kwargs):
raise ValueError('this')
a_signal.connect(fails)
result = a_signal.send_robust(sender=self, val="test")
err = result[0][1]
self.assertIsInstance(err, ValueError)
self.assertEqual(err.args, ('this',))
self.assertTrue(hasattr(err, '__traceback__'))
self.assertIsInstance(err.__traceback__, TracebackType)
a_signal.disconnect(fails)
self.assertTestIsClean(a_signal)
def test_disconnection(self):
receiver_1 = Callable()
receiver_2 = Callable()
receiver_3 = Callable()
a_signal.connect(receiver_1)
a_signal.connect(receiver_2)
a_signal.connect(receiver_3)
a_signal.disconnect(receiver_1)
del receiver_2
garbage_collect()
a_signal.disconnect(receiver_3)
self.assertTestIsClean(a_signal)
def test_values_returned_by_disconnection(self):
receiver_1 = Callable()
receiver_2 = Callable()
a_signal.connect(receiver_1)
receiver_1_disconnected = a_signal.disconnect(receiver_1)
receiver_2_disconnected = a_signal.disconnect(receiver_2)
self.assertTrue(receiver_1_disconnected)
self.assertFalse(receiver_2_disconnected)
self.assertTestIsClean(a_signal)
def test_has_listeners(self):
self.assertFalse(a_signal.has_listeners())
self.assertFalse(a_signal.has_listeners(sender=object()))
receiver_1 = Callable()
a_signal.connect(receiver_1)
self.assertTrue(a_signal.has_listeners())
self.assertTrue(a_signal.has_listeners(sender=object()))
a_signal.disconnect(receiver_1)
self.assertFalse(a_signal.has_listeners())
self.assertFalse(a_signal.has_listeners(sender=object()))
class ReceiverTestCase(unittest.TestCase):
"""
Test suite for receiver.
"""
def test_receiver_single_signal(self):
@receiver(a_signal)
def f(val, **kwargs):
self.state = val
self.state = False
a_signal.send(sender=self, val=True)
self.assertTrue(self.state)
def test_receiver_signal_list(self):
@receiver([a_signal, b_signal, c_signal])
def f(val, **kwargs):
self.state.append(val)
self.state = []
a_signal.send(sender=self, val='a')
c_signal.send(sender=self, val='c')
b_signal.send(sender=self, val='b')
self.assertIn('a', self.state)
self.assertIn('b', self.state)
self.assertIn('c', self.state)
| bsd-3-clause |
rouault/mapnik | scons/scons-local-2.3.4/SCons/Tool/dvi.py | 9 | 2335 | """SCons.Tool.dvi
Common DVI Builder definition for various other Tool modules that use it.
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/dvi.py 2014/09/27 12:51:43 garyo"
import SCons.Builder
import SCons.Tool
DVIBuilder = None
def generate(env):
try:
env['BUILDERS']['DVI']
except KeyError:
global DVIBuilder
if DVIBuilder is None:
# The suffix is hard-coded to '.dvi', not configurable via a
# construction variable like $DVISUFFIX, because the output
# file name is hard-coded within TeX.
DVIBuilder = SCons.Builder.Builder(action = {},
source_scanner = SCons.Tool.LaTeXScanner,
suffix = '.dvi',
emitter = {},
source_ext_match = None)
env['BUILDERS']['DVI'] = DVIBuilder
def exists(env):
# This only puts a skeleton Builder in place, so if someone
# references this Tool directly, it's always "available."
return 1
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 |
wfarner/aurora | src/main/python/apache/thermos/monitoring/detector.py | 10 | 5703 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Detect Thermos tasks on disk
This module contains the TaskDetector, used to detect Thermos tasks within a given checkpoint root.
"""
import functools
import glob
import os
import re
from abc import abstractmethod
from twitter.common.lang import Compatibility, Interface
from apache.thermos.common.constants import DEFAULT_CHECKPOINT_ROOT
from apache.thermos.common.path import TaskPath
class PathDetector(Interface):
@abstractmethod
def get_paths(self):
"""Get a list of valid checkpoint roots."""
class FixedPathDetector(PathDetector):
def __init__(self, path=DEFAULT_CHECKPOINT_ROOT):
if not isinstance(path, Compatibility.string):
raise TypeError('FixedPathDetector path should be a string, got %s' % type(path))
self._paths = [path]
def get_paths(self):
return self._paths[:]
class ChainedPathDetector(PathDetector):
def __init__(self, *detectors):
for detector in detectors:
if not isinstance(detector, PathDetector):
raise TypeError('Expected detector %r to be a PathDetector, got %s' % (
detector, type(detector)))
self._detectors = detectors
def get_paths(self):
def iterate():
for detector in self._detectors:
for path in detector.get_paths():
yield path
return list(set(iterate()))
def memoized(fn):
cache_attr_name = '__memoized_' + fn.__name__
@functools.wraps(fn)
def memoized_fn(self, *args):
if not hasattr(self, cache_attr_name):
setattr(self, cache_attr_name, {})
cache = getattr(self, cache_attr_name)
try:
return cache[args]
except KeyError:
cache[args] = rv = fn(self, *args)
return rv
return memoized_fn
class TaskDetector(object):
"""
Helper class in front of TaskPath to detect active/finished/running tasks. Performs no
introspection on the state of a task; merely detects based on file paths on disk.
"""
class Error(Exception): pass
class MatchingError(Error): pass
def __init__(self, root):
self._root_dir = root
self._pathspec = TaskPath()
@memoized
def __get_task_ids_patterns(self, state):
path_glob = self._pathspec.given(
root=self._root_dir,
task_id="*",
state=state or '*'
).getpath('task_path')
path_regex = self._pathspec.given(
root=re.escape(self._root_dir),
task_id="(\S+)",
state='(\S+)'
).getpath('task_path')
return path_glob, re.compile(path_regex)
def get_task_ids(self, state=None):
path_glob, path_regex = self.__get_task_ids_patterns(state)
for path in glob.glob(path_glob):
try:
task_state, task_id = path_regex.match(path).groups()
except Exception:
continue
if state is None or task_state == state:
yield (task_state, task_id)
@memoized
def __get_process_runs_patterns(self, task_id, log_dir):
path_glob = self._pathspec.given(
root=self._root_dir,
task_id=task_id,
log_dir=log_dir,
process='*',
run='*'
).getpath('process_logdir')
path_regex = self._pathspec.given(
root=re.escape(self._root_dir),
task_id=re.escape(task_id),
log_dir=log_dir,
process='(\S+)',
run='(\d+)'
).getpath('process_logdir')
return path_glob, re.compile(path_regex)
def get_process_runs(self, task_id, log_dir):
path_glob, path_regex = self.__get_process_runs_patterns(task_id, log_dir)
for path in glob.glob(path_glob):
try:
process, run = path_regex.match(path).groups()
except Exception:
continue
yield process, int(run)
def get_process_logs(self, task_id, log_dir):
for process, run in self.get_process_runs(task_id, log_dir):
for logtype in ('stdout', 'stderr'):
path = (self._pathspec.with_filename(logtype).given(root=self._root_dir,
task_id=task_id,
log_dir=log_dir,
process=process,
run=run)
.getpath('process_logdir'))
if os.path.exists(path):
yield path
def get_checkpoint(self, task_id):
return self._pathspec.given(root=self._root_dir, task_id=task_id).getpath('runner_checkpoint')
@memoized
def __get_process_checkpoints_patterns(self, task_id):
path_glob = self._pathspec.given(
root=self._root_dir,
task_id=task_id,
process='*'
).getpath('process_checkpoint')
path_regex = self._pathspec.given(
root=re.escape(self._root_dir),
task_id=re.escape(task_id),
process='(\S+)',
).getpath('process_checkpoint')
return path_glob, re.compile(path_regex)
def get_process_checkpoints(self, task_id):
path_glob, path_regex = self.__get_process_checkpoints_patterns(task_id)
for path in glob.glob(path_glob):
try:
process, = path_regex.match(path).groups()
except Exception:
continue
yield path
| apache-2.0 |
eeshangarg/oh-mainline | vendor/packages/zope.interface/src/zope/interface/tests/test_verify.py | 22 | 4553 | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Interface Verify tests
"""
import doctest
import unittest
from zope.interface import Interface, implements, classImplements, Attribute
from zope.interface.verify import verifyClass, verifyObject
from zope.interface.exceptions import DoesNotImplement, BrokenImplementation
from zope.interface.exceptions import BrokenMethodImplementation
class Test(unittest.TestCase):
def testNotImplemented(self):
class C(object): pass
class I(Interface): pass
self.assertRaises(DoesNotImplement, verifyClass, I, C)
classImplements(C, I)
verifyClass(I, C)
def testMissingAttr(self):
class I(Interface):
def f(): pass
class C(object):
implements(I)
self.assertRaises(BrokenImplementation, verifyClass, I, C)
C.f=lambda self: None
verifyClass(I, C)
def testMissingAttr_with_Extended_Interface(self):
class II(Interface):
def f():
pass
class I(II):
pass
class C(object):
implements(I)
self.assertRaises(BrokenImplementation, verifyClass, I, C)
C.f=lambda self: None
verifyClass(I, C)
def testWrongArgs(self):
class I(Interface):
def f(a): pass
class C(object):
def f(self, b): pass
implements(I)
# We no longer require names to match.
#self.assertRaises(BrokenMethodImplementation, verifyClass, I, C)
C.f=lambda self, a: None
verifyClass(I, C)
C.f=lambda self, **kw: None
self.assertRaises(BrokenMethodImplementation, verifyClass, I, C)
C.f=lambda self, a, *args: None
verifyClass(I, C)
C.f=lambda self, a, *args, **kw: None
verifyClass(I, C)
C.f=lambda self, *args: None
verifyClass(I, C)
def testExtraArgs(self):
class I(Interface):
def f(a): pass
class C(object):
def f(self, a, b): pass
implements(I)
self.assertRaises(BrokenMethodImplementation, verifyClass, I, C)
C.f=lambda self, a: None
verifyClass(I, C)
C.f=lambda self, a, b=None: None
verifyClass(I, C)
def testNoVar(self):
class I(Interface):
def f(a, *args): pass
class C(object):
def f(self, a): pass
implements(I)
self.assertRaises(BrokenMethodImplementation, verifyClass, I, C)
C.f=lambda self, a, *foo: None
verifyClass(I, C)
def testNoKW(self):
class I(Interface):
def f(a, **args): pass
class C(object):
def f(self, a): pass
implements(I)
self.assertRaises(BrokenMethodImplementation, verifyClass, I, C)
C.f=lambda self, a, **foo: None
verifyClass(I, C)
def testModule(self):
from zope.interface.tests.ifoo import IFoo
from zope.interface.tests import dummy
verifyObject(IFoo, dummy)
def testMethodForAttr(self):
class IFoo(Interface):
foo = Attribute("The foo Attribute")
class Foo:
implements(IFoo)
def foo(self):
pass
verifyClass(IFoo, Foo)
def testNonMethodForMethod(self):
class IBar(Interface):
def foo():
pass
class Bar:
implements(IBar)
foo = 1
self.assertRaises(BrokenMethodImplementation, verifyClass, IBar, Bar)
def test_suite():
loader=unittest.TestLoader()
return unittest.TestSuite((
doctest.DocFileSuite(
'../verify.txt',
optionflags=doctest.NORMALIZE_WHITESPACE),
loader.loadTestsFromTestCase(Test),
))
if __name__=='__main__':
unittest.TextTestRunner().run(test_suite())
| agpl-3.0 |
syaiful6/django | tests/gis_tests/geoapp/test_feeds.py | 292 | 4194 | from __future__ import unicode_literals
from xml.dom import minidom
from django.conf import settings
from django.contrib.sites.models import Site
from django.test import (
TestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from .models import City
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
@override_settings(ROOT_URLCONF='gis_tests.geoapp.urls')
@skipUnlessDBFeature("gis_enabled")
class GeoFeedTest(TestCase):
fixtures = ['initial']
def setUp(self):
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py."
actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected)
self.assertEqual(actual, expected)
def test_geofeed_rss(self):
"Tests geographic feeds using GeoRSS over RSSv2."
# Uses `GEOSGeometry` in `item_geometry`
doc1 = minidom.parseString(self.client.get('/feeds/rss1/').content)
# Uses a 2-tuple in `item_geometry`
doc2 = minidom.parseString(self.client.get('/feeds/rss2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2.getElementsByTagName('channel')[0],
['title', 'link', 'description', 'language',
'lastBuildDate', 'item', 'georss:box', 'atom:link']
)
# Incrementing through the feeds.
for feed in [feed1, feed2]:
# Ensuring the georss namespace was added to the <rss> element.
self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the georss element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'georss:point'])
def test_geofeed_atom(self):
"Testing geographic feeds using GeoRSS over Atom."
doc1 = minidom.parseString(self.client.get('/feeds/atom1/').content)
doc2 = minidom.parseString(self.client.get('/feeds/atom2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2, ['title', 'link', 'id', 'updated', 'entry', 'georss:box'])
for feed in [feed1, feed2]:
# Ensuring the georsss namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute('xmlns:georss'), 'http://www.georss.org/georss')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), City.objects.count())
# Ensuring the georss element was added to each entry in the feed.
for entry in entries:
self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'georss:point'])
def test_geofeed_w3c(self):
"Testing geographic feeds using W3C Geo."
doc = minidom.parseString(self.client.get('/feeds/w3cgeo1/').content)
feed = doc.firstChild
# Ensuring the geo namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute('xmlns:geo'), 'http://www.w3.org/2003/01/geo/wgs84_pos#')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the geo:lat and geo:lon element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'geo:lat', 'geo:lon'])
# Boxes and Polygons aren't allowed in W3C Geo feeds.
self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo2/') # Box in <channel>
self.assertRaises(ValueError, self.client.get, '/feeds/w3cgeo3/') # Polygons in <entry>
| bsd-3-clause |
ntuecon/server | pyenv/Lib/site-packages/django/utils/translation/trans_real.py | 38 | 28271 | """Translation helper functions."""
from __future__ import unicode_literals
import gettext as gettext_module
import os
import re
import sys
import warnings
from collections import OrderedDict
from threading import local
from django.apps import apps
from django.conf import settings
from django.conf.locale import LANG_INFO
from django.core.exceptions import AppRegistryNotReady
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils import lru_cache, six
from django.utils._os import upath
from django.utils.encoding import force_text
from django.utils.safestring import SafeData, mark_safe
from django.utils.six import StringIO
from django.utils.translation import (
LANGUAGE_SESSION_KEY, TranslatorCommentWarning, trim_whitespace,
)
# Translations are cached in a dictionary for every language.
# The active translations are stored by threadid to make them thread local.
_translations = {}
_active = local()
# The default translation is based on the settings file.
_default = None
# magic gettext number to separate context from message
CONTEXT_SEPARATOR = "\x04"
# Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9
# and RFC 3066, section 2.1
accept_language_re = re.compile(r'''
([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*"
(?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:.0{,3})?))? # Optional "q=1.00", "q=0.8"
(?:\s*,\s*|$) # Multiple accepts per header.
''', re.VERBOSE)
language_code_re = re.compile(
r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*(?:@[a-z0-9]{1,20})?$',
re.IGNORECASE
)
language_code_prefix_re = re.compile(r'^/([\w@-]+)(/|$)')
@receiver(setting_changed)
def reset_cache(**kwargs):
"""
Reset global state when LANGUAGES setting has been changed, as some
languages should no longer be accepted.
"""
if kwargs['setting'] in ('LANGUAGES', 'LANGUAGE_CODE'):
check_for_language.cache_clear()
get_languages.cache_clear()
get_supported_language_variant.cache_clear()
def to_locale(language, to_lower=False):
"""
Turns a language name (en-us) into a locale name (en_US). If 'to_lower' is
True, the last component is lower-cased (en_us).
"""
p = language.find('-')
if p >= 0:
if to_lower:
return language[:p].lower() + '_' + language[p + 1:].lower()
else:
# Get correct locale for sr-latn
if len(language[p + 1:]) > 2:
return language[:p].lower() + '_' + language[p + 1].upper() + language[p + 2:].lower()
return language[:p].lower() + '_' + language[p + 1:].upper()
else:
return language.lower()
def to_language(locale):
"""Turns a locale name (en_US) into a language name (en-us)."""
p = locale.find('_')
if p >= 0:
return locale[:p].lower() + '-' + locale[p + 1:].lower()
else:
return locale.lower()
class DjangoTranslation(gettext_module.GNUTranslations):
"""
This class sets up the GNUTranslations context with regard to output
charset.
This translation object will be constructed out of multiple GNUTranslations
objects by merging their catalogs. It will construct an object for the
requested language and add a fallback to the default language, if it's
different from the requested language.
"""
def __init__(self, language):
"""Create a GNUTranslations() using many locale directories"""
gettext_module.GNUTranslations.__init__(self)
self.set_output_charset('utf-8') # For Python 2 gettext() (#25720)
self.__language = language
self.__to_language = to_language(language)
self.__locale = to_locale(language)
self._catalog = None
self._init_translation_catalog()
self._add_installed_apps_translations()
self._add_local_translations()
if self.__language == settings.LANGUAGE_CODE and self._catalog is None:
# default lang should have at least one translation file available.
raise IOError("No translation files found for default language %s." % settings.LANGUAGE_CODE)
self._add_fallback()
if self._catalog is None:
# No catalogs found for this language, set an empty catalog.
self._catalog = {}
def __repr__(self):
return "<DjangoTranslation lang:%s>" % self.__language
def _new_gnu_trans(self, localedir, use_null_fallback=True):
"""
Returns a mergeable gettext.GNUTranslations instance.
A convenience wrapper. By default gettext uses 'fallback=False'.
Using param `use_null_fallback` to avoid confusion with any other
references to 'fallback'.
"""
return gettext_module.translation(
domain='django',
localedir=localedir,
languages=[self.__locale],
codeset='utf-8',
fallback=use_null_fallback)
def _init_translation_catalog(self):
"""Creates a base catalog using global django translations."""
settingsfile = upath(sys.modules[settings.__module__].__file__)
localedir = os.path.join(os.path.dirname(settingsfile), 'locale')
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_installed_apps_translations(self):
"""Merges translations from each installed app."""
try:
app_configs = reversed(list(apps.get_app_configs()))
except AppRegistryNotReady:
raise AppRegistryNotReady(
"The translation infrastructure cannot be initialized before the "
"apps registry is ready. Check that you don't make non-lazy "
"gettext calls at import time.")
for app_config in app_configs:
localedir = os.path.join(app_config.path, 'locale')
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_local_translations(self):
"""Merges translations defined in LOCALE_PATHS."""
for localedir in reversed(settings.LOCALE_PATHS):
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_fallback(self):
"""Sets the GNUTranslations() fallback with the default language."""
# Don't set a fallback for the default language or any English variant
# (as it's empty, so it'll ALWAYS fall back to the default language)
if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'):
return
default_translation = translation(settings.LANGUAGE_CODE)
self.add_fallback(default_translation)
def merge(self, other):
"""Merge another translation into this catalog."""
if not getattr(other, '_catalog', None):
return # NullTranslations() has no _catalog
if self._catalog is None:
# Take plural and _info from first catalog found (generally Django's).
self.plural = other.plural
self._info = other._info.copy()
self._catalog = other._catalog.copy()
else:
self._catalog.update(other._catalog)
def language(self):
"""Returns the translation language."""
return self.__language
def to_language(self):
"""Returns the translation language name."""
return self.__to_language
def translation(language):
"""
Returns a translation object.
"""
global _translations
if language not in _translations:
_translations[language] = DjangoTranslation(language)
return _translations[language]
def activate(language):
"""
Fetches the translation object for a given language and installs it as the
current translation object for the current thread.
"""
if not language:
return
_active.value = translation(language)
def deactivate():
"""
Deinstalls the currently active translation object so that further _ calls
will resolve against the default translation object, again.
"""
if hasattr(_active, "value"):
del _active.value
def deactivate_all():
"""
Makes the active translation object a NullTranslations() instance. This is
useful when we want delayed translations to appear as the original string
for some reason.
"""
_active.value = gettext_module.NullTranslations()
_active.value.to_language = lambda *args: None
def get_language():
"""Returns the currently selected language."""
t = getattr(_active, "value", None)
if t is not None:
try:
return t.to_language()
except AttributeError:
pass
# If we don't have a real translation object, assume it's the default language.
return settings.LANGUAGE_CODE
def get_language_bidi():
"""
Returns selected language's BiDi layout.
* False = left-to-right layout
* True = right-to-left layout
"""
lang = get_language()
if lang is None:
return False
else:
base_lang = get_language().split('-')[0]
return base_lang in settings.LANGUAGES_BIDI
def catalog():
"""
Returns the current active catalog for further processing.
This can be used if you need to modify the catalog or want to access the
whole message catalog instead of just translating one string.
"""
global _default
t = getattr(_active, "value", None)
if t is not None:
return t
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return _default
def do_translate(message, translation_function):
"""
Translates 'message' using the given 'translation_function' name -- which
will be either gettext or ugettext. It uses the current thread to find the
translation object to use. If no current translation is activated, the
message will be run through the default translation object.
"""
global _default
# str() is allowing a bytestring message to remain bytestring on Python 2
eol_message = message.replace(str('\r\n'), str('\n')).replace(str('\r'), str('\n'))
if len(eol_message) == 0:
# Returns an empty value of the corresponding type if an empty message
# is given, instead of metadata, which is the default gettext behavior.
result = type(message)("")
else:
_default = _default or translation(settings.LANGUAGE_CODE)
translation_object = getattr(_active, "value", _default)
result = getattr(translation_object, translation_function)(eol_message)
if isinstance(message, SafeData):
return mark_safe(result)
return result
def gettext(message):
"""
Returns a string of the translation of the message.
Returns a string on Python 3 and an UTF-8-encoded bytestring on Python 2.
"""
return do_translate(message, 'gettext')
if six.PY3:
ugettext = gettext
else:
def ugettext(message):
return do_translate(message, 'ugettext')
def pgettext(context, message):
msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message)
result = ugettext(msg_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
# force unicode, because lazy version expects unicode
result = force_text(message)
return result
def gettext_noop(message):
"""
Marks strings for translation but doesn't translate them now. This can be
used to store strings in global variables that should stay in the base
language (because they might be used externally) and will be translated
later.
"""
return message
def do_ntranslate(singular, plural, number, translation_function):
global _default
t = getattr(_active, "value", None)
if t is not None:
return getattr(t, translation_function)(singular, plural, number)
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return getattr(_default, translation_function)(singular, plural, number)
def ngettext(singular, plural, number):
"""
Returns a string of the translation of either the singular or plural,
based on the number.
Returns a string on Python 3 and an UTF-8-encoded bytestring on Python 2.
"""
return do_ntranslate(singular, plural, number, 'ngettext')
if six.PY3:
ungettext = ngettext
else:
def ungettext(singular, plural, number):
"""
Returns a unicode strings of the translation of either the singular or
plural, based on the number.
"""
return do_ntranslate(singular, plural, number, 'ungettext')
def npgettext(context, singular, plural, number):
msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, singular),
"%s%s%s" % (context, CONTEXT_SEPARATOR, plural),
number)
result = ungettext(*msgs_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
result = ungettext(singular, plural, number)
return result
def all_locale_paths():
"""
Returns a list of paths to user-provides languages files.
"""
globalpath = os.path.join(
os.path.dirname(upath(sys.modules[settings.__module__].__file__)), 'locale')
return [globalpath] + list(settings.LOCALE_PATHS)
@lru_cache.lru_cache(maxsize=1000)
def check_for_language(lang_code):
"""
Checks whether there is a global language file for the given language
code. This is used to decide whether a user-provided language is
available.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
# First, a quick check to make sure lang_code is well-formed (#21458)
if lang_code is None or not language_code_re.search(lang_code):
return False
for path in all_locale_paths():
if gettext_module.find('django', path, [to_locale(lang_code)]) is not None:
return True
return False
@lru_cache.lru_cache()
def get_languages():
"""
Cache of settings.LANGUAGES in an OrderedDict for easy lookups by key.
"""
return OrderedDict(settings.LANGUAGES)
@lru_cache.lru_cache(maxsize=1000)
def get_supported_language_variant(lang_code, strict=False):
"""
Returns the language-code that's listed in supported languages, possibly
selecting a more generic variant. Raises LookupError if nothing found.
If `strict` is False (the default), the function will look for an alternative
country-specific variant when the currently checked is not found.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
if lang_code:
# If 'fr-ca' is not supported, try special fallback or language-only 'fr'.
possible_lang_codes = [lang_code]
try:
possible_lang_codes.extend(LANG_INFO[lang_code]['fallback'])
except KeyError:
pass
generic_lang_code = lang_code.split('-')[0]
possible_lang_codes.append(generic_lang_code)
supported_lang_codes = get_languages()
for code in possible_lang_codes:
if code in supported_lang_codes and check_for_language(code):
return code
if not strict:
# if fr-fr is not supported, try fr-ca.
for supported_code in supported_lang_codes:
if supported_code.startswith(generic_lang_code + '-'):
return supported_code
raise LookupError(lang_code)
def get_language_from_path(path, strict=False):
"""
Returns the language-code if there is a valid language-code
found in the `path`.
If `strict` is False (the default), the function will look for an alternative
country-specific variant when the currently checked is not found.
"""
regex_match = language_code_prefix_re.match(path)
if not regex_match:
return None
lang_code = regex_match.group(1)
try:
return get_supported_language_variant(lang_code, strict=strict)
except LookupError:
return None
def get_language_from_request(request, check_path=False):
"""
Analyzes the request to find what language the user wants the system to
show. Only languages listed in settings.LANGUAGES are taken into account.
If the user requests a sublanguage where we have a main language, we send
out the main language.
If check_path is True, the URL path prefix will be checked for a language
code, otherwise this is skipped for backwards compatibility.
"""
if check_path:
lang_code = get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
supported_lang_codes = get_languages()
if hasattr(request, 'session'):
lang_code = request.session.get(LANGUAGE_SESSION_KEY)
if lang_code in supported_lang_codes and lang_code is not None and check_for_language(lang_code):
return lang_code
lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME)
try:
return get_supported_language_variant(lang_code)
except LookupError:
pass
accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
for accept_lang, unused in parse_accept_lang_header(accept):
if accept_lang == '*':
break
if not language_code_re.search(accept_lang):
continue
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
try:
return get_supported_language_variant(settings.LANGUAGE_CODE)
except LookupError:
return settings.LANGUAGE_CODE
dot_re = re.compile(r'\S')
def blankout(src, char):
"""
Changes every non-whitespace character to the given char.
Used in the templatize function.
"""
return dot_re.sub(char, src)
context_re = re.compile(r"""^\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?'))\s*""")
inline_re = re.compile(
# Match the trans 'some text' part
r"""^\s*trans\s+((?:"[^"]*?")|(?:'[^']*?'))"""
# Match and ignore optional filters
r"""(?:\s*\|\s*[^\s:]+(?::(?:[^\s'":]+|(?:"[^"]*?")|(?:'[^']*?')))?)*"""
# Match the optional context part
r"""(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?\s*"""
)
block_re = re.compile(r"""^\s*blocktrans(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?(?:\s+|$)""")
endblock_re = re.compile(r"""^\s*endblocktrans$""")
plural_re = re.compile(r"""^\s*plural$""")
constant_re = re.compile(r"""_\(((?:".*?")|(?:'.*?'))\)""")
def templatize(src, origin=None):
"""
Turns a Django template into something that is understood by xgettext. It
does so by translating the Django translation tags into standard gettext
function invocations.
"""
from django.template.base import (Lexer, TOKEN_TEXT, TOKEN_VAR,
TOKEN_BLOCK, TOKEN_COMMENT, TRANSLATOR_COMMENT_MARK)
src = force_text(src, settings.FILE_CHARSET)
out = StringIO('')
message_context = None
intrans = False
inplural = False
trimmed = False
singular = []
plural = []
incomment = False
comment = []
lineno_comment_map = {}
comment_lineno_cache = None
def join_tokens(tokens, trim=False):
message = ''.join(tokens)
if trim:
message = trim_whitespace(message)
return message
for t in Lexer(src).tokenize():
if incomment:
if t.token_type == TOKEN_BLOCK and t.contents == 'endcomment':
content = ''.join(comment)
translators_comment_start = None
for lineno, line in enumerate(content.splitlines(True)):
if line.lstrip().startswith(TRANSLATOR_COMMENT_MARK):
translators_comment_start = lineno
for lineno, line in enumerate(content.splitlines(True)):
if translators_comment_start is not None and lineno >= translators_comment_start:
out.write(' # %s' % line)
else:
out.write(' #\n')
incomment = False
comment = []
else:
comment.append(t.contents)
elif intrans:
if t.token_type == TOKEN_BLOCK:
endbmatch = endblock_re.match(t.contents)
pluralmatch = plural_re.match(t.contents)
if endbmatch:
if inplural:
if message_context:
out.write(' npgettext(%r, %r, %r,count) ' % (
message_context,
join_tokens(singular, trimmed),
join_tokens(plural, trimmed)))
else:
out.write(' ngettext(%r, %r, count) ' % (
join_tokens(singular, trimmed),
join_tokens(plural, trimmed)))
for part in singular:
out.write(blankout(part, 'S'))
for part in plural:
out.write(blankout(part, 'P'))
else:
if message_context:
out.write(' pgettext(%r, %r) ' % (
message_context,
join_tokens(singular, trimmed)))
else:
out.write(' gettext(%r) ' % join_tokens(singular,
trimmed))
for part in singular:
out.write(blankout(part, 'S'))
message_context = None
intrans = False
inplural = False
singular = []
plural = []
elif pluralmatch:
inplural = True
else:
filemsg = ''
if origin:
filemsg = 'file %s, ' % origin
raise SyntaxError(
"Translation blocks must not include other block tags: "
"%s (%sline %d)" % (t.contents, filemsg, t.lineno)
)
elif t.token_type == TOKEN_VAR:
if inplural:
plural.append('%%(%s)s' % t.contents)
else:
singular.append('%%(%s)s' % t.contents)
elif t.token_type == TOKEN_TEXT:
contents = t.contents.replace('%', '%%')
if inplural:
plural.append(contents)
else:
singular.append(contents)
else:
# Handle comment tokens (`{# ... #}`) plus other constructs on
# the same line:
if comment_lineno_cache is not None:
cur_lineno = t.lineno + t.contents.count('\n')
if comment_lineno_cache == cur_lineno:
if t.token_type != TOKEN_COMMENT:
for c in lineno_comment_map[comment_lineno_cache]:
filemsg = ''
if origin:
filemsg = 'file %s, ' % origin
warn_msg = ("The translator-targeted comment '%s' "
"(%sline %d) was ignored, because it wasn't the last item "
"on the line.") % (c, filemsg, comment_lineno_cache)
warnings.warn(warn_msg, TranslatorCommentWarning)
lineno_comment_map[comment_lineno_cache] = []
else:
out.write('# %s' % ' | '.join(lineno_comment_map[comment_lineno_cache]))
comment_lineno_cache = None
if t.token_type == TOKEN_BLOCK:
imatch = inline_re.match(t.contents)
bmatch = block_re.match(t.contents)
cmatches = constant_re.findall(t.contents)
if imatch:
g = imatch.group(1)
if g[0] == '"':
g = g.strip('"')
elif g[0] == "'":
g = g.strip("'")
g = g.replace('%', '%%')
if imatch.group(2):
# A context is provided
context_match = context_re.match(imatch.group(2))
message_context = context_match.group(1)
if message_context[0] == '"':
message_context = message_context.strip('"')
elif message_context[0] == "'":
message_context = message_context.strip("'")
out.write(' pgettext(%r, %r) ' % (message_context, g))
message_context = None
else:
out.write(' gettext(%r) ' % g)
elif bmatch:
for fmatch in constant_re.findall(t.contents):
out.write(' _(%s) ' % fmatch)
if bmatch.group(1):
# A context is provided
context_match = context_re.match(bmatch.group(1))
message_context = context_match.group(1)
if message_context[0] == '"':
message_context = message_context.strip('"')
elif message_context[0] == "'":
message_context = message_context.strip("'")
intrans = True
inplural = False
trimmed = 'trimmed' in t.split_contents()
singular = []
plural = []
elif cmatches:
for cmatch in cmatches:
out.write(' _(%s) ' % cmatch)
elif t.contents == 'comment':
incomment = True
else:
out.write(blankout(t.contents, 'B'))
elif t.token_type == TOKEN_VAR:
parts = t.contents.split('|')
cmatch = constant_re.match(parts[0])
if cmatch:
out.write(' _(%s) ' % cmatch.group(1))
for p in parts[1:]:
if p.find(':_(') >= 0:
out.write(' %s ' % p.split(':', 1)[1])
else:
out.write(blankout(p, 'F'))
elif t.token_type == TOKEN_COMMENT:
if t.contents.lstrip().startswith(TRANSLATOR_COMMENT_MARK):
lineno_comment_map.setdefault(t.lineno,
[]).append(t.contents)
comment_lineno_cache = t.lineno
else:
out.write(blankout(t.contents, 'X'))
return out.getvalue()
def parse_accept_lang_header(lang_string):
"""
Parses the lang_string, which is the body of an HTTP Accept-Language
header, and returns a list of (lang, q-value), ordered by 'q' values.
Any format errors in lang_string results in an empty list being returned.
"""
result = []
pieces = accept_language_re.split(lang_string.lower())
if pieces[-1]:
return []
for i in range(0, len(pieces) - 1, 3):
first, lang, priority = pieces[i:i + 3]
if first:
return []
if priority:
try:
priority = float(priority)
except ValueError:
return []
if not priority: # if priority is 0.0 at this point make it 1.0
priority = 1.0
result.append((lang, priority))
result.sort(key=lambda k: k[1], reverse=True)
return result
| bsd-3-clause |
Salat-Cx65/python-for-android | python3-alpha/python3-src/Doc/includes/dbpickle.py | 100 | 2863 | # Simple example presenting how persistent ID can be used to pickle
# external objects by reference.
import pickle
import sqlite3
from collections import namedtuple
# Simple class representing a record in our database.
MemoRecord = namedtuple("MemoRecord", "key, task")
class DBPickler(pickle.Pickler):
def persistent_id(self, obj):
# Instead of pickling MemoRecord as a regular class instance, we emit a
# persistent ID.
if isinstance(obj, MemoRecord):
# Here, our persistent ID is simply a tuple, containing a tag and a
# key, which refers to a specific record in the database.
return ("MemoRecord", obj.key)
else:
# If obj does not have a persistent ID, return None. This means obj
# needs to be pickled as usual.
return None
class DBUnpickler(pickle.Unpickler):
def __init__(self, file, connection):
super().__init__(file)
self.connection = connection
def persistent_load(self, pid):
# This method is invoked whenever a persistent ID is encountered.
# Here, pid is the tuple returned by DBPickler.
cursor = self.connection.cursor()
type_tag, key_id = pid
if type_tag == "MemoRecord":
# Fetch the referenced record from the database and return it.
cursor.execute("SELECT * FROM memos WHERE key=?", (str(key_id),))
key, task = cursor.fetchone()
return MemoRecord(key, task)
else:
# Always raises an error if you cannot return the correct object.
# Otherwise, the unpickler will think None is the object referenced
# by the persistent ID.
raise pickle.UnpicklingError("unsupported persistent object")
def main():
import io
import pprint
# Initialize and populate our database.
conn = sqlite3.connect(":memory:")
cursor = conn.cursor()
cursor.execute("CREATE TABLE memos(key INTEGER PRIMARY KEY, task TEXT)")
tasks = (
'give food to fish',
'prepare group meeting',
'fight with a zebra',
)
for task in tasks:
cursor.execute("INSERT INTO memos VALUES(NULL, ?)", (task,))
# Fetch the records to be pickled.
cursor.execute("SELECT * FROM memos")
memos = [MemoRecord(key, task) for key, task in cursor]
# Save the records using our custom DBPickler.
file = io.BytesIO()
DBPickler(file).dump(memos)
print("Pickled records:")
pprint.pprint(memos)
# Update a record, just for good measure.
cursor.execute("UPDATE memos SET task='learn italian' WHERE key=1")
# Load the records from the pickle data stream.
file.seek(0)
memos = DBUnpickler(file, conn).load()
print("Unpickled records:")
pprint.pprint(memos)
if __name__ == '__main__':
main()
| apache-2.0 |
ionrock/designate | designate/sqlalchemy/session.py | 6 | 1613 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Session Handling for SQLAlchemy backend."""
from oslo_config import cfg
from oslo_db.sqlalchemy import session
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
_FACADES = {}
def _create_facade_lazily(cfg_group, connection=None, discriminator=None):
connection = connection or cfg.CONF[cfg_group].connection
cache_name = "%s:%s" % (cfg_group, discriminator)
if cache_name not in _FACADES:
_FACADES[cache_name] = session.EngineFacade(
connection,
**dict(cfg.CONF[cfg_group].items()))
return _FACADES[cache_name]
def get_engine(cfg_group):
facade = _create_facade_lazily(cfg_group)
return facade.get_engine()
def get_session(cfg_group, connection=None, discriminator=None, **kwargs):
facade = _create_facade_lazily(cfg_group, connection, discriminator)
return facade.get_session(**kwargs)
| apache-2.0 |
retr0h/saltmeter | tests/test_utils.py | 1 | 1036 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, John Dewey
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest2 as unittest
from saltmeter import utils
class TestUtils(unittest.TestCase):
def test_execute(self):
u = utils.Utils()
result = u.execute('ls')
self.assertTrue(result)
def test_execute_invalid_command(self):
u = utils.Utils()
result = u.execute('invalid-command')
self.assertFalse(result)
| apache-2.0 |
zbanks/musicazoo | musicazoo/volume/__main__.py | 1 | 1864 | import shmooze.lib.service as service
import shmooze.settings as settings
import os
import signal
import math
try:
import alsaaudio
except:
alsaaudio = None
try:
import osax
except:
osax = None
exp=0.6 # approximate
def human_to_computer(val):
return int(100*(float(val)/100)**exp)
def computer_to_human(val):
return int(100*(float(val)/100)**(1.0/exp))
class Volume(service.JSONCommandProcessor, service.Service):
port=settings.ports["vol"]
def __init__(self):
print "Volume started."
if alsaaudio:
self.mixer=alsaaudio.Mixer(control='PCM')
elif osax:
self.mixer = osax.OSAX()
else:
print "Unable to control volume"
# JSONCommandService handles all of the low-level TCP connection stuff.
super(Volume, self).__init__()
@service.coroutine
def get_vol(self):
if alsaaudio:
v=self.mixer.getvolume()[0]
elif osax:
v=self.mixer.get_volume_settings()[osax.k.output_volume]
else:
v=0
raise service.Return({'vol': computer_to_human(v)})
@service.coroutine
def set_vol(self,vol):
v=human_to_computer(vol)
if alsaaudio:
self.mixer.setvolume(v)
elif osax:
self.mixer.set_volume(output_volume=v)
else:
print "Setting fake volume: ", v
raise service.Return({})
def shutdown(self):
service.ioloop.stop()
commands={
'set_vol': set_vol,
'get_vol': get_vol
}
vol = Volume()
def shutdown_handler(signum,frame):
print
print "Received signal, attempting graceful shutdown..."
service.ioloop.add_callback_from_signal(vol.shutdown)
signal.signal(signal.SIGTERM, shutdown_handler)
signal.signal(signal.SIGINT, shutdown_handler)
service.ioloop.start()
| mit |
mancoast/CPythonPyc_test | cpython/222_test_richcmp.py | 10 | 7187 | # Tests for rich comparisons
from test_support import TestFailed, verify, verbose
class Number:
def __init__(self, x):
self.x = x
def __lt__(self, other):
return self.x < other
def __le__(self, other):
return self.x <= other
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __gt__(self, other):
return self.x > other
def __ge__(self, other):
return self.x >= other
def __cmp__(self, other):
raise TestFailed, "Number.__cmp__() should not be called"
def __repr__(self):
return "Number(%s)" % repr(self.x)
class Vector:
def __init__(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __getitem__(self, i):
return self.data[i]
def __setitem__(self, i, v):
self.data[i] = v
def __hash__(self):
raise TypeError, "Vectors cannot be hashed"
def __nonzero__(self):
raise TypeError, "Vectors cannot be used in Boolean contexts"
def __cmp__(self, other):
raise TestFailed, "Vector.__cmp__() should not be called"
def __repr__(self):
return "Vector(%s)" % repr(self.data)
def __lt__(self, other):
return Vector([a < b for a, b in zip(self.data, self.__cast(other))])
def __le__(self, other):
return Vector([a <= b for a, b in zip(self.data, self.__cast(other))])
def __eq__(self, other):
return Vector([a == b for a, b in zip(self.data, self.__cast(other))])
def __ne__(self, other):
return Vector([a != b for a, b in zip(self.data, self.__cast(other))])
def __gt__(self, other):
return Vector([a > b for a, b in zip(self.data, self.__cast(other))])
def __ge__(self, other):
return Vector([a >= b for a, b in zip(self.data, self.__cast(other))])
def __cast(self, other):
if isinstance(other, Vector):
other = other.data
if len(self.data) != len(other):
raise ValueError, "Cannot compare vectors of different length"
return other
operators = "<", "<=", "==", "!=", ">", ">="
opmap = {}
for op in operators:
opmap[op] = eval("lambda a, b: a %s b" % op)
def testvector():
a = Vector(range(2))
b = Vector(range(3))
for op in operators:
try:
opmap[op](a, b)
except ValueError:
pass
else:
raise TestFailed, "a %s b for different length should fail" % op
a = Vector(range(5))
b = Vector(5 * [2])
for op in operators:
print "%23s %-2s %-23s -> %s" % (a, op, b, opmap[op](a, b))
print "%23s %-2s %-23s -> %s" % (a, op, b.data, opmap[op](a, b.data))
print "%23s %-2s %-23s -> %s" % (a.data, op, b, opmap[op](a.data, b))
try:
if opmap[op](a, b):
raise TestFailed, "a %s b shouldn't be true" % op
else:
raise TestFailed, "a %s b shouldn't be false" % op
except TypeError:
pass
def testop(a, b, op):
try:
ax = a.x
except AttributeError:
ax = a
try:
bx = b.x
except AttributeError:
bx = b
opfunc = opmap[op]
realoutcome = opfunc(ax, bx)
testoutcome = opfunc(a, b)
if realoutcome != testoutcome:
print "Error for", a, op, b, ": expected", realoutcome,
print "but got", testoutcome
## else:
## print a, op, b, "-->", testoutcome # and "true" or "false"
def testit(a, b):
testop(a, b, "<")
testop(a, b, "<=")
testop(a, b, "==")
testop(a, b, "!=")
testop(a, b, ">")
testop(a, b, ">=")
def basic():
for a in range(3):
for b in range(3):
testit(Number(a), Number(b))
testit(a, Number(b))
testit(Number(a), b)
def tabulate(c1=Number, c2=Number):
for op in operators:
opfunc = opmap[op]
print
print "operator:", op
print
print "%9s" % "",
for b in range(3):
b = c2(b)
print "| %9s" % b,
print "|"
print '----------+-' * 4
for a in range(3):
a = c1(a)
print "%9s" % a,
for b in range(3):
b = c2(b)
print "| %9s" % opfunc(a, b),
print "|"
print '----------+-' * 4
print
print '*' * 50
def misbehavin():
class Misb:
def __lt__(self, other): return 0
def __gt__(self, other): return 0
def __eq__(self, other): return 0
def __le__(self, other): raise TestFailed, "This shouldn't happen"
def __ge__(self, other): raise TestFailed, "This shouldn't happen"
def __ne__(self, other): raise TestFailed, "This shouldn't happen"
def __cmp__(self, other): raise RuntimeError, "expected"
a = Misb()
b = Misb()
verify((a<b) == 0)
verify((a==b) == 0)
verify((a>b) == 0)
try:
print cmp(a, b)
except RuntimeError:
pass
else:
raise TestFailed, "cmp(Misb(), Misb()) didn't raise RuntimeError"
def recursion():
from UserList import UserList
a = UserList(); a.append(a)
b = UserList(); b.append(b)
def check(s, a=a, b=b):
if verbose:
print "check", s
try:
if not eval(s):
raise TestFailed, s + " was false but expected to be true"
except RuntimeError, msg:
raise TestFailed, str(msg)
if verbose:
print "recursion tests: a=%s, b=%s" % (a, b)
check('a==b')
check('not a!=b')
a.append(1)
if verbose:
print "recursion tests: a=%s, b=%s" % (a, b)
check('a!=b')
check('not a==b')
b.append(0)
if verbose:
print "recursion tests: a=%s, b=%s" % (a, b)
check('a!=b')
check('not a==b')
a[1] = -1
if verbose:
print "recursion tests: a=%s, b=%s" % (a, b)
check('a!=b')
check('not a==b')
if verbose: print "recursion tests ok"
def dicts():
# Verify that __eq__ and __ne__ work for dicts even if the keys and
# values don't support anything other than __eq__ and __ne__. Complex
# numbers are a fine example of that.
import random
imag1a = {}
for i in range(50):
imag1a[random.randrange(100)*1j] = random.randrange(100)*1j
items = imag1a.items()
random.shuffle(items)
imag1b = {}
for k, v in items:
imag1b[k] = v
imag2 = imag1b.copy()
imag2[k] = v + 1.0
verify(imag1a == imag1a, "imag1a == imag1a should have worked")
verify(imag1a == imag1b, "imag1a == imag1b should have worked")
verify(imag2 == imag2, "imag2 == imag2 should have worked")
verify(imag1a != imag2, "imag1a != imag2 should have worked")
for op in "<", "<=", ">", ">=":
try:
eval("imag1a %s imag2" % op)
except TypeError:
pass
else:
raise TestFailed("expected TypeError from imag1a %s imag2" % op)
def main():
basic()
tabulate()
tabulate(c1=int)
tabulate(c2=int)
testvector()
misbehavin()
recursion()
dicts()
main()
| gpl-3.0 |
idovear/odoo | addons/stock/tests/test_stock_flow.py | 219 | 74767 | # -*- coding: utf-8 -*-
from openerp.addons.stock.tests.common import TestStockCommon
from openerp.tools import mute_logger, float_round
class TestStockFlow(TestStockCommon):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_00_picking_create_and_transfer_quantity(self):
""" Basic stock operation on incoming and outgoing shipment. """
LotObj = self.env['stock.production.lot']
# ----------------------------------------------------------------------
# Create incoming shipment of product A, B, C, D
# ----------------------------------------------------------------------
# Product A ( 1 Unit ) , Product C ( 10 Unit )
# Product B ( 1 Unit ) , Product D ( 10 Unit )
# Product D ( 5 Unit )
# ----------------------------------------------------------------------
picking_in = self.PickingObj.create({
'partner_id': self.partner_delta_id,
'picking_type_id': self.picking_type_in})
self.MoveObj.create({
'name': self.productA.name,
'product_id': self.productA.id,
'product_uom_qty': 1,
'product_uom': self.productA.uom_id.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.productB.name,
'product_id': self.productB.id,
'product_uom_qty': 1,
'product_uom': self.productB.uom_id.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.productC.name,
'product_id': self.productC.id,
'product_uom_qty': 10,
'product_uom': self.productC.uom_id.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.productD.name,
'product_id': self.productD.id,
'product_uom_qty': 10,
'product_uom': self.productD.uom_id.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.productD.name,
'product_id': self.productD.id,
'product_uom_qty': 5,
'product_uom': self.productD.uom_id.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'draft', 'Wrong state of move line.')
# Confirm incoming shipment.
picking_in.action_confirm()
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
# ----------------------------------------------------------------------
# Replace pack operation of incoming shipments.
# ----------------------------------------------------------------------
picking_in.do_prepare_partial()
self.StockPackObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_in.id)]).write({
'product_qty': 4.0})
self.StockPackObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_in.id)]).write({
'product_qty': 5.0})
self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_in.id)]).write({
'product_qty': 5.0})
self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_in.id)]).write({
'product_qty': 5.0})
lot2_productC = LotObj.create({'name': 'C Lot 2', 'product_id': self.productC.id})
self.StockPackObj.create({
'product_id': self.productC.id,
'product_qty': 2,
'product_uom_id': self.productC.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': picking_in.id,
'lot_id': lot2_productC.id})
self.StockPackObj.create({
'product_id': self.productD.id,
'product_qty': 2,
'product_uom_id': self.productD.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': picking_in.id})
# Check incoming shipment total quantity of pack operation
packs = self.StockPackObj.search([('picking_id', '=', picking_in.id)])
total_qty = [pack.product_qty for pack in packs]
self.assertEqual(sum(total_qty), 23, 'Wrong quantity in pack operation (%s found instead of 23)' % (sum(total_qty)))
# Transfer Incoming Shipment.
picking_in.do_transfer()
# ----------------------------------------------------------------------
# Check state, quantity and total moves of incoming shipment.
# ----------------------------------------------------------------------
# Check total no of move lines of incoming shipment.
self.assertEqual(len(picking_in.move_lines), 6, 'Wrong number of move lines.')
# Check incoming shipment state.
self.assertEqual(picking_in.state, 'done', 'Incoming shipment state should be done.')
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'done', 'Wrong state of move line.')
# Check product A done quantity must be 3 and 1
moves = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_in.id)])
a_done_qty = [move.product_uom_qty for move in moves]
self.assertEqual(set(a_done_qty), set([1.0, 3.0]), 'Wrong move quantity for product A.')
# Check product B done quantity must be 4 and 1
moves = self.MoveObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_in.id)])
b_done_qty = [move.product_uom_qty for move in moves]
self.assertEqual(set(b_done_qty), set([4.0, 1.0]), 'Wrong move quantity for product B.')
# Check product C done quantity must be 7
c_done_qty = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_in.id)], limit=1).product_uom_qty
self.assertEqual(c_done_qty, 7.0, 'Wrong move quantity of product C (%s found instead of 7)' % (c_done_qty))
# Check product D done quantity must be 7
d_done_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_in.id)], limit=1).product_uom_qty
self.assertEqual(d_done_qty, 7.0, 'Wrong move quantity of product D (%s found instead of 7)' % (d_done_qty))
# ----------------------------------------------------------------------
# Check Back order of Incoming shipment.
# ----------------------------------------------------------------------
# Check back order created or not.
back_order_in = self.PickingObj.search([('backorder_id', '=', picking_in.id)])
self.assertEqual(len(back_order_in), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(back_order_in.move_lines), 3, 'Wrong number of move lines.')
# Check back order should be created with 3 quantity of product C.
moves = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)])
product_c_qty = [move.product_uom_qty for move in moves]
self.assertEqual(sum(product_c_qty), 3.0, 'Wrong move quantity of product C (%s found instead of 3)' % (product_c_qty))
# Check back order should be created with 8 quantity of product D.
moves = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)])
product_d_qty = [move.product_uom_qty for move in moves]
self.assertEqual(sum(product_d_qty), 8.0, 'Wrong move quantity of product D (%s found instead of 8)' % (product_d_qty))
# ======================================================================
# Create Outgoing shipment with ...
# product A ( 10 Unit ) , product B ( 5 Unit )
# product C ( 3 unit ) , product D ( 10 Unit )
# ======================================================================
picking_out = self.PickingObj.create({
'partner_id': self.partner_agrolite_id,
'picking_type_id': self.picking_type_out})
self.MoveObj.create({
'name': self.productA.name,
'product_id': self.productA.id,
'product_uom_qty': 10,
'product_uom': self.productA.uom_id.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.productB.name,
'product_id': self.productB.id,
'product_uom_qty': 5,
'product_uom': self.productB.uom_id.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.productC.name,
'product_id': self.productC.id,
'product_uom_qty': 3,
'product_uom': self.productC.uom_id.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.productD.name,
'product_id': self.productD.id,
'product_uom_qty': 10,
'product_uom': self.productD.uom_id.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
# Confirm outgoing shipment.
picking_out.action_confirm()
for move in picking_out.move_lines:
self.assertEqual(move.state, 'confirmed', 'Wrong state of move line.')
# Product assign to outgoing shipments
picking_out.action_assign()
for move in picking_out.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
# Check availability for product A
aval_a_qty = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(aval_a_qty, 4.0, 'Wrong move quantity availability of product A (%s found instead of 4)' % (aval_a_qty))
# Check availability for product B
aval_b_qty = self.MoveObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(aval_b_qty, 5.0, 'Wrong move quantity availability of product B (%s found instead of 5)' % (aval_b_qty))
# Check availability for product C
aval_c_qty = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(aval_c_qty, 3.0, 'Wrong move quantity availability of product C (%s found instead of 3)' % (aval_c_qty))
# Check availability for product D
aval_d_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(aval_d_qty, 7.0, 'Wrong move quantity availability of product D (%s found instead of 7)' % (aval_d_qty))
# ----------------------------------------------------------------------
# Replace pack operation of outgoing shipment.
# ----------------------------------------------------------------------
picking_out.do_prepare_partial()
self.StockPackObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 2.0})
self.StockPackObj.search([('product_id', '=', self.productB.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 3.0})
self.StockPackObj.create({
'product_id': self.productB.id,
'product_qty': 2,
'product_uom_id': self.productB.uom_id.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location,
'picking_id': picking_out.id})
self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', picking_out.id)]).write({
'product_qty': 2.0, 'lot_id': lot2_productC.id})
self.StockPackObj.create({
'product_id': self.productC.id,
'product_qty': 3,
'product_uom_id': self.productC.uom_id.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location,
'picking_id': picking_out.id})
self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', picking_out.id)]).write({'product_qty': 6.0})
# Transfer picking.
picking_out.do_transfer()
# ----------------------------------------------------------------------
# Check state, quantity and total moves of outgoing shipment.
# ----------------------------------------------------------------------
# check outgoing shipment status.
self.assertEqual(picking_out.state, 'done', 'Wrong state of outgoing shipment.')
# check outgoing shipment total moves and and its state.
self.assertEqual(len(picking_out.move_lines), 5, 'Wrong number of move lines')
for move in picking_out.move_lines:
self.assertEqual(move.state, 'done', 'Wrong state of move line.')
back_order_out = self.PickingObj.search([('backorder_id', '=', picking_out.id)])
#------------------
# Check back order.
# -----------------
self.assertEqual(len(back_order_out), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(back_order_out.move_lines), 2, 'Wrong number of move lines')
# Check back order should be created with 8 quantity of product A.
product_a_qty = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', back_order_out.id)], limit=1).product_uom_qty
self.assertEqual(product_a_qty, 8.0, 'Wrong move quantity of product A (%s found instead of 8)' % (product_a_qty))
# Check back order should be created with 4 quantity of product D.
product_d_qty = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_out.id)], limit=1).product_uom_qty
self.assertEqual(product_d_qty, 4.0, 'Wrong move quantity of product D (%s found instead of 4)' % (product_d_qty))
#-----------------------------------------------------------------------
# Check stock location quant quantity and quantity available
# of product A, B, C, D
#-----------------------------------------------------------------------
# Check quants and available quantity for product A
quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 2.0, 'Expecting 2.0 Unit , got %.4f Unit on location stock!' % (sum(total_qty)))
self.assertEqual(self.productA.qty_available, 2.0, 'Wrong quantity available (%s found instead of 2.0)' % (self.productA.qty_available))
# Check quants and available quantity for product B
quants = self.StockQuantObj.search([('product_id', '=', self.productB.id), ('location_id', '=', self.stock_location)])
self.assertFalse(quants, 'No quant should found as outgoing shipment took everything out of stock.')
self.assertEqual(self.productB.qty_available, 0.0, 'Product B should have zero quantity available.')
# Check quants and available quantity for product C
quants = self.StockQuantObj.search([('product_id', '=', self.productC.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 2.0, 'Expecting 2.0 Unit, got %.4f Unit on location stock!' % (sum(total_qty)))
self.assertEqual(self.productC.qty_available, 2.0, 'Wrong quantity available (%s found instead of 2.0)' % (self.productC.qty_available))
# Check quants and available quantity for product D
quant = self.StockQuantObj.search([('product_id', '=', self.productD.id), ('location_id', '=', self.stock_location)], limit=1)
self.assertEqual(quant.qty, 1.0, 'Expecting 1.0 Unit , got %.4f Unit on location stock!' % (quant.qty))
self.assertEqual(self.productD.qty_available, 1.0, 'Wrong quantity available (%s found instead of 1.0)' % (self.productD.qty_available))
#-----------------------------------------------------------------------
# Back Order of Incoming shipment
#-----------------------------------------------------------------------
lot3_productC = LotObj.create({'name': 'Lot 3', 'product_id': self.productC.id})
lot4_productC = LotObj.create({'name': 'Lot 4', 'product_id': self.productC.id})
lot5_productC = LotObj.create({'name': 'Lot 5', 'product_id': self.productC.id})
lot6_productC = LotObj.create({'name': 'Lot 6', 'product_id': self.productC.id})
lot1_productD = LotObj.create({'name': 'Lot 1', 'product_id': self.productD.id})
lot2_productD = LotObj.create({'name': 'Lot 2', 'product_id': self.productD.id})
# Confirm back order of incoming shipment.
back_order_in.action_confirm()
self.assertEqual(back_order_in.state, 'assigned', 'Wrong state of incoming shipment back order.')
for move in back_order_in.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
# ----------------------------------------------------------------------
# Replace pack operation (Back order of Incoming shipment)
# ----------------------------------------------------------------------
back_order_in.do_prepare_partial()
packD = self.StockPackObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)])
self.assertEqual(len(packD), 1, 'Wrong number of pack operation.')
packD.write({'product_qty': 4, 'lot_id': lot1_productD.id})
self.StockPackObj.create({
'product_id': self.productD.id,
'product_qty': 4,
'product_uom_id': self.productD.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': back_order_in.id,
'lot_id': lot2_productD.id})
self.StockPackObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)], limit=1).write({'product_qty': 1, 'lot_id': lot3_productC.id})
self.StockPackObj.create({
'product_id': self.productC.id,
'product_qty': 1,
'product_uom_id': self.productC.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': back_order_in.id,
'lot_id': lot4_productC.id})
self.StockPackObj.create({
'product_id': self.productC.id,
'product_qty': 2,
'product_uom_id': self.productC.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': back_order_in.id,
'lot_id': lot5_productC.id})
self.StockPackObj.create({
'product_id': self.productC.id,
'product_qty': 2,
'product_uom_id': self.productC.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': back_order_in.id,
'lot_id': lot6_productC.id})
self.StockPackObj.create({
'product_id': self.productA.id,
'product_qty': 10,
'product_uom_id': self.productA.uom_id.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': back_order_in.id})
back_order_in.do_transfer()
# ----------------------------------------------------------------------
# Check state, quantity and total moves (Back order of Incoming shipment).
# ----------------------------------------------------------------------
# Check total no of move lines.
self.assertEqual(len(back_order_in.move_lines), 6, 'Wrong number of move lines')
# Check incoming shipment state must be 'Done'.
self.assertEqual(back_order_in.state, 'done', 'Wrong state of picking.')
# Check incoming shipment move lines state must be 'Done'.
for move in back_order_in.move_lines:
self.assertEqual(move.state, 'done', 'Wrong state of move lines.')
# Check product A done quantity must be 10
movesA = self.MoveObj.search([('product_id', '=', self.productA.id), ('picking_id', '=', back_order_in.id)])
self.assertEqual(movesA.product_uom_qty, 10, "Wrong move quantity of product A (%s found instead of 10)" % (movesA.product_uom_qty))
# Check product C done quantity must be 3.0, 1.0, 2.0
movesC = self.MoveObj.search([('product_id', '=', self.productC.id), ('picking_id', '=', back_order_in.id)])
c_done_qty = [move.product_uom_qty for move in movesC]
self.assertEqual(set(c_done_qty), set([3.0, 1.0, 2.0]), 'Wrong quantity of moves product C.')
# Check product D done quantity must be 5.0 and 3.0
movesD = self.MoveObj.search([('product_id', '=', self.productD.id), ('picking_id', '=', back_order_in.id)])
d_done_qty = [move.product_uom_qty for move in movesD]
self.assertEqual(set(d_done_qty), set([3.0, 5.0]), 'Wrong quantity of moves product D.')
# Check no back order is created.
self.assertFalse(self.PickingObj.search([('backorder_id', '=', back_order_in.id)]), "Should not create any back order.")
#-----------------------------------------------------------------------
# Check stock location quant quantity and quantity available
# of product A, B, C, D
#-----------------------------------------------------------------------
# Check quants and available quantity for product A.
quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 12.0, 'Wrong total stock location quantity (%s found instead of 12)' % (sum(total_qty)))
self.assertEqual(self.productA.qty_available, 12.0, 'Wrong quantity available (%s found instead of 12)' % (self.productA.qty_available))
# Check quants and available quantity for product B.
quants = self.StockQuantObj.search([('product_id', '=', self.productB.id), ('location_id', '=', self.stock_location)])
self.assertFalse(quants, 'No quant should found as outgoing shipment took everything out of stock')
self.assertEqual(self.productB.qty_available, 0.0, 'Total quantity in stock should be 0 as the backorder took everything out of stock')
# Check quants and available quantity for product C.
quants = self.StockQuantObj.search([('product_id', '=', self.productC.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 8.0, 'Wrong total stock location quantity (%s found instead of 8)' % (sum(total_qty)))
self.assertEqual(self.productC.qty_available, 8.0, 'Wrong quantity available (%s found instead of 8)' % (self.productC.qty_available))
# Check quants and available quantity for product D.
quants = self.StockQuantObj.search([('product_id', '=', self.productD.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 9.0, 'Wrong total stock location quantity (%s found instead of 9)' % (sum(total_qty)))
self.assertEqual(self.productD.qty_available, 9.0, 'Wrong quantity available (%s found instead of 9)' % (self.productD.qty_available))
#-----------------------------------------------------------------------
# Back order of Outgoing shipment
# ----------------------------------------------------------------------
back_order_out.do_prepare_partial()
back_order_out.do_transfer()
# Check stock location quants and available quantity for product A.
quants = self.StockQuantObj.search([('product_id', '=', self.productA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertGreaterEqual(float_round(sum(total_qty), precision_rounding=0.0001), 1, 'Total stock location quantity for product A should not be nagative.')
def test_10_pickings_transfer_with_different_uom(self):
""" Picking transfer with diffrent unit of meassure. """
# ----------------------------------------------------------------------
# Create incoming shipment of products DozA, SDozA, SDozARound, kgB, gB
# ----------------------------------------------------------------------
# DozA ( 10 Dozen ) , SDozA ( 10.5 SuperDozen )
# SDozARound ( 10.5 10.5 SuperDozenRound ) , kgB ( 0.020 kg )
# gB ( 525.3 g )
# ----------------------------------------------------------------------
picking_in_A = self.PickingObj.create({
'partner_id': self.partner_delta_id,
'picking_type_id': self.picking_type_in})
self.MoveObj.create({
'name': self.DozA.name,
'product_id': self.DozA.id,
'product_uom_qty': 10,
'product_uom': self.DozA.uom_id.id,
'picking_id': picking_in_A.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.SDozA.name,
'product_id': self.SDozA.id,
'product_uom_qty': 10.5,
'product_uom': self.SDozA.uom_id.id,
'picking_id': picking_in_A.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.SDozARound.name,
'product_id': self.SDozARound.id,
'product_uom_qty': 10.5,
'product_uom': self.SDozARound.uom_id.id,
'picking_id': picking_in_A.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.kgB.name,
'product_id': self.kgB.id,
'product_uom_qty': 0.020,
'product_uom': self.kgB.uom_id.id,
'picking_id': picking_in_A.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.gB.name,
'product_id': self.gB.id,
'product_uom_qty': 525.3,
'product_uom': self.gB.uom_id.id,
'picking_id': picking_in_A.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
# Check incoming shipment move lines state.
for move in picking_in_A.move_lines:
self.assertEqual(move.state, 'draft', 'Move state must be draft.')
# Confirm incoming shipment.
picking_in_A.action_confirm()
# Check incoming shipment move lines state.
for move in picking_in_A.move_lines:
self.assertEqual(move.state, 'assigned', 'Move state must be draft.')
picking_in_A.do_prepare_partial()
# ----------------------------------------------------
# Check pack operation quantity of incoming shipments.
# ----------------------------------------------------
PackSdozAround = self.StockPackObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_A.id)], limit=1)
self.assertEqual(PackSdozAround.product_qty, 11, 'Wrong quantity in pack operation (%s found instead of 11)' % (PackSdozAround.product_qty))
picking_in_A.do_transfer()
#-----------------------------------------------------------------------
# Check stock location quant quantity and quantity available
#-----------------------------------------------------------------------
# Check quants and available quantity for product DozA
quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 10, 'Expecting 10 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.DozA.qty_available, 10, 'Wrong quantity available (%s found instead of 10)' % (self.DozA.qty_available))
# Check quants and available quantity for product SDozA
quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 10.5, 'Expecting 10.5 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozA.qty_available, 10.5, 'Wrong quantity available (%s found instead of 10.5)' % (self.SDozA.qty_available))
# Check quants and available quantity for product SDozARound
quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 11, 'Expecting 11 SDozenRound , got %.4f SDozenRound on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozARound.qty_available, 11, 'Wrong quantity available (%s found instead of 11)' % (self.SDozARound.qty_available))
# Check quants and available quantity for product gB
quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 525.3, 'Expecting 525.3 gram , got %.4f gram on location stock!' % (sum(total_qty)))
self.assertEqual(self.gB.qty_available, 525.3, 'Wrong quantity available (%s found instead of 525.3' % (self.gB.qty_available))
# Check quants and available quantity for product kgB
quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 0.020, 'Expecting 0.020 kg , got %.4f kg on location stock!' % (sum(total_qty)))
self.assertEqual(self.kgB.qty_available, 0.020, 'Wrong quantity available (%s found instead of 0.020)' % (self.kgB.qty_available))
# ----------------------------------------------------------------------
# Create Incoming Shipment B
# ----------------------------------------------------------------------
picking_in_B = self.PickingObj.create({
'partner_id': self.partner_delta_id,
'picking_type_id': self.picking_type_in})
self.MoveObj.create({
'name': self.DozA.name,
'product_id': self.DozA.id,
'product_uom_qty': 120,
'product_uom': self.uom_unit.id,
'picking_id': picking_in_B.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.SDozA.name,
'product_id': self.SDozA.id,
'product_uom_qty': 1512,
'product_uom': self.uom_unit.id,
'picking_id': picking_in_B.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.SDozARound.name,
'product_id': self.SDozARound.id,
'product_uom_qty': 1584,
'product_uom': self.uom_unit.id,
'picking_id': picking_in_B.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.kgB.name,
'product_id': self.kgB.id,
'product_uom_qty': 20.0,
'product_uom': self.uom_gm.id,
'picking_id': picking_in_B.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
self.MoveObj.create({
'name': self.gB.name,
'product_id': self.gB.id,
'product_uom_qty': 0.525,
'product_uom': self.uom_kg.id,
'picking_id': picking_in_B.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
# Check incoming shipment move lines state.
for move in picking_in_B.move_lines:
self.assertEqual(move.state, 'draft', 'Wrong state of move line.')
# Confirm incoming shipment.
picking_in_B.action_confirm()
# Check incoming shipment move lines state.
for move in picking_in_B.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
picking_in_B.do_prepare_partial()
# ----------------------------------------------------------------------
# Check product quantity and unit of measure of pack operaation.
# ----------------------------------------------------------------------
# Check pack operation quantity and unit of measure for product DozA.
PackdozA = self.StockPackObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(PackdozA.product_qty, 120, 'Wrong quantity in pack operation (%s found instead of 120)' % (PackdozA.product_qty))
self.assertEqual(PackdozA.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product DozA.')
# Check pack operation quantity and unit of measure for product SDozA.
PackSdozA = self.StockPackObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(PackSdozA.product_qty, 1512, 'Wrong quantity in pack operation (%s found instead of 1512)' % (PackSdozA.product_qty))
self.assertEqual(PackSdozA.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product SDozA.')
# Check pack operation quantity and unit of measure for product SDozARound.
PackSdozAround = self.StockPackObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(PackSdozAround.product_qty, 1584, 'Wrong quantity in pack operation (%s found instead of 1584)' % (PackSdozAround.product_qty))
self.assertEqual(PackSdozAround.product_uom_id.id, self.uom_unit.id, 'Wrong uom in pack operation for product SDozARound.')
# Check pack operation quantity and unit of measure for product gB.
packgB = self.StockPackObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(packgB.product_qty, 525, 'Wrong quantity in pack operation (%s found instead of 525)' % (packgB.product_qty))
self.assertEqual(packgB.product_uom_id.id, self.uom_gm.id, 'Wrong uom in pack operation for product gB.')
# Check pack operation quantity and unit of measure for product kgB.
packkgB = self.StockPackObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(packkgB.product_qty, 20.0, 'Wrong quantity in pack operation (%s found instead of 20)' % (packkgB.product_qty))
self.assertEqual(packkgB.product_uom_id.id, self.uom_gm.id, 'Wrong uom in pack operation for product kgB')
# ----------------------------------------------------------------------
# Replace pack operation of incoming shipment.
# ----------------------------------------------------------------------
self.StockPackObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)]).write({
'product_qty': 0.020, 'product_uom_id': self.uom_kg.id})
self.StockPackObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id)]).write({
'product_qty': 525.3, 'product_uom_id': self.uom_gm.id})
self.StockPackObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)]).write({
'product_qty': 4, 'product_uom_id': self.uom_dozen.id})
self.StockPackObj.create({
'product_id': self.DozA.id,
'product_qty': 48,
'product_uom_id': self.uom_unit.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location,
'picking_id': picking_in_B.id})
# Transfer product.
# -----------------
picking_in_B.do_transfer()
#-----------------------------------------------------------------------
# Check incoming shipment
#-----------------------------------------------------------------------
# Check incoming shipment state.
self.assertEqual(picking_in_B.state, 'done', 'Incoming shipment state should be done.')
# Check incoming shipment move lines state.
for move in picking_in_B.move_lines:
self.assertEqual(move.state, 'done', 'Wrong state of move line.')
# Check total done move lines for incoming shipment.
self.assertEqual(len(picking_in_B.move_lines), 6, 'Wrong number of move lines')
# Check product DozA done quantity.
moves_DozA = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(moves_DozA.product_uom_qty, 96, 'Wrong move quantity (%s found instead of 96)' % (moves_DozA.product_uom_qty))
self.assertEqual(moves_DozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product DozA.')
# Check product SDozA done quantity.
moves_SDozA = self.MoveObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(moves_SDozA.product_uom_qty, 1512, 'Wrong move quantity (%s found instead of 1512)' % (moves_SDozA.product_uom_qty))
self.assertEqual(moves_SDozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product SDozA.')
# Check product SDozARound done quantity.
moves_SDozARound = self.MoveObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(moves_SDozARound.product_uom_qty, 1584, 'Wrong move quantity (%s found instead of 1584)' % (moves_SDozARound.product_uom_qty))
self.assertEqual(moves_SDozARound.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product SDozARound.')
# Check product kgB done quantity.
moves_kgB = self.MoveObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_in_B.id)], limit=1)
self.assertEqual(moves_kgB.product_uom_qty, 20, 'Wrong quantity in move (%s found instead of 20)' % (moves_kgB.product_uom_qty))
self.assertEqual(moves_kgB.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product kgB.')
# Check two moves created for product gB with quantity (0.525 kg and 0.3 g)
moves_gB_kg = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id), ('product_uom', '=', self.uom_kg.id)], limit=1)
self.assertEqual(moves_gB_kg.product_uom_qty, 0.525, 'Wrong move quantity (%s found instead of 0.525)' % (moves_gB_kg.product_uom_qty))
self.assertEqual(moves_gB_kg.product_uom.id, self.uom_kg.id, 'Wrong uom in move for product gB.')
moves_gB_g = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_in_B.id), ('product_uom', '=', self.uom_gm.id)], limit=1)
self.assertEqual(moves_gB_g.product_uom_qty, 0.3, 'Wrong move quantity (%s found instead of 0.3)' % (moves_gB_g.product_uom_qty))
self.assertEqual(moves_gB_g.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product gB.')
# ----------------------------------------------------------------------
# Check Back order of Incoming shipment.
# ----------------------------------------------------------------------
# Check back order created or not.
bo_in_B = self.PickingObj.search([('backorder_id', '=', picking_in_B.id)])
self.assertEqual(len(bo_in_B), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(bo_in_B.move_lines), 1, 'Wrong number of move lines')
# Check back order created with correct quantity and uom or not.
moves_DozA = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', bo_in_B.id)], limit=1)
self.assertEqual(moves_DozA.product_uom_qty, 24.0, 'Wrong move quantity (%s found instead of 0.525)' % (moves_DozA.product_uom_qty))
self.assertEqual(moves_DozA.product_uom.id, self.uom_unit.id, 'Wrong uom in move for product DozA.')
# ----------------------------------------------------------------------
# Check product stock location quantity and quantity available.
# ----------------------------------------------------------------------
# Check quants and available quantity for product DozA
quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 18, 'Expecting 18 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.DozA.qty_available, 18, 'Wrong quantity available (%s found instead of 18)' % (self.DozA.qty_available))
# Check quants and available quantity for product SDozA
quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 21, 'Expecting 18 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozA.qty_available, 21, 'Wrong quantity available (%s found instead of 21)' % (self.SDozA.qty_available))
# Check quants and available quantity for product SDozARound
quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 22, 'Expecting 22 SDozenRound , got %.4f SDozenRound on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozARound.qty_available, 22, 'Wrong quantity available (%s found instead of 22)' % (self.SDozARound.qty_available))
# Check quants and available quantity for product gB.
quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 1050.6, 'Expecting 1050.6 Gram , got %.4f Gram on location stock!' % (sum(total_qty)))
self.assertEqual(self.gB.qty_available, 1050.6, 'Wrong quantity available (%s found instead of 1050.6)' % (self.gB.qty_available))
# Check quants and available quantity for product kgB.
quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 0.040, 'Expecting 0.040 kg , got %.4f kg on location stock!' % (sum(total_qty)))
self.assertEqual(self.kgB.qty_available, 0.040, 'Wrong quantity available (%s found instead of 0.040)' % (self.kgB.qty_available))
# ----------------------------------------------------------------------
# Create outgoing shipment.
# ----------------------------------------------------------------------
before_out_quantity = self.kgB.qty_available
picking_out = self.PickingObj.create({
'partner_id': self.partner_agrolite_id,
'picking_type_id': self.picking_type_out})
self.MoveObj.create({
'name': self.kgB.name,
'product_id': self.kgB.id,
'product_uom_qty': 0.966,
'product_uom': self.uom_gm.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.kgB.name,
'product_id': self.kgB.id,
'product_uom_qty': 0.034,
'product_uom': self.uom_gm.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
picking_out.action_confirm()
picking_out.action_assign()
picking_out.do_prepare_partial()
picking_out.do_transfer()
# Check quantity difference after stock transfer.
quantity_diff = before_out_quantity - self.kgB.qty_available
self.assertEqual(float_round(quantity_diff, precision_rounding=0.0001), 0.001, 'Wrong quantity diffrence.')
self.assertEqual(self.kgB.qty_available, 0.039, 'Wrong quantity available (%s found instead of 0.039)' % (self.kgB.qty_available))
# ======================================================================
# Outgoing shipments.
# ======================================================================
# Create Outgoing shipment with ...
# product DozA ( 54 Unit ) , SDozA ( 288 Unit )
# product SDozRound ( 360 unit ) , product gB ( 0.503 kg )
# product kgB ( 19 g )
# ======================================================================
picking_out = self.PickingObj.create({
'partner_id': self.partner_agrolite_id,
'picking_type_id': self.picking_type_out})
self.MoveObj.create({
'name': self.DozA.name,
'product_id': self.DozA.id,
'product_uom_qty': 54,
'product_uom': self.uom_unit.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.SDozA.name,
'product_id': self.SDozA.id,
'product_uom_qty': 288,
'product_uom': self.uom_unit.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.SDozARound.name,
'product_id': self.SDozARound.id,
'product_uom_qty': 360,
'product_uom': self.uom_unit.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.gB.name,
'product_id': self.gB.id,
'product_uom_qty': 0.503,
'product_uom': self.uom_kg.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.MoveObj.create({
'name': self.kgB.name,
'product_id': self.kgB.id,
'product_uom_qty': 20,
'product_uom': self.uom_gm.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
# Confirm outgoing shipment.
picking_out.action_confirm()
for move in picking_out.move_lines:
self.assertEqual(move.state, 'confirmed', 'Wrong state of move line.')
# Assing product to outgoing shipments
picking_out.action_assign()
for move in picking_out.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
# Check product A available quantity
DozA_qty = self.MoveObj.search([('product_id', '=', self.DozA.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(DozA_qty, 4.5, 'Wrong move quantity availability (%s found instead of 4.5)' % (DozA_qty))
# Check product B available quantity
SDozA_qty = self.MoveObj.search([('product_id', '=', self.SDozA.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(SDozA_qty, 2, 'Wrong move quantity availability (%s found instead of 2)' % (SDozA_qty))
# Check product C available quantity
SDozARound_qty = self.MoveObj.search([('product_id', '=', self.SDozARound.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(SDozARound_qty, 3, 'Wrong move quantity availability (%s found instead of 3)' % (SDozARound_qty))
# Check product D available quantity
gB_qty = self.MoveObj.search([('product_id', '=', self.gB.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(gB_qty, 503, 'Wrong move quantity availability (%s found instead of 503)' % (gB_qty))
# Check product D available quantity
kgB_qty = self.MoveObj.search([('product_id', '=', self.kgB.id), ('picking_id', '=', picking_out.id)], limit=1).availability
self.assertEqual(kgB_qty, 0.020, 'Wrong move quantity availability (%s found instead of 0.020)' % (kgB_qty))
picking_out.action_confirm()
picking_out.action_assign()
picking_out.do_prepare_partial()
picking_out.do_transfer()
# ----------------------------------------------------------------------
# Check product stock location quantity and quantity available.
# ----------------------------------------------------------------------
# Check quants and available quantity for product DozA
quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 13.5, 'Expecting 13.5 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.DozA.qty_available, 13.5, 'Wrong quantity available (%s found instead of 13.5)' % (self.DozA.qty_available))
# Check quants and available quantity for product SDozA
quants = self.StockQuantObj.search([('product_id', '=', self.SDozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 19, 'Expecting 19 SDozen , got %.4f SDozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozA.qty_available, 19, 'Wrong quantity available (%s found instead of 19)' % (self.SDozA.qty_available))
# Check quants and available quantity for product SDozARound
quants = self.StockQuantObj.search([('product_id', '=', self.SDozARound.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 19, 'Expecting 19 SDozRound , got %.4f SDozRound on location stock!' % (sum(total_qty)))
self.assertEqual(self.SDozARound.qty_available, 19, 'Wrong quantity available (%s found instead of 19)' % (self.SDozARound.qty_available))
# Check quants and available quantity for product gB.
quants = self.StockQuantObj.search([('product_id', '=', self.gB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(float_round(sum(total_qty), precision_rounding=0.0001), 547.6, 'Expecting 547.6 g , got %.4f g on location stock!' % (sum(total_qty)))
self.assertEqual(self.gB.qty_available, 547.6, 'Wrong quantity available (%s found instead of 547.6)' % (self.gB.qty_available))
# Check quants and available quantity for product kgB.
quants = self.StockQuantObj.search([('product_id', '=', self.kgB.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 0.019, 'Expecting 0.019 kg , got %.4f kg on location stock!' % (sum(total_qty)))
self.assertEqual(self.kgB.qty_available, 0.019, 'Wrong quantity available (%s found instead of 0.019)' % (self.kgB.qty_available))
# ----------------------------------------------------------------------
# Receipt back order of incoming shipment.
# ----------------------------------------------------------------------
bo_in_B.do_prepare_partial()
bo_in_B.do_transfer()
# Check quants and available quantity for product kgB.
quants = self.StockQuantObj.search([('product_id', '=', self.DozA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 15.5, 'Expecting 15.5 Dozen , got %.4f Dozen on location stock!' % (sum(total_qty)))
self.assertEqual(self.DozA.qty_available, 15.5, 'Wrong quantity available (%s found instead of 15.5)' % (self.DozA.qty_available))
# -----------------------------------------
# Create product in kg and receive in ton.
# -----------------------------------------
productKG = self.ProductObj.create({'name': 'Product KG', 'uom_id': self.uom_kg.id, 'uom_po_id': self.uom_kg.id})
picking_in = self.PickingObj.create({
'partner_id': self.partner_delta_id,
'picking_type_id': self.picking_type_in})
self.MoveObj.create({
'name': productKG.name,
'product_id': productKG.id,
'product_uom_qty': 1.0,
'product_uom': self.uom_tone.id,
'picking_id': picking_in.id,
'location_id': self.supplier_location,
'location_dest_id': self.stock_location})
# Check incoming shipment state.
self.assertEqual(picking_in.state, 'draft', 'Incoming shipment state should be draft.')
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'draft', 'Wrong state of move line.')
# Confirm incoming shipment.
picking_in.action_confirm()
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'assigned', 'Wrong state of move line.')
picking_in.do_prepare_partial()
# Check pack operation quantity.
packKG = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_in.id)], limit=1)
self.assertEqual(packKG.product_qty, 1000, 'Wrong product quantity in pack operation (%s found instead of 1000)' % (packKG.product_qty))
self.assertEqual(packKG.product_uom_id.id, self.uom_kg.id, 'Wrong product uom in pack operation.')
# Transfer Incoming shipment.
picking_in.do_transfer()
#-----------------------------------------------------------------------
# Check incoming shipment after transfer.
#-----------------------------------------------------------------------
# Check incoming shipment state.
self.assertEqual(picking_in.state, 'done', 'Incoming shipment state should be done.')
# Check incoming shipment move lines state.
for move in picking_in.move_lines:
self.assertEqual(move.state, 'done', 'Wrong state of move lines.')
# Check total done move lines for incoming shipment.
self.assertEqual(len(picking_in.move_lines), 1, 'Wrong number of move lines')
# Check product DozA done quantity.
move = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_in.id)], limit=1)
self.assertEqual(move.product_uom_qty, 1, 'Wrong product quantity in done move.')
self.assertEqual(move.product_uom.id, self.uom_tone.id, 'Wrong unit of measure in done move.')
self.assertEqual(productKG.qty_available, 1000, 'Wrong quantity available of product (%s found instead of 1000)' % (productKG.qty_available))
picking_out = self.PickingObj.create({
'partner_id': self.partner_agrolite_id,
'picking_type_id': self.picking_type_out})
self.MoveObj.create({
'name': productKG.name,
'product_id': productKG.id,
'product_uom_qty': 2.5,
'product_uom': self.uom_gm.id,
'picking_id': picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
picking_out.action_confirm()
picking_out.action_assign()
picking_out.do_prepare_partial()
pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', picking_out.id)], limit=1)
pack_opt.write({'product_qty': 0.5})
picking_out.do_transfer()
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
# Check total quantity stock location.
self.assertEqual(sum(total_qty), 999.9995, 'Expecting 999.9995 kg , got %.4f kg on location stock!' % (sum(total_qty)))
# Check Back order created or not.
#---------------------------------
bo_out_1 = self.PickingObj.search([('backorder_id', '=', picking_out.id)])
self.assertEqual(len(bo_out_1), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(bo_out_1.move_lines), 1, 'Wrong number of move lines')
moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_1.id)], limit=1)
# Check back order created with correct quantity and uom or not.
self.assertEqual(moves_KG.product_uom_qty, 2.0, 'Wrong move quantity (%s found instead of 2.0)' % (moves_KG.product_uom_qty))
self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.')
bo_out_1.action_assign()
bo_out_1.do_prepare_partial()
pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_1.id)], limit=1)
pack_opt.write({'product_qty': 0.5})
bo_out_1.do_transfer()
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
# Check total quantity stock location.
self.assertEqual(sum(total_qty), 999.9990, 'Expecting 999.9990 kg , got %.4f kg on location stock!' % (sum(total_qty)))
# Check Back order created or not.
#---------------------------------
bo_out_2 = self.PickingObj.search([('backorder_id', '=', bo_out_1.id)])
self.assertEqual(len(bo_out_2), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(bo_out_2.move_lines), 1, 'Wrong number of move lines')
# Check back order created with correct move quantity and uom or not.
moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_2.id)], limit=1)
self.assertEqual(moves_KG.product_uom_qty, 1.5, 'Wrong move quantity (%s found instead of 1.5)' % (moves_KG.product_uom_qty))
self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.')
bo_out_2.action_assign()
bo_out_2.do_prepare_partial()
pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_2.id)], limit=1)
pack_opt.write({'product_qty': 0.5})
bo_out_2.do_transfer()
# Check total quantity stock location of product KG.
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 999.9985, 'Expecting 999.9985 kg , got %.4f kg on location stock!' % (sum(total_qty)))
# Check Back order created or not.
#---------------------------------
bo_out_3 = self.PickingObj.search([('backorder_id', '=', bo_out_2.id)])
self.assertEqual(len(bo_out_3), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(bo_out_3.move_lines), 1, 'Wrong number of move lines')
# Check back order created with correct quantity and uom or not.
moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_3.id)], limit=1)
self.assertEqual(moves_KG.product_uom_qty, 1, 'Wrong move quantity (%s found instead of 1.0)' % (moves_KG.product_uom_qty))
self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.')
bo_out_3.action_assign()
bo_out_3.do_prepare_partial()
pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_3.id)], limit=1)
pack_opt.write({'product_qty': 0.5})
bo_out_3.do_transfer()
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 999.9980, 'Expecting 999.9980 kg , got %.4f kg on location stock!' % (sum(total_qty)))
# Check Back order created or not.
#---------------------------------
bo_out_4 = self.PickingObj.search([('backorder_id', '=', bo_out_3.id)])
self.assertEqual(len(bo_out_4), 1, 'Back order should be created.')
# Check total move lines of back order.
self.assertEqual(len(bo_out_4.move_lines), 1, 'Wrong number of move lines')
# Check back order created with correct quantity and uom or not.
moves_KG = self.MoveObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_4.id)], limit=1)
self.assertEqual(moves_KG.product_uom_qty, 0.5, 'Wrong move quantity (%s found instead of 0.5)' % (moves_KG.product_uom_qty))
self.assertEqual(moves_KG.product_uom.id, self.uom_gm.id, 'Wrong uom in move for product KG.')
bo_out_4.action_assign()
bo_out_4.do_prepare_partial()
pack_opt = self.StockPackObj.search([('product_id', '=', productKG.id), ('picking_id', '=', bo_out_4.id)], limit=1)
pack_opt.write({'product_qty': 0.5})
bo_out_4.do_transfer()
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 999.9975, 'Expecting 999.9975 kg , got %.4f kg on location stock!' % (sum(total_qty)))
def test_20_create_inventory_with_different_uom(self):
"""Create inventory with different unit of measure."""
# ------------------------------------------------
# Test inventory with product A(Unit).
# ------------------------------------------------
inventory = self.InvObj.create({'name': 'Test',
'product_id': self.UnitA.id,
'filter': 'product'})
inventory.prepare_inventory()
self.assertFalse(inventory.line_ids, "Inventory line should not created.")
inventory_line = self.InvLineObj.create({
'inventory_id': inventory.id,
'product_id': self.UnitA.id,
'product_uom_id': self.uom_dozen.id,
'product_qty': 10,
'location_id': self.stock_location})
inventory.action_done()
# Check quantity available of product UnitA.
quants = self.StockQuantObj.search([('product_id', '=', self.UnitA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 120, 'Expecting 120 Units , got %.4f Units on location stock!' % (sum(total_qty)))
self.assertEqual(self.UnitA.qty_available, 120, 'Expecting 120 Units , got %.4f Units of quantity available!' % (self.UnitA.qty_available))
# Create Inventory again for product UnitA.
inventory = self.InvObj.create({'name': 'Test',
'product_id': self.UnitA.id,
'filter': 'product'})
inventory.prepare_inventory()
self.assertEqual(len(inventory.line_ids), 1, "One inventory line should be created.")
inventory_line = self.InvLineObj.search([('product_id', '=', self.UnitA.id), ('inventory_id', '=', inventory.id)], limit=1)
self.assertEqual(inventory_line.product_qty, 120, "Wrong product quantity in inventory line.")
# Modify the inventory line and set the quantity to 144 product on this new inventory.
inventory_line.write({'product_qty': 144})
inventory.action_done()
move = self.MoveObj.search([('product_id', '=', self.UnitA.id), ('inventory_id', '=', inventory.id)], limit=1)
self.assertEqual(move.product_uom_qty, 24, "Wrong move quantity of product UnitA.")
# Check quantity available of product UnitA.
quants = self.StockQuantObj.search([('product_id', '=', self.UnitA.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 144, 'Expecting 144 Units , got %.4f Units on location stock!' % (sum(total_qty)))
self.assertEqual(self.UnitA.qty_available, 144, 'Expecting 144 Units , got %.4f Units of quantity available!' % (self.UnitA.qty_available))
# ------------------------------------------------
# Test inventory with product KG.
# ------------------------------------------------
productKG = self.ProductObj.create({'name': 'Product KG', 'uom_id': self.uom_kg.id, 'uom_po_id': self.uom_kg.id})
inventory = self.InvObj.create({'name': 'Inventory Product KG',
'product_id': productKG.id,
'filter': 'product'})
inventory.prepare_inventory()
self.assertFalse(inventory.line_ids, "Inventory line should not created.")
inventory_line = self.InvLineObj.create({
'inventory_id': inventory.id,
'product_id': productKG.id,
'product_uom_id': self.uom_tone.id,
'product_qty': 5,
'location_id': self.stock_location})
inventory.action_done()
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 5000, 'Expecting 5000 kg , got %.4f kg on location stock!' % (sum(total_qty)))
self.assertEqual(productKG.qty_available, 5000, 'Expecting 5000 kg , got %.4f kg of quantity available!' % (productKG.qty_available))
# Create Inventory again.
inventory = self.InvObj.create({'name': 'Test',
'product_id': productKG.id,
'filter': 'product'})
inventory.prepare_inventory()
self.assertEqual(len(inventory.line_ids), 1, "One inventory line should be created.")
inventory_line = self.InvLineObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1)
self.assertEqual(inventory_line.product_qty, 5000, "Wrong product quantity in inventory line.")
# Modify the inventory line and set the quantity to 4000 product on this new inventory.
inventory_line.write({'product_qty': 4000})
inventory.action_done()
# Check inventory move quantity of product KG.
move = self.MoveObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1)
self.assertEqual(move.product_uom_qty, 1000, "Wrong move quantity of product KG.")
# Check quantity available of product KG.
quants = self.StockQuantObj.search([('product_id', '=', productKG.id), ('location_id', '=', self.stock_location)])
total_qty = [quant.qty for quant in quants]
self.assertEqual(sum(total_qty), 4000, 'Expecting 4000 kg , got %.4f on location stock!' % (sum(total_qty)))
self.assertEqual(productKG.qty_available, 4000, 'Expecting 4000 kg , got %.4f of quantity available!' % (productKG.qty_available))
#--------------------------------------------------------
# TEST PARTIAL INVENTORY WITH PACKS and LOTS
#---------------------------------------------------------
packproduct = self.ProductObj.create({'name': 'Pack Product', 'uom_id': self.uom_unit.id, 'uom_po_id': self.uom_unit.id})
lotproduct = self.ProductObj.create({'name': 'Lot Product', 'uom_id': self.uom_unit.id, 'uom_po_id': self.uom_unit.id})
inventory = self.InvObj.create({'name': 'Test Partial and Pack',
'filter': 'partial',
'location_id': self.stock_location})
inventory.prepare_inventory()
pack_obj = self.env['stock.quant.package']
lot_obj = self.env['stock.production.lot']
pack1 = pack_obj.create({'name': 'PACK00TEST1'})
pack2 = pack_obj.create({'name': 'PACK00TEST2'})
lot1 = lot_obj.create({'name': 'Lot001', 'product_id': lotproduct.id})
move = self.MoveObj.search([('product_id', '=', productKG.id), ('inventory_id', '=', inventory.id)], limit=1)
self.assertEqual(len(move), 0, "Partial filter should not create a lines upon prepare")
line_vals = []
line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 10, 'product_uom_id': packproduct.uom_id.id}]
line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 20, 'product_uom_id': packproduct.uom_id.id, 'package_id': pack1.id}]
line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 30, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': lot1.id}]
line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 25, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': False}]
inventory.write({'line_ids': [(0, 0, x) for x in line_vals]})
inventory.action_done()
self.assertEqual(packproduct.qty_available, 30, "Wrong qty available for packproduct")
self.assertEqual(lotproduct.qty_available, 55, "Wrong qty available for lotproduct")
quants = self.StockQuantObj.search([('product_id', '=', packproduct.id), ('location_id', '=', self.stock_location), ('package_id', '=', pack1.id)])
total_qty = sum([quant.qty for quant in quants])
self.assertEqual(total_qty, 20, 'Expecting 20 units on package 1 of packproduct, but we got %.4f on location stock!' % (total_qty))
#Create an inventory that will put the lots without lot to 0 and check that taking without pack will not take it from the pack
inventory2 = self.InvObj.create({'name': 'Test Partial Lot and Pack2',
'filter': 'partial',
'location_id': self.stock_location})
inventory2.prepare_inventory()
line_vals = []
line_vals += [{'location_id': self.stock_location, 'product_id': packproduct.id, 'product_qty': 20, 'product_uom_id': packproduct.uom_id.id}]
line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 0, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': False}]
line_vals += [{'location_id': self.stock_location, 'product_id': lotproduct.id, 'product_qty': 10, 'product_uom_id': lotproduct.uom_id.id, 'prod_lot_id': lot1.id}]
inventory2.write({'line_ids': [(0, 0, x) for x in line_vals]})
inventory2.action_done()
self.assertEqual(packproduct.qty_available, 40, "Wrong qty available for packproduct")
self.assertEqual(lotproduct.qty_available, 10, "Wrong qty available for lotproduct")
quants = self.StockQuantObj.search([('product_id', '=', lotproduct.id), ('location_id', '=', self.stock_location), ('lot_id', '=', lot1.id)])
total_qty = sum([quant.qty for quant in quants])
self.assertEqual(total_qty, 10, 'Expecting 0 units lot of lotproduct, but we got %.4f on location stock!' % (total_qty))
quants = self.StockQuantObj.search([('product_id', '=', lotproduct.id), ('location_id', '=', self.stock_location), ('lot_id', '=', False)])
total_qty = sum([quant.qty for quant in quants])
self.assertEqual(total_qty, 0, 'Expecting 0 units lot of lotproduct, but we got %.4f on location stock!' % (total_qty)) | agpl-3.0 |
lgscofield/odoo | addons/account_analytic_analysis/res_config.py | 426 | 1408 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class sale_configuration(osv.osv_memory):
_inherit = 'sale.config.settings'
_columns = {
'group_template_required': fields.boolean("Mandatory use of templates.",
implied_group='account_analytic_analysis.group_template_required',
help="Allows you to set the template field as required when creating an analytic account or a contract."),
}
| agpl-3.0 |
boxed/CMi | web_frontend/gdata/Crypto/Cipher/__init__.py | 271 | 1145 | """Secret-key encryption algorithms.
Secret-key encryption algorithms transform plaintext in some way that
is dependent on a key, producing ciphertext. This transformation can
easily be reversed, if (and, hopefully, only if) one knows the key.
The encryption modules here all support the interface described in PEP
272, "API for Block Encryption Algorithms".
If you don't know which algorithm to choose, use AES because it's
standard and has undergone a fair bit of examination.
Crypto.Cipher.AES Advanced Encryption Standard
Crypto.Cipher.ARC2 Alleged RC2
Crypto.Cipher.ARC4 Alleged RC4
Crypto.Cipher.Blowfish
Crypto.Cipher.CAST
Crypto.Cipher.DES The Data Encryption Standard. Very commonly used
in the past, but today its 56-bit keys are too small.
Crypto.Cipher.DES3 Triple DES.
Crypto.Cipher.IDEA
Crypto.Cipher.RC5
Crypto.Cipher.XOR The simple XOR cipher.
"""
__all__ = ['AES', 'ARC2', 'ARC4',
'Blowfish', 'CAST', 'DES', 'DES3', 'IDEA', 'RC5',
'XOR'
]
__revision__ = "$Id: __init__.py,v 1.7 2003/02/28 15:28:35 akuchling Exp $"
| mit |
bartslinger/paparazzi | sw/ground_segment/joystick/gb2ivy.py | 3 | 6801 | #!/usr/bin/env python
from __future__ import print_function
import sys
from os import path, getenv
# if PAPARAZZI_SRC not set, then assume the tree containing this
# file is a reasonable substitute
PPRZ_SRC = getenv("PAPARAZZI_SRC", path.normpath(path.join(path.dirname(path.abspath(__file__)), '../../../')))
sys.path.append(PPRZ_SRC + "/sw/lib/python")
sys.path.append(PPRZ_SRC + "/sw/ext/pprzlink/lib/v1.0/python")
from pprzlink.ivy import IvyMessagesInterface
from pprzlink.message import PprzMessage
from settings_xml_parse import PaparazziACSettings
from math import radians
from time import sleep
import threading
import serial
J_RIGHT = 1<<0
J_LEFT = 1<<1
J_UP = 1<<2
J_DOWN = 1<<3
J_A = 1<<4
J_B = 1<<5
J_SELECT = 1<<6
J_START = 1<<7
class Guidance(object):
def __init__(self, ac_id, verbose=False):
self.ac_id = ac_id
self.verbose = verbose
self._interface = None
self.auto2_index = None
try:
settings = PaparazziACSettings(self.ac_id)
except Exception as e:
print(e)
return
try:
self.auto2_index = settings.name_lookup['auto2'].index
except Exception as e:
print(e)
print("auto2 setting not found, mode change not possible.")
self._interface = IvyMessagesInterface("gb2ivy")
def shutdown(self):
if self._interface is not None:
print("Shutting down ivy interface...")
self._interface.shutdown()
self._interface = None
def __del__(self):
self.shutdown()
def bind_flight_param(self, send_cb):
def bat_cb(ac_id, msg):
bat = float(msg['bat'])
# should not be more that 18 characters
send_cb('bat: '+str(bat)+' V')
self._interface.subscribe(bat_cb, regex=('(^ground ENGINE_STATUS '+str(self.ac_id)+' .*)'))
def set_guided_mode(self):
"""
change auto2 mode to GUIDED.
"""
if self.auto2_index is not None:
msg = PprzMessage("ground", "DL_SETTING")
msg['ac_id'] = self.ac_id
msg['index'] = self.auto2_index
msg['value'] = 19 # AP_MODE_GUIDED
print("Setting mode to GUIDED: %s" % msg)
self._interface.send(msg)
def set_nav_mode(self):
"""
change auto2 mode to NAV.
"""
if self.auto2_index is not None:
msg = PprzMessage("ground", "DL_SETTING")
msg['ac_id'] = self.ac_id
msg['index'] = self.auto2_index
msg['value'] = 13 # AP_MODE_NAV
print("Setting mode to NAV: %s" % msg)
self._interface.send(msg)
def move_at_body_vel(self, forward=0.0, right=0.0, down=0.0, yaw=0.0):
"""
move at specified velocity in meters/sec with absolute heading (if already in GUIDED mode)
"""
msg = PprzMessage("datalink", "GUIDED_SETPOINT_NED")
msg['ac_id'] = self.ac_id
msg['flags'] = 0xE2
msg['x'] = forward
msg['y'] = right
msg['z'] = down
msg['yaw'] = yaw
print("move at vel body: %s" % msg)
self._interface.send_raw_datalink(msg)
def command_callback(self, command):
"""
convert incoming command into velocity
"""
right = 0.0
forward = 0.0
down = 0.0
yaw = 0.0
command = int(command)
if command & J_RIGHT:
right += 2.0
if command & J_LEFT:
right -= 2.0
if command & J_UP:
forward += 2.0
if command & J_DOWN:
forward -= 2.0
if command & J_A:
down -= 1.0
if command & J_B:
down += 1.0
if command & J_START:
yaw += radians(20)
if command & J_SELECT:
yaw -= radians(20)
self.move_at_body_vel(forward, right, down, yaw)
class SerialInterface(threading.Thread):
def __init__(self, callback, verbose=False, device='/dev/ttyUSB0', baudrate=9600):
threading.Thread.__init__(self)
self.callback = callback
self.verbose = verbose
self.running = True
try:
self.ser = serial.Serial(device, baudrate, timeout=1.0)
except serial.SerialException:
print("Error: unable to open serial port '%s'" % device)
exit(0)
#self.trans = PprzTransport(msg_class)
def stop(self):
print("End thread and close serial link")
self.running = False
self.ser.close()
def shutdown(self):
self.stop()
def __del__(self):
try:
self.ser.close()
except:
pass
def send(self, msg):
"""
Send a text message over a serial link
Max number of char for a GameBoy screen is 18
Truncate or pad with space when needed
"""
text = msg[:17]
text = text + (' ' * (17-len(text)))
for c in text:
self.ser.write(c)
self.ser.flush()
sleep(0.01)
# send 0 for sync
self.ser.write('\0')
def run(self):
"""
Thread running function
"""
try:
while self.running:
# Parse incoming data
c = self.ser.readline()
if len(c) > 0:
# Callback function on new message
self.callback(c)
except StopIteration:
pass
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--acid", help="aircraft ID", dest='acid', default=1, type=int)
parser.add_argument("-d", "--device", help="device name", dest='dev', default='/dev/ttyUSB0')
parser.add_argument("-b", "--baudrate", help="baudrate", dest='baud', default=9600, type=int)
args = parser.parse_args()
print("Starting serial interface on %s at %i baud" % (args.dev, args.baud))
try:
guidance = Guidance(args.acid)
serial_interface = SerialInterface(guidance.command_callback, device=args.dev, baudrate=args.baud)
serial_interface.start()
guidance.bind_flight_param(serial_interface.send)
# give the thread some time to properly start
sleep(0.1)
guidance.set_guided_mode()
while serial_interface.isAlive():
serial_interface.join(1)
except (KeyboardInterrupt, SystemExit):
print('Shutting down...')
guidance.set_nav_mode()
serial_interface.stop()
guidance.shutdown()
exit()
except (OSError):
print('Serial port not found')
guidance.shutdown()
exit()
if __name__ == '__main__':
main()
| gpl-2.0 |
houzhenggang/hiwifi-openwrt-HC5661-HC5761 | staging_dir/host/lib/python2.7/test/test_int.py | 32 | 16374 | import sys
import unittest
from test.test_support import run_unittest, have_unicode
import math
L = [
('0', 0),
('1', 1),
('9', 9),
('10', 10),
('99', 99),
('100', 100),
('314', 314),
(' 314', 314),
('314 ', 314),
(' \t\t 314 \t\t ', 314),
(repr(sys.maxint), sys.maxint),
(' 1x', ValueError),
(' 1 ', 1),
(' 1\02 ', ValueError),
('', ValueError),
(' ', ValueError),
(' \t\t ', ValueError)
]
if have_unicode:
L += [
(unicode('0'), 0),
(unicode('1'), 1),
(unicode('9'), 9),
(unicode('10'), 10),
(unicode('99'), 99),
(unicode('100'), 100),
(unicode('314'), 314),
(unicode(' 314'), 314),
(unicode('\u0663\u0661\u0664 ','raw-unicode-escape'), 314),
(unicode(' \t\t 314 \t\t '), 314),
(unicode(' 1x'), ValueError),
(unicode(' 1 '), 1),
(unicode(' 1\02 '), ValueError),
(unicode(''), ValueError),
(unicode(' '), ValueError),
(unicode(' \t\t '), ValueError),
(unichr(0x200), ValueError),
]
class IntTestCases(unittest.TestCase):
def test_basic(self):
self.assertEqual(int(314), 314)
self.assertEqual(int(3.14), 3)
self.assertEqual(int(314L), 314)
# Check that conversion from float truncates towards zero
self.assertEqual(int(-3.14), -3)
self.assertEqual(int(3.9), 3)
self.assertEqual(int(-3.9), -3)
self.assertEqual(int(3.5), 3)
self.assertEqual(int(-3.5), -3)
# Different base:
self.assertEqual(int("10",16), 16L)
if have_unicode:
self.assertEqual(int(unicode("10"),16), 16L)
# Test conversion from strings and various anomalies
for s, v in L:
for sign in "", "+", "-":
for prefix in "", " ", "\t", " \t\t ":
ss = prefix + sign + s
vv = v
if sign == "-" and v is not ValueError:
vv = -v
try:
self.assertEqual(int(ss), vv)
except v:
pass
s = repr(-1-sys.maxint)
x = int(s)
self.assertEqual(x+1, -sys.maxint)
self.assertIsInstance(x, int)
# should return long
self.assertEqual(int(s[1:]), sys.maxint+1)
# should return long
x = int(1e100)
self.assertIsInstance(x, long)
x = int(-1e100)
self.assertIsInstance(x, long)
# SF bug 434186: 0x80000000/2 != 0x80000000>>1.
# Worked by accident in Windows release build, but failed in debug build.
# Failed in all Linux builds.
x = -1-sys.maxint
self.assertEqual(x >> 1, x//2)
self.assertRaises(ValueError, int, '123\0')
self.assertRaises(ValueError, int, '53', 40)
# SF bug 1545497: embedded NULs were not detected with
# explicit base
self.assertRaises(ValueError, int, '123\0', 10)
self.assertRaises(ValueError, int, '123\x00 245', 20)
x = int('1' * 600)
self.assertIsInstance(x, long)
if have_unicode:
x = int(unichr(0x661) * 600)
self.assertIsInstance(x, long)
self.assertRaises(TypeError, int, 1, 12)
self.assertEqual(int('0123', 0), 83)
self.assertEqual(int('0x123', 16), 291)
# Bug 1679: "0x" is not a valid hex literal
self.assertRaises(ValueError, int, "0x", 16)
self.assertRaises(ValueError, int, "0x", 0)
self.assertRaises(ValueError, int, "0o", 8)
self.assertRaises(ValueError, int, "0o", 0)
self.assertRaises(ValueError, int, "0b", 2)
self.assertRaises(ValueError, int, "0b", 0)
# SF bug 1334662: int(string, base) wrong answers
# Various representations of 2**32 evaluated to 0
# rather than 2**32 in previous versions
self.assertEqual(int('100000000000000000000000000000000', 2), 4294967296L)
self.assertEqual(int('102002022201221111211', 3), 4294967296L)
self.assertEqual(int('10000000000000000', 4), 4294967296L)
self.assertEqual(int('32244002423141', 5), 4294967296L)
self.assertEqual(int('1550104015504', 6), 4294967296L)
self.assertEqual(int('211301422354', 7), 4294967296L)
self.assertEqual(int('40000000000', 8), 4294967296L)
self.assertEqual(int('12068657454', 9), 4294967296L)
self.assertEqual(int('4294967296', 10), 4294967296L)
self.assertEqual(int('1904440554', 11), 4294967296L)
self.assertEqual(int('9ba461594', 12), 4294967296L)
self.assertEqual(int('535a79889', 13), 4294967296L)
self.assertEqual(int('2ca5b7464', 14), 4294967296L)
self.assertEqual(int('1a20dcd81', 15), 4294967296L)
self.assertEqual(int('100000000', 16), 4294967296L)
self.assertEqual(int('a7ffda91', 17), 4294967296L)
self.assertEqual(int('704he7g4', 18), 4294967296L)
self.assertEqual(int('4f5aff66', 19), 4294967296L)
self.assertEqual(int('3723ai4g', 20), 4294967296L)
self.assertEqual(int('281d55i4', 21), 4294967296L)
self.assertEqual(int('1fj8b184', 22), 4294967296L)
self.assertEqual(int('1606k7ic', 23), 4294967296L)
self.assertEqual(int('mb994ag', 24), 4294967296L)
self.assertEqual(int('hek2mgl', 25), 4294967296L)
self.assertEqual(int('dnchbnm', 26), 4294967296L)
self.assertEqual(int('b28jpdm', 27), 4294967296L)
self.assertEqual(int('8pfgih4', 28), 4294967296L)
self.assertEqual(int('76beigg', 29), 4294967296L)
self.assertEqual(int('5qmcpqg', 30), 4294967296L)
self.assertEqual(int('4q0jto4', 31), 4294967296L)
self.assertEqual(int('4000000', 32), 4294967296L)
self.assertEqual(int('3aokq94', 33), 4294967296L)
self.assertEqual(int('2qhxjli', 34), 4294967296L)
self.assertEqual(int('2br45qb', 35), 4294967296L)
self.assertEqual(int('1z141z4', 36), 4294967296L)
# tests with base 0
# this fails on 3.0, but in 2.x the old octal syntax is allowed
self.assertEqual(int(' 0123 ', 0), 83)
self.assertEqual(int(' 0123 ', 0), 83)
self.assertEqual(int('000', 0), 0)
self.assertEqual(int('0o123', 0), 83)
self.assertEqual(int('0x123', 0), 291)
self.assertEqual(int('0b100', 0), 4)
self.assertEqual(int(' 0O123 ', 0), 83)
self.assertEqual(int(' 0X123 ', 0), 291)
self.assertEqual(int(' 0B100 ', 0), 4)
self.assertEqual(int('0', 0), 0)
self.assertEqual(int('+0', 0), 0)
self.assertEqual(int('-0', 0), 0)
self.assertEqual(int('00', 0), 0)
self.assertRaises(ValueError, int, '08', 0)
self.assertRaises(ValueError, int, '-012395', 0)
# without base still base 10
self.assertEqual(int('0123'), 123)
self.assertEqual(int('0123', 10), 123)
# tests with prefix and base != 0
self.assertEqual(int('0x123', 16), 291)
self.assertEqual(int('0o123', 8), 83)
self.assertEqual(int('0b100', 2), 4)
self.assertEqual(int('0X123', 16), 291)
self.assertEqual(int('0O123', 8), 83)
self.assertEqual(int('0B100', 2), 4)
# the code has special checks for the first character after the
# type prefix
self.assertRaises(ValueError, int, '0b2', 2)
self.assertRaises(ValueError, int, '0b02', 2)
self.assertRaises(ValueError, int, '0B2', 2)
self.assertRaises(ValueError, int, '0B02', 2)
self.assertRaises(ValueError, int, '0o8', 8)
self.assertRaises(ValueError, int, '0o08', 8)
self.assertRaises(ValueError, int, '0O8', 8)
self.assertRaises(ValueError, int, '0O08', 8)
self.assertRaises(ValueError, int, '0xg', 16)
self.assertRaises(ValueError, int, '0x0g', 16)
self.assertRaises(ValueError, int, '0Xg', 16)
self.assertRaises(ValueError, int, '0X0g', 16)
# SF bug 1334662: int(string, base) wrong answers
# Checks for proper evaluation of 2**32 + 1
self.assertEqual(int('100000000000000000000000000000001', 2), 4294967297L)
self.assertEqual(int('102002022201221111212', 3), 4294967297L)
self.assertEqual(int('10000000000000001', 4), 4294967297L)
self.assertEqual(int('32244002423142', 5), 4294967297L)
self.assertEqual(int('1550104015505', 6), 4294967297L)
self.assertEqual(int('211301422355', 7), 4294967297L)
self.assertEqual(int('40000000001', 8), 4294967297L)
self.assertEqual(int('12068657455', 9), 4294967297L)
self.assertEqual(int('4294967297', 10), 4294967297L)
self.assertEqual(int('1904440555', 11), 4294967297L)
self.assertEqual(int('9ba461595', 12), 4294967297L)
self.assertEqual(int('535a7988a', 13), 4294967297L)
self.assertEqual(int('2ca5b7465', 14), 4294967297L)
self.assertEqual(int('1a20dcd82', 15), 4294967297L)
self.assertEqual(int('100000001', 16), 4294967297L)
self.assertEqual(int('a7ffda92', 17), 4294967297L)
self.assertEqual(int('704he7g5', 18), 4294967297L)
self.assertEqual(int('4f5aff67', 19), 4294967297L)
self.assertEqual(int('3723ai4h', 20), 4294967297L)
self.assertEqual(int('281d55i5', 21), 4294967297L)
self.assertEqual(int('1fj8b185', 22), 4294967297L)
self.assertEqual(int('1606k7id', 23), 4294967297L)
self.assertEqual(int('mb994ah', 24), 4294967297L)
self.assertEqual(int('hek2mgm', 25), 4294967297L)
self.assertEqual(int('dnchbnn', 26), 4294967297L)
self.assertEqual(int('b28jpdn', 27), 4294967297L)
self.assertEqual(int('8pfgih5', 28), 4294967297L)
self.assertEqual(int('76beigh', 29), 4294967297L)
self.assertEqual(int('5qmcpqh', 30), 4294967297L)
self.assertEqual(int('4q0jto5', 31), 4294967297L)
self.assertEqual(int('4000001', 32), 4294967297L)
self.assertEqual(int('3aokq95', 33), 4294967297L)
self.assertEqual(int('2qhxjlj', 34), 4294967297L)
self.assertEqual(int('2br45qc', 35), 4294967297L)
self.assertEqual(int('1z141z5', 36), 4294967297L)
def test_bit_length(self):
tiny = 1e-10
for x in xrange(-65000, 65000):
k = x.bit_length()
# Check equivalence with Python version
self.assertEqual(k, len(bin(x).lstrip('-0b')))
# Behaviour as specified in the docs
if x != 0:
self.assertTrue(2**(k-1) <= abs(x) < 2**k)
else:
self.assertEqual(k, 0)
# Alternative definition: x.bit_length() == 1 + floor(log_2(x))
if x != 0:
# When x is an exact power of 2, numeric errors can
# cause floor(log(x)/log(2)) to be one too small; for
# small x this can be fixed by adding a small quantity
# to the quotient before taking the floor.
self.assertEqual(k, 1 + math.floor(
math.log(abs(x))/math.log(2) + tiny))
self.assertEqual((0).bit_length(), 0)
self.assertEqual((1).bit_length(), 1)
self.assertEqual((-1).bit_length(), 1)
self.assertEqual((2).bit_length(), 2)
self.assertEqual((-2).bit_length(), 2)
for i in [2, 3, 15, 16, 17, 31, 32, 33, 63, 64]:
a = 2**i
self.assertEqual((a-1).bit_length(), i)
self.assertEqual((1-a).bit_length(), i)
self.assertEqual((a).bit_length(), i+1)
self.assertEqual((-a).bit_length(), i+1)
self.assertEqual((a+1).bit_length(), i+1)
self.assertEqual((-a-1).bit_length(), i+1)
@unittest.skipUnless(float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
def test_float_conversion(self):
# values exactly representable as floats
exact_values = [-2, -1, 0, 1, 2, 2**52, 2**53-1, 2**53, 2**53+2,
2**53+4, 2**54-4, 2**54-2, 2**63, -2**63, 2**64,
-2**64, 10**20, 10**21, 10**22]
for value in exact_values:
self.assertEqual(int(float(int(value))), value)
# test round-half-to-even
self.assertEqual(int(float(2**53+1)), 2**53)
self.assertEqual(int(float(2**53+2)), 2**53+2)
self.assertEqual(int(float(2**53+3)), 2**53+4)
self.assertEqual(int(float(2**53+5)), 2**53+4)
self.assertEqual(int(float(2**53+6)), 2**53+6)
self.assertEqual(int(float(2**53+7)), 2**53+8)
self.assertEqual(int(float(-2**53-1)), -2**53)
self.assertEqual(int(float(-2**53-2)), -2**53-2)
self.assertEqual(int(float(-2**53-3)), -2**53-4)
self.assertEqual(int(float(-2**53-5)), -2**53-4)
self.assertEqual(int(float(-2**53-6)), -2**53-6)
self.assertEqual(int(float(-2**53-7)), -2**53-8)
self.assertEqual(int(float(2**54-2)), 2**54-2)
self.assertEqual(int(float(2**54-1)), 2**54)
self.assertEqual(int(float(2**54+2)), 2**54)
self.assertEqual(int(float(2**54+3)), 2**54+4)
self.assertEqual(int(float(2**54+5)), 2**54+4)
self.assertEqual(int(float(2**54+6)), 2**54+8)
self.assertEqual(int(float(2**54+10)), 2**54+8)
self.assertEqual(int(float(2**54+11)), 2**54+12)
def test_intconversion(self):
# Test __int__()
class ClassicMissingMethods:
pass
self.assertRaises(AttributeError, int, ClassicMissingMethods())
class MissingMethods(object):
pass
self.assertRaises(TypeError, int, MissingMethods())
class Foo0:
def __int__(self):
return 42
class Foo1(object):
def __int__(self):
return 42
class Foo2(int):
def __int__(self):
return 42
class Foo3(int):
def __int__(self):
return self
class Foo4(int):
def __int__(self):
return 42L
class Foo5(int):
def __int__(self):
return 42.
self.assertEqual(int(Foo0()), 42)
self.assertEqual(int(Foo1()), 42)
self.assertEqual(int(Foo2()), 42)
self.assertEqual(int(Foo3()), 0)
self.assertEqual(int(Foo4()), 42L)
self.assertRaises(TypeError, int, Foo5())
class Classic:
pass
for base in (object, Classic):
class IntOverridesTrunc(base):
def __int__(self):
return 42
def __trunc__(self):
return -12
self.assertEqual(int(IntOverridesTrunc()), 42)
class JustTrunc(base):
def __trunc__(self):
return 42
self.assertEqual(int(JustTrunc()), 42)
for trunc_result_base in (object, Classic):
class Integral(trunc_result_base):
def __int__(self):
return 42
class TruncReturnsNonInt(base):
def __trunc__(self):
return Integral()
self.assertEqual(int(TruncReturnsNonInt()), 42)
class NonIntegral(trunc_result_base):
def __trunc__(self):
# Check that we avoid infinite recursion.
return NonIntegral()
class TruncReturnsNonIntegral(base):
def __trunc__(self):
return NonIntegral()
try:
int(TruncReturnsNonIntegral())
except TypeError as e:
self.assertEqual(str(e),
"__trunc__ returned non-Integral"
" (type NonIntegral)")
else:
self.fail("Failed to raise TypeError with %s" %
((base, trunc_result_base),))
def test_main():
run_unittest(IntTestCases)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
citrix-openstack-build/nova | nova/tests/api/openstack/compute/contrib/test_used_limits.py | 7 | 9282 | # vim: tabstop=5 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.compute.contrib import used_limits
from nova.api.openstack.compute import limits
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
import nova.context
from nova import exception
from nova import quota
from nova import test
class FakeRequest(object):
def __init__(self, context, reserved=False):
self.environ = {'nova.context': context}
self.reserved = reserved
self.GET = {'reserved': 1} if reserved else {}
class UsedLimitsTestCase(test.NoDBTestCase):
def setUp(self):
"""Run before each test."""
super(UsedLimitsTestCase, self).setUp()
self.ext_mgr = self.mox.CreateMock(extensions.ExtensionManager)
self.controller = used_limits.UsedLimitsController(self.ext_mgr)
self.fake_context = nova.context.RequestContext('fake', 'fake')
self.mox.StubOutWithMock(used_limits, 'authorize_for_admin')
self.authorize_for_admin = used_limits.authorize_for_admin
def _do_test_used_limits(self, reserved):
fake_req = FakeRequest(self.fake_context, reserved=reserved)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
}
limits = {}
for display_name, q in quota_map.iteritems():
limits[q] = {'limit': len(display_name),
'in_use': len(display_name) / 2,
'reserved': len(display_name) / 3}
def stub_get_project_quotas(context, project_id, usages=True):
return limits
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
for used_limit, value in abs_limits.iteritems():
r = limits[quota_map[used_limit]]['reserved'] if reserved else 0
self.assertEqual(value,
limits[quota_map[used_limit]]['in_use'] + r)
def test_used_limits_basic(self):
self._do_test_used_limits(False)
def test_used_limits_with_reserved(self):
self._do_test_used_limits(True)
def test_admin_can_fetch_limits_for_a_given_tenant_id(self):
project_id = "123456"
user_id = "A1234"
tenant_id = 'abcd'
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
target = {
"project_id": tenant_id,
"user_id": user_id
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {'tenant_id': tenant_id}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.authorize_for_admin(self.fake_context, target=target)
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, '%s' % tenant_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_admin_can_fetch_used_limits_for_own_project(self):
project_id = "123456"
user_id = "A1234"
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.mox.StubOutWithMock(extensions, 'extension_authorizer')
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, '%s' % project_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_non_admin_cannot_fetch_used_limits_for_any_other_project(self):
project_id = "123456"
user_id = "A1234"
tenant_id = "abcd"
self.fake_context.project_id = project_id
self.fake_context.user_id = user_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
target = {
"project_id": tenant_id,
"user_id": user_id
}
fake_req = FakeRequest(self.fake_context)
fake_req.GET = {'tenant_id': tenant_id}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(True)
self.authorize_for_admin(self.fake_context, target=target). \
AndRaise(exception.PolicyNotAuthorized(
action="compute_extension:used_limits_for_admin"))
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.assertRaises(exception.PolicyNotAuthorized, self.controller.index,
fake_req, res)
def test_used_limits_fetched_for_context_project_id(self):
project_id = "123456"
self.fake_context.project_id = project_id
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
fake_req = FakeRequest(self.fake_context)
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.mox.StubOutWithMock(quota.QUOTAS, 'get_project_quotas')
quota.QUOTAS.get_project_quotas(self.fake_context, project_id,
usages=True).AndReturn({})
self.mox.ReplayAll()
res = wsgi.ResponseObject(obj)
self.controller.index(fake_req, res)
def test_used_ram_added(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {'ram': {'limit': 512, 'in_use': 256}}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertTrue('totalRAMUsed' in abs_limits)
self.assertEqual(abs_limits['totalRAMUsed'], 256)
def test_no_ram_quota(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertFalse('totalRAMUsed' in abs_limits)
def test_used_limits_xmlns(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj, xml=limits.LimitsTemplate)
res.preserialize('xml')
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.ext_mgr.is_loaded('os-used-limits-for-admin').AndReturn(False)
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.mox.ReplayAll()
self.controller.index(fake_req, res)
response = res.serialize(None, 'xml')
self.assertTrue(used_limits.XMLNS in response.body)
| apache-2.0 |
percy-g2/Novathor_xperia_u8500 | 6.1.1.B.0.253/external/webkit/Tools/QueueStatusServer/main.py | 15 | 3238 | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Request a modern Django
from google.appengine.dist import use_library
use_library('django', '1.1')
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from handlers.dashboard import Dashboard
from handlers.gc import GC
from handlers.nextpatch import NextPatch
from handlers.patch import Patch
from handlers.patchstatus import PatchStatus
from handlers.queuestatus import QueueStatus
from handlers.recentstatus import QueuesOverview
from handlers.releasepatch import ReleasePatch
from handlers.showresults import ShowResults
from handlers.statusbubble import StatusBubble
from handlers.submittoews import SubmitToEWS
from handlers.svnrevision import SVNRevision
from handlers.updatestatus import UpdateStatus
from handlers.updatesvnrevision import UpdateSVNRevision
from handlers.updateworkitems import UpdateWorkItems
webapp.template.register_template_library('filters.webkit_extras')
routes = [
('/', QueuesOverview),
('/dashboard', Dashboard),
('/gc', GC),
(r'/patch-status/(.*)/(.*)', PatchStatus),
(r'/patch/(.*)', Patch),
(r'/submit-to-ews', SubmitToEWS),
(r'/results/(.*)', ShowResults),
(r'/status-bubble/(.*)', StatusBubble),
(r'/svn-revision/(.*)', SVNRevision),
(r'/queue-status/(.*)/bots/(.*)', QueueStatus),
(r'/queue-status/(.*)', QueueStatus),
(r'/next-patch/(.*)', NextPatch),
(r'/release-patch', ReleasePatch),
('/update-status', UpdateStatus),
('/update-work-items', UpdateWorkItems),
('/update-svn-revision', UpdateSVNRevision),
]
application = webapp.WSGIApplication(routes, debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| gpl-2.0 |
alangwansui/mtl_ordercenter | openerp/addons/base_status/__init__.py | 61 | 1087 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base_state
import base_stage
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jmcarp/django | django/core/files/uploadhandler.py | 149 | 6877 | """
Base file upload handler classes, and the built-in concrete subclasses
"""
from __future__ import unicode_literals
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import (
InMemoryUploadedFile, TemporaryUploadedFile,
)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.module_loading import import_string
__all__ = [
'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler',
'StopFutureHandlers'
]
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
@python_2_unicode_compatible
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return 'StopUpload: Halt current upload.'
else:
return 'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(*args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the BytesIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> from django.http import HttpRequest
>>> request = HttpRequest()
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string(path)(*args, **kwargs)
| bsd-3-clause |
wisdark/Empire | data/agent/stagers/common/get_sysinfo.py | 7 | 2133 | import os
import sys
import pwd
import socket
import subprocess
def get_sysinfo(nonce='00000000'):
# NOTE: requires global variable "server" to be set
# nonce | listener | domainname | username | hostname | internal_ip | os_details | os_details | high_integrity | process_name | process_id | language | language_version
__FAILED_FUNCTION = '[FAILED QUERY]'
try:
username = pwd.getpwuid(os.getuid())[0].strip("\\")
except Exception as e:
username = __FAILED_FUNCTION
try:
uid = os.popen('id -u').read().strip()
except Exception as e:
uid = __FAILED_FUNCTION
try:
highIntegrity = "True" if (uid == "0") else False
except Exception as e:
highIntegrity = __FAILED_FUNCTION
try:
osDetails = os.uname()
except Exception as e:
osDetails = __FAILED_FUNCTION
try:
hostname = osDetails[1]
except Exception as e:
hostname = __FAILED_FUNCTION
try:
internalIP = socket.gethostbyname(socket.gethostname())
except Exception as e:
try:
internalIP = os.popen("ifconfig|grep inet|grep inet6 -v|grep -v 127.0.0.1|cut -d' ' -f2").read()
except Exception as e1:
internalIP = __FAILED_FUNCTION
try:
osDetails = ",".join(osDetails)
except Exception as e:
osDetails = __FAILED_FUNCTION
try:
processID = os.getpid()
except Exception as e:
processID = __FAILED_FUNCTION
try:
temp = sys.version_info
pyVersion = "%s.%s" % (temp[0], temp[1])
except Exception as e:
pyVersion = __FAILED_FUNCTION
language = 'python'
cmd = 'ps %s' % (os.getpid())
ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
out = ps.stdout.read()
parts = out.split("\n")
ps.stdout.close()
if len(parts) > 2:
processName = " ".join(parts[1].split()[4:])
else:
processName = 'python'
return "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" % (nonce, server, '', username, hostname, internalIP, osDetails, highIntegrity, processName, processID, language, pyVersion)
| bsd-3-clause |
zooba/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/unittest/test/test_break.py | 16 | 9493 | import gc
import io
import os
import sys
import signal
import weakref
import unittest
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
class TestBreak(unittest.TestCase):
int_handler = None
def setUp(self):
self._default_handler = signal.getsignal(signal.SIGINT)
if self.int_handler is not None:
signal.signal(signal.SIGINT, self.int_handler)
def tearDown(self):
signal.signal(signal.SIGINT, self._default_handler)
unittest.signals._results = weakref.WeakKeyDictionary()
unittest.signals._interrupt_handler = None
def testInstallHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(unittest.signals._interrupt_handler.called)
def testRegisterResult(self):
result = unittest.TestResult()
self.assertNotIn(result, unittest.signals._results)
unittest.registerResult(result)
try:
self.assertIn(result, unittest.signals._results)
finally:
unittest.removeResult(result)
def testInterruptCaught(self):
default_handler = signal.getsignal(signal.SIGINT)
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.breakCaught)
def testSecondInterrupt(self):
# Can't use skipIf decorator because the signal handler may have
# been changed after defining this method.
if signal.getsignal(signal.SIGINT) == signal.SIG_IGN:
self.skipTest("test requires SIGINT to not be ignored")
result = unittest.TestResult()
unittest.installHandler()
unittest.registerResult(result)
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
result.breakCaught = True
self.assertTrue(result.shouldStop)
os.kill(pid, signal.SIGINT)
self.fail("Second KeyboardInterrupt not raised")
try:
test(result)
except KeyboardInterrupt:
pass
else:
self.fail("Second KeyboardInterrupt not raised")
self.assertTrue(result.breakCaught)
def testTwoResults(self):
unittest.installHandler()
result = unittest.TestResult()
unittest.registerResult(result)
new_handler = signal.getsignal(signal.SIGINT)
result2 = unittest.TestResult()
unittest.registerResult(result2)
self.assertEqual(signal.getsignal(signal.SIGINT), new_handler)
result3 = unittest.TestResult()
def test(result):
pid = os.getpid()
os.kill(pid, signal.SIGINT)
try:
test(result)
except KeyboardInterrupt:
self.fail("KeyboardInterrupt not handled")
self.assertTrue(result.shouldStop)
self.assertTrue(result2.shouldStop)
self.assertFalse(result3.shouldStop)
def testHandlerReplacedButCalled(self):
# Can't use skipIf decorator because the signal handler may have
# been changed after defining this method.
if signal.getsignal(signal.SIGINT) == signal.SIG_IGN:
self.skipTest("test requires SIGINT to not be ignored")
# If our handler has been replaced (is no longer installed) but is
# called by the *new* handler, then it isn't safe to delay the
# SIGINT and we should immediately delegate to the default handler
unittest.installHandler()
handler = signal.getsignal(signal.SIGINT)
def new_handler(frame, signum):
handler(frame, signum)
signal.signal(signal.SIGINT, new_handler)
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
else:
self.fail("replaced but delegated handler doesn't raise interrupt")
def testRunner(self):
# Creating a TextTestRunner with the appropriate argument should
# register the TextTestResult it creates
runner = unittest.TextTestRunner(stream=io.StringIO())
result = runner.run(unittest.TestSuite())
self.assertIn(result, unittest.signals._results)
def testWeakReferences(self):
# Calling registerResult on a result should not keep it alive
result = unittest.TestResult()
unittest.registerResult(result)
ref = weakref.ref(result)
del result
# For non-reference counting implementations
gc.collect();gc.collect()
self.assertIsNone(ref())
def testRemoveResult(self):
result = unittest.TestResult()
unittest.registerResult(result)
unittest.installHandler()
self.assertTrue(unittest.removeResult(result))
# Should this raise an error instead?
self.assertFalse(unittest.removeResult(unittest.TestResult()))
try:
pid = os.getpid()
os.kill(pid, signal.SIGINT)
except KeyboardInterrupt:
pass
self.assertFalse(result.shouldStop)
def testMainInstallsHandler(self):
failfast = object()
test = object()
verbosity = object()
result = object()
default_handler = signal.getsignal(signal.SIGINT)
class FakeRunner(object):
initArgs = []
runArgs = []
def __init__(self, *args, **kwargs):
self.initArgs.append((args, kwargs))
def run(self, test):
self.runArgs.append(test)
return result
class Program(unittest.TestProgram):
def __init__(self, catchbreak):
self.exit = False
self.verbosity = verbosity
self.failfast = failfast
self.catchbreak = catchbreak
self.tb_locals = False
self.testRunner = FakeRunner
self.test = test
self.result = None
p = Program(False)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'tb_locals': False,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
FakeRunner.initArgs = []
FakeRunner.runArgs = []
p = Program(True)
p.runTests()
self.assertEqual(FakeRunner.initArgs, [((), {'buffer': None,
'verbosity': verbosity,
'failfast': failfast,
'tb_locals': False,
'warnings': None})])
self.assertEqual(FakeRunner.runArgs, [test])
self.assertEqual(p.result, result)
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandler(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
# check that calling removeHandler multiple times has no ill-effect
unittest.removeHandler()
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
def testRemoveHandlerAsDecorator(self):
default_handler = signal.getsignal(signal.SIGINT)
unittest.installHandler()
@unittest.removeHandler
def test():
self.assertEqual(signal.getsignal(signal.SIGINT), default_handler)
test()
self.assertNotEqual(signal.getsignal(signal.SIGINT), default_handler)
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
class TestBreakDefaultIntHandler(TestBreak):
int_handler = signal.default_int_handler
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
class TestBreakSignalIgnored(TestBreak):
int_handler = signal.SIG_IGN
@unittest.skipUnless(hasattr(os, 'kill'), "Test requires os.kill")
@unittest.skipIf(sys.platform =="win32", "Test cannot run on Windows")
class TestBreakSignalDefault(TestBreak):
int_handler = signal.SIG_DFL
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
StackStorm/mistral | mistral/policies/workbook.py | 2 | 2249 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from mistral.policies import base
WORKBOOKS = 'workbooks:%s'
rules = [
policy.DocumentedRuleDefault(
name=WORKBOOKS % 'create',
check_str=base.RULE_ADMIN_OR_OWNER,
description='Create a new workbook.',
operations=[
{
'path': '/v2/workbooks',
'method': 'POST'
}
]
),
policy.DocumentedRuleDefault(
name=WORKBOOKS % 'delete',
check_str=base.RULE_ADMIN_OR_OWNER,
description='Delete the named workbook.',
operations=[
{
'path': '/v2/workbooks',
'method': 'DELETE'
}
]
),
policy.DocumentedRuleDefault(
name=WORKBOOKS % 'get',
check_str=base.RULE_ADMIN_OR_OWNER,
description='Return the named workbook.',
operations=[
{
'path': '/v2/workbooks/{workbook_name}',
'method': 'GET'
}
]
),
policy.DocumentedRuleDefault(
name=WORKBOOKS % 'list',
check_str=base.RULE_ADMIN_OR_OWNER,
description='Return all workbooks.',
operations=[
{
'path': '/v2/workbooks',
'method': 'GET'
}
]
),
policy.DocumentedRuleDefault(
name=WORKBOOKS % 'update',
check_str=base.RULE_ADMIN_OR_OWNER,
description='Update an workbook.',
operations=[
{
'path': '/v2/workbooks',
'method': 'PUT'
}
]
)
]
def list_rules():
return rules
| apache-2.0 |
airbnb/airflow | airflow/api/common/experimental/get_dag_run_state.py | 4 | 1372 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""DAG run APIs."""
from datetime import datetime
from typing import Dict
from airflow.api.common.experimental import check_and_get_dag, check_and_get_dagrun
def get_dag_run_state(dag_id: str, execution_date: datetime) -> Dict[str, str]:
"""Return the task object identified by the given dag_id and task_id.
:param dag_id: DAG id
:param execution_date: execution date
:return: Dictionary storing state of the object
"""
dag = check_and_get_dag(dag_id=dag_id)
dagrun = check_and_get_dagrun(dag, execution_date)
return {'state': dagrun.get_state()}
| apache-2.0 |
bwhite/hadoopy | hadoopy/thirdparty/pyinstaller/PyInstaller/hooks/hook-vtkpython.py | 4 | 1321 | # Copyright (C) 2005, Giovanni Bajo
# Based on previous work under copyright (c) 2001, 2002 McMillan Enterprises, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
# courtesy of David C. Morrill (4/2/2002)
import os
if os.name == 'posix':
hiddenimports = ['libvtkCommonPython','libvtkFilteringPython','libvtkIOPython','libvtkImagingPython','libvtkGraphicsPython','libvtkRenderingPython','libvtkHybridPython','libvtkParallelPython','libvtkPatentedPython']
else:
hiddenimports = ['vtkCommonPython','vtkFilteringPython','vtkIOPython','vtkImagingPython','vtkGraphicsPython','vtkRenderingPython','vtkHybridPython','vtkParallelPython','vtkPatentedPython']
| gpl-3.0 |
iambibhas/django | django/contrib/gis/gdal/geometries.py | 9 | 24207 | """
The OGRGeometry is a wrapper for using the OGR Geometry class
(see http://www.gdal.org/ogr/classOGRGeometry.html). OGRGeometry
may be instantiated when reading geometries from OGR Data Sources
(e.g. SHP files), or when given OGC WKT (a string).
While the 'full' API is not present yet, the API is "pythonic" unlike
the traditional and "next-generation" OGR Python bindings. One major
advantage OGR Geometries have over their GEOS counterparts is support
for spatial reference systems and their transformation.
Example:
>>> from django.contrib.gis.gdal import OGRGeometry, OGRGeomType, SpatialReference
>>> wkt1, wkt2 = 'POINT(-90 30)', 'POLYGON((0 0, 5 0, 5 5, 0 5)'
>>> pnt = OGRGeometry(wkt1)
>>> print(pnt)
POINT (-90 30)
>>> mpnt = OGRGeometry(OGRGeomType('MultiPoint'), SpatialReference('WGS84'))
>>> mpnt.add(wkt1)
>>> mpnt.add(wkt1)
>>> print(mpnt)
MULTIPOINT (-90 30,-90 30)
>>> print(mpnt.srs.name)
WGS 84
>>> print(mpnt.srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> mpnt.transform_to(SpatialReference('NAD27'))
>>> print(mpnt.proj)
+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs
>>> print(mpnt)
MULTIPOINT (-89.999930378602485 29.999797886557641,-89.999930378602485 29.999797886557641)
The OGRGeomType class is to make it easy to specify an OGR geometry type:
>>> from django.contrib.gis.gdal import OGRGeomType
>>> gt1 = OGRGeomType(3) # Using an integer for the type
>>> gt2 = OGRGeomType('Polygon') # Using a string
>>> gt3 = OGRGeomType('POLYGON') # It's case-insensitive
>>> print(gt1 == 3, gt1 == 'Polygon') # Equivalence works w/non-OGRGeomType objects
True True
"""
# Python library requisites.
import sys
from binascii import a2b_hex, b2a_hex
from ctypes import byref, string_at, c_char_p, c_double, c_ubyte, c_void_p
# Getting GDAL prerequisites
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import GDALException, OGRIndexError, SRSException
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.srs import SpatialReference, CoordTransform
# Getting the ctypes prototype functions that interface w/the GDAL C library.
from django.contrib.gis.gdal.prototypes import geom as capi, srs as srs_api
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
from django.utils import six
from django.utils.six.moves import range
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_G_* routines are relevant here.
class OGRGeometry(GDALBase):
"Generally encapsulates an OGR geometry."
def __init__(self, geom_input, srs=None):
"Initializes Geometry on either WKT or an OGR pointer as input."
str_instance = isinstance(geom_input, six.string_types)
# If HEX, unpack input to a binary buffer.
if str_instance and hex_regex.match(geom_input):
geom_input = six.memoryview(a2b_hex(geom_input.upper().encode()))
str_instance = False
# Constructing the geometry,
if str_instance:
wkt_m = wkt_regex.match(geom_input)
json_m = json_regex.match(geom_input)
if wkt_m:
if wkt_m.group('srid'):
# If there's EWKT, set the SRS w/value of the SRID.
srs = int(wkt_m.group('srid'))
if wkt_m.group('type').upper() == 'LINEARRING':
# OGR_G_CreateFromWkt doesn't work with LINEARRING WKT.
# See http://trac.osgeo.org/gdal/ticket/1992.
g = capi.create_geom(OGRGeomType(wkt_m.group('type')).num)
capi.import_wkt(g, byref(c_char_p(wkt_m.group('wkt').encode())))
else:
g = capi.from_wkt(byref(c_char_p(wkt_m.group('wkt').encode())), None, byref(c_void_p()))
elif json_m:
g = capi.from_json(geom_input.encode())
else:
# Seeing if the input is a valid short-hand string
# (e.g., 'Point', 'POLYGON').
OGRGeomType(geom_input)
g = capi.create_geom(OGRGeomType(geom_input).num)
elif isinstance(geom_input, six.memoryview):
# WKB was passed in
g = capi.from_wkb(bytes(geom_input), None, byref(c_void_p()), len(geom_input))
elif isinstance(geom_input, OGRGeomType):
# OGRGeomType was passed in, an empty geometry will be created.
g = capi.create_geom(geom_input.num)
elif isinstance(geom_input, self.ptr_type):
# OGR pointer (c_void_p) was the input.
g = geom_input
else:
raise GDALException('Invalid input type for OGR Geometry construction: %s' % type(geom_input))
# Now checking the Geometry pointer before finishing initialization
# by setting the pointer for the object.
if not g:
raise GDALException('Cannot create OGR Geometry from input: %s' % str(geom_input))
self.ptr = g
# Assigning the SpatialReference object to the geometry, if valid.
if srs:
self.srs = srs
# Setting the class depending upon the OGR Geometry Type
self.__class__ = GEO_CLASSES[self.geom_type.num]
def __del__(self):
"Deletes this Geometry."
if self._ptr and capi:
capi.destroy_geom(self._ptr)
# Pickle routines
def __getstate__(self):
srs = self.srs
if srs:
srs = srs.wkt
else:
srs = None
return bytes(self.wkb), srs
def __setstate__(self, state):
wkb, srs = state
ptr = capi.from_wkb(wkb, None, byref(c_void_p()), len(wkb))
if not ptr:
raise GDALException('Invalid OGRGeometry loaded from pickled state.')
self.ptr = ptr
self.srs = srs
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
### Geometry set-like operations ###
# g = g1 | g2
def __or__(self, other):
"Returns the union of the two geometries."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
def __eq__(self, other):
"Is this Geometry equal to the other?"
if isinstance(other, OGRGeometry):
return self.equals(other)
else:
return False
def __ne__(self, other):
"Tests for inequality."
return not (self == other)
def __str__(self):
"WKT is used for the string representation."
return self.wkt
#### Geometry Properties ####
@property
def dimension(self):
"Returns 0 for points, 1 for lines, and 2 for surfaces."
return capi.get_dims(self.ptr)
def _get_coord_dim(self):
"Returns the coordinate dimension of the Geometry."
return capi.get_coord_dim(self.ptr)
def _set_coord_dim(self, dim):
"Sets the coordinate dimension of this Geometry."
if dim not in (2, 3):
raise ValueError('Geometry dimension must be either 2 or 3')
capi.set_coord_dim(self.ptr, dim)
coord_dim = property(_get_coord_dim, _set_coord_dim)
@property
def geom_count(self):
"The number of elements in this Geometry."
return capi.get_geom_count(self.ptr)
@property
def point_count(self):
"Returns the number of Points in this Geometry."
return capi.get_point_count(self.ptr)
@property
def num_points(self):
"Alias for `point_count` (same name method in GEOS API.)"
return self.point_count
@property
def num_coords(self):
"Alais for `point_count`."
return self.point_count
@property
def geom_type(self):
"Returns the Type for this Geometry."
return OGRGeomType(capi.get_geom_type(self.ptr))
@property
def geom_name(self):
"Returns the Name of this Geometry."
return capi.get_geom_name(self.ptr)
@property
def area(self):
"Returns the area for a LinearRing, Polygon, or MultiPolygon; 0 otherwise."
return capi.get_area(self.ptr)
@property
def envelope(self):
"Returns the envelope for this Geometry."
# TODO: Fix Envelope() for Point geometries.
return Envelope(capi.get_envelope(self.ptr, byref(OGREnvelope())))
@property
def extent(self):
"Returns the envelope as a 4-tuple, instead of as an Envelope object."
return self.envelope.tuple
#### SpatialReference-related Properties ####
# The SRS property
def _get_srs(self):
"Returns the Spatial Reference for this Geometry."
try:
srs_ptr = capi.get_geom_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(srs_ptr))
except SRSException:
return None
def _set_srs(self, srs):
"Sets the SpatialReference for this geometry."
# Do not have to clone the `SpatialReference` object pointer because
# when it is assigned to this `OGRGeometry` it's internal OGR
# reference count is incremented, and will likewise be released
# (decremented) when this geometry's destructor is called.
if isinstance(srs, SpatialReference):
srs_ptr = srs.ptr
elif isinstance(srs, six.integer_types + six.string_types):
sr = SpatialReference(srs)
srs_ptr = sr.ptr
else:
raise TypeError('Cannot assign spatial reference with object of type: %s' % type(srs))
capi.assign_srs(self.ptr, srs_ptr)
srs = property(_get_srs, _set_srs)
# The SRID property
def _get_srid(self):
srs = self.srs
if srs:
return srs.srid
return None
def _set_srid(self, srid):
if isinstance(srid, six.integer_types):
self.srs = srid
else:
raise TypeError('SRID must be set with an integer.')
srid = property(_get_srid, _set_srid)
#### Output Methods ####
@property
def geos(self):
"Returns a GEOSGeometry object from this OGRGeometry."
from django.contrib.gis.geos import GEOSGeometry
return GEOSGeometry(self.wkb, self.srid)
@property
def gml(self):
"Returns the GML representation of the Geometry."
return capi.to_gml(self.ptr)
@property
def hex(self):
"Returns the hexadecimal representation of the WKB (a string)."
return b2a_hex(self.wkb).upper()
@property
def json(self):
"""
Returns the GeoJSON representation of this Geometry.
"""
return capi.to_json(self.ptr)
geojson = json
@property
def kml(self):
"Returns the KML representation of the Geometry."
return capi.to_kml(self.ptr, None)
@property
def wkb_size(self):
"Returns the size of the WKB buffer."
return capi.get_wkbsize(self.ptr)
@property
def wkb(self):
"Returns the WKB representation of the Geometry."
if sys.byteorder == 'little':
byteorder = 1 # wkbNDR (from ogr_core.h)
else:
byteorder = 0 # wkbXDR
sz = self.wkb_size
# Creating the unsigned character buffer, and passing it in by reference.
buf = (c_ubyte * sz)()
capi.to_wkb(self.ptr, byteorder, byref(buf))
# Returning a buffer of the string at the pointer.
return six.memoryview(string_at(buf, sz))
@property
def wkt(self):
"Returns the WKT representation of the Geometry."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def ewkt(self):
"Returns the EWKT representation of the Geometry."
srs = self.srs
if srs and srs.srid:
return 'SRID=%s;%s' % (srs.srid, self.wkt)
else:
return self.wkt
#### Geometry Methods ####
def clone(self):
"Clones this OGR Geometry."
return OGRGeometry(capi.clone_geom(self.ptr), self.srs)
def close_rings(self):
"""
If there are any rings within this geometry that have not been
closed, this routine will do so by adding the starting point at the
end.
"""
# Closing the open rings.
capi.geom_close_rings(self.ptr)
def transform(self, coord_trans, clone=False):
"""
Transforms this geometry to a different spatial reference system.
May take a CoordTransform object, a SpatialReference object, string
WKT or PROJ.4, and/or an integer SRID. By default nothing is returned
and the geometry is transformed in-place. However, if the `clone`
keyword is set, then a transformed clone of this geometry will be
returned.
"""
if clone:
klone = self.clone()
klone.transform(coord_trans)
return klone
# Depending on the input type, use the appropriate OGR routine
# to perform the transformation.
if isinstance(coord_trans, CoordTransform):
capi.geom_transform(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, SpatialReference):
capi.geom_transform_to(self.ptr, coord_trans.ptr)
elif isinstance(coord_trans, six.integer_types + six.string_types):
sr = SpatialReference(coord_trans)
capi.geom_transform_to(self.ptr, sr.ptr)
else:
raise TypeError('Transform only accepts CoordTransform, '
'SpatialReference, string, and integer objects.')
def transform_to(self, srs):
"For backwards-compatibility."
self.transform(srs)
#### Topology Methods ####
def _topology(self, func, other):
"""A generalized function for topology operations, takes a GDAL function and
the other geometry to perform the operation on."""
if not isinstance(other, OGRGeometry):
raise TypeError('Must use another OGRGeometry object for topology operations!')
# Returning the output of the given function with the other geometry's
# pointer.
return func(self.ptr, other.ptr)
def intersects(self, other):
"Returns True if this geometry intersects with the other."
return self._topology(capi.ogr_intersects, other)
def equals(self, other):
"Returns True if this geometry is equivalent to the other."
return self._topology(capi.ogr_equals, other)
def disjoint(self, other):
"Returns True if this geometry and the other are spatially disjoint."
return self._topology(capi.ogr_disjoint, other)
def touches(self, other):
"Returns True if this geometry touches the other."
return self._topology(capi.ogr_touches, other)
def crosses(self, other):
"Returns True if this geometry crosses the other."
return self._topology(capi.ogr_crosses, other)
def within(self, other):
"Returns True if this geometry is within the other."
return self._topology(capi.ogr_within, other)
def contains(self, other):
"Returns True if this geometry contains the other."
return self._topology(capi.ogr_contains, other)
def overlaps(self, other):
"Returns True if this geometry overlaps the other."
return self._topology(capi.ogr_overlaps, other)
#### Geometry-generation Methods ####
def _geomgen(self, gen_func, other=None):
"A helper routine for the OGR routines that generate geometries."
if isinstance(other, OGRGeometry):
return OGRGeometry(gen_func(self.ptr, other.ptr), self.srs)
else:
return OGRGeometry(gen_func(self.ptr), self.srs)
@property
def boundary(self):
"Returns the boundary of this geometry."
return self._geomgen(capi.get_boundary)
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points in
this Geometry.
"""
return self._geomgen(capi.geom_convex_hull)
def difference(self, other):
"""
Returns a new geometry consisting of the region which is the difference
of this geometry and the other.
"""
return self._geomgen(capi.geom_diff, other)
def intersection(self, other):
"""
Returns a new geometry consisting of the region of intersection of this
geometry and the other.
"""
return self._geomgen(capi.geom_intersection, other)
def sym_difference(self, other):
"""
Returns a new geometry which is the symmetric difference of this
geometry and the other.
"""
return self._geomgen(capi.geom_sym_diff, other)
def union(self, other):
"""
Returns a new geometry consisting of the region which is the union of
this geometry and the other.
"""
return self._geomgen(capi.geom_union, other)
# The subclasses for OGR Geometry.
class Point(OGRGeometry):
@property
def x(self):
"Returns the X coordinate for this Point."
return capi.getx(self.ptr, 0)
@property
def y(self):
"Returns the Y coordinate for this Point."
return capi.gety(self.ptr, 0)
@property
def z(self):
"Returns the Z coordinate for this Point."
if self.coord_dim == 3:
return capi.getz(self.ptr, 0)
@property
def tuple(self):
"Returns the tuple of this point."
if self.coord_dim == 2:
return (self.x, self.y)
elif self.coord_dim == 3:
return (self.x, self.y, self.z)
coords = tuple
class LineString(OGRGeometry):
def __getitem__(self, index):
"Returns the Point at the given index."
if index >= 0 and index < self.point_count:
x, y, z = c_double(), c_double(), c_double()
capi.get_point(self.ptr, index, byref(x), byref(y), byref(z))
dim = self.coord_dim
if dim == 1:
return (x.value,)
elif dim == 2:
return (x.value, y.value)
elif dim == 3:
return (x.value, y.value, z.value)
else:
raise OGRIndexError('index out of range: %s' % str(index))
def __iter__(self):
"Iterates over each point in the LineString."
for i in range(self.point_count):
yield self[i]
def __len__(self):
"The length returns the number of points in the LineString."
return self.point_count
@property
def tuple(self):
"Returns the tuple representation of this LineString."
return tuple(self[i] for i in range(len(self)))
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function.
"""
return [func(self.ptr, i) for i in range(len(self))]
@property
def x(self):
"Returns the X coordinates in a list."
return self._listarr(capi.getx)
@property
def y(self):
"Returns the Y coordinates in a list."
return self._listarr(capi.gety)
@property
def z(self):
"Returns the Z coordinates in a list."
if self.coord_dim == 3:
return self._listarr(capi.getz)
# LinearRings are used in Polygons.
class LinearRing(LineString):
pass
class Polygon(OGRGeometry):
def __len__(self):
"The number of interior rings in this Polygon."
return self.geom_count
def __iter__(self):
"Iterates through each ring in the Polygon."
for i in range(self.geom_count):
yield self[i]
def __getitem__(self, index):
"Gets the ring at the specified index."
if index < 0 or index >= self.geom_count:
raise OGRIndexError('index out of range: %s' % index)
else:
return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs)
# Polygon Properties
@property
def shell(self):
"Returns the shell of this Polygon."
return self[0] # First ring is the shell
exterior_ring = shell
@property
def tuple(self):
"Returns a tuple of LinearRing coordinate tuples."
return tuple(self[i].tuple for i in range(self.geom_count))
coords = tuple
@property
def point_count(self):
"The number of Points in this Polygon."
# Summing up the number of points in each ring of the Polygon.
return sum(self[i].point_count for i in range(self.geom_count))
@property
def centroid(self):
"Returns the centroid (a Point) of this Polygon."
# The centroid is a Point, create a geometry for this.
p = OGRGeometry(OGRGeomType('Point'))
capi.get_centroid(self.ptr, p.ptr)
return p
# Geometry Collection base class.
class GeometryCollection(OGRGeometry):
"The Geometry Collection class."
def __getitem__(self, index):
"Gets the Geometry at the specified index."
if index < 0 or index >= self.geom_count:
raise OGRIndexError('index out of range: %s' % index)
else:
return OGRGeometry(capi.clone_geom(capi.get_geom_ref(self.ptr, index)), self.srs)
def __iter__(self):
"Iterates over each Geometry."
for i in range(self.geom_count):
yield self[i]
def __len__(self):
"The number of geometries in this Geometry Collection."
return self.geom_count
def add(self, geom):
"Add the geometry to this Geometry Collection."
if isinstance(geom, OGRGeometry):
if isinstance(geom, self.__class__):
for g in geom:
capi.add_geom(self.ptr, g.ptr)
else:
capi.add_geom(self.ptr, geom.ptr)
elif isinstance(geom, six.string_types):
tmp = OGRGeometry(geom)
capi.add_geom(self.ptr, tmp.ptr)
else:
raise GDALException('Must add an OGRGeometry.')
@property
def point_count(self):
"The number of Points in this Geometry Collection."
# Summing up the number of points in each geometry in this collection
return sum(self[i].point_count for i in range(self.geom_count))
@property
def tuple(self):
"Returns a tuple representation of this Geometry Collection."
return tuple(self[i].tuple for i in range(self.geom_count))
coords = tuple
# Multiple Geometry types.
class MultiPoint(GeometryCollection):
pass
class MultiLineString(GeometryCollection):
pass
class MultiPolygon(GeometryCollection):
pass
# Class mapping dictionary (using the OGRwkbGeometryType as the key)
GEO_CLASSES = {1: Point,
2: LineString,
3: Polygon,
4: MultiPoint,
5: MultiLineString,
6: MultiPolygon,
7: GeometryCollection,
101: LinearRing,
1 + OGRGeomType.wkb25bit: Point,
2 + OGRGeomType.wkb25bit: LineString,
3 + OGRGeomType.wkb25bit: Polygon,
4 + OGRGeomType.wkb25bit: MultiPoint,
5 + OGRGeomType.wkb25bit: MultiLineString,
6 + OGRGeomType.wkb25bit: MultiPolygon,
7 + OGRGeomType.wkb25bit: GeometryCollection,
}
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.