repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
tangrams/tangram-sandbox | data/tripMaker.py | 2 | 1736 | #!/usr/bin/env python
import requests, json, sys
import geojson
#six degrees of precision in valhalla
inv = 1.0 / 1e6;
#decode an encoded string
def decode(encoded):
decoded = []
previous = [0,0]
i = 0
#for each byte
while i < len(encoded):
#for each coord (lat, lon)
ll = [0,0]
for j in [0, 1]:
shift = 0
byte = 0x20
#keep decoding bytes until you have this coord
while byte >= 0x20:
byte = ord(encoded[i]) - 63
i += 1
ll[j] |= (byte & 0x1f) << shift
shift += 5
#get the final value adding the previous offset and remember it for the next
ll[j] = previous[j] + (~(ll[j] >> 1) if ll[j] & 1 else (ll[j] >> 1))
previous[j] = ll[j]
#scale by the precision and chop off long coords also flip the positions so
#its the far more standard lon,lat instead of lat,lon
decoded.append([float('%.6f' % (ll[1] * inv)), float('%.6f' % (ll[0] * inv))])
#hand back the list of coordinates
return decoded
A = [ -122.4425, 37.77823 ] # SF
B = [ -73.96625, 40.78343 ] # NY
KEY = 'valhalla-EzqiWWY'
URL = 'http://valhalla.mapzen.com/route?'
FROM_TO = '{"locations":[{"lat":'+str(A[1])+',"lon":'+str(A[0])+'},{"lat":'+str(B[1])+',"lon":'+str(B[0])+'}],"costing":"auto"}'
RST = requests.get(URL+'json='+FROM_TO+'&api_key='+KEY)
JSON = json.loads(RST.text)
line = geojson.LineString(decode(JSON['trip']['legs'][0]['shape']))
feature = geojson.Feature(geometry=line)
feature_collection = geojson.FeatureCollection([feature])
file = open('trip.json', 'w')
file.write(geojson.dumps(feature_collection, sort_key=True))
file.close()
| mit |
sdcooke/django | tests/bulk_create/tests.py | 190 | 7570 | from __future__ import unicode_literals
from operator import attrgetter
from django.db import connection
from django.test import (
TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature,
)
from .models import (
Country, Pizzeria, ProxyCountry, ProxyMultiCountry, ProxyMultiProxyCountry,
ProxyProxyCountry, Restaurant, State, TwoFields,
)
class BulkCreateTests(TestCase):
def setUp(self):
self.data = [
Country(name="United States of America", iso_two_letter="US"),
Country(name="The Netherlands", iso_two_letter="NL"),
Country(name="Germany", iso_two_letter="DE"),
Country(name="Czech Republic", iso_two_letter="CZ")
]
def test_simple(self):
created = Country.objects.bulk_create(self.data)
self.assertEqual(len(created), 4)
self.assertQuerysetEqual(Country.objects.order_by("-name"), [
"United States of America", "The Netherlands", "Germany", "Czech Republic"
], attrgetter("name"))
created = Country.objects.bulk_create([])
self.assertEqual(created, [])
self.assertEqual(Country.objects.count(), 4)
@skipUnlessDBFeature('has_bulk_insert')
def test_efficiency(self):
with self.assertNumQueries(1):
Country.objects.bulk_create(self.data)
def test_multi_table_inheritance_unsupported(self):
expected_message = "Can't bulk create a multi-table inherited model"
with self.assertRaisesMessage(ValueError, expected_message):
Pizzeria.objects.bulk_create([
Pizzeria(name="The Art of Pizza"),
])
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiCountry.objects.bulk_create([
ProxyMultiCountry(name="Fillory", iso_two_letter="FL"),
])
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiProxyCountry.objects.bulk_create([
ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"),
])
def test_proxy_inheritance_supported(self):
ProxyCountry.objects.bulk_create([
ProxyCountry(name="Qwghlm", iso_two_letter="QW"),
Country(name="Tortall", iso_two_letter="TA"),
])
self.assertQuerysetEqual(ProxyCountry.objects.all(), {
"Qwghlm", "Tortall"
}, attrgetter("name"), ordered=False)
ProxyProxyCountry.objects.bulk_create([
ProxyProxyCountry(name="Neitherlands", iso_two_letter="NT"),
])
self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), {
"Qwghlm", "Tortall", "Neitherlands",
}, attrgetter("name"), ordered=False)
def test_non_auto_increment_pk(self):
State.objects.bulk_create([
State(two_letter_code=s)
for s in ["IL", "NY", "CA", "ME"]
])
self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [
"CA", "IL", "ME", "NY",
], attrgetter("two_letter_code"))
@skipUnlessDBFeature('has_bulk_insert')
def test_non_auto_increment_pk_efficiency(self):
with self.assertNumQueries(1):
State.objects.bulk_create([
State(two_letter_code=s)
for s in ["IL", "NY", "CA", "ME"]
])
self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [
"CA", "IL", "ME", "NY",
], attrgetter("two_letter_code"))
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for automatic primary key.
"""
valid_country = Country(name='Germany', iso_two_letter='DE')
invalid_country = Country(id=0, name='Poland', iso_two_letter='PL')
with self.assertRaises(ValueError):
Country.objects.bulk_create([valid_country, invalid_country])
def test_batch_same_vals(self):
# Sqlite had a problem where all the same-valued models were
# collapsed to one insert.
Restaurant.objects.bulk_create([
Restaurant(name='foo') for i in range(0, 2)
])
self.assertEqual(Restaurant.objects.count(), 2)
def test_large_batch(self):
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
])
self.assertEqual(TwoFields.objects.count(), 1001)
self.assertEqual(
TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(),
101)
self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101)
@skipUnlessDBFeature('has_bulk_insert')
def test_large_single_field_batch(self):
# SQLite had a problem with more than 500 UNIONed selects in single
# query.
Restaurant.objects.bulk_create([
Restaurant() for i in range(0, 501)
])
@skipUnlessDBFeature('has_bulk_insert')
def test_large_batch_efficiency(self):
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
])
self.assertLess(len(connection.queries), 10)
def test_large_batch_mixed(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)])
self.assertEqual(TwoFields.objects.count(), 1000)
# We can't assume much about the ID's created, except that the above
# created IDs must exist.
id_range = range(100000, 101000, 2)
self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500)
self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500)
@skipUnlessDBFeature('has_bulk_insert')
def test_large_batch_mixed_efficiency(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)])
self.assertLess(len(connection.queries), 10)
def test_explicit_batch_size(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)]
TwoFields.objects.bulk_create(objs, 2)
self.assertEqual(TwoFields.objects.count(), len(objs))
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, len(objs))
self.assertEqual(TwoFields.objects.count(), len(objs))
@skipUnlessDBFeature('has_bulk_insert')
def test_explicit_batch_size_efficiency(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)]
with self.assertNumQueries(2):
TwoFields.objects.bulk_create(objs, 50)
TwoFields.objects.all().delete()
with self.assertNumQueries(1):
TwoFields.objects.bulk_create(objs, len(objs))
| bsd-3-clause |
pforai/easybuild-framework | easybuild/toolchains/mpi/mvapich2.py | 9 | 1489 | ##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Support for MVAPICH2 as toolchain MPI library.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.toolchains.mpi.mpich2 import Mpich2
TC_CONSTANT_MVAPICH2 = "MVAPICH2"
class Mvapich2(Mpich2):
"""MVAPICH2 MPI class"""
MPI_MODULE_NAME = ["MVAPICH2"]
MPI_FAMILY = TC_CONSTANT_MVAPICH2
MPI_LIBRARY_NAME = 'mpich'
MPI_LINK_INFO_OPTION = '-link_info'
| gpl-2.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| agpl-3.0 |
marcelometal/Django-facebook | docs/conf.py | 2 | 7713 | # -*- coding: utf-8 -*-
#
# Django Facebook documentation build configuration file, created by
# sphinx-quickstart on Mon Apr 11 12:43:17 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
django_facebook_path = os.path.abspath('..')
facebook_example_path = os.path.abspath(os.path.join('..', 'facebook_example'))
sys.path.append(django_facebook_path)
sys.path.append(facebook_example_path)
os.environ['DJANGO_SETTINGS_MODULE'] = 'facebook_example.settings'
django_facebook = __import__('django_facebook')
# General information about the project.
project = u'Django Facebook'
copyright = u'2011, Thierry Schellenbach'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = django_facebook.__version__
# The full version, including alpha/beta/rc tags.
release = django_facebook.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'autumn'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DjangoFacebookdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DjangoFacebook.tex', u'Django Facebook Documentation',
u'Thierry Schellenbach', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'djangofacebook', u'Django Facebook Documentation',
[u'Thierry Schellenbach'], 1)
]
| bsd-3-clause |
QTB-HHU/ModelHeatShock | HSM_VaryParamsRMSvsData.py | 1 | 30309 |
from copy import deepcopy
import math
#from HSM_ParametersClass import *
from HSM_SimulateClass import *
def GenerateRandomParametersSets(NumberOfRandomSets, FactorOf, DefaultParamSetRATES):
"""GENERATE SETS OF RANDOM PARAMETERS FROM A FLAT DISTRIBUTION CENTERED AROUND FIDUCIAL VALUES AND WITHIN A FACTOR OF FactorOf"""
DictionaryOfMaxs = {}
DictionaryOfMins = {}
for key in DefaultParamSetRATES:
#DictionaryOfMaxs[key] = deepcopy(DefaultParamSetRATES[key]*FactorOf)
DictionaryOfMaxs[key] = deepcopy(DefaultParamSetRATES[key] - DefaultParamSetRATES[key]*FactorOf)
#DictionaryOfMins[key] = deepcopy(DefaultParamSetRATES[key]/FactorOf)
DictionaryOfMins[key] = deepcopy(DefaultParamSetRATES[key] + DefaultParamSetRATES[key]*FactorOf)
import random
ListOfManyDictionaryOfRandomParameters = []
for i in range(NumberOfRandomSets):
DictionaryOfRandomParameters = {}
for key in DefaultParamSetRATES:
RandomNumber = random.random()
NewValue = deepcopy(DictionaryOfMins[key] + RandomNumber*(DictionaryOfMaxs[key]-DictionaryOfMins[key]))
DictionaryOfRandomParameters[key] = NewValue
ListOfManyDictionaryOfRandomParameters.append(deepcopy((DictionaryOfRandomParameters)))
return ListOfManyDictionaryOfRandomParameters
def GenerateParametersSetsChangingOneParameter(NumberOfValuesForEachParameterk, FactorOfII, DefaultParamSetRATES):
"""GENERATE SETS OF PARAMETERS By Changing Only 1 PARAMETER AT A TIME"""
ListOfManyDictionariesOfParametersVarying1by1 = []
for key in DefaultParamSetRATES:
DictionaryOfTestParameters = deepcopy(DefaultParamSetRATES)
for j in range(NumberOfValuesForEachParameterk+1):
NewValue = deepcopy(DefaultParamSetRATES[key] + FactorOfII * DefaultParamSetRATES[key] * ( 2*j - NumberOfValuesForEachParameterk)/NumberOfValuesForEachParameterk)
DictionaryOfTestParameters[key] = deepcopy(NewValue)
ListOfManyDictionariesOfParametersVarying1by1.append(deepcopy(DictionaryOfTestParameters))
return ListOfManyDictionariesOfParametersVarying1by1
def ExtractDataControlFeedExpHSFandHSP90aFromFiles(DataFileNameHSFcontrol, DataFileNameHSP90Acontrol):
ListToBeFilledWithResults = []
# CONTROL HSF
ColumnNumber = 2 # Only the time and the row data are used
ListOfDataArraysHSF = []
FromDataFileToArrays(DataFileNameHSFcontrol, ColumnNumber, ListOfDataArraysHSF) # Read data file, put in list of arrays
ListToBeFilledWithResults.append(ListOfDataArraysHSF[1])
# Times
ListToBeFilledWithResults.append(ListOfDataArraysHSF[0])
# CONTROL HSP90a
ColumnNumber = 2 # Only the time and the row data are used
ListOfDataArraysHSP90a = []
FromDataFileToArrays(DataFileNameHSP90Acontrol, ColumnNumber, ListOfDataArraysHSP90a) # Read data file, put in list of arrays
ListToBeFilledWithResults.append(ListOfDataArraysHSP90a[1])
return ListToBeFilledWithResults
def ComputePartOfRMSSimulationVsData(ListOfDataTimes, timesetDataRMS, SimulationExperimentDataRMS, ListOfDataHSF, ListOfDataHSP90a):
"""Compute the Sum over all datapoints of ( Xth - Xdata )^2, for 1 feeding experiment, taking into caccount HSF + HSP """
##### 2-C1.1: EXTRACT SIMULATION VALUES AT PROPER TIME POINTS FOR COMPARISON WITH DATA
ListOfmRNAHSFsimulation = []
ListOfmRNAHSPsimulation = []
ListOfTimesForDatapoints = ListOfDataTimes #= [0., 15., 30., 45., 60., 120.]
for val in ListOfTimesForDatapoints:
j = ( val * 60. + vorl ) / timesetDataRMS.CurrentParams["delta_t"] # ( seconds/seconds = adimensional )
ListOfmRNAHSFsimulation.append(SimulationExperimentDataRMS.RF[j])
ListOfmRNAHSPsimulation.append(SimulationExperimentDataRMS.RHP[j])
ArrayOfmRNAHSFsimulation = np.array([val for sublist in ListOfmRNAHSFsimulation for val in sublist])
ArrayOfmRNAHSPsimulation = np.array([val for sublist in ListOfmRNAHSPsimulation for val in sublist])
##### 2-C1.2: COMPUTE pieces of LS TH VS DATA - STAUROSPORINE
# print("We now wants to compare these...")
# print(ListOfDataHSF_stau)
# print(ListOfDataHSP90a_stau)
# print(ArrayOfmRNAHSFsimulation)
# print(ArrayOfmRNAHSPsimulation)
k = 0
SumOverDataPointsHSF = 0.
for val in ListOfDataHSF:
DeviationHSF = ArrayOfmRNAHSFsimulation[k]/max(ArrayOfmRNAHSFsimulation) - ListOfDataHSF[k]/max(ListOfDataHSF)
SumOverDataPointsHSF = SumOverDataPointsHSF + pow(DeviationHSF, 2)
k = k + 1
l = 0
SumOverDataPointsHSP90a = 0.
for val in ListOfDataHSP90a:
DeviationHSP90a = ArrayOfmRNAHSPsimulation[l]/max(ArrayOfmRNAHSPsimulation) - ListOfDataHSP90a[l]/max(ListOfDataHSP90a)
SumOverDataPointsHSP90a = SumOverDataPointsHSP90a + pow(DeviationHSP90a, 2)
l = l + 1
SumOverDatapoints = SumOverDataPointsHSF + SumOverDataPointsHSP90a
return SumOverDatapoints
def PlotSimulationVsDataFeeding(SimulationFeedingControlsDataRMStimes, ListForPlottingHSF, ListForPlottingHSP, timesetDataRMS, ListOfDataHSF, ListOfDataHSP90a, ListOfDataTimes, FigureName, FigureExtension, FolderContainingDataVsSimuCalibration):
"""Plot mRNAs for HSF and HSP90a feeding experiments data VS simulations to see if it makes sense"""
fig = figure()
############ Simulations
ax1 = plt.subplot(121)
SubPlot(ax1, SimulationFeedingControlsDataRMStimes, ListForPlottingHSF, 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., " ", 0, 0, "upper right", "A",
Legendfontsize="small", Legendfancybox=True)
ax2 = plt.subplot(122)
SubPlot(ax2, SimulationFeedingControlsDataRMStimes, ListForPlottingHSP, 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., " ", 0, 0, "upper right", "B",
Legendfontsize="small", Legendfancybox=True)
############ and Data Points
ListOfDataHSFNORM = []
k = 0
for val in ListOfDataHSF:
ListOfDataHSFNORM.append(ListOfDataHSF[k]/max(ListOfDataHSF))
k = k + 1
ListOfDataHSP90aNORM = []
k = 0
for val in ListOfDataHSP90a:
ListOfDataHSP90aNORM.append(ListOfDataHSP90a[k]/max(ListOfDataHSP90a))
k = k + 1
DataLegend = [r"Data" + str(FigureName)]
DataSubPlot(ax1, ListOfDataTimes, [ListOfDataTimes, np.asarray(ListOfDataHSFNORM)], 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., r"mRNA$_{HSF}$ (Normalizet to Max)", 0., 1., "upper right", DataLegend, "",
Legendfontsize="small", Legendfancybox=True, Black = "Yes")
DataLegend = [r"Data" + str(FigureName)]
DataSubPlot(ax2, ListOfDataTimes, [ListOfDataTimes, np.asarray(ListOfDataHSP90aNORM)], 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., r"mRNA$_{HSP}$ (Normalizet to Max)", 0., 1., "upper right", DataLegend, "",
Legendfontsize="small", Legendfancybox=True, Black = "Yes")
PlotAndSave(fig, FolderContainingDataVsSimuCalibration + "FittingToData" + str(FigureName) + FigureExtension, "PS", 1, 0)
##################################################### LOOK HERE!!!!!!!!!!!!!!!!!!!!! ##########################################################
def PlotSimulationVsDataFeedingModelVSFittedData(SimulationFeedingControlsDataRMStimes, ListForPlottingHSF, ListForPlottingHSP, timesetDataRMS, ListOfDataHSF, ListOfDataHSP90a, ListOfDataTimes, FigureName, FigureExtension, FolderContainingDataVsSimuCalibration):
"""Plot mRNAs for HSF and HSP90a feeding experiments data VS simulation best fit for paper"""
##### for key in AllDataControlsFeeding[5]:#ListOfFeedingKeys:
fig = figure()
############ Simulations
ax1 = plt.subplot(121)
SubPlot(ax1, SimulationFeedingControlsDataRMStimes, ListForPlottingHSF, 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., " ", 0, 0, "upper right", "A",
Legendfontsize="small", Legendfancybox=True, Black = "Yes")
ax2 = plt.subplot(122)
SubPlot(ax2, SimulationFeedingControlsDataRMStimes, ListForPlottingHSP, 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., " ", 0, 0, "upper right", "B",
Legendfontsize="small", Legendfancybox=True, Black = "Yes")
############ and Data Points
ListOfFeedingKeys= ["stau", "radi", "ChloCyc", "canav", "Gelda", "CaChel"]
DictionaryForLegend = {"stau": "Staurosporine",
"radi": "Radicicol",
"ChloCyc": "Chlor. / Cyclo.",
"canav": "Canavanine",
"Gelda": "Geldanamicil",
"CaChel": "Calcium Chelator"}
i = 0
for key in ListOfFeedingKeys:
ListOfDataHSFNORM = []
k = 0
for val in ListOfDataHSF[key]:
ListOfDataHSFNORM.append(ListOfDataHSF[key][k]/max(ListOfDataHSF[key]))
k = k + 1
ListOfDataHSP90aNORM = []
k = 0
for val in ListOfDataHSP90a[key]:
ListOfDataHSP90aNORM.append(ListOfDataHSP90a[key][k]/max(ListOfDataHSP90a[key]))
k = k + 1
DataLegend = [r"Data Control " + DictionaryForLegend[key] + " Exp."]
DataSubPlotMOD(ax1, ListOfDataTimes[key], [ListOfDataTimes[key], np.asarray(ListOfDataHSFNORM)], 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., r"Concentration of mRNA$_{HSF}$ (normalized to max)", 0., 1., "upper right", DataLegend, "",
Legendfontsize="small", Legendfancybox=True, ColorNumber = i)
DataLegend = [r"Data Control " + DictionaryForLegend[key] + " Exp."]
DataSubPlotMOD(ax2, ListOfDataTimes[key], [ListOfDataTimes[key], np.asarray(ListOfDataHSP90aNORM)], 'Time (min)', 0.,
(timesetDataRMS.CurrentParams["t_stop"] - vorl) / 60., r"Concentration of mRNA$_{HSP}$ (normalized to max)", 0., 1., "upper right", DataLegend, "",
Legendfontsize="small", Legendfancybox=True, ColorNumber = i)
i = i+1
PlotAndSave(fig, FolderContainingDataVsSimuCalibration + "FittingToDataPAPERversion" + str(FigureName) + FigureExtension, "PS", 1, 0)
def ExtractDataControlsFeedingExperimentsFromFilesIntoListOfDictionaries():
""" EXTRACT EXPERIMENTAL DATA (FEEDING EXPERIMENTS) FROM FILES ALEXANDER SKUPIN """
ListOfFeedingKeys= ["stau", "radi", "ChloCyc", "canav", "Gelda", "CaChel"]
DictionaryOfHSFcontrolFiles, DictionaryOfHSP90acontrolFiles = {}, {}
for key in ListOfFeedingKeys:
DictionaryOfHSFcontrolFiles.update({key : "DataFilesOriginals/" + "hsfcontrol_" + str(key) + ".csv"})
DictionaryOfHSP90acontrolFiles.update({key : "DataFilesOriginals/" + "hsp90acontrol_" + str(key) + ".csv"})
DictionaryOfListsOfDataHSF, DictionaryOfListsOfDataTimes, DictionaryOfListsOfDataHSP90a = {}, {}, {}
for key in ListOfFeedingKeys:
ListOfListsOfExtractedData = ExtractDataControlFeedExpHSFandHSP90aFromFiles(DictionaryOfHSFcontrolFiles[key], DictionaryOfHSP90acontrolFiles[key])
DictionaryOfListsOfDataHSF.update({key : ListOfListsOfExtractedData[0]})
DictionaryOfListsOfDataTimes.update({key : ListOfListsOfExtractedData[1]})
DictionaryOfListsOfDataHSP90a.update({key : ListOfListsOfExtractedData[2]})
# Create Temperature settings and Time settings reproducing the experimental setup of controls for feeding experiments
TsetSchmol2013dataRMS = ParametersSet({"Ttype": 1, "Tin": 25., "Tup": 40., "tau": 5., "ta": 0. * 60. + vorl})
timeset240minsDataRMS = ParametersSet({"t_start": 0., "t_stop": 240. * 60. + vorl, "delta_t": 5.0})
AllDataControlsFeeding = (DictionaryOfListsOfDataTimes, DictionaryOfListsOfDataHSF, DictionaryOfListsOfDataHSP90a, TsetSchmol2013dataRMS, timeset240minsDataRMS, ListOfFeedingKeys)
return AllDataControlsFeeding
def ComputeRMSfeedingForGivenParameterSet(ParamSetRates, ParamSetForREACTIONS, ParamSetInitCond, OutputHSFandHSPtoPlot, AllDataControlsFeeding):
""" Function to compute RMS w.r.t. data of the controls of the feeding experiments from Schmollinger et al. 2013"""
# OutputHSFandHSPtoPlot = "Yes" or "No", for creating also output for time course plots or not, respectively.
############ 1] ############ NEED TO GENERATE A MODEL FOR EVERY PARAMS SET
# Temeprature and Time parameters sets reproducing the experiments
TsetSchmol2013dataRMS = AllDataControlsFeeding[3]
timeset240minsDataRMS = AllDataControlsFeeding[4]
# Set default parameter values
TestParamSetIC = ParametersSet(ParamSetInitCond)
TestParamSetRATES = ParametersSet(ParamSetRates)
ParamSetForREACTIONS["piRFconst"] = ParamSetRates["ketaRF"]*ParamSetRates["ketaF"]/ParamSetRates["kFpi0"]*0.17/8.
ParamSetForREACTIONS["piRHPconst"] = ParamSetRates["ketaRHP"]*ParamSetRates["ketaHP"]/ParamSetRates["kpiHP"]*17.5
ParamSetForREACTIONSobject = ParametersSet(ParamSetForREACTIONS)
# Create an object of the class "Heat shock models" with these parameters
TestHSM = HeatShockModel(TestParamSetIC, TestParamSetRATES, ParamSetForREACTIONSobject)
############ 2] ############ NEXT, FOR EACH MODEL COMPUTE pieces necessary to compute the RMS W.R.T. DATA
##### 2-A: SIMULATE CONTROLS OF ALL FEEDING EXPERIMENTS (one simulation only for all the datasets of feeding experiments!!!)
SimulationFeedingControlsDataRMS = Simulate(TestHSM, timeset240minsDataRMS, TsetSchmol2013dataRMS, "xyzUSELESSxyz") # "testFeedingControls" + str(i) + FigureExtension
SimulationFeedingControlsDataRMS.TimeRun(AvoidPlots="Yes")
##### 2-B : Extract from input the dictionaries containing the data
DictionaryOfListsOfDataTimes = AllDataControlsFeeding[0]
DictionaryOfListsOfDataHSF = AllDataControlsFeeding[1]
DictionaryOfListsOfDataHSP90a = AllDataControlsFeeding[2]
ListOfFeedingKeys = AllDataControlsFeeding[5]
##### 2-C: Compute Part Of RMS Simulation Vs Data FOR EACH DIFFERENT FEEDING EXPERIMENT
SumOverDatapointsFeeding = {}
for key in ListOfFeedingKeys:
SumOverDatapointsFeeding.update( {key : ComputePartOfRMSSimulationVsData(DictionaryOfListsOfDataTimes[key], timeset240minsDataRMS, SimulationFeedingControlsDataRMS, DictionaryOfListsOfDataHSF[key], DictionaryOfListsOfDataHSP90a[key])} )
############ 3] ############ Put together the pieces for each different dataset into 1 single RMS value!!!
NumberOfDataPoints, SumOverDatapoints = 0., 0.
for key in ListOfFeedingKeys:
NumberOfDataPoints = NumberOfDataPoints + len(DictionaryOfListsOfDataHSF[key]) + len(DictionaryOfListsOfDataHSP90a[key])
SumOverDatapoints = SumOverDatapoints + SumOverDatapointsFeeding[key]
RootMeanSquareDeviation = math.sqrt( SumOverDatapoints / NumberOfDataPoints )
print("\n" + str(RootMeanSquareDeviation) + "\n")
if OutputHSFandHSPtoPlot == "No":
output = (RootMeanSquareDeviation)
elif OutputHSFandHSPtoPlot == "Yes":
output = (RootMeanSquareDeviation, SimulationFeedingControlsDataRMS.RF, SimulationFeedingControlsDataRMS.RHP, SimulationFeedingControlsDataRMS.t)
else:
print("\nError in RMS feeding function!!!\n")
return output
def Convert_ORIGINAL_to_RESCALED_ParameterSet(ORIGINAL_ParameterSetDictionary, RescalingFactorsDictionary):
RESCALED_ParameterSetDictionary = {}
for key in RescalingFactorsDictionary:
RESCALED_ParameterSetDictionary.update({ key : deepcopy(ORIGINAL_ParameterSetDictionary[key])/deepcopy(RescalingFactorsDictionary[key]) })
return deepcopy(RESCALED_ParameterSetDictionary)
def Convert_RESCALED_to_ORIGINAL_ParameterSet(RESCALED_ParameterSetDictionary, RescalingFactorsDictionary):
ORIGINAL_ParameterSetDictionary = {}
for key in RescalingFactorsDictionary:
ORIGINAL_ParameterSetDictionary.update({ key : deepcopy(RESCALED_ParameterSetDictionary[key]) * deepcopy(RescalingFactorsDictionary[key]) })
return deepcopy(ORIGINAL_ParameterSetDictionary)
def ComputeRMSfeedingForGivenParameterSet_RESCALED_PARAMETERS(ParamSetRates_RESCALED, ParamSetForREACTIONS, ParamSetInitCond, OutputHSFandHSPtoPlot, AllDataControlsFeeding, RescalingFactorsDictionary):
""" Function that does exactly what ComputeRMSfeedingForGivenParameterSet does, but parameters are rescaled to their fiducial value."""
# This is a PRECONDITIONING, it serves to have a function which is easier to treat numerically with optimization algorithms as the gradient search
ParamSetRates_ORIGINAL = deepcopy(Convert_RESCALED_to_ORIGINAL_ParameterSet(ParamSetRates_RESCALED, RescalingFactorsDictionary))
Output = ComputeRMSfeedingForGivenParameterSet(ParamSetRates_ORIGINAL, ParamSetForREACTIONS, ParamSetInitCond, OutputHSFandHSPtoPlot, AllDataControlsFeeding)
return Output
def FindMinimumOfFunctionUsingGoldenRatioBisectionMethod(FunctionToMinimize, LowerBound, UpperBound, Tolerance):
GoldenRatio = 2./(math.sqrt(5.) + 1)
### Use the golden ratio to set the initial test points
x1 = UpperBound - GoldenRatio*(UpperBound - LowerBound)
x2 = LowerBound + GoldenRatio*(UpperBound - LowerBound)
### Evaluate the function at the test points
f1 = FunctionToMinimize(x1)
f2 = FunctionToMinimize(x2)
i = 0
while ( (abs(UpperBound - LowerBound) > Tolerance) and i <= 15):
i = i + 1
if f2 > f1:
# then the minimum is to the left of x2
# let x2 be the new upper bound
# let x1 be the new upper test point
### Set the new upper bound
UpperBound = deepcopy(x2)
### Set the new upper test point
### Use the special result of the golden ratio
x2 = deepcopy(x1)
f2 = deepcopy(f1)
### Set the new lower test point
x1 = UpperBound - GoldenRatio*(UpperBound - LowerBound)
f1 = FunctionToMinimize(x1)
elif f2 < f1:
# the minimum is to the right of x1
# let x1 be the new lower bound
# let x2 be the new lower test point
### Set the new lower bound
LowerBound = deepcopy(x1)
### Set the new lower test point
x1 = deepcopy(x2)
f1 = deepcopy(f2)
### Set the new upper test point
x2 = LowerBound + GoldenRatio*(UpperBound - LowerBound)
f2 = FunctionToMinimize(x2)
else:
print("Error in Golden Rule minimization algorithm!")
print(str(i) + " " + str(x1) + " " + str(x2) + " " + str(f1) + " " + str(f2))
### Use the mid-point of the final interval as the estimate of the optimzer
EstimatedMinimizer = (LowerBound + UpperBound)/2.
return EstimatedMinimizer
def NormalizeRnaCurvesFromSimulationsToMaxForPlot(RMSvalue, mRNA_HSF_simulation, mRNA_HSP_simulation, ListForPlottingHSF, ListForPlottingHSP, CurveNameInLegend, timesetDataRMS):
IndexOfT0seconds = int(vorl/timesetDataRMS.CurrentParams["delta_t"]) # ( seconds/seconds = adimensional )
RangeOfIndexesForPositiveTimes = range(IndexOfT0seconds,len(mRNA_HSF_simulation),1)
Max_HSF_FeedingControls_simulation = max(np.max(mRNA_HSF_simulation[kkk]) for kkk in RangeOfIndexesForPositiveTimes)
Max_HSP_FeedingControls_simulation = max(np.max(mRNA_HSP_simulation[kkk]) for kkk in RangeOfIndexesForPositiveTimes)
Y_HSF_FeedingControls_simulationNORM = (np.asarray(mRNA_HSF_simulation) / Max_HSF_FeedingControls_simulation) # * 100. for %
Y_HSP_FeedingControls_simulationNORM = (np.asarray(mRNA_HSP_simulation) / Max_HSP_FeedingControls_simulation) # * 100. for %
ListForPlottingHSF.append([CurveNameInLegend, Y_HSF_FeedingControls_simulationNORM])
ListForPlottingHSP.append([CurveNameInLegend, Y_HSP_FeedingControls_simulationNORM])
##############################################################################################
##################################### RMS for DOUBLE HS ######################################
##############################################################################################
def ExtractDataDoubleHSFromFiles(DataFileName, ListOfDoubleHSKeys):
ColumnNumber = 6
ListOfDataArray = []
FromDataFileToArrays(DataFileName, ColumnNumber, ListOfDataArray) # Read data file, put in list of arrays
TimePointsDoubleHS = np.array(ListOfDataArray[0])
DictionaryOfResults2HS = {}
index = 1
for key in ListOfDoubleHSKeys:
DictionaryOfResults2HS.update( {key : ListOfDataArray[index]} )
index = index + 1
ListToBeFilledWithResults = []
ListToBeFilledWithResults.append(TimePointsDoubleHS)
ListToBeFilledWithResults.append(DictionaryOfResults2HS)
return ListToBeFilledWithResults
#############################
def ExtractDataControlsDoubleHSExperimentFromFilesIntoListOfDictionaries():
""" EXTRACT EXPERIMENTAL DATA (DOUBLE HEAT SHOCK EXPERIMENTS) FROM FILES """
ListOfDoubleHSKeys= ["singleHS", "2hdoubleHS", "3hdoubleHS", "4hdoubleHS", "5hdoubleHS"]
PathOfFileContainingAll2HSData = "DataFiles/DataShroda2000ARSFig7b.dat"
ListToBeFilledWithResults = ExtractDataDoubleHSFromFiles(PathOfFileContainingAll2HSData, ListOfDoubleHSKeys)
#ArrayTimeDataPointsDoubleHS = ListToBeFilledWithResults[0] # (in seconds)
DictionaryOfArraysOfData2HS = ListToBeFilledWithResults[1]
DictionaryTimeDataPointsDoubleHS = {
"singleHS" : np.array([0.00, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0, 210.0, 240.0, 270.0, 300.0, 330.0, 360.0]),
"2hdoubleHS" : np.array([0.00, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0, 210.0, 240.0, 270.0]),
"3hdoubleHS" : np.array([0.00, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0, 210.0, 240.0, 270.0, 300.0, 330.0]),
"4hdoubleHS" : np.array([0.00, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0, 210.0, 240.0, 270.0, 300.0, 330.0, 360.0, 390.0]),
"5hdoubleHS" : np.array([0.00, 30.0, 60.0, 90.0, 120.0, 150.0, 180.0, 210.0, 240.0, 270.0, 300.0, 330.0, 360.0, 390.0, 420.0]),
}
# Create Temperature settings and Time settings reproducing the starting experimental setup (they will be modified when solving the ODE for different 2HSs)
HSduration = 30. # (min)
TsetDoubleHSdataRMS = ParametersSet({"Ttype": 2, "Tin": 23., "Tup": 40., "tau": 5., "ta": 0. * 60. + vorl, "tb": HSduration * 60. + vorl})
timesetDoubleHSDataRMS = ParametersSet({"t_start": 0., "t_stop": (2. * HSduration + 5 * 60. + 60.) * 60 + vorl, "delta_t": 5.0})
USELESS = "This should not appear. If you see it anywere, it means something is wrong. It is needed to keep the number of elements of AllDataControlsDoubleHS"
AllDataControlsDoubleHS = (DictionaryTimeDataPointsDoubleHS, DictionaryOfArraysOfData2HS, USELESS, TsetDoubleHSdataRMS, timesetDoubleHSDataRMS, ListOfDoubleHSKeys)
return AllDataControlsDoubleHS
#############################
def ComputePartOfRMSSimulationVsDataDoubleHS(ListOfDataTimes, timesetDoubleHSDataRMS, TimePointsForAllSimulations, ARSconcentrationForOneSetup, ListOfDataARSactivity, AbsoluteMaxData, AbsoluteMaxSimulation):
""" Compute the Sum over all datapoints of ( Xth - Xdata )^2, for 1 feeding experiment, taking into caccount HSF + HSP """
# OutputOfDoubleHSsimulation = [self.t, [ [Legend_singleHS, Yval_singleHS=[]], ..., [Legend_2HS5h,Yval_2HS5h=[]] ] ]
##### 2-C1.1: EXTRACT SIMULATION VALUES AT PROPER TIME POINTS FOR COMPARISON WITH DATA
ListOfARSsimulation = []
ListOfTimesForDatapoints = deepcopy(ListOfDataTimes) #= [0., 30., 60., 90., 120., etc.]
for val in ListOfTimesForDatapoints:
j = ( val * 60. + vorl ) / timesetDoubleHSDataRMS.CurrentParams["delta_t"] # ( seconds/seconds = adimensional )
ListOfARSsimulation.append(ARSconcentrationForOneSetup[j])
ArrayOfARSsimulation = np.array([val for sublist in ListOfARSsimulation for val in sublist])
#print("We now want to compare these...")
#print(ListOfDataARSactivity)
#print()
#print(ArrayOfARSsimulation)
#print()
##### 2-C1.2: COMPUTE pieces of LS TH VS DATA - STAUROSPORINE
k = 0
SumOverDataPointsARS = 0.
for val in ArrayOfARSsimulation:
DeviationHSF = ArrayOfARSsimulation[k]/AbsoluteMaxSimulation - ListOfDataARSactivity[k]/AbsoluteMaxData
SumOverDataPointsARS = SumOverDataPointsARS + pow(DeviationHSF, 2)
#print(str(k) + " " + str(SumOverDataPointsARS))
k = k + 1
return SumOverDataPointsARS
def ComputeRMSdoubleHSforGivenParameterSet(ParamSetRates, ParamSetForREACTIONS, ParamSetInitCond, OutputARSactiviryToPlot, AllDataControlsDoubleHS):
""" Function to compute RMS w.r.t. data of the controls of the feeding experiments from Schmollinger et al. 2013"""
# OutputARSactiviryToPlot = "Yes" or "No", for creating also output for time course plots or not, respectively.
############ 1] ############ NEED TO GENERATE A MODEL FOR EVERY PARAMS SET
# Temeprature and Time parameters sets reproducing the experiments
TsetDoubleHSdataRMS = AllDataControlsDoubleHS[3]
timesetDoubleHSDataRMS = AllDataControlsDoubleHS[4]
# Set default parameter values
TestParamSetIC = ParametersSet(ParamSetInitCond)
TestParamSetRATES = ParametersSet(ParamSetRates)
ParamSetForREACTIONS["piRFconst"] = ParamSetRates["ketaRF"]*ParamSetRates["ketaF"]/ParamSetRates["kFpi0"]*0.17/8.
ParamSetForREACTIONS["piRHPconst"] = ParamSetRates["ketaRHP"]*ParamSetRates["ketaHP"]/ParamSetRates["kpiHP"]*17.5
ParamSetForREACTIONSobject = ParametersSet(ParamSetForREACTIONS)
# Create an object of the class "Heat shock models" with these parameters
TestHSM = HeatShockModel(TestParamSetIC, TestParamSetRATES, ParamSetForREACTIONSobject)
############ 2] ############ NEXT, COMPUTE pieces necessary to compute the RMS W.R.T. DATA
##### 2-A: SIMULATE the single HS + the 4 2HS
SimulationARSdoubleHSdataRMS = Simulate(TestHSM, timesetDoubleHSDataRMS, TsetDoubleHSdataRMS, "Useless")
EmptyListToExtractOutput = []
SimulationARSdoubleHSdataRMS.TimeRunPlusARSdoubleHS(EmptyListToExtractOutput, AvoidPlots="Yes")
##### 2-B : Extract from input the dictionaries containing the data
DictionaryOfListsOfDataTimes = AllDataControlsDoubleHS[0]
DictionaryOfListsOfDataARSactivity = AllDataControlsDoubleHS[1]
ListOfDoubleHSKeys = AllDataControlsDoubleHS[5]
OutputOfDoubleHSsimulation = EmptyListToExtractOutput[0]
#print("I am the one " + str(OutputOfDoubleHSsimulation))
TimePointsForAllSimulations = OutputOfDoubleHSsimulation[0]
ARSconcentrationForEachHSsetupDictionary = {
"singleHS" : OutputOfDoubleHSsimulation[1][0][1],
"2hdoubleHS" : OutputOfDoubleHSsimulation[1][1][1],
"3hdoubleHS" : OutputOfDoubleHSsimulation[1][2][1],
"4hdoubleHS" : OutputOfDoubleHSsimulation[1][3][1],
"5hdoubleHS" : OutputOfDoubleHSsimulation[1][4][1]
}
EmptyARSdataMaximaList = []
for key in ListOfDoubleHSKeys:
massimo = deepcopy(max(DictionaryOfListsOfDataARSactivity[key]))
EmptyARSdataMaximaList.append(massimo)
AbsoluteMaxData = max(EmptyARSdataMaximaList)
EmptyARSSimulationMaximaList = []
for key in ListOfDoubleHSKeys:
massimo = deepcopy(max(ARSconcentrationForEachHSsetupDictionary[key]))
EmptyARSSimulationMaximaList.append(massimo)
AbsoluteMaxSimulationList = max(EmptyARSSimulationMaximaList)
AbsoluteMaxSimulation = AbsoluteMaxSimulationList[0]
#print()
#print("MAX")
#print()
#print(AbsoluteMaxData)
#print()
#print(AbsoluteMaxSimulation)
#print()
##### 2-C: Compute Part Of RMS Simulation Vs Data FOR EACH DIFFERENT FEEDING EXPERIMENT
SumOverDatapointsDoubleHS = {}
for key in ListOfDoubleHSKeys:
#print("I am into this loop!!!")
SumOverDatapointsDoubleHS.update( {key : ComputePartOfRMSSimulationVsDataDoubleHS(DictionaryOfListsOfDataTimes[key], timesetDoubleHSDataRMS, TimePointsForAllSimulations, ARSconcentrationForEachHSsetupDictionary[key], DictionaryOfListsOfDataARSactivity[key], AbsoluteMaxData, AbsoluteMaxSimulation)} )
############ 3] ############ Put together the pieces for each different dataset into 1 single RMS value!!!
NumberOfDataPoints, SumOverDatapoints = 0., 0.
for key in ListOfDoubleHSKeys:
NumberOfDataPoints = NumberOfDataPoints + len(DictionaryOfListsOfDataTimes[key])
SumOverDatapoints = SumOverDatapoints + SumOverDatapointsDoubleHS[key]
#print(key)
#print(NumberOfDataPoints)
#print(SumOverDatapoints)
RootMeanSquareDeviationDoubleHS = math.sqrt( SumOverDatapoints / NumberOfDataPoints )
print("\n" + str(RootMeanSquareDeviationDoubleHS) + "\n")
#if OutputHSFandHSPtoPlot == "No":
output = (RootMeanSquareDeviationDoubleHS)
#elif OutputHSFandHSPtoPlot == "Yes":
# output = (RootMeanSquareDeviation, SimulationFeedingControlsDataRMS.RF, SimulationFeedingControlsDataRMS.RHP, SimulationFeedingControlsDataRMS.t)
#else:
# print("\nError in RMS feeding function!!!\n")
return output
def ComputeRMStotalForGivenParameterSet_RESCALED_PARAMETERS(ParamSetRates_RESCALED, ParamSetForREACTIONS, ParamSetInitCond, AllDataControlsFeeding, RescalingFactorsDictionary, AllDataControlsDoubleHS):
""" Function that does exactly what ComputeRMSfeedingForGivenParameterSet does, but parameters are rescaled to their fiducial value."""
# This is a PRECONDITIONING, it serves to have a function which is easier to treat numerically with optimization algorithms as the gradient search
ParamSetRates_ORIGINAL = deepcopy(Convert_RESCALED_to_ORIGINAL_ParameterSet(ParamSetRates_RESCALED, RescalingFactorsDictionary))
RMS_Feeding=ComputeRMSfeedingForGivenParameterSet(ParamSetRates_ORIGINAL,ParamSetForREACTIONS,ParamSetInitCond,"No",AllDataControlsFeeding)
RMS_DoubleHS=ComputeRMSdoubleHSforGivenParameterSet(ParamSetRates_ORIGINAL,ParamSetForREACTIONS,ParamSetInitCond,"No",AllDataControlsDoubleHS)
Output = deepcopy(RMS_Feeding + RMS_DoubleHS)
return Output
| gpl-3.0 |
samthor/intellij-community | python/lib/Lib/site-packages/django/contrib/formtools/tests.py | 89 | 6613 | import unittest
from django import forms
from django.contrib.formtools import preview, wizard, utils
from django import http
from django.test import TestCase
success_string = "Done was called!"
class TestFormPreview(preview.FormPreview):
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class TestForm(forms.Form):
field1 = forms.CharField()
field1_ = forms.CharField()
bool1 = forms.BooleanField(required=False)
class PreviewTests(TestCase):
urls = 'django.contrib.formtools.test_urls'
def setUp(self):
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1':u'foo', 'field1_':u'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can sucessfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/test1/')
stage = self.input % 1
self.assertContains(response, stage, 1)
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1})
response = self.client.post('/test1/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify sucess.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage':2})
response = self.client.post('/test1/', self.test_data)
self.failIfEqual(response.content, success_string)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage':2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash':hash, 'bool1':u'False'})
response = self.client.post('/test1/', self.test_data)
self.assertEqual(response.content, success_string)
class SecurityHashTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Nothing notable.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Nothing notable. '})
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.security_hash(None, f1)
hash2 = utils.security_hash(None, f2)
self.assertEqual(hash1, hash2)
class HashTestForm(forms.Form):
name = forms.CharField()
bio = forms.CharField()
class HashTestBlankForm(forms.Form):
name = forms.CharField(required=False)
bio = forms.CharField(required=False)
#
# FormWizard tests
#
class WizardPageOneForm(forms.Form):
field = forms.CharField()
class WizardPageTwoForm(forms.Form):
field = forms.CharField()
class WizardClass(wizard.FormWizard):
def render_template(self, *args, **kw):
return http.HttpResponse("")
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
class DummyRequest(http.HttpRequest):
def __init__(self, POST=None):
super(DummyRequest, self).__init__()
self.method = POST and "POST" or "GET"
if POST is not None:
self.POST.update(POST)
self._dont_enforce_csrf_checks = True
class WizardTests(TestCase):
def test_step_starts_at_zero(self):
"""
step should be zero for the first form
"""
wizard = WizardClass([WizardPageOneForm, WizardPageTwoForm])
request = DummyRequest()
wizard(request)
self.assertEquals(0, wizard.step)
def test_step_increments(self):
"""
step should be incremented when we go to the next page
"""
wizard = WizardClass([WizardPageOneForm, WizardPageTwoForm])
request = DummyRequest(POST={"0-field":"test", "wizard_step":"0"})
response = wizard(request)
self.assertEquals(1, wizard.step)
| apache-2.0 |
shenzebang/scikit-learn | examples/exercises/plot_cv_digits.py | 232 | 1206 | """
=============================================
Cross-validation on Digits Dataset Exercise
=============================================
A tutorial exercise using Cross-validation with an SVM on the Digits dataset.
This exercise is used in the :ref:`cv_generators_tut` part of the
:ref:`model_selection_tut` section of the :ref:`stat_learn_tut_index`.
"""
print(__doc__)
import numpy as np
from sklearn import cross_validation, datasets, svm
digits = datasets.load_digits()
X = digits.data
y = digits.target
svc = svm.SVC(kernel='linear')
C_s = np.logspace(-10, 0, 10)
scores = list()
scores_std = list()
for C in C_s:
svc.C = C
this_scores = cross_validation.cross_val_score(svc, X, y, n_jobs=1)
scores.append(np.mean(this_scores))
scores_std.append(np.std(this_scores))
# Do the plotting
import matplotlib.pyplot as plt
plt.figure(1, figsize=(4, 3))
plt.clf()
plt.semilogx(C_s, scores)
plt.semilogx(C_s, np.array(scores) + np.array(scores_std), 'b--')
plt.semilogx(C_s, np.array(scores) - np.array(scores_std), 'b--')
locs, labels = plt.yticks()
plt.yticks(locs, list(map(lambda x: "%g" % x, locs)))
plt.ylabel('CV score')
plt.xlabel('Parameter C')
plt.ylim(0, 1.1)
plt.show()
| bsd-3-clause |
ledtvavs/repository.ledtv | script.mrknow.urlresolver/lib/urlresolver9/plugins/yourupload.py | 4 | 1797 | """
urlresolver XBMC Addon
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from lib import helpers
from urlresolver9 import common
from urlresolver9.resolver import UrlResolver, ResolverError
class YourUploadResolver(UrlResolver):
name = "yourupload.com"
domains = ["yourupload.com", "yucache.net"]
pattern = '(?://|\.)(yourupload\.com|yucache\.net)/(?:watch|embed)?/?([0-9A-Za-z]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url).content
url = re.findall('file\s*:\s*(?:\'|\")(.+?)(?:\'|\")', html)
if not url: raise ResolverError('No video found')
headers = {'User-Agent': common.FF_USER_AGENT,
'Referer': web_url}
url = urlparse.urljoin(web_url, url[0])
url = self.net.http_HEAD(url, headers=headers).get_url()
url = url + helpers.append_headers(headers)
return url
raise ResolverError('No video found')
def get_url(self, host, media_id):
return 'http://www.yourupload.com/embed/%s' % media_id
| gpl-3.0 |
layus/INGInious | backend/tests/TestRemoteDocker.py | 1 | 6508 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2014-2015 Université Catholique de Louvain.
#
# This file is part of INGInious.
#
# INGInious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INGInious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with INGInious. If not, see <http://www.gnu.org/licenses/>.
# This test is made to be run on a very constrained architecture (boot2docker)
# It would be very difficult to make it run everywhere.
# If it don't work as-is on your arch, you can simply disable the TEST_DOCKER_JOB_MANAGER
# flag and trust the code, or you can modify the config in the test to make it run.
import os
from nose.plugins.skip import SkipTest
import docker
from backend.job_managers.remote_docker import RemoteDockerJobManager
TEST_DOCKER_JOB_MANAGER = os.environ.get("TEST_DOCKER_JOB_MANAGER", None)
class TestDockerJobManager(object):
def setUp(self):
if TEST_DOCKER_JOB_MANAGER is None:
raise SkipTest("Testing the Docker Job Manager is disabled.")
elif TEST_DOCKER_JOB_MANAGER == "boot2docker":
self.docker_connection = docker.Client(base_url="tcp://192.168.59.103:2375")
elif TEST_DOCKER_JOB_MANAGER == "travis":
self.docker_connection = docker.Client(base_url="tcp://localhost:2375")
else:
raise Exception("Unknown method for testing the Docker Job Manager!")
# Force the removal of all containers/images linked to this test
try:
self.docker_connection.remove_container("inginious-agent", force=True)
except:
pass
try:
self.docker_connection.remove_image("ingi/inginious-agent", force=True)
except:
pass
self.setUpDocker()
self.job_manager = None
self.setUpJobManager()
def setUpDocker(self):
pass
def setUpJobManager(self):
pass
def start_manager(self):
if TEST_DOCKER_JOB_MANAGER == "boot2docker":
self.job_manager = RemoteDockerJobManager([{
"remote_host": "192.168.59.103",
"remote_docker_port": 2375,
"remote_agent_port": 63456
}], {"default": "ingi/inginious-c-default"}, is_testing=True)
elif TEST_DOCKER_JOB_MANAGER == "travis":
self.job_manager = RemoteDockerJobManager([{
"remote_host": "localhost",
"remote_docker_port": 2375,
"remote_agent_port": 63456
}], {"default": "ingi/inginious-c-default"}, is_testing=True)
self.job_manager.start()
def build_fake_agent(self, dockerfile="FakeAgentDockerfile"):
dockerfile_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "utils/"))
print [line for line in self.docker_connection.build(dockerfile_dir, dockerfile=dockerfile, rm=True, tag="ingi/inginious-agent")]
def start_fake_agent(self):
response = self.docker_connection.create_container(
"ingi/inginious-agent",
detach=True,
name="inginious-agent"
)
container_id = response["Id"]
# Start the container
self.docker_connection.start(container_id)
def tearDown(self):
# sanitize a bit Docker...
if self.job_manager is not None:
self.job_manager.close()
try:
self.docker_connection.remove_container("inginious-agent", force=True)
except:
pass
try:
self.docker_connection.remove_image("ingi/inginious-agent", force=True)
except:
pass
class TestDockerJobManagerNoUpdateNeeded(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentDockerfile")
def test_agent_no_update_needed(self):
assert RemoteDockerJobManager.is_agent_image_update_needed(self.docker_connection) is False
class TestDockerJobManagerUpdateNeeded(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentWrongDockerfile")
def test_agent_update_needed(self):
assert RemoteDockerJobManager.is_agent_image_update_needed(self.docker_connection) is True
class TestDockerJobManagerNoImage(TestDockerJobManager):
def setUpDocker(self):
pass
def test_agent_no_image(self):
assert RemoteDockerJobManager.is_agent_image_update_needed(self.docker_connection) is True
class TestDockerJobManagerAgentAlreadyStarted(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentDockerfile")
self.start_fake_agent()
def test_agent_already_started(self):
assert RemoteDockerJobManager.is_agent_valid_and_started(self.docker_connection) is True
class TestDockerJobManagerAgentAlreadyStartedButDead(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentDockerfile")
self.start_fake_agent()
self.docker_connection.kill("inginious-agent")
def test_agent_already_started_but_dead(self):
assert RemoteDockerJobManager.is_agent_valid_and_started(self.docker_connection) is False
class TestDockerJobManagerInvalidAgentAlreadyStarted(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentWrongDockerfile")
self.start_fake_agent()
def test_invalid_agent_already_started(self):
assert RemoteDockerJobManager.is_agent_valid_and_started(self.docker_connection) is False
class TestDockerJobManagerNoAgentStarted(TestDockerJobManager):
def setUpDocker(self):
pass
def test_invalid_agent_already_started(self):
assert RemoteDockerJobManager.is_agent_valid_and_started(self.docker_connection) is False
class TestDockerJobManagerRun(TestDockerJobManager):
def setUpDocker(self):
self.build_fake_agent("FakeAgentDockerfile")
def setUpJobManager(self):
self.start_manager()
def test_docker_job_manager_run(self):
assert len(self.job_manager._agents_info) == 1
| agpl-3.0 |
JoaoVasques/aws-devtool | eb/macosx/python3/lib/aws/requests/packages/chardet2/langbulgarianmodel.py | 25 | 13035 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = ( \
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = { \
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = { \
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
| apache-2.0 |
marinho/geraldo | site/newsite/site-geraldo/appengine_django/management/commands/update.py | 60 | 1636 | #!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import logging
from django.core.management.base import BaseCommand
def run_appcfg():
# import this so that we run through the checks at the beginning
# and report the appropriate errors
import appcfg
# We don't really want to use that one though, it just executes this one
from google.appengine.tools import appcfg
# Reset the logging level to WARN as appcfg will spew tons of logs on INFO
logging.getLogger().setLevel(logging.WARN)
# Note: if we decide to change the name of this command to something other
# than 'update' we will have to munge the args to replace whatever
# we called it with 'update'
new_args = sys.argv[:]
new_args.append('.')
appcfg.main(new_args)
class Command(BaseCommand):
"""Calls the appcfg.py's update command for the current project.
Any additional arguments are passed directly to appcfg.py.
"""
help = 'Calls appcfg.py update for the current project.'
args = '[any appcfg.py options]'
def run_from_argv(self, argv):
run_appcfg()
| lgpl-3.0 |
kracwarlock/neon | neon/transforms/cross_entropy.py | 7 | 11068 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Cross entropy transform functions and classes.
"""
from neon.transforms.cost import Cost
from neon.transforms.logistic import Logistic
from neon.transforms.softmax import Softmax
from neon.util.param import opt_param
def cross_entropy(backend, outputs, targets, temp, epsilon=2**-23,
scale_by_batchsize=False):
"""
Evaluates cross entropy on pairwise elements from outputs and targets.
Given that this is undefined for predicted outputs equal to exactly 0 or
1.0, we first add epsilon prior to taking log
Arguments:
backend (Backend): The backend class to use for computation.
outputs (Tensor): predicted output values to be compared.
targets (Tensor): known outcome values to be compared against.
temp (list): temporary buffers.
epsilon (numeric): unit roundoff error. Defaults to 2^-23, which
matches python float32 machine epsilon.
scale_by_batchsize: Prescale the cross_entropy, useful for
Returns:
Tensor: Calculated cross entropy values for each element.
"""
if hasattr(backend, 'ng'):
# compund kernel call for NervanaGPU backend.
result = temp[4]
backend.crossent(outputs, targets, temp[0], result, epsilon,
scale_by_batchsize)
return result
else:
# Compute (t-1)*log(1-y).
backend.add(targets, -1.0, out=temp[0])
backend.subtract(1.0, outputs, out=temp[1])
backend.add(temp[1], epsilon, out=temp[1])
backend.log(temp[1], out=temp[1])
backend.multiply(temp[0], temp[1], out=temp[0])
# Compute t*log(y).
backend.add(outputs, epsilon, out=temp[1])
backend.log(temp[1], out=temp[1])
backend.multiply(targets, temp[1], out=temp[1])
# Compute t*log(y) - (t-1)*log(1-y)
backend.subtract(temp[0], temp[1], out=temp[0])
if scale_by_batchsize:
backend.divide(temp[0], temp[0].shape[1], temp[0])
result = backend.empty((1, 1), dtype=temp[0].dtype,
persist_values=False)
backend.sum(temp[0], axes=None, out=result)
return result
def cross_entropy_multi(backend, outputs, targets, temp, epsilon=2**-23,
scale_by_batchsize=False):
"""
Evaluates cross entropy on elements from outputs and targets.
Arguments:
backend (Backend): The backend class to use for computation.
outputs (Tensor): predicted output values to be compared.
targets (Tensor): known outcome values to be compared against.
temp (Tensor): temporary buffers.
epsilon (numeric): unit roundoff error. Defaults to 2^-23, which
matches python float32 machine epsilon.
Returns:
Tensor: Calculated cross entropy values for each element.
"""
if hasattr(backend, 'ng'):
result = temp[4]
backend.crossent(outputs, targets, temp[0], result, epsilon,
scale_by_batchsize, ismulti=True)
return result
else:
# Compute (t*log(y)).
backend.clip(outputs, epsilon, 1, out=temp[1])
backend.log(temp[1], out=temp[1])
backend.multiply(targets, temp[1], out=temp[1])
backend.multiply(temp[1], -1.0, out=temp[0])
if scale_by_batchsize:
backend.divide(temp[0], temp[0].shape[1], temp[0])
result = backend.empty((1, 1), dtype=temp[0].dtype,
persist_values=False)
return backend.sum(temp[0], axes=None, out=result)
def cross_entropy_derivative(backend, outputs, targets, temp, scale=1.0,
epsilon=2**-23):
"""
Applies derivative of the cross entropy to the pairwise elements from
outputs and targets.
Note that this is undefined for predicted outputs equal to exactly 0 or
1.0, so we clip these to epsilon (backend machine precision) and 1.0 -
epsilon respectively.
Arguments:
backend (Backend): The backend class to use for computation.
outputs (Tenor): predicted output values to be compared.
targets (Tensor): known outcome values to be compared against.
temp (Tensor): temporary buffers.
epsilon (numeric): unit roundoff error. Defaults to 2^-23, which
matches python float32 machine epsilon.
Returns:
Tensor: Calculated cross entropy values for each element.
"""
backend.subtract(outputs, targets, out=temp[0])
backend.subtract(1.0, outputs, out=temp[1])
backend.multiply(temp[1], outputs, out=temp[1])
backend.clip(temp[1], epsilon, 1 - epsilon, out=temp[1])
backend.divide(temp[0], temp[1], out=temp[0])
return temp[0]
def cross_entropy_multi_derivative(backend, outputs, targets, temp, scale=1.0):
"""
Applies derivative of the cross entropy to the pairwise elements from
outputs and targets.
Arguments:
backend (Backend): The backend class to use for computation.
outputs (Tensor): predicted output values to be compared.
targets (Tensor): known outcome values to be compared against.
temp (Tensor): temporary buffers.
Returns:
Tensor: Calculated cross entropy values for each element.
"""
backend.divide(targets, outputs, out=temp[0])
backend.multiply(temp[0], -scale, out=temp[0])
return temp[0]
def shortcut_derivative(backend, outputs, targets, temp, scale=1.0):
"""
For use when combining cost with matched activation
i.e. cross_entropy_binary with logistic or
cross_entropy_multi with softmax
Derivative has simpler form and removes numerical errors
"""
backend.subtract(outputs, targets, out=temp[0])
if scale != 1.0:
backend.multiply(temp[0], scale, out=temp[0])
return temp[0]
class CrossEntropy(Cost):
"""
Embodiment of a cross entropy cost function.
"""
def __init__(self, **kwargs):
opt_param(self, ['epsilon'], 2**-23) # default float32 machine epsilon
super(CrossEntropy, self).__init__(**kwargs)
def initialize(self, kwargs):
opt_param(self, ['shortcut_deriv'], True)
# raw label indicates whether the reference labels are indexes (raw)
# or one-hot (default)
super(CrossEntropy, self).initialize(kwargs)
if isinstance(self.olayer.activation, Softmax):
self.ce_function = cross_entropy_multi
if self.shortcut_deriv:
self.cd_function = shortcut_derivative
self.olayer.skip_act = True
else:
self.cd_function = cross_entropy_multi_derivative
elif isinstance(self.olayer.activation, Logistic):
self.ce_function = cross_entropy
if self.shortcut_deriv:
self.cd_function = shortcut_derivative
self.olayer.skip_act = True
else:
self.cd_function = cross_entropy_derivative
else:
self.ce_function = cross_entropy
self.cd_function = cross_entropy_derivative
def __str__(self):
return ("Cost Function: {shrtct} {rl}\n".format(
shrtct=self.shortcut_deriv, rl=self.raw_label))
def set_outputbuf(self, databuf):
temp_dtype = self.temp_dtype
if not self.outputbuf or self.outputbuf.shape != databuf.shape:
tempbuf1 = self.backend.zeros(databuf.shape, dtype=temp_dtype,
persist_values=False)
tempbuf2 = self.backend.zeros(databuf.shape, dtype=temp_dtype,
persist_values=False)
tempbuf3 = self.backend.zeros((1, databuf.shape[1]),
dtype=temp_dtype,
persist_values=False)
tempbuf4 = self.backend.zeros(databuf.shape, dtype=temp_dtype,
persist_values=False)
tempbuf5 = self.backend.zeros((1, 1), temp_dtype,
persist_values=False)
self.temp = [tempbuf1, tempbuf2, tempbuf3, tempbuf4, tempbuf5]
self.outputbuf = databuf
def get_deltabuf(self):
# used by layer2 only.
return self.temp[0]
def raw_to_onehot(self, labels):
self.temp[3].fill(0.0)
for row in range(self.outputbuf.shape[0]):
self.backend.equal(labels, row, self.temp[3][row:(row+1)])
return self.temp[3]
def apply_logloss(self, targets, eps=1e-15):
"""
Logloss function -- does normalization prior to computing multiclass
log loss function if the output layer is not softmax
"""
if self.raw_label:
targets = self.raw_to_onehot(targets)
if isinstance(self.olayer.activation, Softmax):
return self.ce_function(self.backend, self.outputbuf, targets,
self.temp)
self.backend.add(self.outputbuf, eps, out=self.temp[0])
self.backend.sum(self.temp[0], axes=0, out=self.temp[2])
self.backend.divide(self.temp[0], self.temp[2], out=self.temp[0])
return cross_entropy_multi(self.backend, self.temp[0], targets,
self.temp)
def apply_function(self, targets, scale_by_batchsize=False):
"""
Apply the cross entropy cost function to the datasets passed.
"""
if self.raw_label:
targets = self.raw_to_onehot(targets)
result = self.ce_function(self.backend, self.outputbuf, targets,
self.temp, epsilon=self.epsilon,
scale_by_batchsize=scale_by_batchsize)
if self.scale != 1.0:
self.backend.multiply(result, self.scale, out=result)
return result
def apply_derivative(self, targets):
"""
Apply the derivative of the cross entropy cost function to the datasets
passed.
"""
if self.raw_label:
targets = self.raw_to_onehot(targets)
return self.cd_function(self.backend, self.outputbuf,
targets, self.temp, self.scale)
| apache-2.0 |
AutorestCI/azure-sdk-for-python | azure-cognitiveservices-vision-customvision/azure/cognitiveservices/vision/customvision/prediction/prediction_endpoint.py | 1 | 14280 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from . import models
class PredictionEndpointConfiguration(Configuration):
"""Configuration for PredictionEndpoint
Note that all parameters used to create this instance are saved as instance
attributes.
:param api_key:
:type api_key: str
:param str base_url: Service URL
"""
def __init__(
self, api_key, base_url=None):
if api_key is None:
raise ValueError("Parameter 'api_key' must not be None.")
if not base_url:
base_url = 'https://southcentralus.api.cognitive.microsoft.com/customvision/v1.1/Prediction'
super(PredictionEndpointConfiguration, self).__init__(base_url)
self.add_user_agent('azure-cognitiveservices-vision-customvision/{}'.format(VERSION))
self.api_key = api_key
class PredictionEndpoint(object):
"""PredictionEndpoint
:ivar config: Configuration for client.
:vartype config: PredictionEndpointConfiguration
:param api_key:
:type api_key: str
:param str base_url: Service URL
"""
def __init__(
self, api_key, base_url=None):
self.config = PredictionEndpointConfiguration(api_key, base_url)
self._client = ServiceClient(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '1.1'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
def predict_image_url(
self, project_id, iteration_id=None, application=None, url=None, custom_headers=None, raw=False, **operation_config):
"""Predict an image url and saves the result.
:param project_id: The project id
:type project_id: str
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified
:type iteration_id: str
:param application: Optional. Specifies the name of application using
the endpoint
:type application: str
:param url:
:type url: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePredictionResultModel or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.prediction.models.ImagePredictionResultModel
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
image_url = models.ImageUrl(url=url)
# Construct URL
url = '/{projectId}/url'
path_format_arguments = {
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if application is not None:
query_parameters['application'] = self._serialize.query("application", application, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Prediction-Key'] = self._serialize.header("self.config.api_key", self.config.api_key, 'str')
# Construct body
body_content = self._serialize.body(image_url, 'ImageUrl')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePredictionResultModel', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def predict_image(
self, project_id, image_data, iteration_id=None, application=None, custom_headers=None, raw=False, **operation_config):
"""Predict an image and saves the result.
:param project_id: The project id
:type project_id: str
:param image_data:
:type image_data: Generator
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified
:type iteration_id: str
:param application: Optional. Specifies the name of application using
the endpoint
:type application: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePredictionResultModel or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.prediction.models.ImagePredictionResultModel
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = '/{projectId}/image'
path_format_arguments = {
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if application is not None:
query_parameters['application'] = self._serialize.query("application", application, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'multipart/form-data'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Prediction-Key'] = self._serialize.header("self.config.api_key", self.config.api_key, 'str')
# Construct form data
form_data_content = {
'imageData': image_data,
}
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send_formdata(
request, header_parameters, form_data_content, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePredictionResultModel', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def predict_image_url_with_no_store(
self, project_id, iteration_id=None, application=None, url=None, custom_headers=None, raw=False, **operation_config):
"""Predict an image url without saving the result.
:param project_id: The project id
:type project_id: str
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified
:type iteration_id: str
:param application: Optional. Specifies the name of application using
the endpoint
:type application: str
:param url:
:type url: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePredictionResultModel or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.prediction.models.ImagePredictionResultModel
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
image_url = models.ImageUrl(url=url)
# Construct URL
url = '/{projectId}/url/nostore'
path_format_arguments = {
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if application is not None:
query_parameters['application'] = self._serialize.query("application", application, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Prediction-Key'] = self._serialize.header("self.config.api_key", self.config.api_key, 'str')
# Construct body
body_content = self._serialize.body(image_url, 'ImageUrl')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePredictionResultModel', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def predict_image_with_no_store(
self, project_id, image_data, iteration_id=None, application=None, custom_headers=None, raw=False, **operation_config):
"""Predict an image without saving the result.
:param project_id: The project id
:type project_id: str
:param image_data:
:type image_data: Generator
:param iteration_id: Optional. Specifies the id of a particular
iteration to evaluate against.
The default iteration for the project will be used when not specified
:type iteration_id: str
:param application: Optional. Specifies the name of application using
the endpoint
:type application: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImagePredictionResultModel or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.customvision.prediction.models.ImagePredictionResultModel
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
# Construct URL
url = '/{projectId}/image/nostore'
path_format_arguments = {
'projectId': self._serialize.url("project_id", project_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if iteration_id is not None:
query_parameters['iterationId'] = self._serialize.query("iteration_id", iteration_id, 'str')
if application is not None:
query_parameters['application'] = self._serialize.query("application", application, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'multipart/form-data'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Prediction-Key'] = self._serialize.header("self.config.api_key", self.config.api_key, 'str')
# Construct form data
form_data_content = {
'imageData': image_data,
}
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send_formdata(
request, header_parameters, form_data_content, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImagePredictionResultModel', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| mit |
vaidap/zulip | zilencer/views.py | 3 | 4479 | from __future__ import absolute_import
from django.utils.translation import ugettext as _
from django.utils import timezone
from django.http import HttpResponse, HttpRequest
from zilencer.models import Deployment, RemotePushDeviceToken, RemoteZulipServer
from zerver.decorator import has_request_variables, REQ
from zerver.lib.error_notify import do_report_error
from zerver.lib.push_notifications import send_android_push_notification, \
send_apple_push_notification
from zerver.lib.request import JsonableError
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_dict, check_int
from zerver.models import UserProfile, PushDeviceToken, Realm
from zerver.views.push_notifications import validate_token
from typing import Any, Dict, Optional, Union, Text, cast
def validate_entity(entity):
# type: (Union[UserProfile, RemoteZulipServer]) -> None
if not isinstance(entity, RemoteZulipServer):
raise JsonableError(_("Must validate with valid Zulip server API key"))
def validate_bouncer_token_request(entity, token, kind):
# type: (Union[UserProfile, RemoteZulipServer], bytes, int) -> None
if kind not in [RemotePushDeviceToken.APNS, RemotePushDeviceToken.GCM]:
raise JsonableError(_("Invalid token type"))
validate_entity(entity)
validate_token(token, kind)
@has_request_variables
def report_error(request, deployment, type=REQ(), report=REQ(validator=check_dict([]))):
# type: (HttpRequest, Deployment, Text, Dict[str, Any]) -> HttpResponse
return do_report_error(deployment.name, type, report)
@has_request_variables
def remote_server_register_push(request, entity, user_id=REQ(),
token=REQ(), token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], int, bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
# If a user logged out on a device and failed to unregister,
# we should delete any other user associations for this token
# & RemoteServer pair
RemotePushDeviceToken.objects.filter(
token=token, kind=token_kind, server=server).exclude(user_id=user_id).delete()
# Save or update
remote_token, created = RemotePushDeviceToken.objects.update_or_create(
user_id=user_id,
server=server,
kind=token_kind,
token=token,
defaults=dict(
ios_app_id=ios_app_id,
last_updated=timezone.now()))
return json_success()
@has_request_variables
def remote_server_unregister_push(request, entity, token=REQ(),
token_kind=REQ(validator=check_int), ios_app_id=None):
# type: (HttpRequest, Union[UserProfile, RemoteZulipServer], bytes, int, Optional[Text]) -> HttpResponse
validate_bouncer_token_request(entity, token, token_kind)
server = cast(RemoteZulipServer, entity)
deleted = RemotePushDeviceToken.objects.filter(token=token,
kind=token_kind,
server=server).delete()
if deleted[0] == 0:
return json_error(_("Token does not exist"))
return json_success()
@has_request_variables
def remote_server_notify_push(request, # type: HttpRequest
entity, # type: Union[UserProfile, RemoteZulipServer]
payload=REQ(argument_type='body') # type: Dict[str, Any]
):
# type: (...) -> HttpResponse
validate_entity(entity)
server = cast(RemoteZulipServer, entity)
user_id = payload['user_id']
gcm_payload = payload['gcm_payload']
apns_payload = payload['apns_payload']
android_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.GCM,
server=server
))
apple_devices = list(RemotePushDeviceToken.objects.filter(
user_id=user_id,
kind=RemotePushDeviceToken.APNS,
server=server
))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, remote=True)
# TODO: set badge count in a better way
if apple_devices:
send_apple_push_notification(user_id, apple_devices,
badge=1, zulip=apns_payload)
return json_success()
| apache-2.0 |
igemsoftware/SYSU-Software2013 | project/Python27/Tools/scripts/findlinksto.py | 100 | 1070 | #! /usr/bin/env python
# findlinksto
#
# find symbolic links to a path matching a regular expression
import os
import sys
import re
import getopt
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], '')
if len(args) < 2:
raise getopt.GetoptError('not enough arguments', None)
except getopt.GetoptError, msg:
sys.stdout = sys.stderr
print msg
print 'usage: findlinksto pattern directory ...'
sys.exit(2)
pat, dirs = args[0], args[1:]
prog = re.compile(pat)
for dirname in dirs:
os.path.walk(dirname, visit, prog)
def visit(prog, dirname, names):
if os.path.islink(dirname):
names[:] = []
return
if os.path.ismount(dirname):
print 'descend into', dirname
for name in names:
name = os.path.join(dirname, name)
try:
linkto = os.readlink(name)
if prog.search(linkto) is not None:
print name, '->', linkto
except os.error:
pass
if __name__ == '__main__':
main()
| mit |
jonadiazz/spamFilterApp | venv/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py | 640 | 2524 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
'''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''
CSI = '\033['
OSC = '\033]'
BEL = '\007'
def code_to_chars(code):
return CSI + str(code) + 'm'
def set_title(title):
return OSC + '2;' + title + BEL
def clear_screen(mode=2):
return CSI + str(mode) + 'J'
def clear_line(mode=2):
return CSI + str(mode) + 'K'
class AnsiCodes(object):
def __init__(self):
# the subclasses declare class attributes which are numbers.
# Upon instantiation we define instance attributes, which are the same
# as the class attributes but wrapped with the ANSI escape sequence
for name in dir(self):
if not name.startswith('_'):
value = getattr(self, name)
setattr(self, name, code_to_chars(value))
class AnsiCursor(object):
def UP(self, n=1):
return CSI + str(n) + 'A'
def DOWN(self, n=1):
return CSI + str(n) + 'B'
def FORWARD(self, n=1):
return CSI + str(n) + 'C'
def BACK(self, n=1):
return CSI + str(n) + 'D'
def POS(self, x=1, y=1):
return CSI + str(y) + ';' + str(x) + 'H'
class AnsiFore(AnsiCodes):
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 90
LIGHTRED_EX = 91
LIGHTGREEN_EX = 92
LIGHTYELLOW_EX = 93
LIGHTBLUE_EX = 94
LIGHTMAGENTA_EX = 95
LIGHTCYAN_EX = 96
LIGHTWHITE_EX = 97
class AnsiBack(AnsiCodes):
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 100
LIGHTRED_EX = 101
LIGHTGREEN_EX = 102
LIGHTYELLOW_EX = 103
LIGHTBLUE_EX = 104
LIGHTMAGENTA_EX = 105
LIGHTCYAN_EX = 106
LIGHTWHITE_EX = 107
class AnsiStyle(AnsiCodes):
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiFore()
Back = AnsiBack()
Style = AnsiStyle()
Cursor = AnsiCursor()
| unlicense |
resmo/ansible | lib/ansible/modules/cloud/digital_ocean/_digital_ocean.py | 49 | 15648 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: digital_ocean
short_description: Create/delete a droplet/SSH_key in DigitalOcean
deprecated:
removed_in: '2.12'
why: Updated module to remove external dependency with increased functionality.
alternative: Use M(digital_ocean_droplet) instead.
description:
- Create/delete a droplet in DigitalOcean and optionally wait for it to be 'running', or deploy an SSH key.
version_added: "1.3"
author: "Vincent Viallet (@zbal)"
options:
command:
description:
- Which target you want to operate on.
default: droplet
choices: ['droplet', 'ssh']
state:
description:
- Indicate desired state of the target.
default: present
choices: ['present', 'active', 'absent', 'deleted']
api_token:
description:
- DigitalOcean api token.
version_added: "1.9.5"
id:
description:
- Numeric, the droplet id you want to operate on.
aliases: ['droplet_id']
name:
description:
- String, this is the name of the droplet - must be formatted by hostname rules, or the name of a SSH key.
unique_name:
description:
- Bool, require unique hostnames. By default, DigitalOcean allows multiple hosts with the same name. Setting this to "yes" allows only one host
per name. Useful for idempotence.
type: bool
default: 'no'
version_added: "1.4"
size_id:
description:
- This is the slug of the size you would like the droplet created with.
image_id:
description:
- This is the slug of the image you would like the droplet created with.
region_id:
description:
- This is the slug of the region you would like your server to be created in.
ssh_key_ids:
description:
- Optional, array of SSH key (numeric) ID that you would like to be added to the server.
virtio:
description:
- "Bool, turn on virtio driver in droplet for improved network and storage I/O."
type: bool
default: 'yes'
version_added: "1.4"
private_networking:
description:
- "Bool, add an additional, private network interface to droplet for inter-droplet communication."
type: bool
default: 'no'
version_added: "1.4"
backups_enabled:
description:
- Optional, Boolean, enables backups for your droplet.
type: bool
default: 'no'
version_added: "1.6"
user_data:
description:
- opaque blob of data which is made available to the droplet
version_added: "2.0"
ipv6:
description:
- Optional, Boolean, enable IPv6 for your droplet.
type: bool
default: 'no'
version_added: "2.2"
wait:
description:
- Wait for the droplet to be in state 'running' before returning. If wait is "no" an ip_address may not be returned.
type: bool
default: 'yes'
wait_timeout:
description:
- How long before wait gives up, in seconds.
default: 300
ssh_pub_key:
description:
- The public SSH key you want to add to your account.
notes:
- Two environment variables can be used, DO_API_KEY and DO_API_TOKEN. They both refer to the v2 token.
- As of Ansible 1.9.5 and 2.0, Version 2 of the DigitalOcean API is used, this removes C(client_id) and C(api_key) options in favor of C(api_token).
- If you are running Ansible 1.9.4 or earlier you might not be able to use the included version of this module as the API version used has been retired.
Upgrade Ansible or, if unable to, try downloading the latest version of this module from github and putting it into a 'library' directory.
requirements:
- "python >= 2.6"
- dopy
'''
EXAMPLES = '''
# Ensure a SSH key is present
# If a key matches this name, will return the ssh key id and changed = False
# If no existing key matches this name, a new key is created, the ssh key id is returned and changed = False
- digital_ocean:
state: present
command: ssh
name: my_ssh_key
ssh_pub_key: 'ssh-rsa AAAA...'
api_token: XXX
# Create a new Droplet
# Will return the droplet details including the droplet id (used for idempotence)
- digital_ocean:
state: present
command: droplet
name: mydroplet
api_token: XXX
size_id: 2gb
region_id: ams2
image_id: fedora-19-x64
wait_timeout: 500
register: my_droplet
- debug:
msg: "ID is {{ my_droplet.droplet.id }}"
- debug:
msg: "IP is {{ my_droplet.droplet.ip_address }}"
# Ensure a droplet is present
# If droplet id already exist, will return the droplet details and changed = False
# If no droplet matches the id, a new droplet will be created and the droplet details (including the new id) are returned, changed = True.
- digital_ocean:
state: present
command: droplet
id: 123
name: mydroplet
api_token: XXX
size_id: 2gb
region_id: ams2
image_id: fedora-19-x64
wait_timeout: 500
# Create a droplet with ssh key
# The ssh key id can be passed as argument at the creation of a droplet (see ssh_key_ids).
# Several keys can be added to ssh_key_ids as id1,id2,id3
# The keys are used to connect as root to the droplet.
- digital_ocean:
state: present
ssh_key_ids: 123,456
name: mydroplet
api_token: XXX
size_id: 2gb
region_id: ams2
image_id: fedora-19-x64
'''
import os
import time
import traceback
from distutils.version import LooseVersion
try:
# Imported as a dependency for dopy
import ansible.module_utils.six
HAS_SIX = True
except ImportError:
HAS_SIX = False
HAS_DOPY = False
try:
import dopy
from dopy.manager import DoError, DoManager
if LooseVersion(dopy.__version__) >= LooseVersion('0.3.2'):
HAS_DOPY = True
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule, env_fallback
class TimeoutError(Exception):
def __init__(self, msg, id_):
super(TimeoutError, self).__init__(msg)
self.id = id_
class JsonfyMixIn(object):
def to_json(self):
return self.__dict__
class Droplet(JsonfyMixIn):
manager = None
def __init__(self, droplet_json):
self.status = 'new'
self.__dict__.update(droplet_json)
def is_powered_on(self):
return self.status == 'active'
def update_attr(self, attrs=None):
if attrs:
for k, v in attrs.items():
setattr(self, k, v)
networks = attrs.get('networks', {})
for network in networks.get('v6', []):
if network['type'] == 'public':
setattr(self, 'public_ipv6_address', network['ip_address'])
else:
setattr(self, 'private_ipv6_address', network['ip_address'])
else:
json = self.manager.show_droplet(self.id)
if json['ip_address']:
self.update_attr(json)
def power_on(self):
if self.status != 'off':
raise AssertionError('Can only power on a closed one.')
json = self.manager.power_on_droplet(self.id)
self.update_attr(json)
def ensure_powered_on(self, wait=True, wait_timeout=300):
if self.is_powered_on():
return
if self.status == 'off': # powered off
self.power_on()
if wait:
end_time = time.time() + wait_timeout
while time.time() < end_time:
time.sleep(min(20, end_time - time.time()))
self.update_attr()
if self.is_powered_on():
if not self.ip_address:
raise TimeoutError('No ip is found.', self.id)
return
raise TimeoutError('Wait for droplet running timeout', self.id)
def destroy(self):
return self.manager.destroy_droplet(self.id, scrub_data=True)
@classmethod
def setup(cls, api_token):
cls.manager = DoManager(None, api_token, api_version=2)
@classmethod
def add(cls, name, size_id, image_id, region_id, ssh_key_ids=None, virtio=True, private_networking=False, backups_enabled=False, user_data=None,
ipv6=False):
private_networking_lower = str(private_networking).lower()
backups_enabled_lower = str(backups_enabled).lower()
ipv6_lower = str(ipv6).lower()
json = cls.manager.new_droplet(name, size_id, image_id, region_id,
ssh_key_ids=ssh_key_ids, virtio=virtio, private_networking=private_networking_lower,
backups_enabled=backups_enabled_lower, user_data=user_data, ipv6=ipv6_lower)
droplet = cls(json)
return droplet
@classmethod
def find(cls, id=None, name=None):
if not id and not name:
return False
droplets = cls.list_all()
# Check first by id. digital ocean requires that it be unique
for droplet in droplets:
if droplet.id == id:
return droplet
# Failing that, check by hostname.
for droplet in droplets:
if droplet.name == name:
return droplet
return False
@classmethod
def list_all(cls):
json = cls.manager.all_active_droplets()
return list(map(cls, json))
class SSH(JsonfyMixIn):
manager = None
def __init__(self, ssh_key_json):
self.__dict__.update(ssh_key_json)
update_attr = __init__
def destroy(self):
self.manager.destroy_ssh_key(self.id)
return True
@classmethod
def setup(cls, api_token):
cls.manager = DoManager(None, api_token, api_version=2)
@classmethod
def find(cls, name):
if not name:
return False
keys = cls.list_all()
for key in keys:
if key.name == name:
return key
return False
@classmethod
def list_all(cls):
json = cls.manager.all_ssh_keys()
return list(map(cls, json))
@classmethod
def add(cls, name, key_pub):
json = cls.manager.new_ssh_key(name, key_pub)
return cls(json)
def core(module):
def getkeyordie(k):
v = module.params[k]
if v is None:
module.fail_json(msg='Unable to load %s' % k)
return v
api_token = module.params['api_token']
changed = True
command = module.params['command']
state = module.params['state']
if command == 'droplet':
Droplet.setup(api_token)
if state in ('active', 'present'):
# First, try to find a droplet by id.
droplet = Droplet.find(id=module.params['id'])
# If we couldn't find the droplet and the user is allowing unique
# hostnames, then check to see if a droplet with the specified
# hostname already exists.
if not droplet and module.params['unique_name']:
droplet = Droplet.find(name=getkeyordie('name'))
# If both of those attempts failed, then create a new droplet.
if not droplet:
droplet = Droplet.add(
name=getkeyordie('name'),
size_id=getkeyordie('size_id'),
image_id=getkeyordie('image_id'),
region_id=getkeyordie('region_id'),
ssh_key_ids=module.params['ssh_key_ids'],
virtio=module.params['virtio'],
private_networking=module.params['private_networking'],
backups_enabled=module.params['backups_enabled'],
user_data=module.params.get('user_data'),
ipv6=module.params['ipv6'],
)
if droplet.is_powered_on():
changed = False
droplet.ensure_powered_on(
wait=getkeyordie('wait'),
wait_timeout=getkeyordie('wait_timeout')
)
module.exit_json(changed=changed, droplet=droplet.to_json())
elif state in ('absent', 'deleted'):
# First, try to find a droplet by id.
droplet = Droplet.find(module.params['id'])
# If we couldn't find the droplet and the user is allowing unique
# hostnames, then check to see if a droplet with the specified
# hostname already exists.
if not droplet and module.params['unique_name']:
droplet = Droplet.find(name=getkeyordie('name'))
if not droplet:
module.exit_json(changed=False, msg='The droplet is not found.')
droplet.destroy()
module.exit_json(changed=True)
elif command == 'ssh':
SSH.setup(api_token)
name = getkeyordie('name')
if state in ('active', 'present'):
key = SSH.find(name)
if key:
module.exit_json(changed=False, ssh_key=key.to_json())
key = SSH.add(name, getkeyordie('ssh_pub_key'))
module.exit_json(changed=True, ssh_key=key.to_json())
elif state in ('absent', 'deleted'):
key = SSH.find(name)
if not key:
module.exit_json(changed=False, msg='SSH key with the name of %s is not found.' % name)
key.destroy()
module.exit_json(changed=True)
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(choices=['droplet', 'ssh'], default='droplet'),
state=dict(choices=['active', 'present', 'absent', 'deleted'], default='present'),
api_token=dict(
aliases=['API_TOKEN'],
no_log=True,
fallback=(env_fallback, ['DO_API_TOKEN', 'DO_API_KEY'])
),
name=dict(type='str'),
size_id=dict(),
image_id=dict(),
region_id=dict(),
ssh_key_ids=dict(type='list'),
virtio=dict(type='bool', default='yes'),
private_networking=dict(type='bool', default='no'),
backups_enabled=dict(type='bool', default='no'),
id=dict(aliases=['droplet_id'], type='int'),
unique_name=dict(type='bool', default='no'),
user_data=dict(default=None),
ipv6=dict(type='bool', default='no'),
wait=dict(type='bool', default=True),
wait_timeout=dict(default=300, type='int'),
ssh_pub_key=dict(type='str'),
),
required_together=(
['size_id', 'image_id', 'region_id'],
),
mutually_exclusive=(
['size_id', 'ssh_pub_key'],
['image_id', 'ssh_pub_key'],
['region_id', 'ssh_pub_key'],
),
required_one_of=(
['id', 'name'],
),
)
if not HAS_DOPY and not HAS_SIX:
module.fail_json(msg='dopy >= 0.3.2 is required for this module. dopy requires six but six is not installed. '
'Make sure both dopy and six are installed.')
if not HAS_DOPY:
module.fail_json(msg='dopy >= 0.3.2 required for this module')
try:
core(module)
except TimeoutError as e:
module.fail_json(msg=str(e), id=e.id)
except (DoError, Exception) as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
Strubbl/pynder | pynder.py | 1 | 7110 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file is part of Pynder.
Pynder is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Pynder is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Pynder. If not, see <http://www.gnu.org/licenses/>.
"""
import datetime
from operator import itemgetter
import os
import re
import bot
import config
import feedparser
import utils
import version
__author__ = "Strubbl"
__version__ = version.version
__credits__ = ["Strubbl"]
class Pynder(bot.Bot):
"""
class that extends the Bot class in order to add commands to the bot
"""
def __init__(self, name, jid, password, resource=None):
"""
init method to set up a Bot and an empty dict to keep the slapped things
"""
super(Pynder, self).__init__(name, jid, password, resource)
self.slapped = {}
def cmd_echo(self, args, tojid, typ="chat"):
"""
echo command to simply echo something as text
"""
self.send(self.room_name, args)
def cmd_help(self, args, tojid, typ="chat"):
"""
help command
returns all availabe commands of this bot class
"""
helptext = "available commands: " + str(self.commands)
self.send(tojid, helptext, typ)
def cmd_uptime(self, args, tojid, typ="chat"):
"""
calculates, formats it human readable and then echos the uptime
"""
uptime = datetime.datetime.today()-self.birthday
# python >= 2.7: http://docs.python.org/library/datetime.html#datetime.timedelta.total_seconds
#uptimestr = utils.format_seconds(uptime.total_seconds())
# for compatibility with python < 2.7
uptimestr = utils.format_seconds((uptime.microseconds + (uptime.seconds + uptime.days * 24 * 3600) * 10 ** 6) / 10 ** 6)
self.send(tojid, uptimestr, typ)
def cmd_slap(self, args, tojid, typ="chat"):
"""
slaps anything you want and increases the counter counting how many times it was slapped
"""
key = args.lower()
if key in self.slapped:
self.slapped[key] += 1
else:
self.slapped[key] = 1
self.send(tojid, "/me slaps " + args, typ)
# self.send(tojid, "/me slaps " + args + ". (" + str(self.slapped[key]) + " totally slapped)", typ)
def cmd_totallyslapped(self, args, tojid, typ="chat"):
"""
echo what and how often it was slapped sorted by number of slaps
"""
#sort the dict by value
pairs = sorted(self.slapped.iteritems(), key=itemgetter(1), reverse=True)
i = 1
message = ""
for key, value in pairs:
if i > 1:
message += "\n"
message += str(i) + "# " + key + ": " + str(value)
i += 1
self.send(tojid, message, typ)
def cmd_totalusers(self, args, tojid, typ="chat"):
"""
Minecraft: echo how many users are ingame
"""
netstatout = os.popen("/bin/netstat -tn").read()
searchstring = config.mcport + ".*" + config.established
connlist = re.findall(searchstring,netstatout)
usercount = str(len(connlist))
message= "user online: " + usercount
self.send(tojid,message,typ)
def cmd_listusers(self, args, tojid, typ="chat"):
"""
Minecraft: echo list of users
"""
os.popen("screen -S mc -X stuff 'list^M'")
time.sleep(0.1)
logcommand = "/usr/bin/tail -n 1 "+ config.watch_file
logoutput = os.popen(logcommand).read()
pos = re.search(r'(]:) (.*$)',logoutput)
message = pos.group(2)
self.send(tojid,message,typ)
def cronjob(self):
self.check_rss()
def check_rss(self):
rss_cache_dir = config.cache_dir + os.sep + "rss"
newest_item_written = False
if config.rss_feeds:
self.rootlog.debug("rss feeds found:" + str(config.rss_feeds))
for name, feed in config.rss_feeds.items():
last_cache_item = utils.read_file(rss_cache_dir + os.sep + name)
f = feedparser.parse(feed)
self.rootlog.debug(str(f["channel"]["title"] + " feed fetched"))
if last_cache_item != None:
self.rootlog.debug("last_cache_item not None: " + last_cache_item)
for i in f["items"]:
if str(last_cache_item.strip()) == str(i["date"].strip()):
self.rootlog.debug("item found, aborting")
break
else:
if newest_item_written == False:
utils.write_file(rss_cache_dir + os.sep + name, i["date"].strip())
newest_item_written = True
# write date of this feed into file (not elegant)
text2chat = "".join(["[", name, "] ", i["title"], " ", i["link"]])
self.rootlog.debug(text2chat)
self.send(self.room_name, text2chat)
else:
self.rootlog.debug("last_cache_item is None")
utils.write_file(rss_cache_dir + os.sep + name, f["items"][0]["date"])
def parse_minecraft_log(self, line):
# ^\[[\d:]+\] \[.*\]: <(\S+)> (.*)$
p = re.compile('^\[[\d:]+\] \[.*\]: <(\S+)> (.*)$')
m = p.match(line)
if m != None:
who = m.group(1)
msg = m.group(2)
self.send(self.room_name, who + ": " + msg)
def parse_new_file_content(self, line):
self.parse_minecraft_log(line)
def write_to_minecraft_chat(self, message):
self.rootlog.debug("write_to_minecraft_chat: orig message: " + message)
message = message.replace(config.room + "@" + config.conference_server + "/", "")
message = re.escape(message)
for i in ".,:-()[]$€?! äüöÄÖÜß":
message = message.replace("\\" + i, i)
message = message.replace("\n", " ")
self.rootlog.debug("write_to_minecraft_chat: message: " + message)
minecraft_say = config.minecraft_say.replace('%message%', message)
self.rootlog.debug("write_to_minecraft_chat: command: " + minecraft_say)
os.system(minecraft_say)
def handle_message(self, nick, text):
message = nick + ": " + text
self.write_to_minecraft_chat(message)
### main program
if __name__ == "__main__":
p = Pynder(config.name, config.jid, config.password, config.resource)
p.go_online()
| gpl-3.0 |
ric2b/Vivaldi-browser | chromium/components/ntp_snippets/remote/fetch.py | 10 | 6937 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Fetches articles from the server.
Examples:
$ fetch.py # unauthenticated, no experiments
$ fetch.py --short # abbreviate instead of dumping JSON
$ fetch.py --signed-in -x3313279 # authenticated, results from Google Now
If getting signed-in results, authenticates with OAuth2 and stores the
credentials at ~/.zineauth.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import base64
import datetime
import json
import os
import textwrap
import oauth2client.client
import oauth2client.file
import oauth2client.tools
import requests
import sys
API_KEY_FILE = os.path.join(
os.path.dirname(__file__),
"../../../google_apis/internal/google_chrome_api_keys.h")
API_SCOPE = "https://www.googleapis.com/auth/chrome-content-suggestions"
API_HOSTS = {
"prod": "https://chromecontentsuggestions-pa.googleapis.com",
"staging": "https://staging-chromecontentsuggestions-pa.googleapis.com",
"alpha": "https://alpha-chromecontentsuggestions-pa.sandbox.googleapis.com",
}
API_PATH = "/v1/suggestions/fetch"
def main():
default_lang = os.environ.get("LANG", "en_US").split(".")[0]
parser = argparse.ArgumentParser(
description="fetch articles from server",
parents=[oauth2client.tools.argparser])
parser.add_argument("-c", "--component",
default="prod", choices=["prod", "staging", "alpha"],
help="component to fetch from (default: prod)")
parser.add_argument("-x", "--experiment", action="append", type=int,
help="include an experiment ID")
parser.add_argument("-l", "--ui-language", default=default_lang,
help="language code (default: %s)" % default_lang)
parser.add_argument("--ip", help="fake IP address")
parser.add_argument("--api-key", type=str,
help="API key to use for unauthenticated requests"
" (default: use official key)")
parser.add_argument("-s", "--signed-in", action="store_true",
help="sign in and issue authenticated request")
parser.add_argument("--client", metavar="ID,SECRET", type=str,
help="client project to use for authenticated requests"
" (default: use official project ID")
parser.add_argument("--short", action="store_true",
help="print results in abbreviated form")
args = parser.parse_args()
r = PostRequest(args)
j = {}
try:
j = r.json()
except ValueError:
print(r.text.encode("utf-8"))
sys.exit(1)
if j.get("error"):
print(r.text.encode("utf-8"))
sys.exit(1)
if args.short:
PrintShortResponse(j)
return
print(r.text.encode("utf-8"))
if r.status_code != 200:
sys.exit(1)
def GetApiKeyFile():
return API_KEY_FILE
def GetAPIDefs():
"""Parses the internal file with API keys and returns a dict."""
with open(GetApiKeyFile()) as f:
lines = f.readlines()
defs = {}
next_name = None
for line in lines:
if next_name:
defs[next_name] = json.loads(line)
next_name = None
elif line.startswith("#define"):
try:
_, name, value = line.split()
except ValueError:
continue
if value == "\\":
next_name = name
else:
defs[name] = json.loads(value)
return defs
def GetAPIKey():
return GetAPIDefs()["GOOGLE_API_KEY"]
def GetOAuthClient():
defs = GetAPIDefs()
return defs["GOOGLE_CLIENT_ID_MAIN"], defs["GOOGLE_CLIENT_SECRET_MAIN"]
def EncodeExperiments(experiments):
"""Turn a list of experiment IDs into an X-Client-Data header value.
Encodes all the IDs as a protobuf (tag 1, varint) and base64 encodes the
result.
"""
binary = b""
for exp in experiments:
binary += b"\x08"
while True:
byte = (exp & 0x7f)
exp >>= 7
if exp:
binary += chr(0x80 | byte)
else:
binary += chr(byte)
break
return base64.b64encode(binary)
def AbbreviateDuration(duration):
"""Turn a datetime.timedelta into a short string like "10h 14m"."""
w = duration.days // 7
d = duration.days % 7
h = duration.seconds // 3600
m = (duration.seconds % 3600) // 60
s = duration.seconds % 60
us = duration.microseconds
if w:
return "%dw %dd" % (w, d)
elif d:
return "%dd %dh" % (d, h)
elif h:
return "%dh %dm" % (h, m)
elif m:
return "%dm %ds" % (m, s)
elif s:
return "%ds" % s
elif us:
return "<1s"
else:
return "0s"
def PostRequest(args):
url = API_HOSTS[args.component] + API_PATH
headers = {}
if args.experiment:
headers["X-Client-Data"] = EncodeExperiments(args.experiment)
if args.ip is not None:
headers["X-User-IP"] = args.ip
if args.signed_in:
if args.client:
client_id, client_secret = args.client.split(",")
else:
client_id, client_secret = GetOAuthClient()
Authenticate(args, headers, client_id, client_secret)
else:
if args.api_key:
api_key = args.api_key
else:
api_key = GetAPIKey()
url += "?key=" + api_key
data = {
"uiLanguage": args.ui_language,
}
return requests.post(url, headers=headers, data=data)
def Authenticate(args, headers, client_id, client_secret):
storage = oauth2client.file.Storage(os.path.expanduser("~/.zineauth"))
creds = storage.get()
if not creds or creds.invalid or creds.access_token_expired:
flow = oauth2client.client.OAuth2WebServerFlow(
client_id=client_id, client_secret=client_secret,
scope=API_SCOPE)
oauth2client.tools.run_flow(flow, storage, args)
creds = storage.get()
creds.apply(headers)
def PrintShortResponse(j):
now = datetime.datetime.now()
for category in j["categories"]:
print("%s: " % category["localizedTitle"])
for suggestion in category.get("suggestions", []):
attribution = suggestion["attribution"]
title = suggestion["title"]
full_url = suggestion["fullPageUrl"]
amp_url = suggestion.get("ampUrl")
creation_time = suggestion["creationTime"]
if len(title) > 40:
title = textwrap.wrap(title, 40)[0] + "…"
creation_time = ParseDateTime(creation_time)
age = AbbreviateDuration(now - creation_time)
print(" “%s” (%s, %s ago)" % (title, attribution, age))
print(" " + (amp_url or full_url))
if category["allowFetchingMoreResults"]:
print(" [More]")
def ParseDateTime(creation_time):
try:
return datetime.datetime.strptime(creation_time, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
return datetime.datetime.strptime(creation_time, "%Y-%m-%dT%H:%M:%S.%fZ")
if __name__ == "__main__":
main()
| bsd-3-clause |
mancoast/CPythonPyc_test | cpython/254_test_httplib.py | 22 | 5580 | import httplib
import StringIO
import sys
from unittest import TestCase
from test import test_support
class FakeSocket:
def __init__(self, text, fileclass=StringIO.StringIO):
self.text = text
self.fileclass = fileclass
def sendall(self, data):
self.data = data
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise httplib.UnimplementedFileMode()
return self.fileclass(self.text)
class NoEOFStringIO(StringIO.StringIO):
"""Like StringIO, but raises AssertionError on EOF.
This is used below to test that httplib doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = StringIO.StringIO.read(self, n)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = StringIO.StringIO.readline(self, length)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
import httplib
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
# Collect output to a buffer so that we don't have to cope with line-ending
# issues across platforms. Specifically, the headers will have \r\n pairs
# and some platforms will strip them from the output file.
def test():
buf = StringIO.StringIO()
_stdout = sys.stdout
try:
sys.stdout = buf
_test()
finally:
sys.stdout = _stdout
# print individual lines with endings stripped
s = buf.getvalue()
for line in s.split("\n"):
print line.strip()
def _test():
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock, 1)
resp.begin()
print resp.read()
resp.close()
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock, 1)
try:
resp.begin()
except httplib.BadStatusLine:
print "BadStatusLine raised as expected"
else:
print "Expect BadStatusLine"
# Check invalid host_port
for hp in ("www.python.org:abc", "www.python.org:"):
try:
h = httplib.HTTP(hp)
except httplib.InvalidURL:
print "InvalidURL raised as expected"
else:
print "Expect InvalidURL"
for hp,h,p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b", 8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
try:
http = httplib.HTTP(hp)
except httplib.InvalidURL:
print "InvalidURL raised erroneously"
c = http._conn
if h != c.host: raise AssertionError, ("Host incorrectly parsed", h, c.host)
if p != c.port: raise AssertionError, ("Port incorrectly parsed", p, c.host)
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = httplib.HTTPResponse(s, 1)
r.begin()
cookies = r.getheader("Set-Cookie")
if cookies != hdr:
raise AssertionError, "multiple headers not combined properly"
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFStringIO)
resp = httplib.HTTPResponse(sock, 1, method="HEAD")
resp.begin()
if resp.read() != "":
raise AssertionError, "Did not expect response from HEAD request"
resp.close()
class OfflineTest(TestCase):
def test_responses(self):
self.assertEquals(httplib.responses[httplib.NOT_FOUND], "Not Found")
def test_main(verbose=None):
tests = [HeaderTests,OfflineTest]
test_support.run_unittest(*tests)
test()
| gpl-3.0 |
m8ttyB/socorro | socorro/unittest/external/postgresql/unittestbase.py | 14 | 3174 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.external.postgresql.connection_context import ConnectionContext
from configman import ConfigurationManager, Namespace
from configman.converters import list_converter, class_converter
from socorro.unittest.testbase import TestCase
class PostgreSQLTestCase(TestCase):
"""Base class for PostgreSQL related unit tests. """
app_name = 'PostgreSQLTestCase'
app_version = '1.0'
app_description = __doc__
metadata = ''
required_config = Namespace()
# we use this class here because it is a convenient way to pull in
# both a database connection context and a transaction executor
required_config.add_option(
'crashstorage_class',
default='socorro.external.postgresql.crashstorage.'
'PostgreSQLCrashStorage',
from_string_converter=class_converter
)
required_config.add_option(
name='database_superusername',
default='test',
doc='Username to connect to database',
)
required_config.add_option(
name='database_superuserpassword',
default='aPassword',
doc='Password to connect to database',
)
required_config.add_option(
name='dropdb',
default=False,
doc='Whether or not to drop database_name',
exclude_from_print_conf=True,
exclude_from_dump_conf=True
)
required_config.add_option(
'platforms',
default=[{
"id": "windows",
"name": "Windows NT"
}, {
"id": "mac",
"name": "Mac OS X"
}, {
"id": "linux",
"name": "Linux"
}],
doc='Array associating OS ids to full names.',
)
required_config.add_option(
'non_release_channels',
default=['beta', 'aurora', 'nightly'],
doc='List of channels, excluding the `release` one.',
from_string_converter=list_converter
)
required_config.add_option(
'restricted_channels',
default=['beta'],
doc='List of channels to restrict based on build ids.',
from_string_converter=list_converter
)
@classmethod
def get_standard_config(cls):
config_manager = ConfigurationManager(
[cls.required_config,
],
app_name='PostgreSQLTestCase',
app_description=__doc__,
argv_source=[]
)
with config_manager.context() as config:
return config
@classmethod
def setUpClass(cls):
"""Create a configuration context and a database connection.
This will create (and later destroy) one connection per test
case (aka. test class).
"""
cls.config = cls.get_standard_config()
cls.database = ConnectionContext(cls.config)
cls.connection = cls.database.connection()
@classmethod
def tearDownClass(cls):
"""Close the database connection. """
cls.connection.close()
| mpl-2.0 |
StephenWeber/ansible | lib/ansible/module_utils/_text.py | 62 | 12325 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Toshio Kuratomi <a.badger@gmail.com>, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
.. warn:: This module_util is currently internal implementation.
We want to evaluate this code for stability and API suitability before
making backwards compatibility guarantees. The API may change between
releases. Do not use this unless you are willing to port your module code.
"""
import codecs
from ansible.module_utils.six import PY3, text_type, binary_type
try:
codecs.lookup_error('surrogateescape')
HAS_SURROGATEESCAPE = True
except LookupError:
HAS_SURROGATEESCAPE = False
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape',
'surrogate_or_strict',
'surrogate_then_replace'))
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a byte string
:arg obj: An object to make sure is a byte string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a text string to
a byte string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the text string is not
encodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. There are three additional error strategies
specifically aimed at helping people to port code. The first two are:
:surrogate_or_strict: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``strict``
:surrogate_or_replace: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``replace``.
Because ``surrogateescape`` was added in Python3 this usually means that
Python3 will use ``surrogateescape`` and Python2 will use the fallback
error handler. Note that the code checks for ``surrogateescape`` when the
module is imported. If you have a backport of ``surrogateescape`` for
Python2, be sure to register the error handler prior to importing this
module.
The last error handler is:
:surrogate_then_replace: Will use ``surrogateescape`` if it is a valid
handler. If encoding with ``surrogateescape`` would traceback,
surrogates are first replaced with a replacement characters
and then the string is encoded using ``replace`` (which replaces
the rest of the nonencodable bytes). If ``surrogateescape`` is
not present it will simply use ``replace``. (Added in Ansible 2.3)
This strategy is designed to never traceback when it attempts
to encode a string.
The default until Ansible-2.2 was ``surrogate_or_replace``
From Ansible-2.3 onwards, the default is ``surrogate_then_replace``.
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the bytes version of that string.
:empty: Return an empty byte string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a byte string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a text string.
.. note:: If passed a byte string, this function does not check that the
string is valid in the specified encoding. If it's important that the
byte string is in the specified encoding do::
encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8')
.. version_changed:: 2.3
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
"""
if isinstance(obj, binary_type):
return obj
# We're given a text string
# If it has surrogates, we know because it will decode
original_errors = errors
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, text_type):
try:
# Try this first as it's the fastest
return obj.encode(encoding, errors)
except UnicodeEncodeError:
if original_errors in (None, 'surrogate_then_replace'):
# Slow but works
return_string = obj.encode('utf-8', 'surrogateescape')
return_string = return_string.decode('utf-8', 'replace')
return return_string.encode(encoding, 'replace')
raise
# Note: We do these last even though we have to call to_bytes again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return to_bytes('')
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
# python2.4 doesn't have b''
return to_bytes('')
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring)
return to_bytes(value, encoding, errors)
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a text string
:arg obj: An object to make sure is a text string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a byte string to
a text string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the byte string is not
decodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. We support three additional error strategies
specifically aimed at helping people to port code:
:surrogate_or_strict: Will use surrogateescape if it is a valid
handler, otherwise it will use strict
:surrogate_or_replace: Will use surrogateescape if it is a valid
handler, otherwise it will use replace.
:surrogate_then_replace: Does the same as surrogate_or_replace but
`was added for symmetry with the error handlers in
:func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3)
Because surrogateescape was added in Python3 this usually means that
Python3 will use `surrogateescape` and Python2 will use the fallback
error handler. Note that the code checks for surrogateescape when the
module is imported. If you have a backport of `surrogateescape` for
python2, be sure to register the error handler prior to importing this
module.
The default until Ansible-2.2 was `surrogate_or_replace`
In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry
with :func:`ansible.module_utils._text.to_bytes` .
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the text version of that string.
:empty: Return an empty text string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a text string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a byte string.
From Ansible-2.3 onwards, the default is `surrogate_then_replace`.
.. version_changed:: 2.3
Added the surrogate_then_replace error handler and made it the default error handler.
"""
if isinstance(obj, text_type):
return obj
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, binary_type):
# Note: We don't need special handling for surrogate_then_replace
# because all bytes will either be made into surrogates or are valid
# to decode.
return obj.decode(encoding, errors)
# Note: We do these last even though we have to call to_text again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return u''
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
return u''
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring)
return to_text(value, encoding, errors)
#: :py:func:`to_native`
#: Transform a variable into the native str type for the python version
#:
#: On Python2, this is an alias for
#: :func:`~ansible.module_utils.to_bytes`. On Python3 it is an alias for
#: :func:`~ansible.module_utils.to_text`. It makes it easier to
#: transform a variable into the native str type for the python version
#: the code is running on. Use this when constructing the message to
#: send to exceptions or when dealing with an API that needs to take
#: a native string. Example::
#:
#: try:
#: 1//0
#: except ZeroDivisionError as e:
#: raise MyException('Encountered and error: %s' % to_native(e))
if PY3:
to_native = to_text
else:
to_native = to_bytes
| gpl-3.0 |
mxOBS/deb-pkg_trusty_chromium-browser | third_party/pyftpdlib/src/setup.py | 5 | 3799 | #!/usr/bin/env python
# $Id$
#
# pyftpdlib is released under the MIT license, reproduced below:
# ======================================================================
# Copyright (C) 2007-2012 Giampaolo Rodola' <g.rodola@gmail.com>
#
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# ======================================================================
"""pyftpdlib installer.
To install pyftpdlib just open a command shell and run:
> python setup.py install
"""
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
name = 'pyftpdlib'
version = '0.7.0'
download_url = "http://pyftpdlib.googlecode.com/files/" + name + "-" + \
version + ".tar.gz"
setup(
name=name,
version=version,
description='High-level asynchronous FTP server library',
long_description="Python FTP server library provides an high-level portable "
"interface to easily write asynchronous FTP servers with "
"Python.",
license='License :: OSI Approved :: MIT License',
platforms='Platform Independent',
author="Giampaolo Rodola'",
author_email='g.rodola@gmail.com',
url='http://code.google.com/p/pyftpdlib/',
download_url=download_url,
packages=['pyftpdlib', 'pyftpdlib/contrib'],
keywords=['ftp', 'ftps', 'server', 'ftpd', 'daemon', 'python', 'ssl',
'sendfile', 'rfc959', 'rfc1123', 'rfc2228', 'rfc2428', 'rfc3659'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: File Transfer Protocol (FTP)',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Filesystems',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
if os.name == 'posix':
try:
import sendfile
except ImportError:
msg = "\nYou might want to install pysendfile module to speedup " \
"transfers:\nhttp://code.google.com/p/pysendfile/\n"
if sys.stderr.isatty():
sys.stderr.write('\x1b[1m%s\x1b[0m' % msg)
else:
sys.stderr.write(msg)
| bsd-3-clause |
JioCloud/oslo.middleware | oslo_middleware/tests/test_correlation_id.py | 2 | 1713 | # Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import mock
from oslotest import base as test_base
from oslotest import moxstubout
from oslo_middleware import correlation_id
class CorrelationIdTest(test_base.BaseTestCase):
def setUp(self):
super(CorrelationIdTest, self).setUp()
self.stubs = self.useFixture(moxstubout.MoxStubout()).stubs
def test_process_request(self):
app = mock.Mock()
req = mock.Mock()
req.headers = {}
mock_uuid4 = mock.Mock()
mock_uuid4.return_value = "fake_uuid"
self.stubs.Set(uuid, 'uuid4', mock_uuid4)
middleware = correlation_id.CorrelationId(app)
middleware(req)
self.assertEqual(req.headers.get("X_CORRELATION_ID"), "fake_uuid")
def test_process_request_should_not_regenerate_correlation_id(self):
app = mock.Mock()
req = mock.Mock()
req.headers = {"X_CORRELATION_ID": "correlation_id"}
middleware = correlation_id.CorrelationId(app)
middleware(req)
self.assertEqual(req.headers.get("X_CORRELATION_ID"), "correlation_id")
| apache-2.0 |
melodous/designate | designate/schema/resolvers.py | 1 | 1348 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jsonschema
from designate.openstack.common import log as logging
from designate import utils
LOG = logging.getLogger(__name__)
class LocalResolver(jsonschema.RefResolver):
def __init__(self, base_uri, referrer):
super(LocalResolver, self).__init__(base_uri, referrer, (), True)
self.api_version = None
@classmethod
def from_schema(cls, api_version, schema, *args, **kwargs):
resolver = cls(schema.get("id", ""), schema, *args, **kwargs)
resolver.api_version = api_version
return resolver
def resolve_remote(self, uri):
LOG.debug('Loading remote schema: %s' % uri)
return utils.load_schema(self.api_version, uri)
| apache-2.0 |
kawasaki2013/python-for-android-x86 | python-modules/twisted/twisted/names/root.py | 52 | 16410 | # -*- test-case-name: twisted.names.test.test_rootresolve -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Resolver implementation for querying successive authoritative servers to
lookup a record, starting from the root nameservers.
@author: Jp Calderone
todo::
robustify it
documentation
"""
import warnings
from twisted.python.failure import Failure
from twisted.internet import defer
from twisted.names import dns, common, error
def retry(t, p, *args):
"""
Issue a query one or more times.
This function is deprecated. Use one of the resolver classes for retry
logic, or implement it yourself.
"""
warnings.warn(
"twisted.names.root.retry is deprecated since Twisted 10.0. Use a "
"Resolver object for retry logic.", category=DeprecationWarning,
stacklevel=2)
assert t, "Timeout is required"
t = list(t)
def errback(failure):
failure.trap(defer.TimeoutError)
if not t:
return failure
return p.query(timeout=t.pop(0), *args
).addErrback(errback
)
return p.query(timeout=t.pop(0), *args
).addErrback(errback
)
class _DummyController:
"""
A do-nothing DNS controller. This is useful when all messages received
will be responses to previously issued queries. Anything else received
will be ignored.
"""
def messageReceived(self, *args):
pass
class Resolver(common.ResolverBase):
"""
L{Resolver} implements recursive lookup starting from a specified list of
root servers.
@ivar hints: A C{list} of C{str} giving the dotted quad representation
of IP addresses of root servers at which to begin resolving names.
@ivar _maximumQueries: A C{int} giving the maximum number of queries
which will be attempted to resolve a single name.
@ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider to use to
bind UDP ports and manage timeouts.
"""
def __init__(self, hints, maximumQueries=10, reactor=None):
common.ResolverBase.__init__(self)
self.hints = hints
self._maximumQueries = maximumQueries
self._reactor = reactor
def _roots(self):
"""
Return a list of two-tuples representing the addresses of the root
servers, as defined by C{self.hints}.
"""
return [(ip, dns.PORT) for ip in self.hints]
def _query(self, query, servers, timeout, filter):
"""
Issue one query and return a L{Deferred} which fires with its response.
@param query: The query to issue.
@type query: L{dns.Query}
@param servers: The servers which might have an answer for this
query.
@type servers: L{list} of L{tuple} of L{str} and L{int}
@param timeout: A timeout on how long to wait for the response.
@type timeout: L{tuple} of L{int}
@param filter: A flag indicating whether to filter the results. If
C{True}, the returned L{Deferred} will fire with a three-tuple of
lists of L{RRHeaders} (like the return value of the I{lookup*}
methods of L{IResolver}. IF C{False}, the result will be a
L{Message} instance.
@type filter: L{bool}
@return: A L{Deferred} which fires with the response or a timeout
error.
@rtype: L{Deferred}
"""
from twisted.names import client
r = client.Resolver(servers=servers, reactor=self._reactor)
d = r.queryUDP([query], timeout)
if filter:
d.addCallback(r.filterAnswers)
return d
def _lookup(self, name, cls, type, timeout):
"""
Implement name lookup by recursively discovering the authoritative
server for the name and then asking it, starting at one of the servers
in C{self.hints}.
"""
if timeout is None:
# A series of timeouts for semi-exponential backoff, summing to an
# arbitrary total of 60 seconds.
timeout = (1, 3, 11, 45)
return self._discoverAuthority(
dns.Query(name, type, cls), self._roots(), timeout,
self._maximumQueries)
def _discoverAuthority(self, query, servers, timeout, queriesLeft):
"""
Issue a query to a server and follow a delegation if necessary.
@param query: The query to issue.
@type query: L{dns.Query}
@param servers: The servers which might have an answer for this
query.
@type servers: L{list} of L{tuple} of L{str} and L{int}
@param timeout: A C{tuple} of C{int} giving the timeout to use for this
query.
@param queriesLeft: A C{int} giving the number of queries which may
yet be attempted to answer this query before the attempt will be
abandoned.
@return: A L{Deferred} which fires with a three-tuple of lists of
L{RRHeaders} giving the response, or with a L{Failure} if there is
a timeout or response error.
"""
# Stop now if we've hit the query limit.
if queriesLeft <= 0:
return Failure(
error.ResolverError("Query limit reached without result"))
d = self._query(query, servers, timeout, False)
d.addCallback(
self._discoveredAuthority, query, timeout, queriesLeft - 1)
return d
def _discoveredAuthority(self, response, query, timeout, queriesLeft):
"""
Interpret the response to a query, checking for error codes and
following delegations if necessary.
@param response: The L{Message} received in response to issuing C{query}.
@type response: L{Message}
@param query: The L{dns.Query} which was issued.
@type query: L{dns.Query}.
@param timeout: The timeout to use if another query is indicated by
this response.
@type timeout: L{tuple} of L{int}
@param queriesLeft: A C{int} giving the number of queries which may
yet be attempted to answer this query before the attempt will be
abandoned.
@return: A L{Failure} indicating a response error, a three-tuple of
lists of L{RRHeaders} giving the response to C{query} or a
L{Deferred} which will fire with one of those.
"""
if response.rCode != dns.OK:
return Failure(self.exceptionForCode(response.rCode)(response))
# Turn the answers into a structure that's a little easier to work with.
records = {}
for answer in response.answers:
records.setdefault(answer.name, []).append(answer)
def findAnswerOrCName(name, type, cls):
cname = None
for record in records.get(name, []):
if record.cls == cls:
if record.type == type:
return record
elif record.type == dns.CNAME:
cname = record
# If there were any CNAME records, return the last one. There's
# only supposed to be zero or one, though.
return cname
seen = set()
name = query.name
record = None
while True:
seen.add(name)
previous = record
record = findAnswerOrCName(name, query.type, query.cls)
if record is None:
if name == query.name:
# If there's no answer for the original name, then this may
# be a delegation. Code below handles it.
break
else:
# Try to resolve the CNAME with another query.
d = self._discoverAuthority(
dns.Query(str(name), query.type, query.cls),
self._roots(), timeout, queriesLeft)
# We also want to include the CNAME in the ultimate result,
# otherwise this will be pretty confusing.
def cbResolved((answers, authority, additional)):
answers.insert(0, previous)
return (answers, authority, additional)
d.addCallback(cbResolved)
return d
elif record.type == query.type:
return (
response.answers,
response.authority,
response.additional)
else:
# It's a CNAME record. Try to resolve it from the records
# in this response with another iteration around the loop.
if record.payload.name in seen:
raise error.ResolverError("Cycle in CNAME processing")
name = record.payload.name
# Build a map to use to convert NS names into IP addresses.
addresses = {}
for rr in response.additional:
if rr.type == dns.A:
addresses[str(rr.name)] = rr.payload.dottedQuad()
hints = []
traps = []
for rr in response.authority:
if rr.type == dns.NS:
ns = str(rr.payload.name)
if ns in addresses:
hints.append((addresses[ns], dns.PORT))
else:
traps.append(ns)
if hints:
return self._discoverAuthority(
query, hints, timeout, queriesLeft)
elif traps:
d = self.lookupAddress(traps[0], timeout)
d.addCallback(
lambda (answers, authority, additional):
answers[0].payload.dottedQuad())
d.addCallback(
lambda hint: self._discoverAuthority(
query, [(hint, dns.PORT)], timeout, queriesLeft - 1))
return d
else:
return Failure(error.ResolverError(
"Stuck at response without answers or delegation"))
def discoveredAuthority(self, auth, name, cls, type, timeout):
warnings.warn(
'twisted.names.root.Resolver.discoveredAuthority is deprecated since '
'Twisted 10.0. Use twisted.names.client.Resolver directly, instead.',
category=DeprecationWarning, stacklevel=2)
from twisted.names import client
q = dns.Query(name, type, cls)
r = client.Resolver(servers=[(auth, dns.PORT)])
d = r.queryUDP([q], timeout)
d.addCallback(r.filterAnswers)
return d
def lookupNameservers(host, atServer, p=None):
warnings.warn(
'twisted.names.root.lookupNameservers is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupNameservers instead.',
category=DeprecationWarning, stacklevel=2)
# print 'Nameserver lookup for', host, 'at', atServer, 'with', p
if p is None:
p = dns.DNSDatagramProtocol(_DummyController())
p.noisy = False
return retry(
(1, 3, 11, 45), # Timeouts
p, # Protocol instance
(atServer, dns.PORT), # Server to query
[dns.Query(host, dns.NS, dns.IN)] # Question to ask
)
def lookupAddress(host, atServer, p=None):
warnings.warn(
'twisted.names.root.lookupAddress is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupAddress instead.',
category=DeprecationWarning, stacklevel=2)
# print 'Address lookup for', host, 'at', atServer, 'with', p
if p is None:
p = dns.DNSDatagramProtocol(_DummyController())
p.noisy = False
return retry(
(1, 3, 11, 45), # Timeouts
p, # Protocol instance
(atServer, dns.PORT), # Server to query
[dns.Query(host, dns.A, dns.IN)] # Question to ask
)
def extractAuthority(msg, cache):
warnings.warn(
'twisted.names.root.extractAuthority is deprecated since Twisted '
'10.0. Please inspect the Message object directly.',
category=DeprecationWarning, stacklevel=2)
records = msg.answers + msg.authority + msg.additional
nameservers = [r for r in records if r.type == dns.NS]
# print 'Records for', soFar, ':', records
# print 'NS for', soFar, ':', nameservers
if not nameservers:
return None, nameservers
if not records:
raise IOError("No records")
for r in records:
if r.type == dns.A:
cache[str(r.name)] = r.payload.dottedQuad()
for r in records:
if r.type == dns.NS:
if str(r.payload.name) in cache:
return cache[str(r.payload.name)], nameservers
for addr in records:
if addr.type == dns.A and addr.name == r.name:
return addr.payload.dottedQuad(), nameservers
return None, nameservers
def discoverAuthority(host, roots, cache=None, p=None):
warnings.warn(
'twisted.names.root.discoverAuthority is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupNameservers instead.',
category=DeprecationWarning, stacklevel=4)
if cache is None:
cache = {}
rootAuths = list(roots)
parts = host.rstrip('.').split('.')
parts.reverse()
authority = rootAuths.pop()
soFar = ''
for part in parts:
soFar = part + '.' + soFar
# print '///////', soFar, authority, p
msg = defer.waitForDeferred(lookupNameservers(soFar, authority, p))
yield msg
msg = msg.getResult()
newAuth, nameservers = extractAuthority(msg, cache)
if newAuth is not None:
# print "newAuth is not None"
authority = newAuth
else:
if nameservers:
r = str(nameservers[0].payload.name)
# print 'Recursively discovering authority for', r
authority = defer.waitForDeferred(discoverAuthority(r, roots, cache, p))
yield authority
authority = authority.getResult()
# print 'Discovered to be', authority, 'for', r
## else:
## # print 'Doing address lookup for', soFar, 'at', authority
## msg = defer.waitForDeferred(lookupAddress(soFar, authority, p))
## yield msg
## msg = msg.getResult()
## records = msg.answers + msg.authority + msg.additional
## addresses = [r for r in records if r.type == dns.A]
## if addresses:
## authority = addresses[0].payload.dottedQuad()
## else:
## raise IOError("Resolution error")
# print "Yielding authority", authority
yield authority
discoverAuthority = defer.deferredGenerator(discoverAuthority)
def makePlaceholder(deferred, name):
def placeholder(*args, **kw):
deferred.addCallback(lambda r: getattr(r, name)(*args, **kw))
return deferred
return placeholder
class DeferredResolver:
def __init__(self, resolverDeferred):
self.waiting = []
resolverDeferred.addCallback(self.gotRealResolver)
def gotRealResolver(self, resolver):
w = self.waiting
self.__dict__ = resolver.__dict__
self.__class__ = resolver.__class__
for d in w:
d.callback(resolver)
def __getattr__(self, name):
if name.startswith('lookup') or name in ('getHostByName', 'query'):
self.waiting.append(defer.Deferred())
return makePlaceholder(self.waiting[-1], name)
raise AttributeError(name)
def bootstrap(resolver):
"""Lookup the root nameserver addresses using the given resolver
Return a Resolver which will eventually become a C{root.Resolver}
instance that has references to all the root servers that we were able
to look up.
"""
domains = [chr(ord('a') + i) for i in range(13)]
# f = lambda r: (log.msg('Root server address: ' + str(r)), r)[1]
f = lambda r: r
L = [resolver.getHostByName('%s.root-servers.net' % d).addCallback(f) for d in domains]
d = defer.DeferredList(L)
d.addCallback(lambda r: Resolver([e[1] for e in r if e[0]]))
return DeferredResolver(d)
| apache-2.0 |
EVERROCKET/mobile-data | lib/jinja2/exceptions.py | 977 | 4428 | # -*- coding: utf-8 -*-
"""
jinja2.exceptions
~~~~~~~~~~~~~~~~~
Jinja exceptions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import imap, text_type, PY2, implements_to_string
class TemplateError(Exception):
"""Baseclass for all template errors."""
if PY2:
def __init__(self, message=None):
if message is not None:
message = text_type(message).encode('utf-8')
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message.decode('utf-8', 'replace')
def __unicode__(self):
return self.message or u''
else:
def __init__(self, message=None):
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message
@implements_to_string
class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist."""
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
IOError.__init__(self)
if message is None:
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self):
return self.message
class TemplatesNotFound(TemplateNotFound):
"""Like :class:`TemplateNotFound` but raised if multiple templates
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
message = u'none of the templates given were found: ' + \
u', '.join(imap(text_type, names))
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
@implements_to_string
class TemplateSyntaxError(TemplateError):
"""Raised to tell the user that there is a problem with the template."""
def __init__(self, message, lineno, name=None, filename=None):
TemplateError.__init__(self, message)
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self.translated = False
def __str__(self):
# for translated errors we only return the message
if self.translated:
return self.message
# otherwise attach some stuff
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
# if the source is set, add the line to the output
if self.source is not None:
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
class TemplateAssertionError(TemplateSyntaxError):
"""Like a template syntax error, but covers cases where something in the
template caused an error at compile time that wasn't necessarily caused
by a syntax error. However it's a direct subclass of
:exc:`TemplateSyntaxError` and has the same attributes.
"""
class TemplateRuntimeError(TemplateError):
"""A generic runtime error in the template engine. Under some situations
Jinja may raise this exception.
"""
class UndefinedError(TemplateRuntimeError):
"""Raised if a template tries to operate on :class:`Undefined`."""
class SecurityError(TemplateRuntimeError):
"""Raised if a template tries to do something insecure if the
sandbox is enabled.
"""
class FilterArgumentError(TemplateRuntimeError):
"""This error is raised if a filter was called with inappropriate
arguments
"""
| apache-2.0 |
petewarden/tensorflow | tensorflow/python/autograph/converters/call_trees_test.py | 14 | 6541 | # python3
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for call_trees module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import imp
from tensorflow.python.autograph.converters import call_trees
from tensorflow.python.autograph.converters import functions
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.platform import test
class MockConvertedCall(object):
def __init__(self):
self.calls = []
def __call__(self, f, args, kwargs, caller_fn_scope=None, options=None):
del caller_fn_scope, options
self.calls.append((args, kwargs))
kwargs = kwargs or {}
return f(*args, **kwargs)
class CallTreesTest(converter_testing.TestCase):
def _transform_with_mock(self, f):
mock = MockConvertedCall()
tr = self.transform(
f, (functions, call_trees),
ag_overrides={'converted_call': mock})
return tr, mock
def test_function_no_args(self):
def f(f):
return f() + 20
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda: 1), 21)
self.assertListEqual(mock.calls, [((), None)])
def test_function_with_expression_in_argument(self):
def f(f, g):
return f(g() + 20) + 4000
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda x: x + 300, lambda: 1), 4321)
self.assertListEqual(mock.calls, [
((), None),
((21,), None),
])
def test_function_with_call_in_argument(self):
def f(f, g):
return f(g()) + 300
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda x: x + 20, lambda: 1), 321)
self.assertListEqual(mock.calls, [
((), None),
((1,), None),
])
def test_function_chaining(self):
def get_one():
return 1
def f():
return get_one().__add__(20)
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(), 21)
self.assertListEqual(mock.calls, [
((), None),
((20,), None),
])
def test_function_with_single_arg(self):
def f(f, a):
return f(a) + 20
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda a: a, 1), 21)
self.assertListEqual(mock.calls, [((1,), None)])
def test_function_with_args_only(self):
def f(f, a, b):
return f(a, b) + 300
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda a, b: a + b, 1, 20), 321)
self.assertListEqual(mock.calls, [((1, 20), None)])
def test_function_with_kwarg(self):
def f(f, a, b):
return f(a, c=b) + 300
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(lambda a, c: a + c, 1, 20), 321)
self.assertListEqual(mock.calls, [((1,), {'c': 20})])
def test_function_with_kwargs_starargs(self):
def f(f, a, *args, **kwargs):
return f(a, *args, **kwargs) + 5
tr, mock = self._transform_with_mock(f)
self.assertEqual(
tr(lambda *args, **kwargs: 7, 1, *[2, 3], **{
'b': 4,
'c': 5
}), 12)
self.assertListEqual(mock.calls, [((1, 2, 3), {'b': 4, 'c': 5})])
def test_function_with_starargs_only(self):
def g(*args):
return sum(args)
def f():
args = [1, 20, 300]
return g(*args) + 4000
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(), 4321)
self.assertListEqual(mock.calls, [((1, 20, 300), None)])
def test_function_with_starargs_mixed(self):
def g(a, b, c, d):
return a * 1000 + b * 100 + c * 10 + d
def f():
args1 = (1,)
args2 = [3]
return g(*args1, 2, *args2, 4)
tr, mock = self._transform_with_mock(f)
self.assertEqual(tr(), 1234)
self.assertListEqual(mock.calls, [((1, 2, 3, 4), None)])
def test_function_with_kwargs_keywords(self):
def f(f, a, b, **kwargs):
return f(a, b=b, **kwargs) + 5
tr, mock = self._transform_with_mock(f)
self.assertEqual(
tr(lambda *args, **kwargs: 7, 1, 2, **{'c': 3}), 12)
self.assertListEqual(mock.calls, [((1,), {'b': 2, 'c': 3})])
def test_function_with_multiple_kwargs(self):
def f(f, a, b, c, kwargs1, kwargs2):
return f(a, b=b, **kwargs1, c=c, **kwargs2) + 5
tr, mock = self._transform_with_mock(f)
self.assertEqual(
tr(lambda *args, **kwargs: 7, 1, 2, 3, {'d': 4}, {'e': 5}), 12)
self.assertListEqual(mock.calls, [((1,), {
'b': 2,
'c': 3,
'd': 4,
'e': 5
})])
def test_function_with_call_in_lambda_argument(self):
def h(l, a):
return l(a) + 4000
def g(a, *args):
return a + sum(args)
def f(h, g, a, *args):
return h(lambda x: g(x, *args), a)
tr, _ = self._transform_with_mock(f)
self.assertEqual(tr(h, g, 1, *(20, 300)), 4321)
def test_debugger_set_trace(self):
tracking_list = []
pdb = imp.new_module('fake_pdb')
pdb.set_trace = lambda: tracking_list.append(1)
def f():
return pdb.set_trace()
tr, _ = self._transform_with_mock(f)
tr()
self.assertListEqual(tracking_list, [1])
def test_class_method(self):
class TestClass(object):
def other_method(self, x):
return x + 20
def test_method(self, a):
return self.other_method(a) + 300
tc = TestClass()
tr, mock = self._transform_with_mock(TestClass.test_method)
self.assertEqual(321, tr(tc, 1))
self.assertListEqual(mock.calls, [((1,), None)])
def test_object_method(self):
class TestClass(object):
def other_method(self, x):
return x + 20
def test_method(self, a):
return self.other_method(a) + 300
tc = TestClass()
tr, mock = self._transform_with_mock(tc.test_method)
self.assertEqual(321, tr(tc, 1))
self.assertListEqual(mock.calls, [((1,), None)])
if __name__ == '__main__':
test.main()
| apache-2.0 |
huanchenz/STX-h-store | third_party/python/boto/s3/bucket.py | 9 | 48882 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto import handler
from boto.provider import Provider
from boto.resultset import ResultSet
from boto.s3.acl import ACL, Policy, CannedACLStrings, Grant
from boto.s3.key import Key
from boto.s3.prefix import Prefix
from boto.s3.deletemarker import DeleteMarker
from boto.s3.user import User
from boto.s3.multipart import MultiPartUpload
from boto.s3.multipart import CompleteMultiPartUpload
from boto.s3.bucketlistresultset import BucketListResultSet
from boto.s3.bucketlistresultset import VersionedBucketListResultSet
from boto.s3.bucketlistresultset import MultiPartUploadListResultSet
import boto.jsonresponse
import boto.utils
import xml.sax
import urllib
import re
from collections import defaultdict
# as per http://goo.gl/BDuud (02/19/2011)
class S3WebsiteEndpointTranslate:
trans_region = defaultdict(lambda :'s3-website-us-east-1')
trans_region['EU'] = 's3-website-eu-west-1'
trans_region['us-west-1'] = 's3-website-us-west-1'
trans_region['ap-northeast-1'] = 's3-website-ap-northeast-1'
trans_region['ap-southeast-1'] = 's3-website-ap-southeast-1'
@classmethod
def translate_region(self, reg):
return self.trans_region[reg]
S3Permissions = ['READ', 'WRITE', 'READ_ACP', 'WRITE_ACP', 'FULL_CONTROL']
class Bucket(object):
BucketLoggingBody = """<?xml version="1.0" encoding="UTF-8"?>
<BucketLoggingStatus xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LoggingEnabled>
<TargetBucket>%s</TargetBucket>
<TargetPrefix>%s</TargetPrefix>
</LoggingEnabled>
</BucketLoggingStatus>"""
EmptyBucketLoggingBody = """<?xml version="1.0" encoding="UTF-8"?>
<BucketLoggingStatus xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
</BucketLoggingStatus>"""
LoggingGroup = 'http://acs.amazonaws.com/groups/s3/LogDelivery'
BucketPaymentBody = """<?xml version="1.0" encoding="UTF-8"?>
<RequestPaymentConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Payer>%s</Payer>
</RequestPaymentConfiguration>"""
VersioningBody = """<?xml version="1.0" encoding="UTF-8"?>
<VersioningConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Status>%s</Status>
<MfaDelete>%s</MfaDelete>
</VersioningConfiguration>"""
WebsiteBody = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument><Suffix>%s</Suffix></IndexDocument>
%s
</WebsiteConfiguration>"""
WebsiteErrorFragment = """<ErrorDocument><Key>%s</Key></ErrorDocument>"""
VersionRE = '<Status>([A-Za-z]+)</Status>'
MFADeleteRE = '<MfaDelete>([A-Za-z]+)</MfaDelete>'
def __init__(self, connection=None, name=None, key_class=Key):
self.name = name
self.connection = connection
self.key_class = key_class
def __repr__(self):
return '<Bucket: %s>' % self.name
def __iter__(self):
return iter(BucketListResultSet(self))
def __contains__(self, key_name):
return not (self.get_key(key_name) is None)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'CreationDate':
self.creation_date = value
else:
setattr(self, name, value)
def set_key_class(self, key_class):
"""
Set the Key class associated with this bucket. By default, this
would be the boto.s3.key.Key class but if you want to subclass that
for some reason this allows you to associate your new class with a
bucket so that when you call bucket.new_key() or when you get a listing
of keys in the bucket you will get an instances of your key class
rather than the default.
:type key_class: class
:param key_class: A subclass of Key that can be more specific
"""
self.key_class = key_class
def lookup(self, key_name, headers=None):
"""
Deprecated: Please use get_key method.
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
return self.get_key(key_name, headers=headers)
def get_key(self, key_name, headers=None, version_id=None):
"""
Check to see if a particular key exists within the bucket. This
method uses a HEAD request to check for the existance of the key.
Returns: An instance of a Key object or None
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
if version_id:
query_args = 'versionId=%s' % version_id
else:
query_args = None
response = self.connection.make_request('HEAD', self.name, key_name,
headers=headers,
query_args=query_args)
# Allow any success status (2xx) - for example this lets us
# support Range gets, which return status 206:
if response.status/100 == 2:
response.read()
k = self.key_class(self)
provider = self.connection.provider
k.metadata = boto.utils.get_aws_metadata(response.msg, provider)
k.etag = response.getheader('etag')
k.content_type = response.getheader('content-type')
k.content_encoding = response.getheader('content-encoding')
k.last_modified = response.getheader('last-modified')
# the following machinations are a workaround to the fact that
# apache/fastcgi omits the content-length header on HEAD
# requests when the content-length is zero.
# See http://goo.gl/0Tdax for more details.
clen = response.getheader('content-length')
if clen:
k.size = int(response.getheader('content-length'))
else:
k.size = 0
k.cache_control = response.getheader('cache-control')
k.name = key_name
k.handle_version_headers(response)
return k
else:
if response.status == 404:
response.read()
return None
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, '')
def list(self, prefix='', delimiter='', marker='', headers=None):
"""
List key objects within a bucket. This returns an instance of an
BucketListResultSet that automatically handles all of the result
paging, etc. from S3. You just need to keep iterating until
there are no more results.
Called with no arguments, this will return an iterator object across
all keys within the bucket.
The Key objects returned by the iterator are obtained by parsing
the results of a GET on the bucket, also known as the List Objects
request. The XML returned by this request contains only a subset
of the information about each key. Certain metadata fields such
as Content-Type and user metadata are not available in the XML.
Therefore, if you want these additional metadata fields you will
have to do a HEAD request on the Key in the bucket.
:type prefix: string
:param prefix: allows you to limit the listing to a particular
prefix. For example, if you call the method with
prefix='/foo/' then the iterator will only cycle
through the keys that begin with the string '/foo/'.
:type delimiter: string
:param delimiter: can be used in conjunction with the prefix
to allow you to organize and browse your keys
hierarchically. See:
http://docs.amazonwebservices.com/AmazonS3/2006-03-01/
for more details.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return BucketListResultSet(self, prefix, delimiter, marker, headers)
def list_versions(self, prefix='', delimiter='', key_marker='',
version_id_marker='', headers=None):
"""
List version objects within a bucket. This returns an instance of an
VersionedBucketListResultSet that automatically handles all of the result
paging, etc. from S3. You just need to keep iterating until
there are no more results.
Called with no arguments, this will return an iterator object across
all keys within the bucket.
:type prefix: string
:param prefix: allows you to limit the listing to a particular
prefix. For example, if you call the method with
prefix='/foo/' then the iterator will only cycle
through the keys that begin with the string '/foo/'.
:type delimiter: string
:param delimiter: can be used in conjunction with the prefix
to allow you to organize and browse your keys
hierarchically. See:
http://docs.amazonwebservices.com/AmazonS3/2006-03-01/
for more details.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return VersionedBucketListResultSet(self, prefix, delimiter, key_marker,
version_id_marker, headers)
def list_multipart_uploads(self, key_marker='',
upload_id_marker='',
headers=None):
"""
List multipart upload objects within a bucket. This returns an
instance of an MultiPartUploadListResultSet that automatically
handles all of the result paging, etc. from S3. You just need
to keep iterating until there are no more results.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return MultiPartUploadListResultSet(self, key_marker,
upload_id_marker,
headers)
def _get_all(self, element_map, initial_query_string='',
headers=None, **params):
l = []
for k,v in params.items():
k = k.replace('_', '-')
if k == 'maxkeys':
k = 'max-keys'
if isinstance(v, unicode):
v = v.encode('utf-8')
if v is not None and v != '':
l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
if len(l):
s = initial_query_string + '&' + '&'.join(l)
else:
s = initial_query_string
response = self.connection.make_request('GET', self.name,
headers=headers, query_args=s)
body = response.read()
boto.log.debug(body)
if response.status == 200:
rs = ResultSet(element_map)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_all_keys(self, headers=None, **params):
"""
A lower-level method for listing contents of a bucket.
This closely models the actual S3 API and requires you to manually
handle the paging of results. For a higher-level method
that handles the details of paging for you, you can use the list method.
:type max_keys: int
:param max_keys: The maximum number of keys to retrieve
:type prefix: string
:param prefix: The prefix of the keys you want to retrieve
:type marker: string
:param marker: The "marker" of where you are in the result set
:type delimiter: string
:param delimiter: If this optional, Unicode string parameter
is included with your request, then keys that
contain the same string between the prefix and
the first occurrence of the delimiter will be
rolled up into a single result element in the
CommonPrefixes collection. These rolled-up keys
are not returned elsewhere in the response.
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
return self._get_all([('Contents', self.key_class),
('CommonPrefixes', Prefix)],
'', headers, **params)
def get_all_versions(self, headers=None, **params):
"""
A lower-level, version-aware method for listing contents of a bucket.
This closely models the actual S3 API and requires you to manually
handle the paging of results. For a higher-level method
that handles the details of paging for you, you can use the list method.
:type max_keys: int
:param max_keys: The maximum number of keys to retrieve
:type prefix: string
:param prefix: The prefix of the keys you want to retrieve
:type key_marker: string
:param key_marker: The "marker" of where you are in the result set
with respect to keys.
:type version_id_marker: string
:param version_id_marker: The "marker" of where you are in the result
set with respect to version-id's.
:type delimiter: string
:param delimiter: If this optional, Unicode string parameter
is included with your request, then keys that
contain the same string between the prefix and
the first occurrence of the delimiter will be
rolled up into a single result element in the
CommonPrefixes collection. These rolled-up keys
are not returned elsewhere in the response.
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
return self._get_all([('Version', self.key_class),
('CommonPrefixes', Prefix),
('DeleteMarker', DeleteMarker)],
'versions', headers, **params)
def get_all_multipart_uploads(self, headers=None, **params):
"""
A lower-level, version-aware method for listing active
MultiPart uploads for a bucket. This closely models the
actual S3 API and requires you to manually handle the paging
of results. For a higher-level method that handles the
details of paging for you, you can use the list method.
:type max_uploads: int
:param max_uploads: The maximum number of uploads to retrieve.
Default value is 1000.
:type key_marker: string
:param key_marker: Together with upload_id_marker, this parameter
specifies the multipart upload after which listing
should begin. If upload_id_marker is not specified,
only the keys lexicographically greater than the
specified key_marker will be included in the list.
If upload_id_marker is specified, any multipart
uploads for a key equal to the key_marker might
also be included, provided those multipart uploads
have upload IDs lexicographically greater than the
specified upload_id_marker.
:type upload_id_marker: string
:param upload_id_marker: Together with key-marker, specifies
the multipart upload after which listing
should begin. If key_marker is not specified,
the upload_id_marker parameter is ignored.
Otherwise, any multipart uploads for a key
equal to the key_marker might be included
in the list only if they have an upload ID
lexicographically greater than the specified
upload_id_marker.
:rtype: ResultSet
:return: The result from S3 listing the uploads requested
"""
return self._get_all([('Upload', MultiPartUpload)],
'uploads', headers, **params)
def new_key(self, key_name=None):
"""
Creates a new key
:type key_name: string
:param key_name: The name of the key to create
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
return self.key_class(self, key_name)
def generate_url(self, expires_in, method='GET', headers=None,
force_http=False, response_headers=None):
return self.connection.generate_url(expires_in, method, self.name,
headers=headers,
force_http=force_http,
response_headers=response_headers)
def delete_key(self, key_name, headers=None,
version_id=None, mfa_token=None):
"""
Deletes a key from the bucket. If a version_id is provided,
only that version of the key will be deleted.
:type key_name: string
:param key_name: The key name to delete
:type version_id: string
:param version_id: The version ID (optional)
:type mfa_token: tuple or list of strings
:param mfa_token: A tuple or list consisting of the serial number
from the MFA device and the current value of
the six-digit token associated with the device.
This value is required anytime you are
deleting versioned objects from a bucket
that has the MFADelete option on the bucket.
"""
provider = self.connection.provider
if version_id:
query_args = 'versionId=%s' % version_id
else:
query_args = None
if mfa_token:
if not headers:
headers = {}
headers[provider.mfa_header] = ' '.join(mfa_token)
response = self.connection.make_request('DELETE', self.name, key_name,
headers=headers,
query_args=query_args)
body = response.read()
if response.status != 204:
raise provider.storage_response_error(response.status,
response.reason, body)
def copy_key(self, new_key_name, src_bucket_name,
src_key_name, metadata=None, src_version_id=None,
storage_class='STANDARD', preserve_acl=False):
"""
Create a new key in the bucket by copying another existing key.
:type new_key_name: string
:param new_key_name: The name of the new key
:type src_bucket_name: string
:param src_bucket_name: The name of the source bucket
:type src_key_name: string
:param src_key_name: The name of the source key
:type src_version_id: string
:param src_version_id: The version id for the key. This param
is optional. If not specified, the newest
version of the key will be copied.
:type metadata: dict
:param metadata: Metadata to be associated with new key.
If metadata is supplied, it will replace the
metadata of the source key being copied.
If no metadata is supplied, the source key's
metadata will be copied to the new key.
:type storage_class: string
:param storage_class: The storage class of the new key.
By default, the new key will use the
standard storage class. Possible values are:
STANDARD | REDUCED_REDUNDANCY
:type preserve_acl: bool
:param preserve_acl: If True, the ACL from the source key
will be copied to the destination
key. If False, the destination key
will have the default ACL.
Note that preserving the ACL in the
new key object will require two
additional API calls to S3, one to
retrieve the current ACL and one to
set that ACL on the new object. If
you don't care about the ACL, a value
of False will be significantly more
efficient.
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
src_key_name = boto.utils.get_utf8_value(src_key_name)
if preserve_acl:
if self.name == src_bucket_name:
src_bucket = self
else:
src_bucket = self.connection.get_bucket(src_bucket_name)
acl = src_bucket.get_xml_acl(src_key_name)
src = '%s/%s' % (src_bucket_name, urllib.quote(src_key_name))
if src_version_id:
src += '?version_id=%s' % src_version_id
provider = self.connection.provider
headers = {provider.copy_source_header : str(src)}
if storage_class != 'STANDARD':
headers[provider.storage_class_header] = storage_class
if metadata:
headers[provider.metadata_directive_header] = 'REPLACE'
headers = boto.utils.merge_meta(headers, metadata, provider)
else:
headers[provider.metadata_directive_header] = 'COPY'
response = self.connection.make_request('PUT', self.name, new_key_name,
headers=headers)
body = response.read()
if response.status == 200:
key = self.new_key(new_key_name)
h = handler.XmlHandler(key, self)
xml.sax.parseString(body, h)
if hasattr(key, 'Error'):
raise provider.storage_copy_error(key.Code, key.Message, body)
key.handle_version_headers(response)
if preserve_acl:
self.set_xml_acl(acl, new_key_name)
return key
else:
raise provider.storage_response_error(response.status, response.reason, body)
def set_canned_acl(self, acl_str, key_name='', headers=None,
version_id=None):
assert acl_str in CannedACLStrings
if headers:
headers[self.connection.provider.acl_header] = acl_str
else:
headers={self.connection.provider.acl_header: acl_str}
query_args='acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('PUT', self.name, key_name,
headers=headers, query_args=query_args)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_xml_acl(self, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
return body
def set_xml_acl(self, acl_str, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('PUT', self.name, key_name,
data=acl_str.encode('ISO-8859-1'),
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_acl(self, acl_or_str, key_name='', headers=None, version_id=None):
if isinstance(acl_or_str, Policy):
self.set_xml_acl(acl_or_str.to_xml(), key_name,
headers, version_id)
else:
self.set_canned_acl(acl_or_str, key_name,
headers, version_id)
def get_acl(self, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status == 200:
policy = Policy(self)
h = handler.XmlHandler(policy, self)
xml.sax.parseString(body, h)
return policy
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def make_public(self, recursive=False, headers=None):
self.set_canned_acl('public-read', headers=headers)
if recursive:
for key in self:
self.set_canned_acl('public-read', key.name, headers=headers)
def add_email_grant(self, permission, email_address,
recursive=False, headers=None):
"""
Convenience method that provides a quick way to add an email grant
to a bucket. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL
and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type email_address: string
:param email_address: The email address associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the command
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
"""
if permission not in S3Permissions:
raise self.connection.provider.storage_permissions_error(
'Unknown Permission: %s' % permission)
policy = self.get_acl(headers=headers)
policy.acl.add_email_grant(permission, email_address)
self.set_acl(policy, headers=headers)
if recursive:
for key in self:
key.add_email_grant(permission, email_address, headers=headers)
def add_user_grant(self, permission, user_id, recursive=False,
headers=None, display_name=None):
"""
Convenience method that provides a quick way to add a canonical
user grant to a bucket. This method retrieves the current ACL,
creates a new grant based on the parameters passed in, adds that
grant to the ACL and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type user_id: string
:param user_id: The canonical user id associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the command
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
:type display_name: string
:param display_name: An option string containing the user's
Display Name. Only required on Walrus.
"""
if permission not in S3Permissions:
raise self.connection.provider.storage_permissions_error(
'Unknown Permission: %s' % permission)
policy = self.get_acl(headers=headers)
policy.acl.add_user_grant(permission, user_id,
display_name=display_name)
self.set_acl(policy, headers=headers)
if recursive:
for key in self:
key.add_user_grant(permission, user_id, headers=headers,
display_name=display_name)
def list_grants(self, headers=None):
policy = self.get_acl(headers=headers)
return policy.acl.grants
def get_location(self):
"""
Returns the LocationConstraint for the bucket.
:rtype: str
:return: The LocationConstraint for the bucket or the empty
string if no constraint was specified when bucket
was created.
"""
response = self.connection.make_request('GET', self.name,
query_args='location')
body = response.read()
if response.status == 200:
rs = ResultSet(self)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs.LocationConstraint
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def enable_logging(self, target_bucket, target_prefix='', headers=None):
if isinstance(target_bucket, Bucket):
target_bucket = target_bucket.name
body = self.BucketLoggingBody % (target_bucket, target_prefix)
response = self.connection.make_request('PUT', self.name, data=body,
query_args='logging', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def disable_logging(self, headers=None):
body = self.EmptyBucketLoggingBody
response = self.connection.make_request('PUT', self.name, data=body,
query_args='logging', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_logging_status(self, headers=None):
response = self.connection.make_request('GET', self.name,
query_args='logging', headers=headers)
body = response.read()
if response.status == 200:
return body
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_as_logging_target(self, headers=None):
policy = self.get_acl(headers=headers)
g1 = Grant(permission='WRITE', type='Group', uri=self.LoggingGroup)
g2 = Grant(permission='READ_ACP', type='Group', uri=self.LoggingGroup)
policy.acl.add_grant(g1)
policy.acl.add_grant(g2)
self.set_acl(policy, headers=headers)
def get_request_payment(self, headers=None):
response = self.connection.make_request('GET', self.name,
query_args='requestPayment', headers=headers)
body = response.read()
if response.status == 200:
return body
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_request_payment(self, payer='BucketOwner', headers=None):
body = self.BucketPaymentBody % payer
response = self.connection.make_request('PUT', self.name, data=body,
query_args='requestPayment', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def configure_versioning(self, versioning, mfa_delete=False,
mfa_token=None, headers=None):
"""
Configure versioning for this bucket.
..note:: This feature is currently in beta release and is available
only in the Northern California region.
:type versioning: bool
:param versioning: A boolean indicating whether version is
enabled (True) or disabled (False).
:type mfa_delete: bool
:param mfa_delete: A boolean indicating whether the Multi-Factor
Authentication Delete feature is enabled (True)
or disabled (False). If mfa_delete is enabled
then all Delete operations will require the
token from your MFA device to be passed in
the request.
:type mfa_token: tuple or list of strings
:param mfa_token: A tuple or list consisting of the serial number
from the MFA device and the current value of
the six-digit token associated with the device.
This value is required when you are changing
the status of the MfaDelete property of
the bucket.
"""
if versioning:
ver = 'Enabled'
else:
ver = 'Suspended'
if mfa_delete:
mfa = 'Enabled'
else:
mfa = 'Disabled'
body = self.VersioningBody % (ver, mfa)
if mfa_token:
if not headers:
headers = {}
provider = self.connection.provider
headers[provider.mfa_header] = ' '.join(mfa_token)
response = self.connection.make_request('PUT', self.name, data=body,
query_args='versioning', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_versioning_status(self, headers=None):
"""
Returns the current status of versioning on the bucket.
:rtype: dict
:returns: A dictionary containing a key named 'Versioning'
that can have a value of either Enabled, Disabled,
or Suspended. Also, if MFADelete has ever been enabled
on the bucket, the dictionary will contain a key
named 'MFADelete' which will have a value of either
Enabled or Suspended.
"""
response = self.connection.make_request('GET', self.name,
query_args='versioning', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
d = {}
ver = re.search(self.VersionRE, body)
if ver:
d['Versioning'] = ver.group(1)
mfa = re.search(self.MFADeleteRE, body)
if mfa:
d['MfaDelete'] = mfa.group(1)
return d
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def configure_website(self, suffix, error_key='', headers=None):
"""
Configure this bucket to act as a website
:type suffix: str
:param suffix: Suffix that is appended to a request that is for a
"directory" on the website endpoint (e.g. if the suffix
is index.html and you make a request to
samplebucket/images/ the data that is returned will
be for the object with the key name images/index.html).
The suffix must not be empty and must not include a
slash character.
:type error_key: str
:param error_key: The object key name to use when a 4XX class
error occurs. This is optional.
"""
if error_key:
error_frag = self.WebsiteErrorFragment % error_key
else:
error_frag = ''
body = self.WebsiteBody % (suffix, error_frag)
response = self.connection.make_request('PUT', self.name, data=body,
query_args='website',
headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_website_configuration(self, headers=None):
"""
Returns the current status of website configuration on the bucket.
:rtype: dict
:returns: A dictionary containing a Python representation
of the XML response from S3. The overall structure is:
* WebsiteConfiguration
* IndexDocument
* Suffix : suffix that is appended to request that
is for a "directory" on the website endpoint
* ErrorDocument
* Key : name of object to serve when an error occurs
"""
response = self.connection.make_request('GET', self.name,
query_args='website', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete_website_configuration(self, headers=None):
"""
Removes all website configuration from the bucket.
"""
response = self.connection.make_request('DELETE', self.name,
query_args='website', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_website_endpoint(self):
"""
Returns the fully qualified hostname to use is you want to access this
bucket as a website. This doesn't validate whether the bucket has
been correctly configured as a website or not.
"""
l = [self.name]
l.append(S3WebsiteEndpointTranslate.translate_region(self.get_location()))
l.append('.'.join(self.connection.host.split('.')[-2:]))
return '.'.join(l)
def get_policy(self, headers=None):
response = self.connection.make_request('GET', self.name,
query_args='policy', headers=headers)
body = response.read()
if response.status == 200:
return body
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_policy(self, policy, headers=None):
response = self.connection.make_request('PUT', self.name,
data=policy,
query_args='policy',
headers=headers)
body = response.read()
if response.status >= 200 and response.status <= 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def initiate_multipart_upload(self, key_name, headers=None,
reduced_redundancy=False, metadata=None):
"""
Start a multipart upload operation.
:type key_name: string
:param key_name: The name of the key that will ultimately result from
this multipart upload operation. This will be exactly
as the key appears in the bucket after the upload
process has been completed.
:type headers: dict
:param headers: Additional HTTP headers to send and store with the
resulting key in S3.
:type reduced_redundancy: boolean
:param reduced_redundancy: In multipart uploads, the storage class is
specified when initiating the upload,
not when uploading individual parts. So
if you want the resulting key to use the
reduced redundancy storage class set this
flag when you initiate the upload.
:type metadata: dict
:param metadata: Any metadata that you would like to set on the key
that results from the multipart upload.
"""
query_args = 'uploads'
if headers is None:
headers = {}
if reduced_redundancy:
storage_class_header = self.connection.provider.storage_class_header
if storage_class_header:
headers[storage_class_header] = 'REDUCED_REDUNDANCY'
# TODO: what if the provider doesn't support reduced redundancy?
# (see boto.s3.key.Key.set_contents_from_file)
if metadata is None:
metadata = {}
headers = boto.utils.merge_meta(headers, metadata,
self.connection.provider)
response = self.connection.make_request('POST', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
resp = MultiPartUpload(self)
h = handler.XmlHandler(resp, self)
xml.sax.parseString(body, h)
return resp
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def complete_multipart_upload(self, key_name, upload_id,
xml_body, headers=None):
"""
Complete a multipart upload operation.
"""
query_args = 'uploadId=%s' % upload_id
if headers is None:
headers = {}
headers['Content-Type'] = 'text/xml'
response = self.connection.make_request('POST', self.name, key_name,
query_args=query_args,
headers=headers, data=xml_body)
contains_error = False
body = response.read()
# Some errors will be reported in the body of the response
# even though the HTTP response code is 200. This check
# does a quick and dirty peek in the body for an error element.
if body.find('<Error>') > 0:
contains_error = True
boto.log.debug(body)
if response.status == 200 and not contains_error:
resp = CompleteMultiPartUpload(self)
h = handler.XmlHandler(resp, self)
xml.sax.parseString(body, h)
return resp
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def cancel_multipart_upload(self, key_name, upload_id, headers=None):
query_args = 'uploadId=%s' % upload_id
response = self.connection.make_request('DELETE', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
boto.log.debug(body)
if response.status != 204:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete(self, headers=None):
return self.connection.delete_bucket(self.name, headers=headers)
| gpl-3.0 |
soldag/home-assistant | tests/components/ipp/test_sensor.py | 7 | 4325 | """Tests for the IPP sensor platform."""
from datetime import datetime
from homeassistant.components.ipp.const import DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.components.ipp import init_integration, mock_connection
from tests.test_util.aiohttp import AiohttpClientMocker
async def test_sensors(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the creation and values of the IPP sensors."""
mock_connection(aioclient_mock)
entry = await init_integration(hass, aioclient_mock, skip_setup=True)
registry = await hass.helpers.entity_registry.async_get_registry()
# Pre-create registry entries for disabled by default sensors
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"cfe92100-67c4-11d4-a45f-f8d027761251_uptime",
suggested_object_id="epson_xp_6000_series_uptime",
disabled_by=None,
)
test_time = datetime(2019, 11, 11, 9, 10, 32, tzinfo=dt_util.UTC)
with patch("homeassistant.components.ipp.sensor.utcnow", return_value=test_time):
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("sensor.epson_xp_6000_series")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:printer"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None
state = hass.states.get("sensor.epson_xp_6000_series_black_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "58"
state = hass.states.get("sensor.epson_xp_6000_series_photo_black_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "98"
state = hass.states.get("sensor.epson_xp_6000_series_cyan_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "91"
state = hass.states.get("sensor.epson_xp_6000_series_yellow_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "95"
state = hass.states.get("sensor.epson_xp_6000_series_magenta_ink")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:water"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is PERCENTAGE
assert state.state == "73"
state = hass.states.get("sensor.epson_xp_6000_series_uptime")
assert state
assert state.attributes.get(ATTR_ICON) == "mdi:clock-outline"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None
assert state.state == "2019-10-26T15:37:00+00:00"
entry = registry.async_get("sensor.epson_xp_6000_series_uptime")
assert entry
assert entry.unique_id == "cfe92100-67c4-11d4-a45f-f8d027761251_uptime"
async def test_disabled_by_default_sensors(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the disabled by default IPP sensors."""
await init_integration(hass, aioclient_mock)
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("sensor.epson_xp_6000_series_uptime")
assert state is None
entry = registry.async_get("sensor.epson_xp_6000_series_uptime")
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
async def test_missing_entry_unique_id(
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the unique_id of IPP sensor when printer is missing identifiers."""
entry = await init_integration(hass, aioclient_mock, uuid=None, unique_id=None)
registry = await hass.helpers.entity_registry.async_get_registry()
entity = registry.async_get("sensor.epson_xp_6000_series")
assert entity
assert entity.unique_id == f"{entry.entry_id}_printer"
| apache-2.0 |
adlnet-archive/edx-platform | common/lib/capa/capa/tests/test_shuffle.py | 196 | 13736 | """Tests the capa shuffle and name-masking."""
import unittest
import textwrap
from . import test_capa_system, new_loncapa_problem
from capa.responsetypes import LoncapaProblemError
class CapaShuffleTest(unittest.TestCase):
"""Capa problem tests for shuffling and choice-name masking."""
def setUp(self):
super(CapaShuffleTest, self).setUp()
self.system = test_capa_system()
def test_shuffle_4_choices(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
# shuffling 4 things with seed of 0 yields: B A C D
# Check that the choices are shuffled
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Banana'.*'Apple'.*'Chocolate'.*'Donut'.*\].*</div>")
# Check that choice name masking is enabled and that unmasking works
response = problem.responders.values()[0]
self.assertFalse(response.has_mask())
self.assertEqual(response.unmask_order(), ['choice_1', 'choice_0', 'choice_2', 'choice_3'])
self.assertEqual(the_html, problem.get_html(), 'should be able to call get_html() twice')
def test_shuffle_custom_names(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" name="aaa">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true" name="ddd">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
# B A C D
# Check that the custom name= names come through
response = problem.responders.values()[0]
self.assertFalse(response.has_mask())
self.assertTrue(response.has_shuffle())
self.assertEqual(response.unmask_order(), ['choice_0', 'choice_aaa', 'choice_1', 'choice_ddd'])
def test_shuffle_different_seed(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=341) # yields D A B C
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Donut'.*'Apple'.*'Banana'.*'Chocolate'.*\].*</div>")
def test_shuffle_1_choice(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="true">Apple</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Apple'.*\].*</div>")
response = problem.responders.values()[0]
self.assertFalse(response.has_mask())
self.assertTrue(response.has_shuffle())
self.assertEqual(response.unmask_order(), ['choice_0'])
def test_shuffle_6_choices(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Zonut</choice>
<choice correct ="false">Eggplant</choice>
<choice correct ="false">Filet Mignon</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0) # yields: C E A B D F
# Donut -> Zonut to show that there is not some hidden alphabetic ordering going on
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Chocolate'.*'Eggplant'.*'Apple'.*'Banana'.*'Zonut'.*'Filet Mignon'.*\].*</div>")
def test_shuffle_false(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="false">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Apple'.*'Banana'.*'Chocolate'.*'Donut'.*\].*</div>")
response = problem.responders.values()[0]
self.assertFalse(response.has_mask())
self.assertFalse(response.has_shuffle())
def test_shuffle_fixed_head_end(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" fixed="true">Alpha</choice>
<choice correct="false" fixed="true">Beta</choice>
<choice correct="false">A</choice>
<choice correct="false">B</choice>
<choice correct="false">C</choice>
<choice correct ="true">D</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
# Alpha Beta held back from shuffle (head end)
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Alpha'.*'Beta'.*'B'.*'A'.*'C'.*'D'.*\].*</div>")
def test_shuffle_fixed_tail_end(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">A</choice>
<choice correct="false">B</choice>
<choice correct="false">C</choice>
<choice correct ="true">D</choice>
<choice correct="false" fixed="true">Alpha</choice>
<choice correct="false" fixed="true">Beta</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
# Alpha Beta held back from shuffle (tail end)
self.assertRegexpMatches(the_html, r"<div>.*\[.*'B'.*'A'.*'C'.*'D'.*'Alpha'.*'Beta'.*\].*</div>")
def test_shuffle_fixed_both_ends(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" fixed="true">Alpha</choice>
<choice correct="false" fixed="true">Beta</choice>
<choice correct="false">A</choice>
<choice correct="false">B</choice>
<choice correct="false">C</choice>
<choice correct ="true">D</choice>
<choice correct="false" fixed="true">Psi</choice>
<choice correct="false" fixed="true">Omega</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Alpha'.*'Beta'.*'B'.*'A'.*'C'.*'D'.*'Psi'.*'Omega'.*\].*</div>")
def test_shuffle_fixed_both_ends_thin(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" fixed="true">Alpha</choice>
<choice correct="false">A</choice>
<choice correct="true" fixed="true">Omega</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'Alpha'.*'A'.*'Omega'.*\].*</div>")
def test_shuffle_fixed_all(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" fixed="true">A</choice>
<choice correct="false" fixed="true">B</choice>
<choice correct="true" fixed="true">C</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'A'.*'B'.*'C'.*\].*</div>")
def test_shuffle_island(self):
"""A fixed 'island' choice not at the head or tail end gets lumped into the tail end."""
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false" fixed="true">A</choice>
<choice correct="false">Mid</choice>
<choice correct="true" fixed="true">C</choice>
<choice correct="False">Mid</choice>
<choice correct="false" fixed="true">D</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
the_html = problem.get_html()
self.assertRegexpMatches(the_html, r"<div>.*\[.*'A'.*'Mid'.*'Mid'.*'C'.*'D'.*\].*</div>")
def test_multiple_shuffle_responses(self):
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
<p>Here is some text</p>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">A</choice>
<choice correct="false">B</choice>
<choice correct="false">C</choice>
<choice correct ="true">D</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
problem = new_loncapa_problem(xml_str, seed=0)
orig_html = problem.get_html()
self.assertEqual(orig_html, problem.get_html(), 'should be able to call get_html() twice')
html = orig_html.replace('\n', ' ') # avoid headaches with .* matching
print html
self.assertRegexpMatches(html, r"<div>.*\[.*'Banana'.*'Apple'.*'Chocolate'.*'Donut'.*\].*</div>.*" +
r"<div>.*\[.*'C'.*'A'.*'D'.*'B'.*\].*</div>")
# Look at the responses in their authored order
responses = sorted(problem.responders.values(), key=lambda resp: int(resp.id[resp.id.rindex('_') + 1:]))
self.assertFalse(responses[0].has_mask())
self.assertTrue(responses[0].has_shuffle())
self.assertTrue(responses[1].has_shuffle())
self.assertEqual(responses[0].unmask_order(), ['choice_1', 'choice_0', 'choice_2', 'choice_3'])
self.assertEqual(responses[1].unmask_order(), ['choice_2', 'choice_0', 'choice_3', 'choice_1'])
def test_shuffle_not_with_answerpool(self):
"""Raise error if shuffle and answer-pool are both used."""
xml_str = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true" answer-pool="4">
<choice correct="false" fixed="true">A</choice>
<choice correct="false">Mid</choice>
<choice correct="true" fixed="true">C</choice>
<choice correct="False">Mid</choice>
<choice correct="false" fixed="true">D</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
with self.assertRaisesRegexp(LoncapaProblemError, "shuffle and answer-pool"):
new_loncapa_problem(xml_str)
| agpl-3.0 |
pjh/vm-analyze | analyze/oldscripts/gather_proc.py | 1 | 3609 | #! /usr/bin/env python3.3
# Virtual memory analysis scripts.
# Developed 2012-2014 by Peter Hornyack, pjh@cs.washington.edu
# Copyright (c) 2012-2014 Peter Hornyack and University of Washington
from vm_regex import *
from pjh_utils import *
import vm_common as vm
import errno
import os
import re
import stat
import sys
proc_files_we_care_about = ("cmdline", "maps", "smaps", "comm", "status")
'''
output_subdir should have just been created, and should be empty.
'''
def copy_proc_files(pid_dir, output_subdir):
tag = "copy_proc_files"
# pid_dir is a /proc/[pid] directory, and output_subdir is a corresponding
# [pid] subdirectory in the output directory. Scan through the list of
# files that we care about and copy the contents of each one to the output
# directory. Because /proc files are not normal file system files, we
# don't use a copy command, but instead open every file for reading and
# then write every line to the output file.
for fname in proc_files_we_care_about:
proc_fname = "{0}/{1}".format(pid_dir, fname)
out_fname = "{0}/{1}".format(output_subdir, fname)
print_debug(tag, ("copying '{0}' to '{1}'".format(
proc_fname, out_fname)))
vm.copy_proc_file_old(proc_fname, out_fname)
def gather_proc_files(output_dir):
tag = "gather_proc_files"
proc_root = "/proc"
# Scan through all of the files under /proc, and for every process
# subdirectory (names with just a PID), copy the files that we care
# about to a corresponding directory in the output directory.
if not os.path.exists(proc_root):
print_error_exit(tag, ("proc_root directory '{0}' does not "
"exist!").format(proc_root))
dir_contents = os.listdir(proc_root)
for item in dir_contents:
match = valid_pid_dir.match(item)
if match:
pid = match.group(1)
pid_dir = "{0}/{1}".format(proc_root, pid)
if os.path.isdir(pid_dir):
output_subdir = "{0}/{1}".format(output_dir, pid)
os.mkdir(output_subdir)
copy_proc_files(pid_dir, output_subdir)
return
def create_output_dir(output_dir):
tag = "create_output_dir"
if os.path.exists(output_dir):
print_error_exit(tag, "Output directory '{0}' already exists".format(
output_dir))
else:
os.mkdir(output_dir)
print(("Output will be created in directory '{0}'").format(output_dir))
return
def check_requirements(output_dir):
tag = "check_requirements"
# Check for super-user permissions: try to open a /proc file that should
# not be readable by normal users.
kernel_fname = "/proc/kcore"
try:
f = open(kernel_fname, 'r')
f.close()
except IOError as e:
#if (e == errno.EACCES):
print_error_exit(tag, "must be run as root")
if os.path.exists(output_dir):
print_error_exit(tag, ("output directory '{0}' already exists").format(
output_dir))
return
def usage():
print("usage: {0} <output-dir> <user>[:<group>]".format(sys.argv[0]))
print(" <output-dir> will be created, its owner will be set to <user>, ")
print(" and its group will optionally be set to <group>.")
print(" This script must be run with root privilege (in order to read "
"/proc)!")
sys.exit(1)
def parse_args(argv):
tag = "parse_args"
if len(argv) != 3:
usage()
print_debug(tag, 'argv: {0}'.format(argv))
output_dir = argv[1]
usrgrp = argv[2]
return (output_dir, usrgrp)
# Main:
if __name__ == "__main__":
tag = "main"
print_debug(tag, "entered")
(output_dir, usrgrp) = parse_args(sys.argv)
check_requirements(output_dir)
create_output_dir(output_dir)
gather_proc_files(output_dir)
set_owner_group(output_dir, usrgrp)
sys.exit(0)
else:
print("Must run stand-alone")
usage()
sys.exit(1)
| bsd-3-clause |
andreasvc/disco-dop | web/browse.py | 1 | 12449 | """Web interface to browse a corpus with various visualizations."""
# stdlib
import os
import re
import sys
import glob
import math
import logging
from collections import OrderedDict
from functools import wraps
import matplotlib
matplotlib.use('AGG')
import matplotlib.cm as cm
import pandas
# Flask & co
from flask import Flask, Response
from flask import request, render_template
# disco-dop
from discodop import treebank, treebanktransforms
from discodop.tree import DrawTree
DEBUG = False # when True: enable debugging interface, disable multiprocessing
PASSWD = None # optionally, dict with user=>pass strings
HEADRULES = '../alpino.headrules'
logging.basicConfig(
format='%(asctime)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG)
APP = Flask(__name__)
log = APP.logger
STANDALONE = __name__ == '__main__'
CORPUS_DIR = "corpus/"
COLORS = dict(enumerate('''
Black Red Green Orange Blue Turquoise SlateGray Peru Teal Aqua
Aquamarine BlanchedAlmond Brown Burlywood CadetBlue Chartreuse
Chocolate Coral Crimson Cyan Firebrick ForestGreen Fuchsia Gainsboro
Gold Goldenrod Gray GreenYellow HotPink IndianRed Indigo Khaki Lime
YellowGreen Magenta Maroon Yellow MidnightBlue Moccasin NavyBlue Olive
OliveDrab Orchid PapayaWhip Pink Plum PowderBlue Purple RebeccaPurple
RoyalBlue SaddleBrown Salmon SandyBrown SeaGreen Sienna Silver SkyBlue
SlateBlue Tan Thistle Tomato Violet Wheat'''.split()))
WORDLIST = pandas.read_table('sonar-word.freqsort.lower.gz',
encoding='utf8', index_col=0, header=None, names=['word', 'count'],
nrows=20000).index
def getdeplen(item):
"""Compute dependency length."""
tree = item.tree.copy(True)
deps = treebank.dependencies(tree)
a, b = treebank.deplen(deps)
return ([abs(x - y) > 7 for x, _, y in deps], a / b if b else 0)
# cannot highlight due to removing punct
# return (None, a / b if b else 0)
def getmodifiers(item):
"""Count and highlight REL/PP-modifiers."""
nodes = list(item.tree.subtrees(lambda n: n.label in ('REL', 'PP')
and treebanktransforms.function(n) == 'mod'))
return toboolvec(len(item.sent), {a for x in nodes
for a in x.leaves()}), len(nodes)
def toboolvec(length, indices):
"""Convert a list of indices into a list of booleans."""
return [n in indices for n in range(length)]
# Functions that accept item object with item.tree and item.sent members;
# return tuple (wordhighlights, sentweight).
FILTERS = {
'average dependency length': getdeplen,
'd-level': lambda i: (None, treebanktransforms.dlevel(i.tree)),
'rare words': lambda i: (list(~pandas.Index(
t.lower() for t in i.sent
).isin(WORDLIST)
& pandas.Series([ # filter names
'eigen' not in n.source[treebank.MORPH]
for n in
sorted(i.tree.subtrees(lambda n: isinstance(n[0], int)),
key=lambda n: n[0])])
), None),
'PP/REL modifiers': getmodifiers,
'punctuation': lambda i:
(None, max('.,\'"?!(:;'.find(t) + 1 for t in i.sent)),
'direct speech': lambda i:
(None, re.match(r"^- .*$|(?:^|.* )['\"](?: .*|$)",
' '.join(i.sent)) is not None),
}
def torgb(val, mappable):
"""Return hexadecimal HTML color string."""
return '#%02x%02x%02x' % mappable.to_rgba(val, bytes=True)[:3]
def charvalues(sent, values):
"""Project token values to character values.
>>> sorted(charvalues(['The', 'cat', 'is', 'on', 'the', 'mat'],
... [0, 0, 1, 1, 0, 1]))
[0, 1, 2, 3, 8, 9, 10, 14, 15, 16, 17]
"""
assert len(sent) == len(values)
result = []
for a, b in zip(sent, values):
result.extend([b] * (len(a) + 1))
return result
# http://flask.pocoo.org/snippets/8/
def check_auth(username, password):
"""This function is called to check if a username / password
combination is valid."""
return PASSWD is None or (username in PASSWD
and password == PASSWD[username])
def authenticate():
"""Sends a 401 response that enables basic auth."""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
"""Decorator to require basic authentication for route."""
@wraps(f)
def decorated(*args, **kwargs):
"""This docstring intentionally left blank."""
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
# end snipppet
def applyhighlight(sent, high1, high2, colorvec=None):
"""Return a version of sent where given char. indices are highlighted."""
cur = None
start = 0
out = []
for n, _ in enumerate(sent):
if colorvec is not None:
if cur != COLORS.get(colorvec[n], 'gray'):
out.append(sent[start:n])
if cur is not None:
out.append('</font>')
out.append('<font color=%s>' % COLORS.get(colorvec[n], 'gray'))
start = n
cur = COLORS.get(colorvec[n], 'gray')
elif n in high1:
if cur != 'red':
out.append(sent[start:n])
if cur is not None:
out.append('</span>')
out.append('<span class=r>')
start = n
cur = 'red'
elif n in high2:
if cur != 'blue':
out.append(sent[start:n])
if cur is not None:
out.append('</span>')
out.append('<span class=b>')
start = n
cur = 'blue'
else:
if cur is not None:
out.append(sent[start:n])
out.append('</span>')
start = n
cur = None
out.append(sent[start:])
if cur is not None:
out.append('</font>')
return ''.join(out)
def addsentweight(x):
wordhighlights, sentweight = x
if sentweight is None:
return wordhighlights, sum(wordhighlights)
return x
@APP.route('/browse')
@requires_auth
def browsetrees():
"""Browse through trees in a file."""
chunk = 20 # number of trees to fetch for one request
if 'text' in request.args and 'sent' in request.args:
textno = int(request.args['text'])
sentno = int(request.args['sent'])
start = max(1, sentno - sentno % chunk)
stop = start + chunk
nofunc = 'nofunc' in request.args
nomorph = 'nomorph' in request.args
filename = os.path.join(CORPUS_DIR, TEXTS[textno] + '.export')
trees = CORPORA[filename].itertrees(start, stop)
results = ['<pre id="t%s"%s>%s\n%s</pre>' % (n,
' style="display: none; "' if 'ajax' in request.args else '',
', '.join('%s: %.3g' % (f, addsentweight(FILTERS[f](item))[1])
for f in sorted(FILTERS)),
DrawTree(item.tree, item.sent).text(
unicodelines=True, html=True))
for n, (_key, item) in enumerate(trees, start)]
if 'ajax' in request.args:
return '\n'.join(results)
prevlink = '<a id=prev>prev</a>'
if sentno > chunk:
prevlink = '<a href="browse?text=%d;sent=%d" id=prev>prev</a>' % (
textno, sentno - chunk + 1)
nextlink = '<a id=next>next</a>'
nextlink = '<a href="browse?text=%d;sent=%d" id=next>next</a>' % (
textno, sentno + chunk + 1)
return render_template('browse.html', textno=textno, sentno=sentno,
text=TEXTS[textno], totalsents=1000,
trees=results, prevlink=prevlink, nextlink=nextlink,
chunk=chunk, nofunc=nofunc, nomorph=nomorph,
mintree=start, maxtree=stop)
return '<h1>Browse through trees</h1>\n<ol>\n%s</ol>\n' % '\n'.join(
'<li><a href="browse?text=%d;sent=1;nomorph">%s</a> ' % (n, text)
for n, text in enumerate(TEXTS))
@APP.route('/')
@APP.route('/browsesents')
@requires_auth
def browsesents():
"""Browse through sentences in a file; highlight selectable features."""
chunk = 20 # number of sentences per page
if 'text' in request.args and 'sent' in request.args:
textno = int(request.args['text'])
sentno = int(request.args['sent'])
sentno = max(chunk // 2 + 1, sentno)
start = max(1, sentno - chunk // 2)
stop = start + chunk
filename = os.path.join(CORPUS_DIR, TEXTS[textno] + '.export')
feat = request.args.get('feat', next(iter(FILTERS)))
trees = list(CORPORA[filename].itertrees(start, stop))
results = []
values = [addsentweight(FILTERS[feat](item))
for n, (_key, item) in enumerate(trees, start)]
norm = matplotlib.colors.Normalize(
vmin=0, vmax=max(a for _, a in values) * 2)
mappable = cm.ScalarMappable(norm, 'YlOrBr')
for n, ((_key, item), (wordhighlights, sentweight)) in enumerate(
zip(trees, values), start):
if sentweight is None:
sentweight = sum(wordhighlights)
if wordhighlights is not None:
xsent = applyhighlight(
' '.join(item.sent), None, None,
colorvec=charvalues(item.sent, wordhighlights))
else:
xsent = ' '.join(item.sent)
results.append(
'<a href="browse?text=%d;sent=%d" '
'style="text-decoration: none; color: black;">'
'<span style="background: %s; " title="%s: %.3g">'
' %s </span></a>' % (textno, n,
torgb(sentweight, mappable), feat, sentweight, xsent))
legend = 'Feature: [ %s ]<br>' % ', '.join(f if f == feat
else ('<a href="browsesents?text=%d;sent=%d;feat=%s">'
'%s</a>' % (textno, sentno, f, f))
for f in sorted(FILTERS))
legend += 'Legend: ' + ''.join(
'<span style="background-color: %s; width: 30px; '
'display: inline-block; text-align: center; ">'
'%d</span>' % (torgb(n, mappable), n)
for n in range(0,
int(math.ceil(max(a for _, a in values))) + 1))
prevlink = '<a id=prev>prev</a>'
if sentno > chunk:
prevlink = (
'<a href="browsesents?text=%d;sent=%d;feat=%s" id=prev>'
'prev</a>' % (textno, sentno - chunk, feat))
nextlink = '<a id=next>next</a>'
nextlink = ('<a href="browsesents?text=%d;sent=%d;feat=%s" id=next>'
'next</a>' % (textno, sentno + chunk, feat))
return render_template('browsesents.html', textno=textno,
sentno=sentno, text=TEXTS[textno],
totalsents='??', # FIXME
sents=results, prevlink=prevlink, nextlink=nextlink,
chunk=chunk, mintree=start, legend=legend,
query=request.args.get('query', ''),
engine='')
return render_template('browsemain.html',
texts=TEXTS)
def querydict(queries):
"""Return an OrderedDict of names and queries.
name is abbreviated query if not given."""
result = OrderedDict()
for line in (x for x in queries.splitlines() if x.strip()):
if ':' in line and line[:line.index(':')].isalnum():
name, query = line.split(':', 1)
else:
name = line[:100] + ('' if len(line) < 100 else '...')
query = line
if '\t' in query:
normquery, query = query.split('\t')
else:
normquery = None
result[name] = normquery, query
return result
def getcorpus():
"""Get list of files and number of lines in them."""
files = sorted(glob.glob(os.path.join(CORPUS_DIR, '*.export')))
assert files, ('no corpus files with extension .export '
'found.')
texts = [os.path.splitext(os.path.basename(a))[0] for a in files]
corpora = {filename: treebank.NegraCorpusReader(filename,
headrules=HEADRULES, punct='move')
for filename in files}
if os.path.exists('metadata.csv'):
metadata = pandas.read_csv('metadata.csv', index_col=0)
assert set(metadata.index) == set(texts), (
'metadata.csv does not match list of files.\n'
'only in metadata: %s\nonly in files: %s' % (
set(metadata.index) - set(texts),
set(texts) - set(metadata.index)))
metadata = metadata.loc[texts]
else:
metadata = None
return texts, corpora, metadata
class QueryStringRedirectMiddleware(object):
"""Support ; as query delimiter.
http://flask.pocoo.org/snippets/43/"""
def __init__(self, application):
self.application = application
def __call__(self, environ, start_response):
qs = environ.get('QUERY_STRING', '')
environ['QUERY_STRING'] = qs.replace(';', '&')
return self.application(environ, start_response)
APP.wsgi_app = QueryStringRedirectMiddleware(APP.wsgi_app)
log.info('loading corpus.')
if STANDALONE:
from getopt import gnu_getopt, GetoptError
try:
opts, _args = gnu_getopt(sys.argv[1:], '',
['port=', 'ip=', 'numproc=', 'debug'])
opts = dict(opts)
except GetoptError as err:
print('error: %r' % err, file=sys.stderr)
sys.exit(2)
DEBUG = '--debug' in opts
# NB: load corpus regardless of whether running standalone:
(TEXTS, CORPORA, METADATA) = getcorpus()
log.info('corpus loaded.')
try:
with open('treesearchpasswd.txt', 'rt') as fileobj:
PASSWD = {a.strip(): b.strip() for a, b
in (line.split(':', 1) for line in fileobj)}
log.info('password protection enabled.')
except IOError:
log.info('no password protection.')
if STANDALONE:
APP.run(use_reloader=False,
host=opts.get('--ip', '0.0.0.0'),
port=int(opts.get('--port', 5003)),
debug=DEBUG)
| gpl-2.0 |
varunmc/cassandra-server | apache-cassandra-2.1.0/pylib/cqlshlib/helptopics.py | 2 | 30977 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cql3handling import simple_cql_types
class CQLHelpTopics(object):
def get_help_topics(self):
return [ t[5:] for t in dir(self) if t.startswith('help_') ]
def print_help_topic(self, topic):
getattr(self, 'help_' + topic.lower())()
def help_types(self):
print "\n CQL types recognized by this version of cqlsh:\n"
for t in simple_cql_types:
print ' ' + t
print """
For information on the various recognizable input formats for these
types, or on controlling the formatting of cqlsh query output, see
one of the following topics:
HELP TIMESTAMP_INPUT
HELP BLOB_INPUT
HELP UUID_INPUT
HELP BOOLEAN_INPUT
HELP TEXT_OUTPUT
HELP TIMESTAMP_OUTPUT
"""
def help_timestamp_input(self):
print """
Timestamp input
CQL supports any of the following ISO 8601 formats for timestamp
specification:
yyyy-mm-dd HH:mm
yyyy-mm-dd HH:mm:ss
yyyy-mm-dd HH:mmZ
yyyy-mm-dd HH:mm:ssZ
yyyy-mm-dd'T'HH:mm
yyyy-mm-dd'T'HH:mmZ
yyyy-mm-dd'T'HH:mm:ss
yyyy-mm-dd'T'HH:mm:ssZ
yyyy-mm-dd
yyyy-mm-ddZ
The Z in these formats refers to an RFC-822 4-digit time zone,
expressing the time zone's difference from UTC. For example, a
timestamp in Pacific Standard Time might be given thus:
2012-01-20 16:14:12-0800
If no time zone is supplied, the current time zone for the Cassandra
server node will be used.
"""
def help_blob_input(self):
print """
Blob input
CQL blob data must be specified in a string literal as hexidecimal
data. Example: to store the ASCII values for the characters in the
string "CQL", use '43514c'.
"""
def help_uuid_input(self):
print """
UUID input
UUIDs may be specified in CQL using 32 hexidecimal characters,
split up using dashes in the standard UUID format:
XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
"""
def help_boolean_input(self):
print """
Boolean input
CQL accepts the strings 'true' and 'false' (case insensitive)
as input for boolean types.
"""
def help_timestamp_output(self):
print """
Timestamp output
Cqlsh will display timestamps in the following format by default:
yyyy-mm-dd HH:mm:ssZ
which is a format acceptable as CQL timestamp input as well.
The output format can be changed by setting 'time_format' property
in the [ui] section of .cqlshrc file.
"""
def help_text_output(self):
print """
Textual output
When control characters, or other characters which can't be encoded
in your current locale, are found in values of 'text' or 'ascii'
types, it will be shown as a backslash escape. If color is enabled,
any such backslash escapes will be shown in a different color from
the surrounding text.
Unicode code points in your data will be output intact, if the
encoding for your locale is capable of decoding them. If you prefer
that non-ascii characters be shown with Python-style "\\uABCD"
escape sequences, invoke cqlsh with an ASCII locale (for example,
by setting the $LANG environment variable to "C").
"""
help_ascii_output = help_text_output
def help_create_index(self):
print """
CREATE INDEX [<indexname>] ON <cfname> ( <colname> );
A CREATE INDEX statement is used to create a new, automatic secondary
index on the given CQL table, for the named column. A name for the
index itself can be specified before the ON keyword, if desired. A
single column name must be specified inside the parentheses. It is not
necessary for the column to exist on any current rows (Cassandra is
schema-optional), but the column must already have a type (specified
during the CREATE TABLE, or added afterwards with ALTER TABLE).
"""
def help_drop(self):
print """
There are different variants of DROP. For more information, see
one of the following:
HELP DROP_KEYSPACE;
HELP DROP_TABLE;
HELP DROP_INDEX;
"""
def help_drop_keyspace(self):
print """
DROP KEYSPACE <keyspacename>;
A DROP KEYSPACE statement results in the immediate, irreversible
removal of a keyspace, including all column families in it, and all
data contained in those column families.
"""
def help_drop_table(self):
print """
DROP TABLE <tablename>;
A DROP TABLE statement results in the immediate, irreversible
removal of a CQL table and the underlying column family, including all
data contained in it.
"""
help_drop_columnfamily = help_drop_table
def help_drop_index(self):
print """
DROP INDEX <indexname>;
A DROP INDEX statement is used to drop an existing secondary index.
"""
def help_truncate(self):
print """
TRUNCATE <tablename>;
TRUNCATE accepts a single argument for the table name, and permanently
removes all data from it.
"""
def help_create(self):
print """
There are different variants of CREATE. For more information, see
one of the following:
HELP CREATE_KEYSPACE;
HELP CREATE_TABLE;
HELP CREATE_INDEX;
"""
def help_use(self):
print """
USE <keyspacename>;
Tells cqlsh and the connected Cassandra instance that you will be
working in the given keyspace. All subsequent operations on tables
or indexes will be in the context of this keyspace, unless otherwise
specified, until another USE command is issued or the connection
terminates.
As always, when a keyspace name does not work as a normal identifier or
number, it can be quoted using single quotes (CQL 2) or double quotes
(CQL 3).
"""
def help_create_table(self):
print """
CREATE TABLE <cfname> ( <colname> <type> PRIMARY KEY [,
<colname> <type> [, ...]] )
[WITH <optionname> = <val> [AND <optionname> = <val> [...]]];
CREATE TABLE statements create a new CQL table under the current
keyspace. Valid table names are strings of alphanumeric characters and
underscores, which begin with a letter.
Each table requires a primary key, which will correspond to the
underlying columnfamily key and key validator. It's important to
note that the key type you use must be compatible with the partitioner
in use. For example, OrderPreservingPartitioner and
CollatingOrderPreservingPartitioner both require UTF-8 keys.
In cql3 mode, a table can have multiple columns composing the primary
key (see HELP COMPOUND_PRIMARY_KEYS).
For more information, see one of the following:
HELP CREATE_TABLE_TYPES;
HELP CREATE_TABLE_OPTIONS;
"""
help_create_columnfamily = help_create_table
def help_compound_primary_keys(self):
print """
CREATE TABLE <cfname> ( <partition_key> <type>, <clustering_key1> type, <clustering_key2> type,
[, ...]], PRIMARY KEY (<partition_key>, <clustering_key1>, <clustering_key2>);
CREATE TABLE allows a primary key composed of multiple columns. When this is the case, specify
the columns that take part in the compound key after all columns have been specified.
, PRIMARY KEY( <key1>, <key2>, ... )
The partitioning key itself can be a compound key, in which case the first element of the PRIMARY KEY
phrase should be parenthesized, as
PRIMARY KEY ((<partition_key_part1>, <partition_key_part2>), <clustering_key>)
"""
def help_create_table_types(self):
print """
CREATE TABLE: Specifying column types
CREATE ... (KEY <type> PRIMARY KEY,
othercol <type>) ...
It is possible to assign columns a type during table creation. Columns
configured with a type are validated accordingly when a write occurs,
and intelligent CQL drivers and interfaces will be able to decode the
column values correctly when receiving them. Column types are specified
as a parenthesized, comma-separated list of column term and type pairs.
See HELP TYPES; for the list of recognized types.
"""
help_create_columnfamily_types = help_create_table_types
def help_create_table_options(self):
print """
CREATE TABLE: Specifying columnfamily options
CREATE TABLE blah (...)
WITH optionname = val AND otheroption = val2;
A number of optional keyword arguments can be supplied to control the
configuration of a new CQL table, such as the size of the associated
row and key caches for the underlying Cassandra columnfamily. Consult
your CQL reference for the complete list of options and possible
values.
"""
help_create_columnfamily_options = help_create_table_options
def help_alter_alter(self):
print """
ALTER TABLE: altering existing typed columns
ALTER TABLE addamsFamily ALTER lastKnownLocation TYPE uuid;
ALTER TABLE ... ALTER changes the expected storage type for a column.
The column must already have a type in the column family metadata. The
column may or may not already exist in current rows-- but be aware that
no validation of existing data is done. The bytes stored in values for
that column will remain unchanged, and if existing data is not
deserializable according to the new type, this may cause your CQL
driver or interface to report errors.
"""
def help_alter_add(self):
print """
ALTER TABLE: adding a typed column
ALTER TABLE addamsFamily ADD gravesite varchar;
The ALTER TABLE ... ADD variant adds a typed column to a column
family. The column must not already have a type in the column family
metadata. See the warnings on HELP ALTER_ALTER regarding the lack of
validation of existing data; they apply here as well.
"""
def help_alter_drop(self):
print """
ALTER TABLE: dropping a typed column
ALTER TABLE addamsFamily DROP gender;
An ALTER TABLE ... DROP statement removes the type of a column
from the column family metadata. Note that this does _not_ remove the
column from current rows; it just removes the metadata saying that the
bytes stored under that column are expected to be deserializable
according to a certain type.
"""
def help_alter_with(self):
print """
ALTER TABLE: changing column family properties
ALTER TABLE addamsFamily WITH comment = 'Glad to be here!'
AND read_repair_chance = 0.2;
An ALTER TABLE ... WITH statement makes adjustments to the
table properties, as defined when the table was created (see
HELP CREATE_TABLE_OPTIONS and your Cassandra documentation for
information about the supported parameter names and values).
"""
def help_delete_columns(self):
print """
DELETE: specifying columns
DELETE col1, col2, col3 FROM ...
Following the DELETE keyword is an optional comma-delimited list of
column name terms. When no column names are given, the remove applies
to the entire row(s) matched by the WHERE clause.
When column names do not parse as valid CQL identifiers, they can be
quoted in single quotes (CQL 2) or double quotes (CQL 3).
"""
def help_delete_where(self):
print """
DELETE: specifying rows
DELETE ... WHERE keycol = 'some_key_value';
DELETE ... WHERE keycol1 = 'val1' AND keycol2 = 'val2';
DELETE ... WHERE keycol IN (key1, key2);
The WHERE clause is used to determine to which row(s) a DELETE
applies. The first form allows the specification of a precise row
by specifying a particular primary key value (if the primary key has
multiple columns, values for each must be given). The second form
allows a list of key values to be specified using the IN operator
and a parenthesized list of comma-delimited key values.
"""
def help_update_set(self):
print """
UPDATE: Specifying Columns and Row
UPDATE ... SET name1 = value1, name2 = value2
WHERE <key> = keyname;
UPDATE ... SET name1 = value1, name2 = value2
WHERE <key> IN ('<key1>', '<key2>', ...)
Rows are created or updated by supplying column names and values in
term assignment format. Multiple columns can be set by separating the
name/value pairs using commas.
"""
def help_update_counters(self):
print """
UPDATE: Updating Counter Columns
UPDATE ... SET name1 = name1 + <value> ...
UPDATE ... SET name1 = name1 - <value> ...
Counter columns can be incremented or decremented by an arbitrary
numeric value though the assignment of an expression that adds or
substracts the value.
"""
def help_update_where(self):
print """
UPDATE: Selecting rows to update
UPDATE ... WHERE <keyname> = <keyval>;
UPDATE ... WHERE <keyname> IN (<keyval1>, <keyval2>, ...);
UPDATE ... WHERE <keycol1> = <keyval1> AND <keycol2> = <keyval2>;
Each update statement requires a precise set of keys to be specified
using a WHERE clause.
If the table's primary key consists of multiple columns, an explicit
value must be given for each for the UPDATE statement to make sense.
"""
def help_select_table(self):
print """
SELECT: Specifying Table
SELECT ... FROM [<keyspace>.]<tablename> ...
The FROM clause is used to specify the CQL table applicable to a SELECT
query. The keyspace in which the table exists can optionally be
specified along with the table name, separated by a dot (.). This will
not change the current keyspace of the session (see HELP USE).
"""
help_select_columnfamily = help_select_table
def help_select_where(self):
print """
SELECT: Filtering rows
SELECT ... WHERE <key> = keyname AND name1 = value1
SELECT ... WHERE <key> >= startkey and <key> =< endkey AND name1 = value1
SELECT ... WHERE <key> IN ('<key>', '<key>', '<key>', ...)
The WHERE clause provides for filtering the rows that appear in
results. The clause can filter on a key name, or range of keys, and in
the case of indexed columns, on column values. Key filters are
specified using the KEY keyword or key alias name, a relational
operator (one of =, >, >=, <, and <=), and a term value. When terms
appear on both sides of a relational operator it is assumed the filter
applies to an indexed column. With column index filters, the term on
the left of the operator is the name, the term on the right is the
value to filter _on_.
Note: The greater-than and less-than operators (> and <) result in key
ranges that are inclusive of the terms. There is no supported notion of
"strictly" greater-than or less-than; these operators are merely
supported as aliases to >= and <=.
"""
def help_select_limit(self):
print """
SELECT: Limiting results
SELECT ... WHERE <clause> [LIMIT n] ...
Limiting the number of rows returned can be achieved by adding the
LIMIT option to a SELECT expression. LIMIT defaults to 10,000 when left
unset.
"""
class CQL3HelpTopics(CQLHelpTopics):
def help_create_keyspace(self):
print """
CREATE KEYSPACE <ksname>
WITH replication = {'class':'<strategy>' [,'<option>':<val>]};
The CREATE KEYSPACE statement creates a new top-level namespace (aka
"keyspace"). Valid names are any string constructed of alphanumeric
characters and underscores. Names which do not work as valid
identifiers or integers should be quoted as string literals. Properties
such as replication strategy and count are specified during creation
as key-value pairs in the 'replication' map:
class [required]: The name of the replication strategy class
which should be used for the new keyspace. Some often-used classes
are SimpleStrategy and NetworkTopologyStrategy.
other options [optional]: Most strategies require additional arguments
which can be supplied as key-value pairs in the 'replication' map.
Examples:
To create a keyspace with NetworkTopologyStrategy and strategy option of "DC1"
with a value of "1" and "DC2" with a value of "2" you would use
the following statement:
CREATE KEYSPACE <ksname>
WITH replication = {'class':'NetworkTopologyStrategy', 'DC1':1, 'DC2':2};
To create a keyspace with SimpleStrategy and "replication_factor" option
with a value of "3" you would use this statement:
CREATE KEYSPACE <ksname>
WITH replication = {'class':'SimpleStrategy', 'replication_factor':3};
"""
def help_begin(self):
print """
BEGIN [UNLOGGED|COUNTER] BATCH [USING TIMESTAMP <timestamp>]
<insert or update or delete statement> ;
[ <another insert or update or delete statement ;
[...]]
APPLY BATCH;
BATCH supports setting a client-supplied optional global timestamp
which will be used for each of the operations included in the batch.
Only data modification statements (specifically, UPDATE, INSERT,
and DELETE) are allowed in a BATCH statement. BATCH is _not_ an
analogue for SQL transactions.
_NOTE: Counter mutations are allowed only within COUNTER batches._
_NOTE: While there are no isolation guarantees, UPDATE queries are
atomic within a given record._
"""
help_apply = help_begin
def help_select(self):
print """
SELECT <selectExpr>
FROM [<keyspace>.]<table>
[WHERE <clause>]
[ORDER BY <colname> [DESC]]
[LIMIT m];
SELECT is used to read one or more records from a CQL table. It returns
a set of rows matching the selection criteria specified.
For more information, see one of the following:
HELP SELECT_EXPR
HELP SELECT_TABLE
HELP SELECT_WHERE
HELP SELECT_LIMIT
"""
def help_delete(self):
print """
DELETE [<col1> [, <col2>, ...] FROM [<keyspace>.]<tablename>
[USING TIMESTAMP <timestamp>]
WHERE <keyname> = <keyvalue>;
A DELETE is used to perform the removal of one or more columns from one
or more rows. Each DELETE statement requires a precise set of row keys
to be specified using a WHERE clause and the KEY keyword or key alias.
For more information, see one of the following:
HELP DELETE_USING
HELP DELETE_COLUMNS
HELP DELETE_WHERE
"""
def help_delete_using(self):
print """
DELETE: the USING clause
DELETE ... USING TIMESTAMP <timestamp>;
<timestamp> defines the optional timestamp for the new tombstone
record. It must be an integer. Cassandra timestamps are generally
specified using milliseconds since the Unix epoch (1970-01-01 00:00:00
UTC).
"""
def help_update(self):
print """
UPDATE [<keyspace>.]<columnFamily>
[USING [TIMESTAMP <timestamp>]
[AND TTL <timeToLive>]]
SET name1 = value1, name2 = value2 WHERE <keycol> = keyval;
An UPDATE is used to write one or more columns to a record in a table.
No results are returned. The record's primary key must be completely
and uniquely specified; that is, if the primary key includes multiple
columns, all must be explicitly given in the WHERE clause.
Statements begin with the UPDATE keyword followed by the name of the
table to be updated.
For more information, see one of the following:
HELP UPDATE_USING
HELP UPDATE_SET
HELP UPDATE_COUNTERS
HELP UPDATE_WHERE
"""
def help_update_using(self):
print """
UPDATE: the USING clause
UPDATE ... USING TIMESTAMP <timestamp>;
UPDATE ... USING TTL <timeToLive>;
The USING clause allows setting of certain query and data parameters.
If multiple parameters need to be set, these may be joined using AND.
Example:
UPDATE ... USING TTL 43200 AND TIMESTAMP 1351620509603
<timestamp> defines the optional timestamp for the new column value(s).
It must be an integer. Cassandra timestamps are generally specified
using milliseconds since the Unix epoch (1970-01-01 00:00:00 UTC).
<timeToLive> defines the optional time to live (TTL) in seconds for the
new column value(s). It must be an integer.
"""
def help_insert(self):
print """
INSERT INTO [<keyspace>.]<tablename>
( <colname1>, <colname2> [, <colname3> [, ...]] )
VALUES ( <colval1>, <colval2> [, <colval3> [, ...]] )
[USING TIMESTAMP <timestamp>]
[AND TTL <timeToLive]];
An INSERT is used to write one or more columns to a record in a
CQL table. No results are returned.
Values for all component columns in the table's primary key must
be given. Also, there must be at least one non-primary-key column
specified (Cassandra rows are not considered to exist with only
a key and no associated columns).
Unlike in SQL, the semantics of INSERT and UPDATE are identical.
In either case a record is created if none existed before, and
udpated when it does. For more information, see one of the
following:
HELP UPDATE
HELP UPDATE_USING
"""
def help_select_expr(self):
print """
SELECT: Specifying Columns
SELECT name1, name2, name3 FROM ...
SELECT COUNT(*) FROM ...
The SELECT expression determines which columns will appear in the
results and takes the form of a comma separated list of names.
It is worth noting that unlike the projection in a SQL SELECT, there is
no guarantee that the results will contain all of the columns
specified. This is because Cassandra is schema-less and there are no
guarantees that a given column exists.
When the COUNT aggregate function is specified as a column to fetch, a
single row will be returned, with a single column named "count" whose
value is the number of rows from the pre-aggregation resultset.
Currently, COUNT is the only function supported by CQL.
"""
def help_alter_drop(self):
print """
ALTER TABLE: dropping a typed column
ALTER TABLE addamsFamily DROP gender;
An ALTER TABLE ... DROP statement removes the type of a column
from the column family metadata. Dropped columns will immediately
become unavailable in the queries and will not be included in
compacted sstables in the future. If a column is readded, queries
won't return values written before the column was last dropped.
It is assumed that timestamps represent actual time, so if this
is not your case, you should NOT readd previously dropped columns.
Columns can't be dropped from tables defined with COMPACT STORAGE.
"""
def help_create(self):
super(CQL3HelpTopics, self).help_create()
print " HELP CREATE_USER;\n"
def help_alter(self):
print """
ALTER TABLE <tablename> ALTER <columnname> TYPE <type>;
ALTER TABLE <tablename> ADD <columnname> <type>;
ALTER TABLE <tablename> RENAME <columnname> TO <columnname>
[AND <columnname> TO <columnname>]
ALTER TABLE <tablename> WITH <optionname> = <val> [AND <optionname> = <val> [...]];
An ALTER statement is used to manipulate table metadata. It allows you
to add new typed columns, drop existing columns, change the data
storage type of existing columns, or change table properties.
No results are returned.
See one of the following for more information:
HELP ALTER_ALTER;
HELP ALTER_ADD;
HELP ALTER_DROP;
HELP ALTER_RENAME;
HELP ALTER_WITH;
"""
def help_alter_rename(self):
print """
ALTER TABLE: renaming a column
ALTER TABLE <tablename> RENAME <columnname> TO <columnname>
[AND <columnname> TO <columnname>]
The ALTER TABLE ... RENAME variant renames a typed column in a column
family.
"""
def help_drop(self):
super(CQL3HelpTopics, self).help_drop()
print " HELP DROP_USER;\n"
def help_list(self):
print """
There are different variants of LIST. For more information, see
one of the following:
HELP LIST_USERS;
HELP LIST_PERMISSIONS;
"""
def help_create_user(self):
print """
CREATE USER <username> [WITH PASSWORD 'password'] [NOSUPERUSER | SUPERUSER];
CREATE USER creates a new Cassandra user account.
Only superusers can issue CREATE USER requests.
To create a superuser account use SUPERUSER option (NOSUPERUSER is the default).
WITH PASSWORD clause should only be used with password-based authenticators,
e.g. PasswordAuthenticator, SimpleAuthenticator.
"""
def help_alter_user(self):
print """
ALTER USER <username> [WITH PASSWORD 'password'] [NOSUPERUSER | SUPERUSER];
Use ALTER USER to change a user's superuser status and/or password (only
with password-based authenticators).
Superusers can change a user's password or superuser status (except their own).
Users cannot change their own superuser status. Ordinary users can only change their
password (if the configured authenticator is password-based).
"""
def help_drop_user(self):
print """
DROP USER <username>;
DROP USER removes an existing user. You have to be logged in as a superuser
to issue a DROP USER statement. A user cannot drop themselves.
"""
def help_list_users(self):
print """
LIST USERS;
List existing users and their superuser status.
"""
def help_grant(self):
print """
GRANT (<permission> [PERMISSION] | ALL [PERMISSIONS])
ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>
TO <username>
Grant the specified permission (or all permissions) on a resource
to a user.
To be able to grant a permission on some resource you have to
have that permission yourself and also AUTHORIZE permission on it,
or on one of its parent resources.
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_revoke(self):
print """
REVOKE (<permission> [PERMISSION] | ALL [PERMISSIONS])
ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>
FROM <username>
Revokes the specified permission (or all permissions) on a resource
from a user.
To be able to revoke a permission on some resource you have to
have that permission yourself and also AUTHORIZE permission on it,
or on one of its parent resources.
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_list_permissions(self):
print """
LIST (<permission> [PERMISSION] | ALL [PERMISSIONS])
[ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>]
[OF <username>]
[NORECURSIVE]
Omitting ON <resource> part will list permissions on ALL KEYSPACES,
every keyspace and table.
Omitting OF <username> part will list permissions of all users.
Omitting NORECURSIVE specifier will list permissions of the resource
and all its parents (table, table's keyspace and ALL KEYSPACES).
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_permissions(self):
print """
PERMISSIONS
Cassandra has 6 permissions:
ALTER: required for ALTER KEYSPCE, ALTER TABLE, CREATE INDEX, DROP INDEX
AUTHORIZE: required for GRANT, REVOKE
CREATE: required for CREATE KEYSPACE, CREATE TABLE
DROP: required for DROP KEYSPACE, DROP TABLE
MODIFY: required for INSERT, DELETE, UPDATE, TRUNCATE
SELECT: required for SELECT
"""
| apache-2.0 |
chenc10/Spark-PAF | dist/examples/src/main/python/mllib/isotonic_regression_example.py | 3 | 2244 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Isotonic Regression Example.
"""
from __future__ import print_function
from pyspark import SparkContext
# $example on$
import math
from pyspark.mllib.regression import IsotonicRegression, IsotonicRegressionModel
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="PythonIsotonicRegressionExample")
# $example on$
data = sc.textFile("data/mllib/sample_isotonic_regression_data.txt")
# Create label, feature, weight tuples from input data with weight set to default value 1.0.
parsedData = data.map(lambda line: tuple([float(x) for x in line.split(',')]) + (1.0,))
# Split data into training (60%) and test (40%) sets.
training, test = parsedData.randomSplit([0.6, 0.4], 11)
# Create isotonic regression model from training data.
# Isotonic parameter defaults to true so it is only shown for demonstration
model = IsotonicRegression.train(training)
# Create tuples of predicted and real labels.
predictionAndLabel = test.map(lambda p: (model.predict(p[1]), p[0]))
# Calculate mean squared error between predicted and real labels.
meanSquaredError = predictionAndLabel.map(lambda pl: math.pow((pl[0] - pl[1]), 2)).mean()
print("Mean Squared Error = " + str(meanSquaredError))
# Save and load model
model.save(sc, "target/tmp/myIsotonicRegressionModel")
sameModel = IsotonicRegressionModel.load(sc, "target/tmp/myIsotonicRegressionModel")
# $example off$
| apache-2.0 |
phassoa/openelisglobal-core | liquibase/OE2.9/testCatalogHT_Clinical/scripts/testResult.py | 6 | 3274 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def convert_type_to_symbole( type):
if type == 'Numeric' or type == 'numeric' :
return 'N'
if 'Free Text' in type:
return 'R'
if type == 'Select list':
return 'D'
if type == 'multi' or type == 'Multi Select':
return 'M'
return type
def esc_char(name):
if "'" in name:
return "$$" + name + "$$"
else:
return "'" + name + "'"
def get_split_names( name ):
split_name_list = name.split("/")
for i in range(0, len(split_name_list)):
split_name_list[i] = split_name_list[i].strip()
return split_name_list
def get_comma_split_names( name ):
split_name_list = [name]
if ',' in name:
split_name_list = name.split(",")
elif ';' in name:
split_name_list = name.split(";")
for i in range(0, len(split_name_list)):
split_name_list[i] = split_name_list[i].strip()
return split_name_list
test_names = []
sample_types = []
select = []
type = []
descriptions = []
name_file = open('testName.txt','r')
sample_type_file = open("sampleType.txt")
select_file = open("selectList.txt", 'r')
result_type_file = open("resultType.txt", 'r')
results = open("output/MassiveTestResults.sql", 'w')
for line in name_file:
test_names.append(line.strip())
name_file.close()
for line in sample_type_file:
sample_types.append(line.strip())
sample_type_file.close()
for line in select_file:
select.append(line.strip())
select_file.close()
for line in result_type_file:
type.append( line.strip())
result_type_file.close()
nextVal = " VALUES ( nextval( 'test_result_seq' ) "
order = 10
for row in range(0, len(test_names)):
if len(test_names[row]) > 1: #it's a new entry
result_type = convert_type_to_symbole(type[row])
description = esc_char(test_names[row] + "(" + sample_types[row] + ")")
if description not in descriptions:
descriptions.append(description)
if result_type == 'D' or result_type == 'M':
split_selections = get_comma_split_names( select[row])
for j in range(0, len(split_selections)):
dictionary_select = " ( select max(id) from clinlims.dictionary where dict_entry =" + esc_char(split_selections[j].strip()) + " ) "
results.write("INSERT INTO test_result( id, test_id, tst_rslt_type, value , lastupdated, sort_order)\n\t")
results.write( nextVal + ", ( select id from clinlims.test where description = " + description + " ) , '")
results.write( result_type + "' , " + dictionary_select + " , now() , " + str(order) + ");\n")
order += 10
else:
results.write("INSERT INTO test_result( id, test_id, tst_rslt_type, value , lastupdated, sort_order)\n\t")
results.write( nextVal + ", ( select id from clinlims.test where description = " + description + " ) , '")
results.write( result_type + "' , null , now() , " + str(order) + ");\n")
order += 10
print "Done results in MassiveTestResults.sql" | mpl-2.0 |
audiokit/csound | Opcodes/py/pycall-gen.py | 5 | 7093 | #!/usr/bin/env python
# Copyright (C) 2002 Maurizio Umberto Puxeddu
# This file is part of Csound.
# The Csound Library is free software; you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# Csound is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with Csound; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307 USA
'''Automatically generate opcode structures, methods [and table entries]. Just run it.'''
def generate_pycall_common_init_code(f, n, pre, post, rate, triggered=0):
if triggered:
t, T = 't', 'T'
else:
t, T = '', ''
name = 'py%scall%d%s%s_%srate' % (pre, n, post, t, rate)
print >> f, 'static int %s(CSOUND *csound, PYCALL%d%s *p)' % (name, n, T)
print >> f, '{'
print >> f, ' char command[1024];'
print >> f, ' PyObject *result;'
print >> f, ' int *py_initialize_done;'
print >> f, ' if((py_initialize_done = csound->QueryGlobalVariable(csound,"PY_INITIALIZE")) == NULL ||*py_initialize_done == 0)'
print >> f, ' return NOTOK;'
print >> f
if triggered:
print >> f, ' if (!*p->trigger) {'
if n == 0:
pass
if n == 1:
print >> f, ' *p->result = p->oresult;'
elif n > 1:
for i in range(n):
print >> f, ' *p->result%d = p->oresult%d;' % (i+1, i+1)
print >> f, ' return OK;'
print >> f, ' }'
print >> f
def generate_pycall_common_call_code(f, context, withinit, triggered):
if triggered:
skip = 2
else:
skip = 1
print >> f, ' format_call_statement(command, (char*) p->function->data,'
print >> f, ' p->INOCOUNT, p->args, %d);' % skip
print >> f
if context == 'private':
print >> f, ' result = eval_string_in_given_context(command, 0);'
else:
if withinit:
print >> f, ' create_private_namespace_if_needed(&p->h);'
print >> f
print >> f, ' result = eval_string_in_given_context(command, GETPYLOCAL(p->h.insdshead));'
print >> f
def generate_pycall_exception_handling_code(f, n, pre, post, rate, triggered=0):
print >> f, ' if (result == NULL)'
print >> f, ' return pyErrMsg(p, "python exception");'
print >> f
def generate_pycall_result_conversion_code(f, n, pre, post, rate, triggered=0):
if triggered:
t, T = 't', 'T'
else:
t, T = '', ''
if n == 0:
print >> f, ' if (result != Py_None)'
print >> f, ' return errMsg(p, "callable must return None");'
elif n == 1:
print >> f, ' if (!PyFloat_Check(result)) {'
print >> f, ' return errMsg(p, "callable must return a float");'
print >> f, ' }'
print >> f, ' else {'
print >> f, ' *p->result = PyFloat_AsDouble(result);'
if triggered:
print >> f, ' p->oresult = *p->result;'
print >> f, ' }'
print >> f, ' return OK;'
else:
name = 'py%scall%d%s%s_%srate' % (pre, n, post, t, rate)
print >> f, ' if (!PyTuple_Check(result) || PyTuple_Size(result) != %d) {' % n
print >> f, ' return errMsg(p, "callable must return %d values");' % n
print >> f, ' }'
print >> f, ' else {'
for i in range(n):
print >> f, ' *p->result%d = PyFloat_AsDouble(PyTuple_GET_ITEM(result, %d));' % (i+1, i)
if triggered:
print >> f, ' p->oresult%d = *p->result%d;' % (i+1, i+1)
print >> f, ' }'
print >> f
print >> f, ' Py_DECREF(result);'
print >> f, ' return OK;'
print >> f, '}'
print >> f
def generate_pycall_krate_method(f, n, triggered=0):
generate_pycall_common_init_code(f, n, '', '', 'k', triggered)
generate_pycall_common_call_code(f, 'private', 1, triggered)
generate_pycall_exception_handling_code(f, n, '', '', 'k', triggered)
generate_pycall_result_conversion_code(f, n, '', '', 'k', triggered)
def generate_pylcall_irate_method(f, n, triggered=0):
if triggered:
t, T = 't', 'T'
else:
t, T = '', ''
name = 'pylcall%d%s_irate' % (n, t)
print >> f, 'static int %s(CSOUND *csound, PYCALL%d%s *p)' % (name, n, T)
print >> f, '{'
print >> f, ' int *py_initialize_done;'
print >> f, ' if((py_initialize_done = csound->QueryGlobalVariable(csound,"PY_INITIALIZE")) == NULL ||*py_initialize_done == 0)'
print >> f, ' return NOTOK;'
print >> f, ' create_private_namespace_if_needed(&p->h);'
print >> f, ' return OK;'
print >> f, '}'
print >> f
def generate_pylcall_krate_method(f, n, triggered=0):
generate_pycall_common_init_code(f, n, 'l', '', 'k', triggered)
generate_pycall_common_call_code(f, 'global', 0, triggered)
generate_pycall_exception_handling_code(f, n, 'l', '', 'k', triggered)
generate_pycall_result_conversion_code(f, n, 'l', '', 'k', triggered)
def generate_pylcalli_irate_method(f, n):
generate_pycall_common_init_code(f, n, 'l', 'i', 'i')
generate_pycall_common_call_code(f, 'global', 1, 0)
generate_pycall_exception_handling_code(f, n, 'l', 'i', 'i')
generate_pycall_result_conversion_code(f, n, 'l', 'i', 'i')
# ----------
def generate_pycall_opcode_struct(f, n, triggered=0):
if triggered:
T = 'T'
else:
T = ''
print >> f, 'typedef struct {'
print >> f, ' OPDS h;'
if n == 1:
print >> f, ' MYFLT *result;'
else:
for i in range(n):
print >> f, ' MYFLT *result%d;' % (i+1)
if triggered:
print >> f, ' MYFLT *trigger;'
print >> f, ' STRINGDAT *function;'
print >> f, ' MYFLT *args[VARGMAX-3];'
if triggered:
if n == 1:
print >> f, ' MYFLT oresult;'
else:
for i in range(n):
print >> f, ' MYFLT oresult%d;' % (i+1)
print >> f, '} PYCALL%d%s;' % (n, T)
print >> f
# --------
f = open('pycall.auto.c', 'w')
print >> f
for n in range(9):
generate_pycall_krate_method(f, n)
generate_pylcall_irate_method(f, n)
generate_pylcall_krate_method(f, n)
generate_pylcalli_irate_method(f, n)
generate_pycall_krate_method(f, n, 1)
generate_pylcall_irate_method(f, n, 1)
generate_pylcall_krate_method(f, n, 1)
f.close()
f = open('pycall.auto.h', 'w')
print >> f
for n in range(9):
generate_pycall_opcode_struct(f, n)
generate_pycall_opcode_struct(f, n, 1)
f.close()
| lgpl-2.1 |
rahul67/hue | desktop/core/ext-py/python-openid-2.2.5/openid/test/test_openidyadis.py | 87 | 4828 | import unittest
from openid.consumer.discover import \
OpenIDServiceEndpoint, OPENID_1_1_TYPE, OPENID_1_0_TYPE
from openid.yadis.services import applyFilter
XRDS_BOILERPLATE = '''\
<?xml version="1.0" encoding="UTF-8"?>
<xrds:XRDS xmlns:xrds="xri://$xrds"
xmlns="xri://$xrd*($v*2.0)"
xmlns:openid="http://openid.net/xmlns/1.0">
<XRD>
%s\
</XRD>
</xrds:XRDS>
'''
def mkXRDS(services):
return XRDS_BOILERPLATE % (services,)
def mkService(uris=None, type_uris=None, local_id=None, dent=' '):
chunks = [dent, '<Service>\n']
dent2 = dent + ' '
if type_uris:
for type_uri in type_uris:
chunks.extend([dent2 + '<Type>', type_uri, '</Type>\n'])
if uris:
for uri in uris:
if type(uri) is tuple:
uri, prio = uri
else:
prio = None
chunks.extend([dent2, '<URI'])
if prio is not None:
chunks.extend([' priority="', str(prio), '"'])
chunks.extend(['>', uri, '</URI>\n'])
if local_id:
chunks.extend(
[dent2, '<openid:Delegate>', local_id, '</openid:Delegate>\n'])
chunks.extend([dent, '</Service>\n'])
return ''.join(chunks)
# Different sets of server URLs for use in the URI tag
server_url_options = [
[], # This case should not generate an endpoint object
['http://server.url/'],
['https://server.url/'],
['https://server.url/', 'http://server.url/'],
['https://server.url/',
'http://server.url/',
'http://example.server.url/'],
]
# Used for generating test data
def subsets(l):
"""Generate all non-empty sublists of a list"""
subsets_list = [[]]
for x in l:
subsets_list += [[x] + t for t in subsets_list]
return subsets_list
# A couple of example extension type URIs. These are not at all
# official, but are just here for testing.
ext_types = [
'http://janrain.com/extension/blah',
'http://openid.net/sreg/1.0',
]
# All valid combinations of Type tags that should produce an OpenID endpoint
type_uri_options = [
exts + ts
# All non-empty sublists of the valid OpenID type URIs
for ts in subsets([OPENID_1_0_TYPE, OPENID_1_1_TYPE])
if ts
# All combinations of extension types (including empty extenstion list)
for exts in subsets(ext_types)
]
# Range of valid Delegate tag values for generating test data
local_id_options = [
None,
'http://vanity.domain/',
'https://somewhere/yadis/',
]
# All combinations of valid URIs, Type URIs and Delegate tags
data = [
(uris, type_uris, local_id)
for uris in server_url_options
for type_uris in type_uri_options
for local_id in local_id_options
]
class OpenIDYadisTest(unittest.TestCase):
def __init__(self, uris, type_uris, local_id):
unittest.TestCase.__init__(self)
self.uris = uris
self.type_uris = type_uris
self.local_id = local_id
def shortDescription(self):
# XXX:
return 'Successful OpenID Yadis parsing case'
def setUp(self):
self.yadis_url = 'http://unit.test/'
# Create an XRDS document to parse
services = mkService(uris=self.uris,
type_uris=self.type_uris,
local_id=self.local_id)
self.xrds = mkXRDS(services)
def runTest(self):
# Parse into endpoint objects that we will check
endpoints = applyFilter(
self.yadis_url, self.xrds, OpenIDServiceEndpoint)
# make sure there are the same number of endpoints as
# URIs. This assumes that the type_uris contains at least one
# OpenID type.
self.failUnlessEqual(len(self.uris), len(endpoints))
# So that we can check equality on the endpoint types
type_uris = list(self.type_uris)
type_uris.sort()
seen_uris = []
for endpoint in endpoints:
seen_uris.append(endpoint.server_url)
# All endpoints will have same yadis_url
self.failUnlessEqual(self.yadis_url, endpoint.claimed_id)
# and local_id
self.failUnlessEqual(self.local_id, endpoint.local_id)
# and types
actual_types = list(endpoint.type_uris)
actual_types.sort()
self.failUnlessEqual(actual_types, type_uris)
# So that they will compare equal, because we don't care what
# order they are in
seen_uris.sort()
uris = list(self.uris)
uris.sort()
# Make sure we saw all URIs, and saw each one once
self.failUnlessEqual(uris, seen_uris)
def pyUnitTests():
cases = []
for args in data:
cases.append(OpenIDYadisTest(*args))
return unittest.TestSuite(cases)
| apache-2.0 |
yjfcool/picasso-graphic | tools/gyp/build/lib/gyp/SCons.py | 253 | 5848 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
SCons generator.
This contains class definitions and supporting functions for generating
pieces of SCons files for the different types of GYP targets.
"""
import os
def WriteList(fp, list, prefix='',
separator=',\n ',
preamble=None,
postamble=None):
fp.write(preamble or '')
fp.write((separator or ' ').join([prefix + l for l in list]))
fp.write(postamble or '')
class TargetBase(object):
"""
Base class for a SCons representation of a GYP target.
"""
is_ignored = False
target_prefix = ''
target_suffix = ''
def __init__(self, spec):
self.spec = spec
def full_product_name(self):
"""
Returns the full name of the product being built:
* Uses 'product_name' if it's set, else prefix + 'target_name'.
* Prepends 'product_dir' if set.
* Appends SCons suffix variables for the target type (or
product_extension).
"""
suffix = self.target_suffix
product_extension = self.spec.get('product_extension')
if product_extension:
suffix = '.' + product_extension
prefix = self.spec.get('product_prefix', self.target_prefix)
name = self.spec['target_name']
name = prefix + self.spec.get('product_name', name) + suffix
product_dir = self.spec.get('product_dir')
if product_dir:
name = os.path.join(product_dir, name)
else:
name = os.path.join(self.out_dir, name)
return name
def write_input_files(self, fp):
"""
Writes the definition of the input files (sources).
"""
sources = self.spec.get('sources')
if not sources:
fp.write('\ninput_files = []\n')
return
preamble = '\ninput_files = [\n '
postamble = ',\n]\n'
WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
def builder_call(self):
"""
Returns the actual SCons builder call to build this target.
"""
name = self.full_product_name()
return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
def write_target(self, fp, src_dir='', pre=''):
"""
Writes the lines necessary to build this target.
"""
fp.write('\n' + pre)
fp.write('_outputs = %s\n' % self.builder_call())
fp.write('target_files.extend(_outputs)\n')
class NoneTarget(TargetBase):
"""
A GYP target type of 'none', implicitly or explicitly.
"""
def write_target(self, fp, src_dir='', pre=''):
fp.write('\ntarget_files.extend(input_files)\n')
class SettingsTarget(TargetBase):
"""
A GYP target type of 'settings'.
"""
is_ignored = True
compilable_sources_template = """
_result = []
for infile in input_files:
if env.compilable(infile):
if (type(infile) == type('')
and (infile.startswith(%(src_dir)r)
or not os.path.isabs(env.subst(infile)))):
# Force files below the build directory by replacing all '..'
# elements in the path with '__':
base, ext = os.path.splitext(os.path.normpath(infile))
base = [d == '..' and '__' or d for d in base.split('/')]
base = os.path.join(*base)
object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
if not infile.startswith(%(src_dir)r):
infile = %(src_dir)r + infile
infile = env.%(name)s(object, infile)[0]
else:
infile = env.%(name)s(infile)[0]
_result.append(infile)
input_files = _result
"""
class CompilableSourcesTargetBase(TargetBase):
"""
An abstract base class for targets that compile their source files.
We explicitly transform compilable files into object files,
even though SCons could infer that for us, because we want
to control where the object file ends up. (The implicit rules
in SCons always put the object file next to the source file.)
"""
intermediate_builder_name = None
def write_target(self, fp, src_dir='', pre=''):
if self.intermediate_builder_name is None:
raise NotImplementedError
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
variables = {
'src_dir': src_dir,
'name': self.intermediate_builder_name,
}
fp.write(compilable_sources_template % variables)
super(CompilableSourcesTargetBase, self).write_target(fp)
class ProgramTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'executable'.
"""
builder_name = 'GypProgram'
intermediate_builder_name = 'StaticObject'
target_prefix = '${PROGPREFIX}'
target_suffix = '${PROGSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
class StaticLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'static_library'.
"""
builder_name = 'GypStaticLibrary'
intermediate_builder_name = 'StaticObject'
target_prefix = '${LIBPREFIX}'
target_suffix = '${LIBSUFFIX}'
out_dir = '${LIB_DIR}'
class SharedLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'shared_library'.
"""
builder_name = 'GypSharedLibrary'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${LIB_DIR}'
class LoadableModuleTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'loadable_module'.
"""
builder_name = 'GypLoadableModule'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
TargetMap = {
None : NoneTarget,
'none' : NoneTarget,
'settings' : SettingsTarget,
'executable' : ProgramTarget,
'static_library' : StaticLibraryTarget,
'shared_library' : SharedLibraryTarget,
'loadable_module' : LoadableModuleTarget,
}
def Target(spec):
return TargetMap[spec.get('type')](spec)
| bsd-3-clause |
ahhda/sympy | sympy/solvers/tests/test_recurr.py | 81 | 7203 | from sympy import Eq, factorial, Function, Lambda, rf, S, sqrt, symbols, I, expand_func, binomial, gamma
from sympy.solvers.recurr import rsolve, rsolve_hyper, rsolve_poly, rsolve_ratio
from sympy.utilities.pytest import raises
from sympy.core.compatibility import range
from sympy.abc import a, b, c
y = Function('y')
n, k = symbols('n,k', integer=True)
C0, C1, C2 = symbols('C0,C1,C2')
def test_rsolve_poly():
assert rsolve_poly([-1, -1, 1], 0, n) == 0
assert rsolve_poly([-1, -1, 1], 1, n) == -1
assert rsolve_poly([-1, n + 1], n, n) == 1
assert rsolve_poly([-1, 1], n, n) == C0 + (n**2 - n)/2
assert rsolve_poly([-n - 1, n], 1, n) == C1*n - 1
assert rsolve_poly([-4*n - 2, 1], 4*n + 1, n) == -1
assert rsolve_poly([-1, 1], n**5 + n**3, n) == \
C0 - n**3 / 2 - n**5 / 2 + n**2 / 6 + n**6 / 6 + 2*n**4 / 3
def test_rsolve_ratio():
solution = rsolve_ratio([-2*n**3 + n**2 + 2*n - 1, 2*n**3 + n**2 - 6*n,
-2*n**3 - 11*n**2 - 18*n - 9, 2*n**3 + 13*n**2 + 22*n + 8], 0, n)
assert solution in [
C1*((-2*n + 3)/(n**2 - 1))/3,
(S(1)/2)*(C1*(-3 + 2*n)/(-1 + n**2)),
(S(1)/2)*(C1*( 3 - 2*n)/( 1 - n**2)),
(S(1)/2)*(C2*(-3 + 2*n)/(-1 + n**2)),
(S(1)/2)*(C2*( 3 - 2*n)/( 1 - n**2)),
]
def test_rsolve_hyper():
assert rsolve_hyper([-1, -1, 1], 0, n) in [
C0*(S.Half - S.Half*sqrt(5))**n + C1*(S.Half + S.Half*sqrt(5))**n,
C1*(S.Half - S.Half*sqrt(5))**n + C0*(S.Half + S.Half*sqrt(5))**n,
]
assert rsolve_hyper([n**2 - 2, -2*n - 1, 1], 0, n) in [
C0*rf(sqrt(2), n) + C1*rf(-sqrt(2), n),
C1*rf(sqrt(2), n) + C0*rf(-sqrt(2), n),
]
assert rsolve_hyper([n**2 - k, -2*n - 1, 1], 0, n) in [
C0*rf(sqrt(k), n) + C1*rf(-sqrt(k), n),
C1*rf(sqrt(k), n) + C0*rf(-sqrt(k), n),
]
assert rsolve_hyper(
[2*n*(n + 1), -n**2 - 3*n + 2, n - 1], 0, n) == C1*factorial(n) + C0*2**n
assert rsolve_hyper(
[n + 2, -(2*n + 3)*(17*n**2 + 51*n + 39), n + 1], 0, n) == None
assert rsolve_hyper([-n - 1, -1, 1], 0, n) == None
assert rsolve_hyper([-1, 1], n, n).expand() == C0 + n**2/2 - n/2
assert rsolve_hyper([-1, 1], 1 + n, n).expand() == C0 + n**2/2 + n/2
assert rsolve_hyper([-1, 1], 3*(n + n**2), n).expand() == C0 + n**3 - n
assert rsolve_hyper([-a, 1],0,n).expand() == C0*a**n
assert rsolve_hyper([-a, 0, 1], 0, n).expand() == (-1)**n*C1*a**(n/2) + C0*a**(n/2)
assert rsolve_hyper([1, 1, 1], 0, n).expand() == \
C0*(-S(1)/2 - sqrt(3)*I/2)**n + C1*(-S(1)/2 + sqrt(3)*I/2)**n
assert rsolve_hyper([1, -2*n/a - 2/a, 1], 0, n) is None
def recurrence_term(c, f):
"""Compute RHS of recurrence in f(n) with coefficients in c."""
return sum(c[i]*f.subs(n, n + i) for i in range(len(c)))
def test_rsolve_bulk():
"""Some bulk-generated tests."""
funcs = [ n, n + 1, n**2, n**3, n**4, n + n**2, 27*n + 52*n**2 - 3*
n**3 + 12*n**4 - 52*n**5 ]
coeffs = [ [-2, 1], [-2, -1, 1], [-1, 1, 1, -1, 1], [-n, 1], [n**2 -
n + 12, 1] ]
for p in funcs:
# compute difference
for c in coeffs:
q = recurrence_term(c, p)
if p.is_polynomial(n):
assert rsolve_poly(c, q, n) == p
# See issue 3956:
#if p.is_hypergeometric(n):
# assert rsolve_hyper(c, q, n) == p
def test_rsolve():
f = y(n + 2) - y(n + 1) - y(n)
h = sqrt(5)*(S.Half + S.Half*sqrt(5))**n \
- sqrt(5)*(S.Half - S.Half*sqrt(5))**n
assert rsolve(f, y(n)) in [
C0*(S.Half - S.Half*sqrt(5))**n + C1*(S.Half + S.Half*sqrt(5))**n,
C1*(S.Half - S.Half*sqrt(5))**n + C0*(S.Half + S.Half*sqrt(5))**n,
]
assert rsolve(f, y(n), [0, 5]) == h
assert rsolve(f, y(n), {0: 0, 1: 5}) == h
assert rsolve(f, y(n), {y(0): 0, y(1): 5}) == h
assert rsolve(y(n) - y(n - 1) - y(n - 2), y(n), [0, 5]) == h
assert rsolve(Eq(y(n), y(n - 1) + y(n - 2)), y(n), [0, 5]) == h
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = (n - 1)*y(n + 2) - (n**2 + 3*n - 2)*y(n + 1) + 2*n*(n + 1)*y(n)
g = C1*factorial(n) + C0*2**n
h = -3*factorial(n) + 3*2**n
assert rsolve(f, y(n)) == g
assert rsolve(f, y(n), []) == g
assert rsolve(f, y(n), {}) == g
assert rsolve(f, y(n), [0, 3]) == h
assert rsolve(f, y(n), {0: 0, 1: 3}) == h
assert rsolve(f, y(n), {y(0): 0, y(1): 3}) == h
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = y(n) - y(n - 1) - 2
assert rsolve(f, y(n), {y(0): 0}) == 2*n
assert rsolve(f, y(n), {y(0): 1}) == 2*n + 1
assert rsolve(f, y(n), {y(0): 0, y(1): 1}) is None
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = 3*y(n - 1) - y(n) - 1
assert rsolve(f, y(n), {y(0): 0}) == -3**n/2 + S.Half
assert rsolve(f, y(n), {y(0): 1}) == 3**n/2 + S.Half
assert rsolve(f, y(n), {y(0): 2}) == 3*3**n/2 + S.Half
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = y(n) - 1/n*y(n - 1)
assert rsolve(f, y(n)) == C0/factorial(n)
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = y(n) - 1/n*y(n - 1) - 1
assert rsolve(f, y(n)) is None
f = 2*y(n - 1) + (1 - n)*y(n)/n
assert rsolve(f, y(n), {y(1): 1}) == 2**(n - 1)*n
assert rsolve(f, y(n), {y(1): 2}) == 2**(n - 1)*n*2
assert rsolve(f, y(n), {y(1): 3}) == 2**(n - 1)*n*3
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
f = (n - 1)*(n - 2)*y(n + 2) - (n + 1)*(n + 2)*y(n)
assert rsolve(f, y(n), {y(3): 6, y(4): 24}) == n*(n - 1)*(n - 2)
assert rsolve(
f, y(n), {y(3): 6, y(4): -24}) == -n*(n - 1)*(n - 2)*(-1)**(n)
assert f.subs(y, Lambda(k, rsolve(f, y(n)).subs(n, k))).simplify() == 0
assert rsolve(Eq(y(n + 1), a*y(n)), y(n), {y(1): a}).simplify() == a**n
assert rsolve(y(n) - a*y(n-2),y(n), \
{y(1): sqrt(a)*(a + b), y(2): a*(a - b)}).simplify() == \
a**(n/2)*(-(-1)**n*b + a)
f = (-16*n**2 + 32*n - 12)*y(n - 1) + (4*n**2 - 12*n + 9)*y(n)
assert expand_func(rsolve(f, y(n), \
{y(1): binomial(2*n + 1, 3)}).rewrite(gamma)).simplify() == \
2**(2*n)*n*(2*n - 1)*(4*n**2 - 1)/12
assert (rsolve(y(n) + a*(y(n + 1) + y(n - 1))/2, y(n)) -
(C0*((sqrt(-a**2 + 1) - 1)/a)**n +
C1*((-sqrt(-a**2 + 1) - 1)/a)**n)).simplify() == 0
assert rsolve((k + 1)*y(k), y(k)) is None
assert (rsolve((k + 1)*y(k) + (k + 3)*y(k + 1) + (k + 5)*y(k + 2), y(k))
is None)
def test_rsolve_raises():
x = Function('x')
raises(ValueError, lambda: rsolve(y(n) - y(k + 1), y(n)))
raises(ValueError, lambda: rsolve(y(n) - y(n + 1), x(n)))
raises(ValueError, lambda: rsolve(y(n) - x(n + 1), y(n)))
raises(ValueError, lambda: rsolve(y(n) - sqrt(n)*y(n + 1), y(n)))
raises(ValueError, lambda: rsolve(y(n) - y(n + 1), y(n), {x(0): 0}))
def test_issue_6844():
f = y(n + 2) - y(n + 1) + y(n)/4
assert rsolve(f, y(n)) == 2**(-n)*(C0 + C1*n)
assert rsolve(f, y(n), {y(0): 0, y(1): 1}) == 2*2**(-n)*n
| bsd-3-clause |
cloud-rocket/python-OBD | obd/decoders.py | 1 | 9011 |
########################################################################
# #
# python-OBD: A python OBD-II serial module derived from pyobd #
# #
# Copyright 2004 Donour Sizemore (donour@uchicago.edu) #
# Copyright 2009 Secons Ltd. (www.obdtester.com) #
# Copyright 2009 Peter J. Creath #
# Copyright 2015 Brendan Whitfield (bcw7044@rit.edu) #
# #
########################################################################
# #
# decoders.py #
# #
# This file is part of python-OBD (a derivative of pyOBD) #
# #
# python-OBD is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 2 of the License, or #
# (at your option) any later version. #
# #
# python-OBD is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with python-OBD. If not, see <http://www.gnu.org/licenses/>. #
# #
########################################################################
import math
from .utils import *
from .codes import *
from .debug import debug
'''
All decoders take the form:
def <name>(_hex):
...
return (<value>, <unit>)
'''
# todo
def todo(_hex):
return (_hex, Unit.NONE)
# hex in, hex out
def noop(_hex):
return (_hex, Unit.NONE)
# hex in, bitstring out
def pid(_hex):
v = bitstring(_hex, len(_hex) * 4)
return (v, Unit.NONE)
'''
Sensor decoders
Return Value object with value and units
'''
def count(_hex):
v = unhex(_hex)
return (v, Unit.COUNT)
# 0 to 100 %
def percent(_hex):
v = unhex(_hex[0:2])
v = v * 100.0 / 255.0
return (v, Unit.PERCENT)
# -100 to 100 %
def percent_centered(_hex):
v = unhex(_hex[0:2])
v = (v - 128) * 100.0 / 128.0
return (v, Unit.PERCENT)
# -40 to 215 C
def temp(_hex):
v = unhex(_hex)
v = v - 40
return (v, Unit.C)
# -40 to 6513.5 C
def catalyst_temp(_hex):
v = unhex(_hex)
v = (v / 10.0) - 40
return (v, Unit.C)
# -128 to 128 mA
def current_centered(_hex):
v = unhex(_hex[4:8])
v = (v / 256.0) - 128
return (v, Unit.MA)
# 0 to 1.275 volts
def sensor_voltage(_hex):
v = unhex(_hex[0:2])
v = v / 200.0
return (v, Unit.VOLT)
# 0 to 8 volts
def sensor_voltage_big(_hex):
v = unhex(_hex[4:8])
v = (v * 8.0) / 65535
return (v, Unit.VOLT)
# 0 to 765 kPa
def fuel_pressure(_hex):
v = unhex(_hex)
v = v * 3
return (v, Unit.KPA)
# 0 to 255 kPa
def pressure(_hex):
v = unhex(_hex)
return (v, Unit.KPA)
# 0 to 5177 kPa
def fuel_pres_vac(_hex):
v = unhex(_hex)
v = v * 0.079
return (v, Unit.KPA)
# 0 to 655,350 kPa
def fuel_pres_direct(_hex):
v = unhex(_hex)
v = v * 10
return (v, Unit.KPA)
# -8192 to 8192 Pa
def evap_pressure(_hex):
# decode the twos complement
a = twos_comp(unhex(_hex[0:2]), 8)
b = twos_comp(unhex(_hex[2:4]), 8)
v = ((a * 256.0) + b) / 4.0
return (v, Unit.PA)
# 0 to 327.675 kPa
def abs_evap_pressure(_hex):
v = unhex(_hex)
v = v / 200.0
return (v, Unit.KPA)
# -32767 to 32768 Pa
def evap_pressure_alt(_hex):
v = unhex(_hex)
v = v - 32767
return (v, Unit.PA)
# 0 to 16,383.75 RPM
def rpm(_hex):
v = unhex(_hex)
v = v / 4.0
return (v, Unit.RPM)
# 0 to 255 KPH
def speed(_hex):
v = unhex(_hex)
return (v, Unit.KPH)
# -64 to 63.5 degrees
def timing_advance(_hex):
v = unhex(_hex)
v = (v - 128) / 2.0
return (v, Unit.DEGREES)
# -210 to 301 degrees
def inject_timing(_hex):
v = unhex(_hex)
v = (v - 26880) / 128.0
return (v, Unit.DEGREES)
# 0 to 655.35 grams/sec
def maf(_hex):
v = unhex(_hex)
v = v / 100.0
return (v, Unit.GPS)
# 0 to 2550 grams/sec
def max_maf(_hex):
v = unhex(_hex[0:2])
v = v * 10
return (v, Unit.GPS)
# 0 to 65535 seconds
def seconds(_hex):
v = unhex(_hex)
return (v, Unit.SEC)
# 0 to 65535 minutes
def minutes(_hex):
v = unhex(_hex)
return (v, Unit.MIN)
# 0 to 65535 km
def distance(_hex):
v = unhex(_hex)
return (v, Unit.KM)
# 0 to 3212 Liters/hour
def fuel_rate(_hex):
v = unhex(_hex)
v = v * 0.05
return (v, Unit.LPH)
'''
Special decoders
Return objects, lists, etc
'''
def status(_hex):
bits = bitstring(_hex, 32)
output = Status()
output.MIL = bitToBool(bits[0])
output.DTC_count = unbin(bits[1:8])
output.ignition_type = IGNITION_TYPE[unbin(bits[12])]
output.tests.append(Test("Misfire", \
bitToBool(bits[15]), \
bitToBool(bits[11])))
output.tests.append(Test("Fuel System", \
bitToBool(bits[14]), \
bitToBool(bits[10])))
output.tests.append(Test("Components", \
bitToBool(bits[13]), \
bitToBool(bits[9])))
# different tests for different ignition types
if(output.ignition_type == IGNITION_TYPE[0]): # spark
for i in range(8):
if SPARK_TESTS[i] is not None:
t = Test(SPARK_TESTS[i], \
bitToBool(bits[(2 * 8) + i]), \
bitToBool(bits[(3 * 8) + i]))
output.tests.append(t)
elif(output.ignition_type == IGNITION_TYPE[1]): # compression
for i in range(8):
if COMPRESSION_TESTS[i] is not None:
t = Test(COMPRESSION_TESTS[i], \
bitToBool(bits[(2 * 8) + i]), \
bitToBool(bits[(3 * 8) + i]))
output.tests.append(t)
return (output, Unit.NONE)
def fuel_status(_hex):
v = unhex(_hex[0:2]) # todo, support second fuel system
if v <= 0:
debug("Invalid fuel status response (v <= 0)", True)
return (None, Unit.NONE)
i = math.log(v, 2) # only a single bit should be on
if i % 1 != 0:
debug("Invalid fuel status response (multiple bits set)", True)
return (None, Unit.NONE)
i = int(i)
if i >= len(FUEL_STATUS):
debug("Invalid fuel status response (no table entry)", True)
return (None, Unit.NONE)
return (FUEL_STATUS[i], Unit.NONE)
def air_status(_hex):
v = unhex(_hex)
if v <= 0:
debug("Invalid air status response (v <= 0)", True)
return (None, Unit.NONE)
i = math.log(v, 2) # only a single bit should be on
if i % 1 != 0:
debug("Invalid air status response (multiple bits set)", True)
return (None, Unit.NONE)
i = int(i)
if i >= len(AIR_STATUS):
debug("Invalid air status response (no table entry)", True)
return (None, Unit.NONE)
return (AIR_STATUS[i], Unit.NONE)
def obd_compliance(_hex):
i = unhex(_hex)
v = "Error: Unknown OBD compliance response"
if i < len(OBD_COMPLIANCE):
v = OBD_COMPLIANCE[i]
return (v, Unit.NONE)
def fuel_type(_hex):
i = unhex(_hex)
v = "Error: Unknown fuel type response"
if i < len(FUEL_TYPES):
v = FUEL_TYPES[i]
return (v, Unit.NONE)
# converts 2 bytes of hex into a DTC code
def single_dtc(_hex):
if len(_hex) != 4:
return None
if _hex == "0000":
return None
bits = bitstring(_hex[0], 4)
dtc = ""
dtc += ['P', 'C', 'B', 'U'][unbin(bits[0:2])]
dtc += str(unbin(bits[2:4]))
dtc += _hex[1:4]
return dtc
# converts a frame of 2-byte DTCs into a list of DTCs
# example input = "010480034123"
# [ ][ ][ ]
def dtc(_hex):
codes = []
for n in range(0, len(_hex), 4):
dtc = single_dtc(_hex[n:n+4])
if dtc is not None:
# pull a description if we have one
desc = "Unknown error code"
if dtc in DTC:
desc = DTC[dtc]
codes.append( (dtc, desc) )
return (codes, Unit.NONE)
| gpl-2.0 |
tianweizhang/nova | nova/openstack/common/systemd.py | 24 | 3057 | # Copyright 2012-2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper module for systemd service readiness notification.
"""
import os
import socket
import sys
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def _abstractify(socket_name):
if socket_name.startswith('@'):
# abstract namespace socket
socket_name = '\0%s' % socket_name[1:]
return socket_name
def _sd_notify(unset_env, msg):
notify_socket = os.getenv('NOTIFY_SOCKET')
if notify_socket:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
try:
sock.connect(_abstractify(notify_socket))
sock.sendall(msg)
if unset_env:
del os.environ['NOTIFY_SOCKET']
except EnvironmentError:
LOG.debug("Systemd notification failed", exc_info=True)
finally:
sock.close()
def notify():
"""Send notification to Systemd that service is ready.
For details see
http://www.freedesktop.org/software/systemd/man/sd_notify.html
"""
_sd_notify(False, 'READY=1')
def notify_once():
"""Send notification once to Systemd that service is ready.
Systemd sets NOTIFY_SOCKET environment variable with the name of the
socket listening for notifications from services.
This method removes the NOTIFY_SOCKET environment variable to ensure
notification is sent only once.
"""
_sd_notify(True, 'READY=1')
def onready(notify_socket, timeout):
"""Wait for systemd style notification on the socket.
:param notify_socket: local socket address
:type notify_socket: string
:param timeout: socket timeout
:type timeout: float
:returns: 0 service ready
1 service not ready
2 timeout occurred
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sock.settimeout(timeout)
sock.bind(_abstractify(notify_socket))
try:
msg = sock.recv(512)
except socket.timeout:
return 2
finally:
sock.close()
if 'READY=1' in msg:
return 0
else:
return 1
if __name__ == '__main__':
# simple CLI for testing
if len(sys.argv) == 1:
notify()
elif len(sys.argv) >= 2:
timeout = float(sys.argv[1])
notify_socket = os.getenv('NOTIFY_SOCKET')
if notify_socket:
retval = onready(notify_socket, timeout)
sys.exit(retval)
| apache-2.0 |
catapult-project/catapult-csm | telemetry/third_party/mox3/mox3/mox.py | 28 | 70541 | # Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is a fork of the pymox library intended to work with Python 3.
# The file was modified by quermit@gmail.com and dawid.fatyga@gmail.com
"""Mox, an object-mocking framework for Python.
Mox works in the record-replay-verify paradigm. When you first create
a mock object, it is in record mode. You then programmatically set
the expected behavior of the mock object (what methods are to be
called on it, with what parameters, what they should return, and in
what order).
Once you have set up the expected mock behavior, you put it in replay
mode. Now the mock responds to method calls just as you told it to.
If an unexpected method (or an expected method with unexpected
parameters) is called, then an exception will be raised.
Once you are done interacting with the mock, you need to verify that
all the expected interactions occured. (Maybe your code exited
prematurely without calling some cleanup method!) The verify phase
ensures that every expected method was called; otherwise, an exception
will be raised.
WARNING! Mock objects created by Mox are not thread-safe. If you are
call a mock in multiple threads, it should be guarded by a mutex.
TODO(stevepm): Add the option to make mocks thread-safe!
Suggested usage / workflow:
# Create Mox factory
my_mox = Mox()
# Create a mock data access object
mock_dao = my_mox.CreateMock(DAOClass)
# Set up expected behavior
mock_dao.RetrievePersonWithIdentifier('1').AndReturn(person)
mock_dao.DeletePerson(person)
# Put mocks in replay mode
my_mox.ReplayAll()
# Inject mock object and run test
controller.SetDao(mock_dao)
controller.DeletePersonById('1')
# Verify all methods were called as expected
my_mox.VerifyAll()
"""
import collections
import difflib
import inspect
import re
import types
import unittest
from mox3 import stubout
class Error(AssertionError):
"""Base exception for this module."""
pass
class ExpectedMethodCallsError(Error):
"""Raised when an expected method wasn't called.
This can occur if Verify() is called before all expected methods have been
called.
"""
def __init__(self, expected_methods):
"""Init exception.
Args:
# expected_methods: A sequence of MockMethod objects that should
# have been called.
expected_methods: [MockMethod]
Raises:
ValueError: if expected_methods contains no methods.
"""
if not expected_methods:
raise ValueError("There must be at least one expected method")
Error.__init__(self)
self._expected_methods = expected_methods
def __str__(self):
calls = "\n".join(["%3d. %s" % (i, m)
for i, m in enumerate(self._expected_methods)])
return "Verify: Expected methods never called:\n%s" % (calls,)
class UnexpectedMethodCallError(Error):
"""Raised when an unexpected method is called.
This can occur if a method is called with incorrect parameters, or out of
the specified order.
"""
def __init__(self, unexpected_method, expected):
"""Init exception.
Args:
# unexpected_method: MockMethod that was called but was not at the
# head of the expected_method queue.
# expected: MockMethod or UnorderedGroup the method should have
# been in.
unexpected_method: MockMethod
expected: MockMethod or UnorderedGroup
"""
Error.__init__(self)
if expected is None:
self._str = "Unexpected method call %s" % (unexpected_method,)
else:
differ = difflib.Differ()
diff = differ.compare(str(unexpected_method).splitlines(True),
str(expected).splitlines(True))
self._str = ("Unexpected method call."
" unexpected:- expected:+\n%s"
% ("\n".join(line.rstrip() for line in diff),))
def __str__(self):
return self._str
class UnknownMethodCallError(Error):
"""Raised if an unknown method is requested of the mock object."""
def __init__(self, unknown_method_name):
"""Init exception.
Args:
# unknown_method_name: Method call that is not part of the mocked
# class's public interface.
unknown_method_name: str
"""
Error.__init__(self)
self._unknown_method_name = unknown_method_name
def __str__(self):
return ("Method called is not a member of the object: %s" %
self._unknown_method_name)
class PrivateAttributeError(Error):
"""Raised if a MockObject is passed a private additional attribute name."""
def __init__(self, attr):
Error.__init__(self)
self._attr = attr
def __str__(self):
return ("Attribute '%s' is private and should not be available"
"in a mock object." % self._attr)
class ExpectedMockCreationError(Error):
"""Raised if mocks should have been created by StubOutClassWithMocks."""
def __init__(self, expected_mocks):
"""Init exception.
Args:
# expected_mocks: A sequence of MockObjects that should have been
# created
Raises:
ValueError: if expected_mocks contains no methods.
"""
if not expected_mocks:
raise ValueError("There must be at least one expected method")
Error.__init__(self)
self._expected_mocks = expected_mocks
def __str__(self):
mocks = "\n".join(["%3d. %s" % (i, m)
for i, m in enumerate(self._expected_mocks)])
return "Verify: Expected mocks never created:\n%s" % (mocks,)
class UnexpectedMockCreationError(Error):
"""Raised if too many mocks were created by StubOutClassWithMocks."""
def __init__(self, instance, *params, **named_params):
"""Init exception.
Args:
# instance: the type of obejct that was created
# params: parameters given during instantiation
# named_params: named parameters given during instantiation
"""
Error.__init__(self)
self._instance = instance
self._params = params
self._named_params = named_params
def __str__(self):
args = ", ".join(["%s" % v for i, v in enumerate(self._params)])
error = "Unexpected mock creation: %s(%s" % (self._instance, args)
if self._named_params:
error += ", " + ", ".join(["%s=%s" % (k, v) for k, v in
self._named_params.items()])
error += ")"
return error
class Mox(object):
"""Mox: a factory for creating mock objects."""
# A list of types that should be stubbed out with MockObjects (as
# opposed to MockAnythings).
_USE_MOCK_OBJECT = [types.FunctionType, types.ModuleType, types.MethodType]
def __init__(self):
"""Initialize a new Mox."""
self._mock_objects = []
self.stubs = stubout.StubOutForTesting()
def CreateMock(self, class_to_mock, attrs=None, bounded_to=None):
"""Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
attrs: dict of attribute names to values that will be
set on the mock object. Only public attributes may be set.
bounded_to: optionally, when class_to_mock is not a class,
it points to a real class object, to which
attribute is bound
Returns:
MockObject that can be used as the class_to_mock would be.
"""
if attrs is None:
attrs = {}
new_mock = MockObject(class_to_mock, attrs=attrs,
class_to_bind=bounded_to)
self._mock_objects.append(new_mock)
return new_mock
def CreateMockAnything(self, description=None):
"""Create a mock that will accept any method calls.
This does not enforce an interface.
Args:
description: str. Optionally, a descriptive name for the mock object
being created, for debugging output purposes.
"""
new_mock = MockAnything(description=description)
self._mock_objects.append(new_mock)
return new_mock
def ReplayAll(self):
"""Set all mock objects to replay mode."""
for mock_obj in self._mock_objects:
mock_obj._Replay()
def VerifyAll(self):
"""Call verify on all mock objects created."""
for mock_obj in self._mock_objects:
mock_obj._Verify()
def ResetAll(self):
"""Call reset on all mock objects. This does not unset stubs."""
for mock_obj in self._mock_objects:
mock_obj._Reset()
def StubOutWithMock(self, obj, attr_name, use_mock_anything=False):
"""Replace a method, attribute, etc. with a Mock.
This will replace a class or module with a MockObject, and everything
else (method, function, etc) with a MockAnything. This can be
overridden to always use a MockAnything by setting use_mock_anything
to True.
Args:
obj: A Python object (class, module, instance, callable).
attr_name: str. The name of the attribute to replace with a mock.
use_mock_anything: bool. True if a MockAnything should be used
regardless of the type of attribute.
"""
if inspect.isclass(obj):
class_to_bind = obj
else:
class_to_bind = None
attr_to_replace = getattr(obj, attr_name)
attr_type = type(attr_to_replace)
if attr_type == MockAnything or attr_type == MockObject:
raise TypeError('Cannot mock a MockAnything! Did you remember to '
'call UnsetStubs in your previous test?')
type_check = (
attr_type in self._USE_MOCK_OBJECT or
inspect.isclass(attr_to_replace) or
isinstance(attr_to_replace, object))
if type_check and not use_mock_anything:
stub = self.CreateMock(attr_to_replace, bounded_to=class_to_bind)
else:
stub = self.CreateMockAnything(
description='Stub for %s' % attr_to_replace)
stub.__name__ = attr_name
self.stubs.Set(obj, attr_name, stub)
def StubOutClassWithMocks(self, obj, attr_name):
"""Replace a class with a "mock factory" that will create mock objects.
This is useful if the code-under-test directly instantiates
dependencies. Previously some boilder plate was necessary to
create a mock that would act as a factory. Using
StubOutClassWithMocks, once you've stubbed out the class you may
use the stubbed class as you would any other mock created by mox:
during the record phase, new mock instances will be created, and
during replay, the recorded mocks will be returned.
In replay mode
# Example using StubOutWithMock (the old, clunky way):
mock1 = mox.CreateMock(my_import.FooClass)
mock2 = mox.CreateMock(my_import.FooClass)
foo_factory = mox.StubOutWithMock(my_import, 'FooClass',
use_mock_anything=True)
foo_factory(1, 2).AndReturn(mock1)
foo_factory(9, 10).AndReturn(mock2)
mox.ReplayAll()
my_import.FooClass(1, 2) # Returns mock1 again.
my_import.FooClass(9, 10) # Returns mock2 again.
mox.VerifyAll()
# Example using StubOutClassWithMocks:
mox.StubOutClassWithMocks(my_import, 'FooClass')
mock1 = my_import.FooClass(1, 2) # Returns a new mock of FooClass
mock2 = my_import.FooClass(9, 10) # Returns another mock instance
mox.ReplayAll()
my_import.FooClass(1, 2) # Returns mock1 again.
my_import.FooClass(9, 10) # Returns mock2 again.
mox.VerifyAll()
"""
attr_to_replace = getattr(obj, attr_name)
attr_type = type(attr_to_replace)
if attr_type == MockAnything or attr_type == MockObject:
raise TypeError('Cannot mock a MockAnything! Did you remember to '
'call UnsetStubs in your previous test?')
if not inspect.isclass(attr_to_replace):
raise TypeError('Given attr is not a Class. Use StubOutWithMock.')
factory = _MockObjectFactory(attr_to_replace, self)
self._mock_objects.append(factory)
self.stubs.Set(obj, attr_name, factory)
def UnsetStubs(self):
"""Restore stubs to their original state."""
self.stubs.UnsetAll()
def Replay(*args):
"""Put mocks into Replay mode.
Args:
# args is any number of mocks to put into replay mode.
"""
for mock in args:
mock._Replay()
def Verify(*args):
"""Verify mocks.
Args:
# args is any number of mocks to be verified.
"""
for mock in args:
mock._Verify()
def Reset(*args):
"""Reset mocks.
Args:
# args is any number of mocks to be reset.
"""
for mock in args:
mock._Reset()
class MockAnything(object):
"""A mock that can be used to mock anything.
This is helpful for mocking classes that do not provide a public interface.
"""
def __init__(self, description=None):
"""Initialize a new MockAnything.
Args:
description: str. Optionally, a descriptive name for the mock
object being created, for debugging output purposes.
"""
self._description = description
self._Reset()
def __repr__(self):
if self._description:
return '<MockAnything instance of %s>' % self._description
else:
return '<MockAnything instance>'
def __getattr__(self, method_name):
"""Intercept method calls on this object.
A new MockMethod is returned that is aware of the MockAnything's
state (record or replay). The call will be recorded or replayed
by the MockMethod's __call__.
Args:
# method name: the name of the method being called.
method_name: str
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
if method_name == '__dir__':
return self.__class__.__dir__.__get__(self, self.__class__)
return self._CreateMockMethod(method_name)
def __str__(self):
return self._CreateMockMethod('__str__')()
def __call__(self, *args, **kwargs):
return self._CreateMockMethod('__call__')(*args, **kwargs)
def __getitem__(self, i):
return self._CreateMockMethod('__getitem__')(i)
def _CreateMockMethod(self, method_name, method_to_mock=None,
class_to_bind=object):
"""Create a new mock method call and return it.
Args:
# method_name: the name of the method being called.
# method_to_mock: The actual method being mocked, used for
# introspection.
# class_to_bind: Class to which method is bounded
# (object by default)
method_name: str
method_to_mock: a method object
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
return MockMethod(method_name, self._expected_calls_queue,
self._replay_mode, method_to_mock=method_to_mock,
description=self._description,
class_to_bind=class_to_bind)
def __nonzero__(self):
"""Return 1 for nonzero so the mock can be used as a conditional."""
return 1
def __bool__(self):
"""Return True for nonzero so the mock can be used as a conditional."""
return True
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockAnything) and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __ne__(self, rhs):
"""Provide custom logic to compare objects."""
return not self == rhs
def _Replay(self):
"""Start replaying expected method calls."""
self._replay_mode = True
def _Verify(self):
"""Verify that all of the expected calls have been made.
Raises:
ExpectedMethodCallsError: if there are still more method calls in
the expected queue.
"""
# If the list of expected calls is not empty, raise an exception
if self._expected_calls_queue:
# The last MultipleTimesGroup is not popped from the queue.
if (len(self._expected_calls_queue) == 1 and
isinstance(self._expected_calls_queue[0],
MultipleTimesGroup) and
self._expected_calls_queue[0].IsSatisfied()):
pass
else:
raise ExpectedMethodCallsError(self._expected_calls_queue)
def _Reset(self):
"""Reset the state of this mock to record mode with an empty queue."""
# Maintain a list of method calls we are expecting
self._expected_calls_queue = collections.deque()
# Make sure we are in setup mode, not replay mode
self._replay_mode = False
class MockObject(MockAnything):
"""Mock object that simulates the public/protected interface of a class."""
def __init__(self, class_to_mock, attrs=None, class_to_bind=None):
"""Initialize a mock object.
Determines the methods and properties of the class and stores them.
Args:
# class_to_mock: class to be mocked
class_to_mock: class
attrs: dict of attribute names to values that will be set on the
mock object. Only public attributes may be set.
class_to_bind: optionally, when class_to_mock is not a class at
all, it points to a real class
Raises:
PrivateAttributeError: if a supplied attribute is not public.
ValueError: if an attribute would mask an existing method.
"""
if attrs is None:
attrs = {}
# Used to hack around the mixin/inheritance of MockAnything, which
# is not a proper object (it can be anything. :-)
MockAnything.__dict__['__init__'](self)
# Get a list of all the public and special methods we should mock.
self._known_methods = set()
self._known_vars = set()
self._class_to_mock = class_to_mock
if inspect.isclass(class_to_mock):
self._class_to_bind = self._class_to_mock
else:
self._class_to_bind = class_to_bind
try:
if inspect.isclass(self._class_to_mock):
self._description = class_to_mock.__name__
else:
self._description = type(class_to_mock).__name__
except Exception:
pass
for method in dir(class_to_mock):
attr = getattr(class_to_mock, method)
if callable(attr):
self._known_methods.add(method)
elif not (type(attr) is property):
# treating properties as class vars makes little sense.
self._known_vars.add(method)
# Set additional attributes at instantiation time; this is quicker
# than manually setting attributes that are normally created in
# __init__.
for attr, value in attrs.items():
if attr.startswith("_"):
raise PrivateAttributeError(attr)
elif attr in self._known_methods:
raise ValueError("'%s' is a method of '%s' objects." % (attr,
class_to_mock))
else:
setattr(self, attr, value)
def _CreateMockMethod(self, *args, **kwargs):
"""Overridden to provide self._class_to_mock to class_to_bind."""
kwargs.setdefault("class_to_bind", self._class_to_bind)
return super(MockObject, self)._CreateMockMethod(*args, **kwargs)
def __getattr__(self, name):
"""Intercept attribute request on this object.
If the attribute is a public class variable, it will be returned and
not recorded as a call.
If the attribute is not a variable, it is handled like a method
call. The method name is checked against the set of mockable
methods, and a new MockMethod is returned that is aware of the
MockObject's state (record or replay). The call will be recorded
or replayed by the MockMethod's __call__.
Args:
# name: the name of the attribute being requested.
name: str
Returns:
Either a class variable or a new MockMethod that is aware of the
state of the mock (record or replay).
Raises:
UnknownMethodCallError if the MockObject does not mock the
requested method.
"""
if name in self._known_vars:
return getattr(self._class_to_mock, name)
if name in self._known_methods:
return self._CreateMockMethod(
name,
method_to_mock=getattr(self._class_to_mock, name))
raise UnknownMethodCallError(name)
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockObject) and
self._class_to_mock == rhs._class_to_mock and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __setitem__(self, key, value):
"""Custom logic for mocking classes that support item assignment.
Args:
key: Key to set the value for.
value: Value to set.
Returns:
Expected return value in replay mode. A MockMethod object for the
__setitem__ method that has already been called if not in replay
mode.
Raises:
TypeError if the underlying class does not support item assignment.
UnexpectedMethodCallError if the object does not expect the call to
__setitem__.
"""
# Verify the class supports item assignment.
if '__setitem__' not in dir(self._class_to_mock):
raise TypeError('object does not support item assignment')
# If we are in replay mode then simply call the mock __setitem__ method
if self._replay_mode:
return MockMethod('__setitem__', self._expected_calls_queue,
self._replay_mode)(key, value)
# Otherwise, create a mock method __setitem__.
return self._CreateMockMethod('__setitem__')(key, value)
def __getitem__(self, key):
"""Provide custom logic for mocking classes that are subscriptable.
Args:
key: Key to return the value for.
Returns:
Expected return value in replay mode. A MockMethod object for the
__getitem__ method that has already been called if not in replay
mode.
Raises:
TypeError if the underlying class is not subscriptable.
UnexpectedMethodCallError if the object does not expect the call to
__getitem__.
"""
# Verify the class supports item assignment.
if '__getitem__' not in dir(self._class_to_mock):
raise TypeError('unsubscriptable object')
# If we are in replay mode then simply call the mock __getitem__ method
if self._replay_mode:
return MockMethod('__getitem__', self._expected_calls_queue,
self._replay_mode)(key)
# Otherwise, create a mock method __getitem__.
return self._CreateMockMethod('__getitem__')(key)
def __iter__(self):
"""Provide custom logic for mocking classes that are iterable.
Returns:
Expected return value in replay mode. A MockMethod object for the
__iter__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class is not iterable.
UnexpectedMethodCallError if the object does not expect the call to
__iter__.
"""
methods = dir(self._class_to_mock)
# Verify the class supports iteration.
if '__iter__' not in methods:
# If it doesn't have iter method and we are in replay method,
# then try to iterate using subscripts.
if '__getitem__' not in methods or not self._replay_mode:
raise TypeError('not iterable object')
else:
results = []
index = 0
try:
while True:
results.append(self[index])
index += 1
except IndexError:
return iter(results)
# If we are in replay mode then simply call the mock __iter__ method.
if self._replay_mode:
return MockMethod('__iter__', self._expected_calls_queue,
self._replay_mode)()
# Otherwise, create a mock method __iter__.
return self._CreateMockMethod('__iter__')()
def __contains__(self, key):
"""Provide custom logic for mocking classes that contain items.
Args:
key: Key to look in container for.
Returns:
Expected return value in replay mode. A MockMethod object for the
__contains__ method that has already been called if not in replay
mode.
Raises:
TypeError if the underlying class does not implement __contains__
UnexpectedMethodCaller if the object does not expect the call to
__contains__.
"""
contains = self._class_to_mock.__dict__.get('__contains__', None)
if contains is None:
raise TypeError('unsubscriptable object')
if self._replay_mode:
return MockMethod('__contains__', self._expected_calls_queue,
self._replay_mode)(key)
return self._CreateMockMethod('__contains__')(key)
def __call__(self, *params, **named_params):
"""Provide custom logic for mocking classes that are callable."""
# Verify the class we are mocking is callable.
is_callable = hasattr(self._class_to_mock, '__call__')
if not is_callable:
raise TypeError('Not callable')
# Because the call is happening directly on this object instead of
# a method, the call on the mock method is made right here
# If we are mocking a Function, then use the function, and not the
# __call__ method
method = None
if type(self._class_to_mock) in (types.FunctionType, types.MethodType):
method = self._class_to_mock
else:
method = getattr(self._class_to_mock, '__call__')
mock_method = self._CreateMockMethod('__call__', method_to_mock=method)
return mock_method(*params, **named_params)
@property
def __name__(self):
"""Return the name that is being mocked."""
return self._description
# TODO(dejw): this property stopped to work after I introduced changes with
# binding classes. Fortunately I found a solution in the form of
# __getattribute__ method below, but this issue should be investigated
@property
def __class__(self):
return self._class_to_mock
def __dir__(self):
"""Return only attributes of a class to mock."""
return dir(self._class_to_mock)
def __getattribute__(self, name):
"""Return _class_to_mock on __class__ attribute."""
if name == "__class__":
return super(MockObject, self).__getattribute__("_class_to_mock")
return super(MockObject, self).__getattribute__(name)
class _MockObjectFactory(MockObject):
"""A MockObjectFactory creates mocks and verifies __init__ params.
A MockObjectFactory removes the boiler plate code that was previously
necessary to stub out direction instantiation of a class.
The MockObjectFactory creates new MockObjects when called and verifies the
__init__ params are correct when in record mode. When replaying,
existing mocks are returned, and the __init__ params are verified.
See StubOutWithMock vs StubOutClassWithMocks for more detail.
"""
def __init__(self, class_to_mock, mox_instance):
MockObject.__init__(self, class_to_mock)
self._mox = mox_instance
self._instance_queue = collections.deque()
def __call__(self, *params, **named_params):
"""Instantiate and record that a new mock has been created."""
method = getattr(self._class_to_mock, '__init__')
mock_method = self._CreateMockMethod('__init__', method_to_mock=method)
# Note: calling mock_method() is deferred in order to catch the
# empty instance_queue first.
if self._replay_mode:
if not self._instance_queue:
raise UnexpectedMockCreationError(self._class_to_mock, *params,
**named_params)
mock_method(*params, **named_params)
return self._instance_queue.pop()
else:
mock_method(*params, **named_params)
instance = self._mox.CreateMock(self._class_to_mock)
self._instance_queue.appendleft(instance)
return instance
def _Verify(self):
"""Verify that all mocks have been created."""
if self._instance_queue:
raise ExpectedMockCreationError(self._instance_queue)
super(_MockObjectFactory, self)._Verify()
class MethodSignatureChecker(object):
"""Ensures that methods are called correctly."""
_NEEDED, _DEFAULT, _GIVEN = range(3)
def __init__(self, method, class_to_bind=None):
"""Creates a checker.
Args:
# method: A method to check.
# class_to_bind: optionally, a class used to type check first
# method parameter, only used with unbound methods
method: function
class_to_bind: type or None
Raises:
ValueError: method could not be inspected, so checks aren't
possible. Some methods and functions like built-ins
can't be inspected.
"""
try:
self._args, varargs, varkw, defaults = inspect.getargspec(method)
except TypeError:
raise ValueError('Could not get argument specification for %r'
% (method,))
if inspect.ismethod(method) or class_to_bind:
self._args = self._args[1:] # Skip 'self'.
self._method = method
self._instance = None # May contain the instance this is bound to.
self._instance = getattr(method, "__self__", None)
# _bounded_to determines whether the method is bound or not
if self._instance:
self._bounded_to = self._instance.__class__
else:
self._bounded_to = class_to_bind or getattr(method, "im_class",
None)
self._has_varargs = varargs is not None
self._has_varkw = varkw is not None
if defaults is None:
self._required_args = self._args
self._default_args = []
else:
self._required_args = self._args[:-len(defaults)]
self._default_args = self._args[-len(defaults):]
def _RecordArgumentGiven(self, arg_name, arg_status):
"""Mark an argument as being given.
Args:
# arg_name: The name of the argument to mark in arg_status.
# arg_status: Maps argument names to one of
# _NEEDED, _DEFAULT, _GIVEN.
arg_name: string
arg_status: dict
Raises:
AttributeError: arg_name is already marked as _GIVEN.
"""
if arg_status.get(arg_name, None) == MethodSignatureChecker._GIVEN:
raise AttributeError('%s provided more than once' % (arg_name,))
arg_status[arg_name] = MethodSignatureChecker._GIVEN
def Check(self, params, named_params):
"""Ensures that the parameters used while recording a call are valid.
Args:
# params: A list of positional parameters.
# named_params: A dict of named parameters.
params: list
named_params: dict
Raises:
AttributeError: the given parameters don't work with the given
method.
"""
arg_status = dict((a, MethodSignatureChecker._NEEDED)
for a in self._required_args)
for arg in self._default_args:
arg_status[arg] = MethodSignatureChecker._DEFAULT
# WARNING: Suspect hack ahead.
#
# Check to see if this is an unbound method, where the instance
# should be bound as the first argument. We try to determine if
# the first argument (param[0]) is an instance of the class, or it
# is equivalent to the class (used to account for Comparators).
#
# NOTE: If a Func() comparator is used, and the signature is not
# correct, this will cause extra executions of the function.
if inspect.ismethod(self._method) or self._bounded_to:
# The extra param accounts for the bound instance.
if len(params) > len(self._required_args):
expected = self._bounded_to
# Check if the param is an instance of the expected class,
# or check equality (useful for checking Comparators).
# This is a hack to work around the fact that the first
# parameter can be a Comparator, and the comparison may raise
# an exception during this comparison, which is OK.
try:
param_equality = (params[0] == expected)
except Exception:
param_equality = False
if isinstance(params[0], expected) or param_equality:
params = params[1:]
# If the IsA() comparator is being used, we need to check the
# inverse of the usual case - that the given instance is a
# subclass of the expected class. For example, the code under
# test does late binding to a subclass.
elif (isinstance(params[0], IsA) and
params[0]._IsSubClass(expected)):
params = params[1:]
# Check that each positional param is valid.
for i in range(len(params)):
try:
arg_name = self._args[i]
except IndexError:
if not self._has_varargs:
raise AttributeError(
'%s does not take %d or more positional '
'arguments' % (self._method.__name__, i))
else:
self._RecordArgumentGiven(arg_name, arg_status)
# Check each keyword argument.
for arg_name in named_params:
if arg_name not in arg_status and not self._has_varkw:
raise AttributeError('%s is not expecting keyword argument %s'
% (self._method.__name__, arg_name))
self._RecordArgumentGiven(arg_name, arg_status)
# Ensure all the required arguments have been given.
still_needed = [k for k, v in arg_status.items()
if v == MethodSignatureChecker._NEEDED]
if still_needed:
raise AttributeError('No values given for arguments: %s'
% (' '.join(sorted(still_needed))))
class MockMethod(object):
"""Callable mock method.
A MockMethod should act exactly like the method it mocks, accepting
parameters and returning a value, or throwing an exception (as specified).
When this method is called, it can optionally verify whether the called
method (name and signature) matches the expected method.
"""
def __init__(self, method_name, call_queue, replay_mode,
method_to_mock=None, description=None, class_to_bind=None):
"""Construct a new mock method.
Args:
# method_name: the name of the method
# call_queue: deque of calls, verify this call against the head,
# or add this call to the queue.
# replay_mode: False if we are recording, True if we are verifying
# calls against the call queue.
# method_to_mock: The actual method being mocked, used for
# introspection.
# description: optionally, a descriptive name for this method.
# Typically this is equal to the descriptive name of
# the method's class.
# class_to_bind: optionally, a class that is used for unbound
# methods (or functions in Python3) to which method
# is bound, in order not to loose binding
# information. If given, it will be used for
# checking the type of first method parameter
method_name: str
call_queue: list or deque
replay_mode: bool
method_to_mock: a method object
description: str or None
class_to_bind: type or None
"""
self._name = method_name
self.__name__ = method_name
self._call_queue = call_queue
if not isinstance(call_queue, collections.deque):
self._call_queue = collections.deque(self._call_queue)
self._replay_mode = replay_mode
self._description = description
self._params = None
self._named_params = None
self._return_value = None
self._exception = None
self._side_effects = None
try:
self._checker = MethodSignatureChecker(method_to_mock,
class_to_bind=class_to_bind)
except ValueError:
self._checker = None
def __call__(self, *params, **named_params):
"""Log parameters and return the specified return value.
If the Mock(Anything/Object) associated with this call is in record
mode, this MockMethod will be pushed onto the expected call queue.
If the mock is in replay mode, this will pop a MockMethod off the
top of the queue and verify this call is equal to the expected call.
Raises:
UnexpectedMethodCall if this call is supposed to match an expected
method call and it does not.
"""
self._params = params
self._named_params = named_params
if not self._replay_mode:
if self._checker is not None:
self._checker.Check(params, named_params)
self._call_queue.append(self)
return self
expected_method = self._VerifyMethodCall()
if expected_method._side_effects:
result = expected_method._side_effects(*params, **named_params)
if expected_method._return_value is None:
expected_method._return_value = result
if expected_method._exception:
raise expected_method._exception
return expected_method._return_value
def __getattr__(self, name):
"""Raise an AttributeError with a helpful message."""
raise AttributeError(
'MockMethod has no attribute "%s". '
'Did you remember to put your mocks in replay mode?' % name)
def __iter__(self):
"""Raise a TypeError with a helpful message."""
raise TypeError(
'MockMethod cannot be iterated. '
'Did you remember to put your mocks in replay mode?')
def next(self):
"""Raise a TypeError with a helpful message."""
raise TypeError(
'MockMethod cannot be iterated. '
'Did you remember to put your mocks in replay mode?')
def __next__(self):
"""Raise a TypeError with a helpful message."""
raise TypeError(
'MockMethod cannot be iterated. '
'Did you remember to put your mocks in replay mode?')
def _PopNextMethod(self):
"""Pop the next method from our call queue."""
try:
return self._call_queue.popleft()
except IndexError:
raise UnexpectedMethodCallError(self, None)
def _VerifyMethodCall(self):
"""Verify the called method is expected.
This can be an ordered method, or part of an unordered set.
Returns:
The expected mock method.
Raises:
UnexpectedMethodCall if the method called was not expected.
"""
expected = self._PopNextMethod()
# Loop here, because we might have a MethodGroup followed by another
# group.
while isinstance(expected, MethodGroup):
expected, method = expected.MethodCalled(self)
if method is not None:
return method
# This is a mock method, so just check equality.
if expected != self:
raise UnexpectedMethodCallError(self, expected)
return expected
def __str__(self):
params = ', '.join(
[repr(p) for p in self._params or []] +
['%s=%r' % x for x in sorted((self._named_params or {}).items())])
full_desc = "%s(%s) -> %r" % (self._name, params, self._return_value)
if self._description:
full_desc = "%s.%s" % (self._description, full_desc)
return full_desc
def __hash__(self):
return id(self)
def __eq__(self, rhs):
"""Test whether this MockMethod is equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return (isinstance(rhs, MockMethod) and
self._name == rhs._name and
self._params == rhs._params and
self._named_params == rhs._named_params)
def __ne__(self, rhs):
"""Test if this MockMethod is not equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return not self == rhs
def GetPossibleGroup(self):
"""Returns a possible group from the end of the call queue.
Return None if no other methods are on the stack.
"""
# Remove this method from the tail of the queue so we can add it
# to a group.
this_method = self._call_queue.pop()
assert this_method == self
# Determine if the tail of the queue is a group, or just a regular
# ordered mock method.
group = None
try:
group = self._call_queue[-1]
except IndexError:
pass
return group
def _CheckAndCreateNewGroup(self, group_name, group_class):
"""Checks if the last method (a possible group) is an instance of our
group_class. Adds the current method to this group or creates a
new one.
Args:
group_name: the name of the group.
group_class: the class used to create instance of this new group
"""
group = self.GetPossibleGroup()
# If this is a group, and it is the correct group, add the method.
if isinstance(group, group_class) and group.group_name() == group_name:
group.AddMethod(self)
return self
# Create a new group and add the method.
new_group = group_class(group_name)
new_group.AddMethod(self)
self._call_queue.append(new_group)
return self
def InAnyOrder(self, group_name="default"):
"""Move this method into a group of unordered calls.
A group of unordered calls must be defined together, and must be
executed in full before the next expected method can be called.
There can be multiple groups that are expected serially, if they are
given different group names. The same group name can be reused if there
is a standard method call, or a group with a different name, spliced
between usages.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, UnorderedGroup)
def MultipleTimes(self, group_name="default"):
"""Move method into group of calls which may be called multiple times.
A group of repeating calls must be defined together, and must be
executed in full before the next expected method can be called.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, MultipleTimesGroup)
def AndReturn(self, return_value):
"""Set the value to return when this method is called.
Args:
# return_value can be anything.
"""
self._return_value = return_value
return return_value
def AndRaise(self, exception):
"""Set the exception to raise when this method is called.
Args:
# exception: the exception to raise when this method is called.
exception: Exception
"""
self._exception = exception
def WithSideEffects(self, side_effects):
"""Set the side effects that are simulated when this method is called.
Args:
side_effects: A callable which modifies the parameters or other
relevant state which a given test case depends on.
Returns:
Self for chaining with AndReturn and AndRaise.
"""
self._side_effects = side_effects
return self
class Comparator:
"""Base class for all Mox comparators.
A Comparator can be used as a parameter to a mocked method when the exact
value is not known. For example, the code you are testing might build up
a long SQL string that is passed to your mock DAO. You're only interested
that the IN clause contains the proper primary keys, so you can set your
mock up as follows:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
Now whatever query is passed in must contain the string 'IN (1, 2, 4, 5)'.
A Comparator may replace one or more parameters, for example:
# return at most 10 rows
mock_dao.RunQuery(StrContains('SELECT'), 10)
or
# Return some non-deterministic number of rows
mock_dao.RunQuery(StrContains('SELECT'), IsA(int))
"""
def equals(self, rhs):
"""Special equals method that all comparators must implement.
Args:
rhs: any python object
"""
raise NotImplementedError('method must be implemented by a subclass.')
def __eq__(self, rhs):
return self.equals(rhs)
def __ne__(self, rhs):
return not self.equals(rhs)
class Is(Comparator):
"""Comparison class used to check identity, instead of equality."""
def __init__(self, obj):
self._obj = obj
def equals(self, rhs):
return rhs is self._obj
def __repr__(self):
return "<is %r (%s)>" % (self._obj, id(self._obj))
class IsA(Comparator):
"""This class wraps a basic Python type or class. It is used to verify
that a parameter is of the given type or class.
Example:
mock_dao.Connect(IsA(DbConnectInfo))
"""
def __init__(self, class_name):
"""Initialize IsA
Args:
class_name: basic python type or a class
"""
self._class_name = class_name
def equals(self, rhs):
"""Check to see if the RHS is an instance of class_name.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return isinstance(rhs, self._class_name)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(rhs) == type(self._class_name)
def _IsSubClass(self, clazz):
"""Check to see if the IsA comparators class is a subclass of clazz.
Args:
# clazz: a class object
Returns:
bool
"""
try:
return issubclass(self._class_name, clazz)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(clazz) == type(self._class_name)
def __repr__(self):
return 'mox.IsA(%s) ' % str(self._class_name)
class IsAlmost(Comparator):
"""Comparison class used to check whether a parameter is nearly equal
to a given value. Generally useful for floating point numbers.
Example mock_dao.SetTimeout((IsAlmost(3.9)))
"""
def __init__(self, float_value, places=7):
"""Initialize IsAlmost.
Args:
float_value: The value for making the comparison.
places: The number of decimal places to round to.
"""
self._float_value = float_value
self._places = places
def equals(self, rhs):
"""Check to see if RHS is almost equal to float_value
Args:
rhs: the value to compare to float_value
Returns:
bool
"""
try:
return round(rhs - self._float_value, self._places) == 0
except Exception:
# Probably because either float_value or rhs is not a number.
return False
def __repr__(self):
return str(self._float_value)
class StrContains(Comparator):
"""Comparison class used to check whether a substring exists in a
string parameter. This can be useful in mocking a database with SQL
passed in as a string parameter, for example.
Example:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
"""
def __init__(self, search_string):
"""Initialize.
Args:
# search_string: the string you are searching for
search_string: str
"""
self._search_string = search_string
def equals(self, rhs):
"""Check to see if the search_string is contained in the rhs string.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return rhs.find(self._search_string) > -1
except Exception:
return False
def __repr__(self):
return '<str containing \'%s\'>' % self._search_string
class Regex(Comparator):
"""Checks if a string matches a regular expression.
This uses a given regular expression to determine equality.
"""
def __init__(self, pattern, flags=0):
"""Initialize.
Args:
# pattern is the regular expression to search for
pattern: str
# flags passed to re.compile function as the second argument
flags: int
"""
self.flags = flags
self.regex = re.compile(pattern, flags=flags)
def equals(self, rhs):
"""Check to see if rhs matches regular expression pattern.
Returns:
bool
"""
try:
return self.regex.search(rhs) is not None
except Exception:
return False
def __repr__(self):
s = '<regular expression \'%s\'' % self.regex.pattern
if self.flags:
s += ', flags=%d' % self.flags
s += '>'
return s
class In(Comparator):
"""Checks whether an item (or key) is in a list (or dict) parameter.
Example:
mock_dao.GetUsersInfo(In('expectedUserName')).AndReturn(mock_result)
"""
def __init__(self, key):
"""Initialize.
Args:
# key is any thing that could be in a list or a key in a dict
"""
self._key = key
def equals(self, rhs):
"""Check to see whether key is in rhs.
Args:
rhs: dict
Returns:
bool
"""
try:
return self._key in rhs
except Exception:
return False
def __repr__(self):
return '<sequence or map containing \'%s\'>' % str(self._key)
class Not(Comparator):
"""Checks whether a predicates is False.
Example:
mock_dao.UpdateUsers(Not(ContainsKeyValue('stevepm',
stevepm_user_info)))
"""
def __init__(self, predicate):
"""Initialize.
Args:
# predicate: a Comparator instance.
"""
assert isinstance(predicate, Comparator), ("predicate %r must be a"
" Comparator." % predicate)
self._predicate = predicate
def equals(self, rhs):
"""Check to see whether the predicate is False.
Args:
rhs: A value that will be given in argument of the predicate.
Returns:
bool
"""
try:
return not self._predicate.equals(rhs)
except Exception:
return False
def __repr__(self):
return '<not \'%s\'>' % self._predicate
class ContainsKeyValue(Comparator):
"""Checks whether a key/value pair is in a dict parameter.
Example:
mock_dao.UpdateUsers(ContainsKeyValue('stevepm', stevepm_user_info))
"""
def __init__(self, key, value):
"""Initialize.
Args:
# key: a key in a dict
# value: the corresponding value
"""
self._key = key
self._value = value
def equals(self, rhs):
"""Check whether the given key/value pair is in the rhs dict.
Returns:
bool
"""
try:
return rhs[self._key] == self._value
except Exception:
return False
def __repr__(self):
return '<map containing the entry \'%s: %s\'>' % (str(self._key),
str(self._value))
class ContainsAttributeValue(Comparator):
"""Checks whether passed parameter contains attributes with a given value.
Example:
mock_dao.UpdateSomething(ContainsAttribute('stevepm', stevepm_user_info))
"""
def __init__(self, key, value):
"""Initialize.
Args:
# key: an attribute name of an object
# value: the corresponding value
"""
self._key = key
self._value = value
def equals(self, rhs):
"""Check if the given attribute has a matching value in the rhs object.
Returns:
bool
"""
try:
return getattr(rhs, self._key) == self._value
except Exception:
return False
class SameElementsAs(Comparator):
"""Checks whether sequences contain the same elements (ignoring order).
Example:
mock_dao.ProcessUsers(SameElementsAs('stevepm', 'salomaki'))
"""
def __init__(self, expected_seq):
"""Initialize.
Args:
expected_seq: a sequence
"""
# Store in case expected_seq is an iterator.
self._expected_list = list(expected_seq)
def equals(self, actual_seq):
"""Check to see whether actual_seq has same elements as expected_seq.
Args:
actual_seq: sequence
Returns:
bool
"""
try:
# Store in case actual_seq is an iterator. We potentially iterate
# twice: once to make the dict, once in the list fallback.
actual_list = list(actual_seq)
except TypeError:
# actual_seq cannot be read as a sequence.
#
# This happens because Mox uses __eq__ both to check object
# equality (in MethodSignatureChecker) and to invoke Comparators.
return False
try:
return set(self._expected_list) == set(actual_list)
except TypeError:
# Fall back to slower list-compare if any of the objects
# are unhashable.
if len(self._expected_list) != len(actual_list):
return False
for el in actual_list:
if el not in self._expected_list:
return False
return True
def __repr__(self):
return '<sequence with same elements as \'%s\'>' % self._expected_list
class And(Comparator):
"""Evaluates one or more Comparators on RHS, returns an AND of the results.
"""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Comparator
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether all Comparators are equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if not comparator.equals(rhs):
return False
return True
def __repr__(self):
return '<AND %s>' % str(self._comparators)
class Or(Comparator):
"""Evaluates one or more Comparators on RHS; returns OR of the results."""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Mox comparators
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether any Comparator is equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if comparator.equals(rhs):
return True
return False
def __repr__(self):
return '<OR %s>' % str(self._comparators)
class Func(Comparator):
"""Call a function that should verify the parameter passed in is correct.
You may need the ability to perform more advanced operations on the
parameter in order to validate it. You can use this to have a callable
validate any parameter. The callable should return either True or False.
Example:
def myParamValidator(param):
# Advanced logic here
return True
mock_dao.DoSomething(Func(myParamValidator), true)
"""
def __init__(self, func):
"""Initialize.
Args:
func: callable that takes one parameter and returns a bool
"""
self._func = func
def equals(self, rhs):
"""Test whether rhs passes the function test.
rhs is passed into func.
Args:
rhs: any python object
Returns:
the result of func(rhs)
"""
return self._func(rhs)
def __repr__(self):
return str(self._func)
class IgnoreArg(Comparator):
"""Ignore an argument.
This can be used when we don't care about an argument of a method call.
Example:
# Check if CastMagic is called with 3 as first arg and
# 'disappear' as third.
mymock.CastMagic(3, IgnoreArg(), 'disappear')
"""
def equals(self, unused_rhs):
"""Ignores arguments and returns True.
Args:
unused_rhs: any python object
Returns:
always returns True
"""
return True
def __repr__(self):
return '<IgnoreArg>'
class Value(Comparator):
"""Compares argument against a remembered value.
To be used in conjunction with Remember comparator. See Remember()
for example.
"""
def __init__(self):
self._value = None
self._has_value = False
def store_value(self, rhs):
self._value = rhs
self._has_value = True
def equals(self, rhs):
if not self._has_value:
return False
else:
return rhs == self._value
def __repr__(self):
if self._has_value:
return "<Value %r>" % self._value
else:
return "<Value>"
class Remember(Comparator):
"""Remembers the argument to a value store.
To be used in conjunction with Value comparator.
Example:
# Remember the argument for one method call.
users_list = Value()
mock_dao.ProcessUsers(Remember(users_list))
# Check argument against remembered value.
mock_dao.ReportUsers(users_list)
"""
def __init__(self, value_store):
if not isinstance(value_store, Value):
raise TypeError(
"value_store is not an instance of the Value class")
self._value_store = value_store
def equals(self, rhs):
self._value_store.store_value(rhs)
return True
def __repr__(self):
return "<Remember %d>" % id(self._value_store)
class MethodGroup(object):
"""Base class containing common behaviour for MethodGroups."""
def __init__(self, group_name):
self._group_name = group_name
def group_name(self):
return self._group_name
def __str__(self):
return '<%s "%s">' % (self.__class__.__name__, self._group_name)
def AddMethod(self, mock_method):
raise NotImplementedError
def MethodCalled(self, mock_method):
raise NotImplementedError
def IsSatisfied(self):
raise NotImplementedError
class UnorderedGroup(MethodGroup):
"""UnorderedGroup holds a set of method calls that may occur in any order.
This construct is helpful for non-deterministic events, such as iterating
over the keys of a dict.
"""
def __init__(self, group_name):
super(UnorderedGroup, self).__init__(group_name)
self._methods = []
def __str__(self):
return '%s "%s" pending calls:\n%s' % (
self.__class__.__name__,
self._group_name,
"\n".join(str(method) for method in self._methods))
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.append(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the
group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so, remove it from the set
# and return it.
for method in self._methods:
if method == mock_method:
# Remove the called mock_method instead of the method in the
# group. The called method will match any comparators when
# equality is checked during removal. The method in the group
# could pass a comparator to another comparator during the
# equality check.
self._methods.remove(mock_method)
# If group is not empty, put it back at the head of the queue.
if not self.IsSatisfied():
mock_method._call_queue.appendleft(self)
return self, method
raise UnexpectedMethodCallError(mock_method, self)
def IsSatisfied(self):
"""Return True if there are not any methods in this group."""
return len(self._methods) == 0
class MultipleTimesGroup(MethodGroup):
"""MultipleTimesGroup holds methods that may be called any number of times.
Note: Each method must be called at least once.
This is helpful, if you don't know or care how many times a method is
called.
"""
def __init__(self, group_name):
super(MultipleTimesGroup, self).__init__(group_name)
self._methods = set()
self._methods_left = set()
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.add(mock_method)
self._methods_left.add(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the
group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so add it to the set of
# called methods.
for method in self._methods:
if method == mock_method:
self._methods_left.discard(method)
# Always put this group back on top of the queue,
# because we don't know when we are done.
mock_method._call_queue.appendleft(self)
return self, method
if self.IsSatisfied():
next_method = mock_method._PopNextMethod()
return next_method, None
else:
raise UnexpectedMethodCallError(mock_method, self)
def IsSatisfied(self):
"""Return True if all methods in group are called at least once."""
return len(self._methods_left) == 0
class MoxMetaTestBase(type):
"""Metaclass to add mox cleanup and verification to every test.
As the mox unit testing class is being constructed (MoxTestBase or a
subclass), this metaclass will modify all test functions to call the
CleanUpMox method of the test class after they finish. This means that
unstubbing and verifying will happen for every test with no additional
code, and any failures will result in test failures as opposed to errors.
"""
def __init__(cls, name, bases, d):
type.__init__(cls, name, bases, d)
# also get all the attributes from the base classes to account
# for a case when test class is not the immediate child of MoxTestBase
for base in bases:
for attr_name in dir(base):
if attr_name not in d:
d[attr_name] = getattr(base, attr_name)
for func_name, func in d.items():
if func_name.startswith('test') and callable(func):
setattr(cls, func_name, MoxMetaTestBase.CleanUpTest(cls, func))
@staticmethod
def CleanUpTest(cls, func):
"""Adds Mox cleanup code to any MoxTestBase method.
Always unsets stubs after a test. Will verify all mocks for tests that
otherwise pass.
Args:
cls: MoxTestBase or subclass; the class whose method we are
altering.
func: method; the method of the MoxTestBase test class we wish to
alter.
Returns:
The modified method.
"""
def new_method(self, *args, **kwargs):
mox_obj = getattr(self, 'mox', None)
stubout_obj = getattr(self, 'stubs', None)
cleanup_mox = False
cleanup_stubout = False
if mox_obj and isinstance(mox_obj, Mox):
cleanup_mox = True
if stubout_obj and isinstance(stubout_obj,
stubout.StubOutForTesting):
cleanup_stubout = True
try:
func(self, *args, **kwargs)
finally:
if cleanup_mox:
mox_obj.UnsetStubs()
if cleanup_stubout:
stubout_obj.UnsetAll()
stubout_obj.SmartUnsetAll()
if cleanup_mox:
mox_obj.VerifyAll()
new_method.__name__ = func.__name__
new_method.__doc__ = func.__doc__
new_method.__module__ = func.__module__
return new_method
_MoxTestBase = MoxMetaTestBase('_MoxTestBase', (unittest.TestCase, ), {})
class MoxTestBase(_MoxTestBase):
"""Convenience test class to make stubbing easier.
Sets up a "mox" attribute which is an instance of Mox (any mox tests will
want this), and a "stubs" attribute that is an instance of
StubOutForTesting (needed at times). Also automatically unsets any stubs
and verifies that all mock methods have been called at the end of each
test, eliminating boilerplate code.
"""
def setUp(self):
super(MoxTestBase, self).setUp()
self.mox = Mox()
self.stubs = stubout.StubOutForTesting()
| bsd-3-clause |
uranusjr/django | tests/indexes/models.py | 57 | 1811 | from django.db import connection, models
class CurrentTranslation(models.ForeignObject):
"""
Creates virtual relation to the translation with model cache enabled.
"""
# Avoid validation
requires_unique_target = False
def __init__(self, to, on_delete, from_fields, to_fields, **kwargs):
# Disable reverse relation
kwargs['related_name'] = '+'
# Set unique to enable model cache.
kwargs['unique'] = True
super().__init__(to, on_delete, from_fields, to_fields, **kwargs)
class ArticleTranslation(models.Model):
article = models.ForeignKey('indexes.Article', models.CASCADE)
article_no_constraint = models.ForeignKey('indexes.Article', models.CASCADE, db_constraint=False, related_name='+')
language = models.CharField(max_length=10, unique=True)
content = models.TextField()
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
# Add virtual relation to the ArticleTranslation model.
translation = CurrentTranslation(ArticleTranslation, models.CASCADE, ['id'], ['article'])
class Meta:
index_together = [
["headline", "pub_date"],
]
# Model for index_together being used only with single list
class IndexTogetherSingleList(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
class Meta:
index_together = ["headline", "pub_date"]
# Indexing a TextField on Oracle or MySQL results in index creation error.
if connection.vendor == 'postgresql':
class IndexedArticle(models.Model):
headline = models.CharField(max_length=100, db_index=True)
body = models.TextField(db_index=True)
slug = models.CharField(max_length=40, unique=True)
| bsd-3-clause |
eemirtekin/edx-platform | cms/djangoapps/contentstore/tests/test_utils.py | 17 | 24100 | """ Tests for utils. """
import collections
import copy
import mock
from datetime import datetime, timedelta
from pytz import UTC
from django.test import TestCase
from django.test.utils import override_settings
from contentstore import utils
from contentstore.tests.utils import CourseTestCase
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore.django import modulestore
class LMSLinksTestCase(TestCase):
""" Tests for LMS links. """
def about_page_test(self):
""" Get URL for about page, no marketing site """
# default for ENABLE_MKTG_SITE is False.
self.assertEquals(self.get_about_page_link(), "//localhost:8000/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'dummy-root'})
def about_page_marketing_site_test(self):
""" Get URL for about page, marketing root present. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//dummy-root/courses/mitX/101/test/about")
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
self.assertEquals(self.get_about_page_link(), "//localhost:8000/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'http://www.dummy'})
def about_page_marketing_site_remove_http_test(self):
""" Get URL for about page, marketing root present, remove http://. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummy/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'https://www.dummy'})
def about_page_marketing_site_remove_https_test(self):
""" Get URL for about page, marketing root present, remove https://. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummy/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={'ROOT': 'www.dummyhttps://x'})
def about_page_marketing_site_https__edge_test(self):
""" Get URL for about page, only remove https:// at the beginning of the string. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), "//www.dummyhttps://x/courses/mitX/101/test/about")
@override_settings(MKTG_URLS={})
def about_page_marketing_urls_not_set_test(self):
""" Error case. ENABLE_MKTG_SITE is True, but there is either no MKTG_URLS, or no MKTG_URLS Root property. """
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
self.assertEquals(self.get_about_page_link(), None)
@override_settings(LMS_BASE=None)
def about_page_no_lms_base_test(self):
""" No LMS_BASE, nor is ENABLE_MKTG_SITE True """
self.assertEquals(self.get_about_page_link(), None)
def get_about_page_link(self):
""" create mock course and return the about page link """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
return utils.get_lms_link_for_about_page(course_key)
def lms_link_test(self):
""" Tests get_lms_link_for_item. """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
location = course_key.make_usage_key('vertical', 'contacting_us')
link = utils.get_lms_link_for_item(location, False)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us")
# test preview
link = utils.get_lms_link_for_item(location, True)
self.assertEquals(
link,
"//preview/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us"
)
# now test with the course' location
location = course_key.make_usage_key('course', 'test')
link = utils.get_lms_link_for_item(location)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/course/test")
class ExtraPanelTabTestCase(TestCase):
""" Tests adding and removing extra course tabs. """
def get_tab_type_dicts(self, tab_types):
""" Returns an array of tab dictionaries. """
if tab_types:
return [{'tab_type': tab_type} for tab_type in tab_types.split(',')]
else:
return []
def get_course_with_tabs(self, tabs=None):
""" Returns a mock course object with a tabs attribute. """
if tabs is None:
tabs = []
course = collections.namedtuple('MockCourse', ['tabs'])
if isinstance(tabs, basestring):
course.tabs = self.get_tab_type_dicts(tabs)
else:
course.tabs = tabs
return course
def test_add_extra_panel_tab(self):
""" Tests if a tab can be added to a course tab list. """
for tab_type in utils.EXTRA_TAB_PANELS.keys():
tab = utils.EXTRA_TAB_PANELS.get(tab_type)
# test adding with changed = True
for tab_setup in ['', 'x', 'x,y,z']:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
expected_tabs.append(tab)
changed, actual_tabs = utils.add_extra_panel_tab(tab_type, course)
self.assertTrue(changed)
self.assertEqual(actual_tabs, expected_tabs)
# test adding with changed = False
tab_test_setup = [
[tab],
[tab, self.get_tab_type_dicts('x,y,z')],
[self.get_tab_type_dicts('x,y'), tab, self.get_tab_type_dicts('z')],
[self.get_tab_type_dicts('x,y,z'), tab]]
for tab_setup in tab_test_setup:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
changed, actual_tabs = utils.add_extra_panel_tab(tab_type, course)
self.assertFalse(changed)
self.assertEqual(actual_tabs, expected_tabs)
def test_remove_extra_panel_tab(self):
""" Tests if a tab can be removed from a course tab list. """
for tab_type in utils.EXTRA_TAB_PANELS.keys():
tab = utils.EXTRA_TAB_PANELS.get(tab_type)
# test removing with changed = True
tab_test_setup = [
[tab],
[tab, self.get_tab_type_dicts('x,y,z')],
[self.get_tab_type_dicts('x,y'), tab, self.get_tab_type_dicts('z')],
[self.get_tab_type_dicts('x,y,z'), tab]]
for tab_setup in tab_test_setup:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = [t for t in course.tabs if t != utils.EXTRA_TAB_PANELS.get(tab_type)]
changed, actual_tabs = utils.remove_extra_panel_tab(tab_type, course)
self.assertTrue(changed)
self.assertEqual(actual_tabs, expected_tabs)
# test removing with changed = False
for tab_setup in ['', 'x', 'x,y,z']:
course = self.get_course_with_tabs(tab_setup)
expected_tabs = copy.copy(course.tabs)
changed, actual_tabs = utils.remove_extra_panel_tab(tab_type, course)
self.assertFalse(changed)
self.assertEqual(actual_tabs, expected_tabs)
class CourseImageTestCase(ModuleStoreTestCase):
"""Tests for course image URLs."""
def verify_url(self, expected_url, actual_url):
"""
Helper method for verifying the URL is as expected.
"""
if not expected_url.startswith("/"):
expected_url = "/" + expected_url
self.assertEquals(expected_url, actual_url)
def test_get_image_url(self):
"""Test image URL formatting."""
course = CourseFactory.create()
self.verify_url(
unicode(course.id.make_asset_key('asset', course.course_image)),
utils.course_image_url(course)
)
def test_non_ascii_image_name(self):
""" Verify that non-ascii image names are cleaned """
course_image = u'before_\N{SNOWMAN}_after.jpg'
course = CourseFactory.create(course_image=course_image)
self.verify_url(
unicode(course.id.make_asset_key('asset', course_image.replace(u'\N{SNOWMAN}', '_'))),
utils.course_image_url(course)
)
def test_spaces_in_image_name(self):
""" Verify that image names with spaces in them are cleaned """
course_image = u'before after.jpg'
course = CourseFactory.create(course_image=u'before after.jpg')
self.verify_url(
unicode(course.id.make_asset_key('asset', course_image.replace(" ", "_"))),
utils.course_image_url(course)
)
class XBlockVisibilityTestCase(ModuleStoreTestCase):
"""Tests for xblock visibility for students."""
def setUp(self):
super(XBlockVisibilityTestCase, self).setUp()
self.dummy_user = ModuleStoreEnum.UserID.test
self.past = datetime(1970, 1, 1)
self.future = datetime.now(UTC) + timedelta(days=1)
self.course = CourseFactory.create()
def test_private_unreleased_xblock(self):
"""Verifies that a private unreleased xblock is not visible"""
self._test_visible_to_students(False, 'private_unreleased', self.future)
def test_private_released_xblock(self):
"""Verifies that a private released xblock is not visible"""
self._test_visible_to_students(False, 'private_released', self.past)
def test_public_unreleased_xblock(self):
"""Verifies that a public (published) unreleased xblock is not visible"""
self._test_visible_to_students(False, 'public_unreleased', self.future, publish=True)
def test_public_released_xblock(self):
"""Verifies that public (published) released xblock is visible if staff lock is not enabled."""
self._test_visible_to_students(True, 'public_released', self.past, publish=True)
def test_private_no_start_xblock(self):
"""Verifies that a private xblock with no start date is not visible"""
self._test_visible_to_students(False, 'private_no_start', None)
def test_public_no_start_xblock(self):
"""Verifies that a public (published) xblock with no start date is visible unless staff lock is enabled"""
self._test_visible_to_students(True, 'public_no_start', None, publish=True)
def test_draft_released_xblock(self):
"""Verifies that a xblock with an unreleased draft and a released published version is visible"""
vertical = self._create_xblock_with_start_date('draft_released', self.past, publish=True)
# Create an unreleased draft version of the xblock
vertical.start = self.future
modulestore().update_item(vertical, self.dummy_user)
self.assertTrue(utils.is_currently_visible_to_students(vertical))
def _test_visible_to_students(self, expected_visible_without_lock, name, start_date, publish=False):
"""
Helper method that checks that is_xblock_visible_to_students returns the correct value both
with and without visible_to_staff_only set.
"""
no_staff_lock = self._create_xblock_with_start_date(name, start_date, publish, visible_to_staff_only=False)
self.assertEqual(expected_visible_without_lock, utils.is_currently_visible_to_students(no_staff_lock))
# any xblock with visible_to_staff_only set to True should not be visible to students.
staff_lock = self._create_xblock_with_start_date(
name + "_locked", start_date, publish, visible_to_staff_only=True
)
self.assertFalse(utils.is_currently_visible_to_students(staff_lock))
def _create_xblock_with_start_date(self, name, start_date, publish=False, visible_to_staff_only=False):
"""Helper to create an xblock with a start date, optionally publishing it"""
vertical = modulestore().create_item(
self.dummy_user, self.course.location.course_key, 'vertical', name,
fields={'start': start_date, 'visible_to_staff_only': visible_to_staff_only}
)
if publish:
modulestore().publish(vertical.location, self.dummy_user)
return vertical
class ReleaseDateSourceTest(CourseTestCase):
"""Tests for finding the source of an xblock's release date."""
def setUp(self):
super(ReleaseDateSourceTest, self).setUp()
self.chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
self.sequential = ItemFactory.create(category='sequential', parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
# Read again so that children lists are accurate
self.chapter = self.store.get_item(self.chapter.location)
self.sequential = self.store.get_item(self.sequential.location)
self.vertical = self.store.get_item(self.vertical.location)
self.date_one = datetime(1980, 1, 1, tzinfo=UTC)
self.date_two = datetime(2020, 1, 1, tzinfo=UTC)
def _update_release_dates(self, chapter_start, sequential_start, vertical_start):
"""Sets the release dates of the chapter, sequential, and vertical"""
self.chapter.start = chapter_start
self.chapter = self.store.update_item(self.chapter, ModuleStoreEnum.UserID.test)
self.sequential.start = sequential_start
self.sequential = self.store.update_item(self.sequential, ModuleStoreEnum.UserID.test)
self.vertical.start = vertical_start
self.vertical = self.store.update_item(self.vertical, ModuleStoreEnum.UserID.test)
def _verify_release_date_source(self, item, expected_source):
"""Helper to verify that the release date source of a given item matches the expected source"""
source = utils.find_release_date_source(item)
self.assertEqual(source.location, expected_source.location)
self.assertEqual(source.start, expected_source.start)
def test_chapter_source_for_vertical(self):
"""Tests a vertical's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)
self._verify_release_date_source(self.vertical, self.chapter)
def test_sequential_source_for_vertical(self):
"""Tests a vertical's release date being set by its sequential"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.vertical, self.sequential)
def test_chapter_source_for_sequential(self):
"""Tests a sequential's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)
self._verify_release_date_source(self.sequential, self.chapter)
def test_sequential_source_for_sequential(self):
"""Tests a sequential's release date being set by itself"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.sequential, self.sequential)
class StaffLockTest(CourseTestCase):
"""Base class for testing staff lock functions."""
def setUp(self):
super(StaffLockTest, self).setUp()
self.chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
self.sequential = ItemFactory.create(category='sequential', parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
self.orphan = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
# Read again so that children lists are accurate
self.chapter = self.store.get_item(self.chapter.location)
self.sequential = self.store.get_item(self.sequential.location)
self.vertical = self.store.get_item(self.vertical.location)
# Orphan the orphaned xblock
self.sequential.children = [self.vertical.location]
self.sequential = self.store.update_item(self.sequential, ModuleStoreEnum.UserID.test)
def _set_staff_lock(self, xblock, is_locked):
"""If is_locked is True, xblock is staff locked. Otherwise, the xblock staff lock field is removed."""
field = xblock.fields['visible_to_staff_only']
if is_locked:
field.write_to(xblock, True)
else:
field.delete_from(xblock)
return self.store.update_item(xblock, ModuleStoreEnum.UserID.test)
def _update_staff_locks(self, chapter_locked, sequential_locked, vertical_locked):
"""
Sets the staff lock on the chapter, sequential, and vertical
If the corresponding argument is False, then the field is deleted from the xblock
"""
self.chapter = self._set_staff_lock(self.chapter, chapter_locked)
self.sequential = self._set_staff_lock(self.sequential, sequential_locked)
self.vertical = self._set_staff_lock(self.vertical, vertical_locked)
class StaffLockSourceTest(StaffLockTest):
"""Tests for finding the source of an xblock's staff lock."""
def _verify_staff_lock_source(self, item, expected_source):
"""Helper to verify that the staff lock source of a given item matches the expected source"""
source = utils.find_staff_lock_source(item)
self.assertEqual(source.location, expected_source.location)
self.assertTrue(source.visible_to_staff_only)
def test_chapter_source_for_vertical(self):
"""Tests a vertical's staff lock being set by its chapter"""
self._update_staff_locks(True, False, False)
self._verify_staff_lock_source(self.vertical, self.chapter)
def test_sequential_source_for_vertical(self):
"""Tests a vertical's staff lock being set by its sequential"""
self._update_staff_locks(True, True, False)
self._verify_staff_lock_source(self.vertical, self.sequential)
self._update_staff_locks(False, True, False)
self._verify_staff_lock_source(self.vertical, self.sequential)
def test_vertical_source_for_vertical(self):
"""Tests a vertical's staff lock being set by itself"""
self._update_staff_locks(True, True, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
self._update_staff_locks(False, True, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
self._update_staff_locks(False, False, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
def test_orphan_has_no_source(self):
"""Tests that a orphaned xblock has no staff lock source"""
self.assertIsNone(utils.find_staff_lock_source(self.orphan))
def test_no_source_for_vertical(self):
"""Tests a vertical with no staff lock set anywhere"""
self._update_staff_locks(False, False, False)
self.assertIsNone(utils.find_staff_lock_source(self.vertical))
class InheritedStaffLockTest(StaffLockTest):
"""Tests for determining if an xblock inherits a staff lock."""
def test_no_inheritance(self):
"""Tests that a locked or unlocked vertical with no locked ancestors does not have an inherited lock"""
self._update_staff_locks(False, False, False)
self.assertFalse(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(False, False, True)
self.assertFalse(utils.ancestor_has_staff_lock(self.vertical))
def test_inheritance_in_locked_section(self):
"""Tests that a locked or unlocked vertical in a locked section has an inherited lock"""
self._update_staff_locks(True, False, False)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(True, False, True)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
def test_inheritance_in_locked_subsection(self):
"""Tests that a locked or unlocked vertical in a locked subsection has an inherited lock"""
self._update_staff_locks(False, True, False)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(False, True, True)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
def test_no_inheritance_for_orphan(self):
"""Tests that an orphaned xblock does not inherit staff lock"""
self.assertFalse(utils.ancestor_has_staff_lock(self.orphan))
class GroupVisibilityTest(CourseTestCase):
"""
Test content group access rules.
"""
def setUp(self):
super(GroupVisibilityTest, self).setUp()
chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
sequential = ItemFactory.create(category='sequential', parent_location=chapter.location)
vertical = ItemFactory.create(category='vertical', parent_location=sequential.location)
html = ItemFactory.create(category='html', parent_location=vertical.location)
problem = ItemFactory.create(
category='problem', parent_location=vertical.location, data="<problem></problem>"
)
self.sequential = self.store.get_item(sequential.location)
self.vertical = self.store.get_item(vertical.location)
self.html = self.store.get_item(html.location)
self.problem = self.store.get_item(problem.location)
def set_group_access(self, xblock, value):
""" Sets group_access to specified value and calls update_item to persist the change. """
xblock.group_access = value
self.store.update_item(xblock, self.user.id)
def test_no_visibility_set(self):
""" Tests when group_access has not been set on anything. """
def verify_all_components_visible_to_all(): # pylint: disable=invalid-name
""" Verifies when group_access has not been set on anything. """
for item in (self.sequential, self.vertical, self.html, self.problem):
self.assertFalse(utils.has_children_visible_to_specific_content_groups(item))
self.assertFalse(utils.is_visible_to_specific_content_groups(item))
verify_all_components_visible_to_all()
# Test with group_access set to Falsey values.
self.set_group_access(self.vertical, {1: []})
self.set_group_access(self.html, {2: None})
verify_all_components_visible_to_all()
def test_sequential_and_problem_have_group_access(self):
""" Tests when group_access is set on a few different components. """
self.set_group_access(self.sequential, {1: [0]})
# This is a no-op.
self.set_group_access(self.vertical, {1: []})
self.set_group_access(self.problem, {2: [3, 4]})
# Note that "has_children_visible_to_specific_content_groups" only checks immediate children.
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.sequential))
self.assertTrue(utils.has_children_visible_to_specific_content_groups(self.vertical))
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.html))
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.problem))
self.assertTrue(utils.is_visible_to_specific_content_groups(self.sequential))
self.assertFalse(utils.is_visible_to_specific_content_groups(self.vertical))
self.assertFalse(utils.is_visible_to_specific_content_groups(self.html))
self.assertTrue(utils.is_visible_to_specific_content_groups(self.problem))
| agpl-3.0 |
paultcochrane/bokeh | bokeh/plotting.py | 10 | 9238 | from __future__ import absolute_import, print_function
import logging
logger = logging.getLogger(__name__)
from . import _glyph_functions as gf
from .models import Axis, Grid, GridPlot, Legend, LogAxis, Plot
from .plotting_helpers import (
_list_attr_splat, _get_range, _get_axis_class, _get_num_minor_ticks, _process_tools_arg
)
# extra imports -- just things to add to 'from plotting import *'
from .document import Document
from .models import ColumnDataSource
from .session import Session
from .io import (
curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, hplot, vplot)
# Names that we want in this namespace (fool pyflakes)
(GridPlot, Document, ColumnDataSource, Session, cursession, gridplot,
show, save, reset_output, push, output_file, output_notebook,
output_server, vplot, hplot)
DEFAULT_TOOLS = "pan,wheel_zoom,box_zoom,save,resize,reset,help"
class Figure(Plot):
__subtype__ = "Figure"
__view_model__ = "Plot"
def __init__(self, *arg, **kw):
tools = kw.pop("tools", DEFAULT_TOOLS)
x_range = kw.pop("x_range", None)
y_range = kw.pop("y_range", None)
x_axis_type = kw.pop("x_axis_type", "auto")
y_axis_type = kw.pop("y_axis_type", "auto")
x_minor_ticks = kw.pop('x_minor_ticks', 'auto')
y_minor_ticks = kw.pop('y_minor_ticks', 'auto')
x_axis_location = kw.pop("x_axis_location", "below")
y_axis_location = kw.pop("y_axis_location", "left")
x_axis_label = kw.pop("x_axis_label", "")
y_axis_label = kw.pop("y_axis_label", "")
super(Figure, self).__init__(*arg, **kw)
self.x_range = _get_range(x_range)
self.y_range = _get_range(y_range)
x_axiscls = _get_axis_class(x_axis_type, self.x_range)
if x_axiscls:
if x_axiscls is LogAxis:
self.x_mapper_type = 'log'
xaxis = x_axiscls(plot=self)
xaxis.ticker.num_minor_ticks = _get_num_minor_ticks(x_axiscls, x_minor_ticks)
axis_label = x_axis_label
if axis_label:
xaxis.axis_label = axis_label
xgrid = Grid(plot=self, dimension=0, ticker=xaxis.ticker); xgrid
if x_axis_location == "above":
self.above.append(xaxis)
elif x_axis_location == "below":
self.below.append(xaxis)
y_axiscls = _get_axis_class(y_axis_type, self.y_range)
if y_axiscls:
if y_axiscls is LogAxis:
self.y_mapper_type = 'log'
yaxis = y_axiscls(plot=self)
yaxis.ticker.num_minor_ticks = _get_num_minor_ticks(y_axiscls, y_minor_ticks)
axis_label = y_axis_label
if axis_label:
yaxis.axis_label = axis_label
ygrid = Grid(plot=self, dimension=1, ticker=yaxis.ticker); ygrid
if y_axis_location == "left":
self.left.append(yaxis)
elif y_axis_location == "right":
self.right.append(yaxis)
tool_objs = _process_tools_arg(self, tools)
self.add_tools(*tool_objs)
def _axis(self, *sides):
objs = []
for s in sides:
objs.extend(getattr(self, s, []))
axis = [obj for obj in objs if isinstance(obj, Axis)]
return _list_attr_splat(axis)
@property
def xaxis(self):
""" Get the current `x` axis object(s)
Returns:
splattable list of x-axis objects on this Plot
"""
return self._axis("above", "below")
@property
def yaxis(self):
""" Get the current `y` axis object(s)
Returns:
splattable list of y-axis objects on this Plot
"""
return self._axis("left", "right")
@property
def axis(self):
""" Get all the current axis objects
Returns:
splattable list of axis objects on this Plot
"""
return _list_attr_splat(self.xaxis + self.yaxis)
@property
def legend(self):
""" Get the current :class:`legend <bokeh.models.Legend>` object(s)
Returns:
splattable list of legend objects on this Plot
"""
legends = [obj for obj in self.renderers if isinstance(obj, Legend)]
return _list_attr_splat(legends)
def _grid(self, dimension):
grid = [obj for obj in self.renderers if isinstance(obj, Grid) and obj.dimension==dimension]
return _list_attr_splat(grid)
@property
def xgrid(self):
""" Get the current `x` :class:`grid <bokeh.models.Grid>` object(s)
Returns:
splattable list of legend objects on this Plot
"""
return self._grid(0)
@property
def ygrid(self):
""" Get the current `y` :class:`grid <bokeh.models.Grid>` object(s)
Returns:
splattable list of y-grid objects on this Plot
"""
return self._grid(1)
@property
def grid(self):
""" Get the current :class:`grid <bokeh.models.Grid>` object(s)
Returns:
splattable list of grid objects on this Plot
"""
return _list_attr_splat(self.xgrid + self.ygrid)
annular_wedge = gf.annular_wedge
annulus = gf.annulus
arc = gf.arc
asterisk = gf.asterisk
bezier = gf.bezier
circle = gf.circle
circle_cross = gf.circle_cross
circle_x = gf.circle_x
cross = gf.cross
diamond = gf.diamond
diamond_cross = gf.diamond_cross
image = gf.image
image_rgba = gf.image_rgba
image_url = gf.image_url
inverted_triangle = gf.inverted_triangle
line = gf.line
multi_line = gf.multi_line
oval = gf.oval
patch = gf.patch
patches = gf.patches
quad = gf.quad
quadratic = gf.quadratic
ray = gf.ray
rect = gf.rect
segment = gf.segment
square = gf.square
square_cross = gf.square_cross
square_x = gf.square_x
text = gf.text
triangle = gf.triangle
wedge = gf.wedge
x = gf.x
def scatter(self, *args, **kwargs):
""" Creates a scatter plot of the given x and y items.
Args:
*args : The data to plot. Can be of several forms:
(X, Y)
Two 1D arrays or iterables
(XNAME, YNAME)
Two bokeh DataSource/ColumnsRef
marker (str, optional): a valid marker_type, defaults to "circle"
color (color value, optional): shorthand to set both fill and line color
All the :ref:`userguide_styling_line_properties` and :ref:`userguide_styling_fill_properties` are
also accepted as keyword parameters.
Examples:
>>> p.scatter([1,2,3],[4,5,6], fill_color="red")
>>> p.scatter("data1", "data2", source=data_source, ...)
"""
markertype = kwargs.pop("marker", "circle")
if markertype not in _marker_types:
raise ValueError("Invalid marker type '%s'. Use markers() to see a list of valid marker types." % markertype)
# TODO (bev) make better when plotting.scatter is removed
conversions = {
"*": "asterisk",
"+": "cross",
"o": "circle",
"ox": "circle_x",
"o+": "circle_cross"
}
if markertype in conversions:
markertype = conversions[markertype]
return getattr(self, markertype)(*args, **kwargs)
def figure(**kwargs):
''' Activate a new figure for plotting.
All subsequent plotting operations will affect the new figure.
This function accepts all plot style keyword parameters.
Returns:
figure : a new :class:`Plot <bokeh.models.plots.Plot>`
'''
if 'plot_width' in kwargs and 'width' in kwargs:
raise ValueError("figure() called but both plot_width and width supplied, supply only one")
if 'plot_height' in kwargs and 'height' in kwargs:
raise ValueError("figure() called but both plot_height and height supplied, supply only one")
if 'height' in kwargs:
kwargs['plot_height'] = kwargs.pop('height')
if 'width' in kwargs:
kwargs['plot_width'] = kwargs.pop('width')
fig = Figure(**kwargs)
curdoc()._current_plot = fig
if curdoc().autoadd:
curdoc().add(fig)
return fig
_marker_types = [
"asterisk",
"circle",
"circle_cross",
"circle_x",
"cross",
"diamond",
"diamond_cross",
"inverted_triangle",
"square",
"square_x",
"square_cross",
"triangle",
"x",
"*",
"+",
"o",
"ox",
"o+",
]
def markers():
""" Prints a list of valid marker types for scatter()
Returns:
None
"""
print("Available markers: \n - " + "\n - ".join(_marker_types))
_color_fields = set(["color", "fill_color", "line_color"])
_alpha_fields = set(["alpha", "fill_alpha", "line_alpha"])
| bsd-3-clause |
CTSRD-SOAAP/chromium-42.0.2311.135 | third_party/pywebsocket/src/example/internal_error_wsh.py | 465 | 1738 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mod_pywebsocket import msgutil
def web_socket_do_extra_handshake(request):
pass
def web_socket_transfer_data(request):
raise msgutil.BadOperationException('Intentional')
# vi:sts=4 sw=4 et
| bsd-3-clause |
pfalcon/micropython | tests/basics/unpack1.py | 45 | 1802 | # unpack sequences
a, = 1, ; print(a)
a, b = 2, 3 ; print(a, b)
a, b, c = 1, 2, 3; print(a, b, c)
a, = range(1); print(a)
a, b = range(2); print(a, b)
a, b, c = range(3); print(a, b, c)
(a) = range(1); print(a)
(a,) = range(1); print(a)
(a, b) = range(2); print(a, b)
(a, b, c) = range(3); print(a, b, c)
(a, (b, c)) = [-1, range(2)]; print(a, b, c)
# lists
[] = []
[a] = range(1); print(a)
[a, b] = range(2); print(a, b)
[a, b, c] = range(3); print(a, b, c)
# with star
*a, = () ; print(a)
*a, = 4, ; print(a)
*a, = 5, 6 ; print(a)
*a, b = 7, ; print(a, b)
*a, b = 8, 9 ; print(a, b)
*a, b = 10, 11, 12 ; print(a, b)
a, *b = 13, ; print(a, b)
a, *b = 14, 15 ; print(a, b)
a, *b = 16, 17, 18 ; print(a, b)
a, *b, c = 19, 20 ; print(a, b)
a, *b, c = 21, 22, 23 ; print(a, b)
a, *b, c = 24, 25, 26, 27 ; print(a, b)
a = [28, 29]
*b, = a
print(a, b, a == b, a is b)
[*a] = [1, 2, 3]
print(a)
try:
a, *b, c = (30,)
except ValueError:
print("ValueError")
# with star and generic iterator
*a, = range(5) ; print(a)
*a, b = range(5) ; print(a, b)
*a, b, c = range(5) ; print(a, b, c)
a, *b = range(5) ; print(a, b)
a, *b, c = range(5) ; print(a, b, c)
a, *b, c, d = range(5) ; print(a, b, c, d)
a, b, *c = range(5) ; print(a, b, c)
a, b, *c, d = range(5) ; print(a, b, c, d)
a, b, *c, d, e = range(5) ; print(a, b, c, d, e)
*a, = [x * 2 for x in [1, 2, 3, 4]] ; print(a)
*a, b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b, c = [x * 2 for x in [1, 2, 3, 4]]; print(a, b, c)
try:
a, *b, c = range(0)
except ValueError:
print("ValueError")
try:
a, *b, c = range(1)
except ValueError:
print("ValueError")
| mit |
ryneches/SuchTree | SuchTree/tests/test_SuchTree.py | 1 | 3768 | from __future__ import print_function
import pytest
from pytest import approx
from SuchTree import SuchTree
from dendropy import Tree
from itertools import combinations, chain
import numpy
try :
import networkx
has_networkx = True
except ImportError :
has_networkx = False
test_tree = 'SuchTree/tests/test.tree'
dpt = Tree.get( file=open(test_tree), schema='newick' )
dpt.resolve_polytomies()
for n,node in enumerate( dpt.inorder_node_iter() ) :
node.label = n
def test_init() :
T = SuchTree( test_tree )
assert type(T) == SuchTree
def test_get_children() :
T = SuchTree( test_tree )
for node in dpt.inorder_node_iter() :
if not node.taxon :
left, right = [ n.label for n in node.child_nodes() ]
else :
left, right = -1, -1
L,R = T.get_children( node.label )
assert L == left
assert R == right
def test_get_distance_to_root() :
T = SuchTree( test_tree )
for leaf in dpt.leaf_node_iter() :
assert T.get_distance_to_root( leaf.label ) == approx( leaf.distance_from_root(), 0.001 )
def test_distance() :
T = SuchTree( test_tree )
for line in open( 'SuchTree/tests/test.matrix' ) :
a,b,d1 = line.split()
d1 = float(d1)
d2 = T.distance( a, b )
assert d1 == approx( d2, 0.001 )
def test_distances() :
T = SuchTree( test_tree )
ids = []
d1 = []
for line in open( 'SuchTree/tests/test.matrix' ) :
a,b,d = line.split()
d1.append( float(d) )
A = T.leafs[a]
B = T.leafs[b]
ids.append( (A,B) )
result = T.distances( numpy.array( ids, dtype=numpy.int64 ) )
for D1,D2 in zip( d1,result ) :
assert D1 == approx( D2, 0.001 )
def test_distances_by_name() :
T = SuchTree( test_tree )
ids = []
d1 = []
for line in open( 'SuchTree/tests/test.matrix' ) :
a,b,d = line.split()
d1.append( float(d) )
ids.append( (a,b) )
result = T.distances_by_name( ids )
for D1,D2 in zip( d1,result ) :
assert D1 == approx( D2, 0.001 )
def test_get_leafs() :
T = SuchTree( test_tree )
assert set( list(T.get_leafs( T.root )) ) == set( T.leafs.values() )
def test_hierarchy() :
T = SuchTree( test_tree )
all_leafs = set( T.get_leafs( T.root ) )
for i in T.get_internal_nodes() :
some_leafs = set( T.get_leafs( i ) )
assert some_leafs <= all_leafs
def test_adjacency() :
T = SuchTree( test_tree )
aj, leaf_ids = T.adjacency( T.root ).values()
leaf_ids = list( leaf_ids )
for node in chain(T.leafs.values(), list(T.get_internal_nodes() )):
if node == T.root : continue # skip the root node
parent = T.get_parent( node )
distance = T.distance( node, parent )
i,j = leaf_ids.index( node ), leaf_ids.index( parent )
print( node, parent, ':', i, j, ' :: ', aj[i,j], distance )
def test_get_descendant_nodes() :
T = SuchTree( test_tree )
A = set( T.get_descendant_nodes( T.root ) )
B = set( T.get_leafs( T.root ) )
C = set( T.get_internal_nodes() )
assert A == B | C
def test_is_ancestor() :
T = SuchTree( test_tree )
assert T.length - 1 == sum( map( lambda x : T.is_ancestor( T.root, x ),
T.get_descendant_nodes( T.root ) ) )
assert 1 - T.length == sum( map( lambda x : T.is_ancestor( x, T.root ),
T.get_descendant_nodes( T.root ) ) )
@pytest.mark.skipif(not has_networkx, reason="networkx not installed")
def test_networkx() :
T = SuchTree( test_tree )
g = networkx.graph.Graph()
g.add_nodes_from( T.nodes_data() )
g.add_edges_from( T.edges_data() )
assert set( g.nodes() ) == set( T.get_nodes() )
| bsd-3-clause |
jylaxp/django | django/core/mail/backends/base.py | 577 | 1573 | """Base email backend class."""
class BaseEmailBackend(object):
"""
Base class for email backend implementations.
Subclasses must at least overwrite send_messages().
open() and close() can be called indirectly by using a backend object as a
context manager:
with backend as connection:
# do something with connection
pass
"""
def __init__(self, fail_silently=False, **kwargs):
self.fail_silently = fail_silently
def open(self):
"""Open a network connection.
This method can be overwritten by backend implementations to
open a network connection.
It's up to the backend implementation to track the status of
a network connection if it's needed by the backend.
This method can be called by applications to force a single
network connection to be used when sending mails. See the
send_messages() method of the SMTP backend for a reference
implementation.
The default implementation does nothing.
"""
pass
def close(self):
"""Close a network connection."""
pass
def __enter__(self):
self.open()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
raise NotImplementedError('subclasses of BaseEmailBackend must override send_messages() method')
| bsd-3-clause |
degano/root | tutorials/pyroot/qtexample.py | 28 | 1721 | import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import ROOT
import sip
class window(QMainWindow):
def __init__(self):
# Init the main window.
QMainWindow.__init__(self)
self.resize(350, 350)
# Create the central widget.
self.CentralWidget = QWidget(self)
self.setCentralWidget(self.CentralWidget)
self.Layout = QGridLayout(self.CentralWidget)
# Create a button.
self.QuitButton = QPushButton(self.centralWidget())
self.QuitButton.setText('Quit')
self.Layout.addWidget(self.QuitButton, 1, 0)
# Connect the button.
QObject.connect(self.QuitButton, SIGNAL('clicked()'), self.quit)
# Create a root histogram.
self.hist = ROOT.TH1F("pipo","pipo", 100, 0, 100)
# Create the main TQtWidget (using sip to get the pointer to the central widget).
self.Address = sip.unwrapinstance(self.CentralWidget)
self.Canvas = ROOT.TQtWidget(sip.voidptr(self.Address).ascobject())
ROOT.SetOwnership( self.Canvas, False )
# Place the TQtWidget in the main grid layout and draw the histogram.
self.Layout.addWidget(sip.wrapinstance(ROOT.AddressOf(self.Canvas)[0],QWidget), 0, 0)
self.hist.Draw()
def quit(self):
print 'Bye bye...'
self.close()
ROOT.gApplication.Terminate()
if __name__ == '__main__':
application = qApp
terminator = ROOT.TQtRootSlot.CintSlot()
termAddress = sip.wrapinstance(ROOT.AddressOf(terminator)[0],QObject)
QObject.connect(application, SIGNAL("lastWindowClosed()"),termAddress ,SLOT("Terminate()"))
w = window()
w.show()
ROOT.gApplication.Run(1)
print 'Bye forever!'
| lgpl-2.1 |
ravindrapanda/tensorflow | tensorflow/contrib/rnn/python/ops/rnn.py | 48 | 10799 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RNN helpers for TensorFlow models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import variable_scope as vs
def stack_bidirectional_rnn(cells_fw,
cells_bw,
inputs,
initial_states_fw=None,
initial_states_bw=None,
dtype=None,
sequence_length=None,
scope=None):
"""Creates a bidirectional recurrent neural network.
Stacks several bidirectional rnn layers. The combined forward and backward
layer outputs are used as input of the next layer. tf.bidirectional_rnn
does not allow to share forward and backward information between layers.
The input_size of the first forward and backward cells must match.
The initial state for both directions is zero and no intermediate states
are returned.
As described in https://arxiv.org/abs/1303.5778
Args:
cells_fw: List of instances of RNNCell, one per layer,
to be used for forward direction.
cells_bw: List of instances of RNNCell, one per layer,
to be used for backward direction.
inputs: A length T list of inputs, each a tensor of shape
[batch_size, input_size], or a nested tuple of such elements.
initial_states_fw: (optional) A list of the initial states (one per layer)
for the forward RNN.
Each tensor must has an appropriate type and shape
`[batch_size, cell_fw.state_size]`.
initial_states_bw: (optional) Same as for `initial_states_fw`, but using
the corresponding properties of `cells_bw`.
dtype: (optional) The data type for the initial state. Required if
either of the initial states are not provided.
sequence_length: (optional) An int32/int64 vector, size `[batch_size]`,
containing the actual lengths for each of the sequences.
scope: VariableScope for the created subgraph; defaults to None.
Returns:
A tuple (outputs, output_state_fw, output_state_bw) where:
outputs is a length `T` list of outputs (one for each input), which
are depth-concatenated forward and backward outputs.
output_states_fw is the final states, one tensor per layer,
of the forward rnn.
output_states_bw is the final states, one tensor per layer,
of the backward rnn.
Raises:
TypeError: If `cell_fw` or `cell_bw` is not an instance of `RNNCell`.
ValueError: If inputs is None, not a list or an empty list.
"""
if not cells_fw:
raise ValueError("Must specify at least one fw cell for BidirectionalRNN.")
if not cells_bw:
raise ValueError("Must specify at least one bw cell for BidirectionalRNN.")
if not isinstance(cells_fw, list):
raise ValueError("cells_fw must be a list of RNNCells (one per layer).")
if not isinstance(cells_bw, list):
raise ValueError("cells_bw must be a list of RNNCells (one per layer).")
if len(cells_fw) != len(cells_bw):
raise ValueError("Forward and Backward cells must have the same depth.")
if (initial_states_fw is not None and
(not isinstance(initial_states_fw, list) or
len(initial_states_fw) != len(cells_fw))):
raise ValueError(
"initial_states_fw must be a list of state tensors (one per layer).")
if (initial_states_bw is not None and
(not isinstance(initial_states_bw, list) or
len(initial_states_bw) != len(cells_bw))):
raise ValueError(
"initial_states_bw must be a list of state tensors (one per layer).")
states_fw = []
states_bw = []
prev_layer = inputs
with vs.variable_scope(scope or "stack_bidirectional_rnn"):
for i, (cell_fw, cell_bw) in enumerate(zip(cells_fw, cells_bw)):
initial_state_fw = None
initial_state_bw = None
if initial_states_fw:
initial_state_fw = initial_states_fw[i]
if initial_states_bw:
initial_state_bw = initial_states_bw[i]
with vs.variable_scope("cell_%d" % i) as cell_scope:
prev_layer, state_fw, state_bw = rnn.static_bidirectional_rnn(
cell_fw,
cell_bw,
prev_layer,
initial_state_fw=initial_state_fw,
initial_state_bw=initial_state_bw,
sequence_length=sequence_length,
dtype=dtype,
scope=cell_scope)
states_fw.append(state_fw)
states_bw.append(state_bw)
return prev_layer, tuple(states_fw), tuple(states_bw)
def stack_bidirectional_dynamic_rnn(cells_fw,
cells_bw,
inputs,
initial_states_fw=None,
initial_states_bw=None,
dtype=None,
sequence_length=None,
parallel_iterations=None,
time_major=False,
scope=None):
"""Creates a dynamic bidirectional recurrent neural network.
Stacks several bidirectional rnn layers. The combined forward and backward
layer outputs are used as input of the next layer. tf.bidirectional_rnn
does not allow to share forward and backward information between layers.
The input_size of the first forward and backward cells must match.
The initial state for both directions is zero and no intermediate states
are returned.
Args:
cells_fw: List of instances of RNNCell, one per layer,
to be used for forward direction.
cells_bw: List of instances of RNNCell, one per layer,
to be used for backward direction.
inputs: The RNN inputs. this must be a tensor of shape:
`[batch_size, max_time, ...]`, or a nested tuple of such elements.
initial_states_fw: (optional) A list of the initial states (one per layer)
for the forward RNN.
Each tensor must has an appropriate type and shape
`[batch_size, cell_fw.state_size]`.
initial_states_bw: (optional) Same as for `initial_states_fw`, but using
the corresponding properties of `cells_bw`.
dtype: (optional) The data type for the initial state. Required if
either of the initial states are not provided.
sequence_length: (optional) An int32/int64 vector, size `[batch_size]`,
containing the actual lengths for each of the sequences.
parallel_iterations: (Default: 32). The number of iterations to run in
parallel. Those operations which do not have any temporal dependency
and can be run in parallel, will be. This parameter trades off
time for space. Values >> 1 use more memory but take less time,
while smaller values use less memory but computations take longer.
time_major: The shape format of the inputs and outputs Tensors. If true,
these Tensors must be shaped [max_time, batch_size, depth]. If false,
these Tensors must be shaped [batch_size, max_time, depth]. Using
time_major = True is a bit more efficient because it avoids transposes at
the beginning and end of the RNN calculation. However, most TensorFlow
data is batch-major, so by default this function accepts input and emits
output in batch-major form.
scope: VariableScope for the created subgraph; defaults to None.
Returns:
A tuple (outputs, output_state_fw, output_state_bw) where:
outputs: Output `Tensor` shaped:
`batch_size, max_time, layers_output]`. Where layers_output
are depth-concatenated forward and backward outputs.
output_states_fw is the final states, one tensor per layer,
of the forward rnn.
output_states_bw is the final states, one tensor per layer,
of the backward rnn.
Raises:
TypeError: If `cell_fw` or `cell_bw` is not an instance of `RNNCell`.
ValueError: If inputs is `None`.
"""
if not cells_fw:
raise ValueError("Must specify at least one fw cell for BidirectionalRNN.")
if not cells_bw:
raise ValueError("Must specify at least one bw cell for BidirectionalRNN.")
if not isinstance(cells_fw, list):
raise ValueError("cells_fw must be a list of RNNCells (one per layer).")
if not isinstance(cells_bw, list):
raise ValueError("cells_bw must be a list of RNNCells (one per layer).")
if len(cells_fw) != len(cells_bw):
raise ValueError("Forward and Backward cells must have the same depth.")
if (initial_states_fw is not None and
(not isinstance(initial_states_fw, list) or
len(initial_states_fw) != len(cells_fw))):
raise ValueError(
"initial_states_fw must be a list of state tensors (one per layer).")
if (initial_states_bw is not None and
(not isinstance(initial_states_bw, list) or
len(initial_states_bw) != len(cells_bw))):
raise ValueError(
"initial_states_bw must be a list of state tensors (one per layer).")
states_fw = []
states_bw = []
prev_layer = inputs
with vs.variable_scope(scope or "stack_bidirectional_rnn"):
for i, (cell_fw, cell_bw) in enumerate(zip(cells_fw, cells_bw)):
initial_state_fw = None
initial_state_bw = None
if initial_states_fw:
initial_state_fw = initial_states_fw[i]
if initial_states_bw:
initial_state_bw = initial_states_bw[i]
with vs.variable_scope("cell_%d" % i):
outputs, (state_fw, state_bw) = rnn.bidirectional_dynamic_rnn(
cell_fw,
cell_bw,
prev_layer,
initial_state_fw=initial_state_fw,
initial_state_bw=initial_state_bw,
sequence_length=sequence_length,
parallel_iterations=parallel_iterations,
dtype=dtype,
time_major=time_major)
# Concat the outputs to create the new input.
prev_layer = array_ops.concat(outputs, 2)
states_fw.append(state_fw)
states_bw.append(state_bw)
return prev_layer, tuple(states_fw), tuple(states_bw)
| apache-2.0 |
NixaSoftware/CVis | venv/bin/libs/python/test/numpy.py | 46 | 2433 | # Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
false = 0;
true = 1;
import doctest, numeric_tests
def _count_failures(test_modules = (numeric_tests,)):
failures = 0
for m in test_modules:
failures += doctest.testmod(m)[0]
return failures
def _run(args = None):
import sys, numarray_tests, numeric_tests
if args is not None:
sys.argv = args
# See which of the numeric modules are installed
has_numeric = 0
try: import Numeric
except ImportError: pass
else:
has_numeric = 1
m = Numeric
has_numarray = 0
try: import numarray
except ImportError: pass
else:
has_numarray = 1
m = numarray
# Bail if neither one is installed
if not (has_numeric or has_numarray):
return 0
# test the info routine outside the doctest. See numpy.cpp for an
# explanation
import numpy_ext
if (has_numarray):
numpy_ext.info(m.array((1,2,3)))
failures = 0
#
# Run tests 4 different ways if both modules are installed, just
# to show that set_module_and_type() is working properly
#
# run all the tests with default module search
print 'testing default extension module:', \
numpy_ext.get_module_name() or '[numeric support not installed]'
failures += _count_failures()
# test against Numeric if installed
if has_numeric:
print 'testing Numeric module explicitly'
numpy_ext.set_module_and_type('Numeric', 'ArrayType')
failures += _count_failures()
if has_numarray:
print 'testing numarray module explicitly'
numpy_ext.set_module_and_type('numarray', 'NDArray')
# Add the _numarray_tests to the list of things to test in
# this case.
failures += _count_failures((numarray_tests, numeric_tests))
# see that we can go back to the default
numpy_ext.set_module_and_type('', '')
print 'testing default module again:', \
numpy_ext.get_module_name() or '[numeric support not installed]'
failures += _count_failures()
return failures
if __name__ == '__main__':
print "running..."
import sys
status = _run()
if (status == 0): print "Done."
sys.exit(status)
| apache-2.0 |
teamfx/openjfx-9-dev-rt | modules/javafx.web/src/main/native/Tools/Scripts/webkitpy/tool/commands/abstractsequencedcommand.py | 122 | 2323 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.common.system.executive import ScriptError
from webkitpy.tool.commands.stepsequence import StepSequence
from webkitpy.tool.multicommandtool import Command
_log = logging.getLogger(__name__)
class AbstractSequencedCommand(Command):
steps = None
def __init__(self):
self._sequence = StepSequence(self.steps)
Command.__init__(self, self._sequence.options())
def _prepare_state(self, options, args, tool):
return None
def execute(self, options, args, tool):
try:
state = self._prepare_state(options, args, tool)
except ScriptError, e:
_log.error(e.message_with_output())
self._exit(e.exit_code or 2)
self._sequence.run_and_handle_errors(tool, options, state)
| gpl-2.0 |
PrincetonUniversity/pox | pox/log/color.py | 46 | 5311 | # Copyright 2011 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: Not platform independent -- uses VT escape codes
# Magic sequence used to introduce a command or color
MAGIC = "@@@"
# Colors for log levels
LEVEL_COLORS = {
'DEBUG': 'CYAN',
'INFO': 'GREEN',
'WARNING': 'YELLOW',
'ERROR': 'RED',
'CRITICAL': 'blink@@@RED',
}
# Will get set to True if module is initialized
enabled = False
# Gets set to True if we should strip special sequences but
# not actually try to colorize
_strip_only = False
import logging
import sys
# Name to (intensity, base_value) (more colors added later)
COLORS = {
'black' : (0,0),
'red' : (0,1),
'green' : (0,2),
'yellow' : (0,3),
'blue' : (0,4),
'magenta' : (0,5),
'cyan' : (0,6),
'gray' : (0,7),
'darkgray' : (1,0),
'pink' : (1,1),
'white' : (1,7),
}
# Add intense/bold colors (names it capitals)
for _c in [_n for _n,_v in COLORS.items() if _v[0] == 0]:
COLORS[_c.upper()] = (1,COLORS[_c][1])
COMMANDS = {
'reset' : 0,
'bold' : 1,
'dim' : 2,
'bright' : 1,
'dull' : 2,
'bright:' : 1,
'dull:' : 2,
'blink' : 5,
'BLINK' : 6,
'invert' : 7,
'bg:' : -1, # Special
'level' : -2, # Special -- color of current level
'normal' : 22,
'underline' : 4,
'nounderline' : 24,
}
# Control Sequence Introducer
CSI = "\033["
def _color (color, msg):
""" Colorizes the given text """
return _proc(MAGIC + color) + msg + _proc(MAGIC + 'reset').lower()
def _proc (msg, level_color = "DEBUG"):
"""
Do some replacements on the text
"""
msg = msg.split(MAGIC)
#print "proc:",msg
r = ''
i = 0
cmd = False
while i < len(msg):
m = msg[i]
#print i,m
i += 1
if cmd:
best = None
bestlen = 0
for k,v in COMMANDS.iteritems():
if len(k) > bestlen:
if m.startswith(k):
best = (k,v)
bestlen = len(k)
special = None
if best is not None and best[0].endswith(':'):
special = best
m = m[bestlen:]
best = None
bestlen = 0
for k,v in COLORS.iteritems():
if len(k) > bestlen:
if m.startswith(k):
best = (k,v)
bestlen = len(k)
if best is not None:
#print "COMMAND", best
m = m[bestlen:]
if type(best[1]) is tuple:
# Color
brightness,color = best[1]
if special is not None:
if special[1] == -1:
brightness = None
color += 10
color += 30
if not _strip_only:
r += CSI
if brightness is not None:
r += str(brightness) + ";"
r += str(color) + "m"
elif not _strip_only:
# Command
if best[1] == -2:
r += _proc(MAGIC + LEVEL_COLORS.get(level_color, ""), level_color)
else:
r += CSI + str(best[1]) + "m"
cmd = True
r += m
return r
def launch (entire=False):
"""
If --entire then the whole message is color-coded, otherwise just the
log level.
Also turns on interpretation of some special sequences in the log
format string. For example, try:
log --format="%(levelname)s: @@@bold%(message)s@@@normal" log.color
"""
global enabled
if enabled: return
from pox.core import core
log = core.getLogger()
windows_hack = False
# Try to work on Windows
if sys.platform == "win32":
try:
from colorama import init
windows_hack = True
init()
except:
log.info("You need colorama if you want color logging on Windows")
global _strip_only
_strip_only = True
from pox.core import _default_log_handler as dlf
if not dlf:
log.warning("Color logging disabled -- no default logger found")
return
#if not hasattr(dlf, 'formatter'):
# log.warning("Color logging disabled -- no formatter found")
# return
#if not hasattr(dlf.formatter, '_fmt'):
# log.warning("Color logging disabled -- formatter unrecognized")
# return
# Monkeypatch in a new format function...
old_format = dlf.format
if entire:
def new_format (record):
msg = _proc(old_format(record), record.levelname)
color = LEVEL_COLORS.get(record.levelname)
if color is None:
return msg
return _color(color, msg)
else:
def new_format (record):
color = LEVEL_COLORS.get(record.levelname)
oldlevelname = record.levelname
if color is not None:
record.levelname = _color(color, record.levelname)
r = _proc(old_format(record), oldlevelname)
record.levelname = oldlevelname
return r
dlf.format = new_format
if windows_hack:
if hasattr(dlf, "stream"):
if dlf.stream is sys.__stderr__:
dlf.stream = sys.stderr
enabled = True
else:
enabled = True
| apache-2.0 |
haya14busa/alc-etm-searcher | nltk-3.0a3/nltk/test/unit/test_stem.py | 4 | 1047 | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import unittest
from nltk.stem.snowball import SnowballStemmer
class SnowballTest(unittest.TestCase):
def test_russian(self):
# Russian words both consisting of Cyrillic
# and Roman letters can be stemmed.
stemmer_russian = SnowballStemmer("russian")
assert stemmer_russian.stem("авантненькая") == "авантненьк"
assert stemmer_russian.stem("avenantnen'kai^a") == "avenantnen'k"
def test_german(self):
stemmer_german = SnowballStemmer("german")
stemmer_german2 = SnowballStemmer("german", ignore_stopwords=True)
assert stemmer_german.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german2.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german.stem("keinen") == 'kein'
assert stemmer_german2.stem("keinen") == 'keinen'
def test_short_strings_bug(self):
stemmer = SnowballStemmer('english')
assert stemmer.stem("y's") == 'y'
| mit |
rue89-tech/edx-platform | common/lib/xmodule/xmodule/tests/test_editing_module.py | 181 | 2640 | """ Tests for editing descriptors"""
import unittest
import os
import logging
from mock import Mock
from pkg_resources import resource_string
from opaque_keys.edx.locations import Location
from xmodule.editing_module import TabsEditingDescriptor
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from xmodule.tests import get_test_descriptor_system
log = logging.getLogger(__name__)
class TabsEditingDescriptorTestCase(unittest.TestCase):
""" Testing TabsEditingDescriptor"""
def setUp(self):
super(TabsEditingDescriptorTestCase, self).setUp()
system = get_test_descriptor_system()
system.render_template = Mock(return_value="<div>Test Template HTML</div>")
self.tabs = [
{
'name': "Test_css",
'template': "tabs/codemirror-edit.html",
'current': True,
'css': {
'scss': [
resource_string(
__name__,
'../../test_files/test_tabseditingdescriptor.scss'
)
],
'css': [
resource_string(
__name__,
'../../test_files/test_tabseditingdescriptor.css'
)
]
}
},
{
'name': "Subtitles",
'template': "video/subtitles.html",
},
{
'name': "Settings",
'template': "tabs/video-metadata-edit-tab.html"
}
]
TabsEditingDescriptor.tabs = self.tabs
self.descriptor = system.construct_xblock_from_class(
TabsEditingDescriptor,
scope_ids=ScopeIds(None, None, None, Location('org', 'course', 'run', 'category', 'name', 'revision')),
field_data=DictFieldData({}),
)
def test_get_css(self):
"""test get_css"""
css = self.descriptor.get_css()
test_files_dir = os.path.dirname(__file__).replace('xmodule/tests', 'test_files')
test_css_file = os.path.join(test_files_dir, 'test_tabseditingdescriptor.scss')
with open(test_css_file) as new_css:
added_css = new_css.read()
self.assertEqual(css['scss'].pop(), added_css)
self.assertEqual(css['css'].pop(), added_css)
def test_get_context(self):
""""test get_context"""
rendered_context = self.descriptor.get_context()
self.assertListEqual(rendered_context['tabs'], self.tabs)
| agpl-3.0 |
MERegistro/meregistro | meregistro/apps/seguridad/models/Rol.py | 1 | 1354 | # -*- coding: UTF-8 -*-
from django.db import models
from apps.seguridad.models import Credencial, TipoAmbito
class Rol(models.Model):
ROL_ADMIN_NACIONAL = 'AdminNacional'
ROL_ADMIN_SEGURIDAD = 'AdminSeguridad'
ROL_REFERENTE_JURISDICCIONAL = 'ReferenteJurisdiccional'
ROL_REFERENTE_INSTITUCIONAL = 'ReferenteInstitucional'
nombre = models.CharField(max_length=40)
descripcion = models.CharField(max_length=255)
credenciales = models.ManyToManyField(Credencial, related_name='roles')
tipos_ambito_asignable = models.ManyToManyField(TipoAmbito, related_name='roles')
roles_asignables = models.ManyToManyField('self', related_name='roles_asignadores', symmetrical=False)
path = models.CharField(max_length=255)
padre = models.ForeignKey('self', null=True, blank=True)
class Meta:
app_label = 'seguridad'
def __unicode__(self):
return self.descripcion
def save(self):
if self.padre is None:
padre_path = '/'
else:
padre_path = self.padre.path
self.path = padre_path + str(self.id) + '/'
models.Model.save(self)
def asigna(self, rol):
return bool(self.roles_asignables.filter(id=rol.id))
def asignableAAmbito(self, ambito):
return bool(self.tipos_ambito_asignable.filter(id=ambito.tipo.id))
| bsd-3-clause |
vishnu-kumar/PeformanceFramework | rally/plugins/openstack/scenarios/murano/environments.py | 11 | 2862 | # Copyright 2015: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.common import log as logging
from rally import consts
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.murano import utils
from rally.task import atomic
from rally.task import validation
LOG = logging.getLogger(__name__)
class MuranoEnvironments(utils.MuranoScenario):
"""Benchmark scenarios for Murano environments."""
@validation.required_clients("murano")
@validation.required_services(consts.Service.MURANO)
@scenario.configure(context={"cleanup": ["murano.environments"]})
def list_environments(self):
"""List the murano environments.
Run murano environment-list for listing all environments.
"""
self._list_environments()
@validation.required_clients("murano")
@validation.required_services(consts.Service.MURANO)
@scenario.configure(context={"cleanup": ["murano.environments"]})
def create_and_delete_environment(self):
"""Create environment, session and delete environment."""
environment = self._create_environment()
self._create_session(environment.id)
self._delete_environment(environment)
@validation.required_clients("murano")
@validation.required_services(consts.Service.MURANO)
@validation.required_contexts("murano_packages")
@scenario.configure(context={"cleanup": ["murano"], "roles": ["admin"]})
def create_and_deploy_environment(self, packages_per_env=1):
"""Create environment, session and deploy environment.
Create environment, create session, add app to environment
packages_per_env times, send environment to deploy.
:param packages_per_env: number of packages per environment
"""
environment = self._create_environment()
session = self._create_session(environment.id)
package = self.context["tenant"]["packages"][0]
with atomic.ActionTimer(self, "murano.create_service"):
for i in range(packages_per_env):
self._create_service(environment, session,
package.fully_qualified_name,
atomic_action=False)
self._deploy_environment(environment, session)
| apache-2.0 |
illicitonion/givabit | lib/sdks/google_appengine_1.7.1/google_appengine/google/appengine/_internal/django/utils/dateformat.py | 23 | 8904 | """
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print df.format('jS F Y H:i')
7th October 2003 11:39
>>>
"""
import re
import time
import calendar
from google.appengine._internal.django.utils.dates import MONTHS, MONTHS_3, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR
from google.appengine._internal.django.utils.tzinfo import LocalTimezone
from google.appengine._internal.django.utils.translation import ugettext as _
from google.appengine._internal.django.utils.encoding import force_unicode
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDfFgGhHiIjlLmMnNOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_unicode(formatstr))):
if i % 2:
pieces.append(force_unicode(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return u''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, t):
self.data = t
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return u'%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return u'%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return u'%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return u'%02d' % self.data.minute
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return u'%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return u'%02d' % self.data.second
def u(self):
"Microseconds"
return self.data.microsecond
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def __init__(self, dt):
# Accepts either a datetime or date object.
self.data = dt
self.timezone = getattr(dt, 'tzinfo', None)
if hasattr(self.data, 'hour') and not self.timezone:
self.timezone = LocalTimezone(dt)
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return u'%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return u'1'
else:
return u'0'
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self):
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return u'%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def O(self):
"Difference to Greenwich time in hours; e.g. '+0200'"
seconds = self.Z()
return u"%+03d%02d" % (seconds // 3600, (seconds // 60) % 60)
def r(self):
"RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return u'th'
last = self.data.day % 10
if last == 1:
return u'st'
if last == 2:
return u'nd'
if last == 3:
return u'rd'
return u'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return u'%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def T(self):
"Time zone of this machine; e.g. 'EST' or 'MDT'"
name = self.timezone and self.timezone.tzname(self.data) or None
if name is None:
name = self.format('O')
return unicode(name)
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if getattr(self.data, 'tzinfo', None):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year-1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return unicode(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
"""
if not self.timezone:
return 0
offset = self.timezone.utcoffset(self.data)
# Only days can be negative, so negative offsets have days=-1 and
# seconds positive. Positive offsets have days=0
return offset.days * 86400 + offset.seconds
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
| apache-2.0 |
mkoistinen/cmsplugin-sections | cmsplugin_sections/models.py | 2 | 2298 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible #, force_text
from django.utils.translation import ugettext_lazy as _
from .unique_slugify import unique_slugify
from cms.models import CMSPlugin
class AbstractSectionContainerPluginModel(CMSPlugin):
class Meta:
abstract = True
pass
class SectionContainerPluginModel(AbstractSectionContainerPluginModel):
subordinate_page = models.BooleanField(_('subordinate_page?'), default=False)
@python_2_unicode_compatible
class AbstractSectionBasePluginModel(CMSPlugin):
"""
Defines a common interface for section plugins.
"""
class Meta:
abstract = True
taints_cache = True
section_title = models.CharField(_('title'),
blank=False,
default='',
help_text=_('This is the section title.'),
max_length=64,
)
show_title = models.BooleanField(_('display title?'),
default=True,
)
section_menu_label = models.CharField(_('label'),
blank=True,
default='',
help_text=_('This is how the menu item is displayed. Leave empty to use section title.'),
max_length=64,
)
section_menu_slug = models.SlugField(_('section menu slug'),
blank=True,
default='',
help_text=_('This is the hash part of the URL for intra-page links. Leave it blank and it will be auto-generated from the section title.'),
max_length=64,
)
show_in_menu = models.BooleanField(_('show in menu?'),
default=True,
)
def save(self, *args, **kwargs):
"""
Save override to ensure that there is a unique slug for this item.
"""
if not self.section_menu_label:
self.section_menu_label = self.section_title
if not self.section_menu_slug:
unique_slugify(self, self.section_title, slug_field_name='section_menu_slug')
super(AbstractSectionBasePluginModel, self).save(*args, **kwargs)
def __str__(self):
return self.section_menu_label
class SectionBasePluginModel(AbstractSectionBasePluginModel):
"""
Defines a common interface for section plugins.
"""
taints_cache = True
| mit |
andymckay/django | tests/modeltests/m2o_recursive/tests.py | 150 | 1722 | from __future__ import absolute_import
from django.test import TestCase
from .models import Category, Person
class ManyToOneRecursiveTests(TestCase):
def setUp(self):
self.r = Category(id=None, name='Root category', parent=None)
self.r.save()
self.c = Category(id=None, name='Child category', parent=self.r)
self.c.save()
def test_m2o_recursive(self):
self.assertQuerysetEqual(self.r.child_set.all(),
['<Category: Child category>'])
self.assertEqual(self.r.child_set.get(name__startswith='Child').id, self.c.id)
self.assertEqual(self.r.parent, None)
self.assertQuerysetEqual(self.c.child_set.all(), [])
self.assertEqual(self.c.parent.id, self.r.id)
class MultipleManyToOneRecursiveTests(TestCase):
def setUp(self):
self.dad = Person(full_name='John Smith Senior', mother=None, father=None)
self.dad.save()
self.mom = Person(full_name='Jane Smith', mother=None, father=None)
self.mom.save()
self.kid = Person(full_name='John Smith Junior', mother=self.mom, father=self.dad)
self.kid.save()
def test_m2o_recursive2(self):
self.assertEqual(self.kid.mother.id, self.mom.id)
self.assertEqual(self.kid.father.id, self.dad.id)
self.assertQuerysetEqual(self.dad.fathers_child_set.all(),
['<Person: John Smith Junior>'])
self.assertQuerysetEqual(self.mom.mothers_child_set.all(),
['<Person: John Smith Junior>'])
self.assertQuerysetEqual(self.kid.mothers_child_set.all(), [])
self.assertQuerysetEqual(self.kid.fathers_child_set.all(), [])
| bsd-3-clause |
joostvdg/jenkins-job-builder | tests/parallel/test_parallel.py | 5 | 2380 | # Copyright 2015 David Caro
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from multiprocessing import cpu_count
from testtools import matchers
from testtools import TestCase
from jenkins_jobs.parallel import parallelize
from tests.base import mock
class TestCaseParallel(TestCase):
def test_parallel_correct_order(self):
expected = list(range(10, 20))
@parallelize
def parallel_test(num_base, num_extra):
return num_base + num_extra
parallel_args = [{'num_extra': num} for num in range(10)]
result = parallel_test(10, parallelize=parallel_args)
self.assertThat(result, matchers.Equals(expected))
def test_parallel_time_less_than_serial(self):
@parallelize
def wait(secs):
time.sleep(secs)
before = time.time()
# ten threads to make it as fast as possible
wait(parallelize=[{'secs': 1} for _ in range(10)], n_workers=10)
after = time.time()
self.assertThat(after - before, matchers.LessThan(5))
def test_parallel_single_thread(self):
expected = list(range(10, 20))
@parallelize
def parallel_test(num_base, num_extra):
return num_base + num_extra
parallel_args = [{'num_extra': num} for num in range(10)]
result = parallel_test(10, parallelize=parallel_args, n_workers=1)
self.assertThat(result, matchers.Equals(expected))
@mock.patch('jenkins_jobs.parallel.cpu_count', wraps=cpu_count)
def test_use_auto_detect_cores(self, mockCpu_count):
@parallelize
def parallel_test():
return True
result = parallel_test(parallelize=[{} for _ in range(10)],
n_workers=0)
self.assertThat(result, matchers.Equals([True for _ in range(10)]))
mockCpu_count.assert_called_once_with()
| apache-2.0 |
damdam-s/OCB | openerp/tools/__init__.py | 337 | 1447 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import copy
import win32
import appdirs
from config import config
from misc import *
from convert import *
from translate import *
from graph import graph
from image import *
from amount_to_text import *
from amount_to_text_en import *
from pdf_utils import *
from yaml_import import *
from sql import *
from float_utils import *
from mail import *
from func import *
from debugger import *
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
lukauskas/scipy | benchmarks/benchmarks/linalg.py | 44 | 2636 | from __future__ import division, absolute_import, print_function
import numpy.linalg as nl
import numpy as np
from numpy.testing import assert_
from numpy.random import rand
try:
import scipy.linalg as sl
except ImportError:
pass
from .common import Benchmark
def random(size):
return rand(*size)
class Bench(Benchmark):
params = [
[20, 100, 500, 1000],
['contig', 'nocont'],
['numpy', 'scipy']
]
param_names = ['size', 'contiguous', 'module']
def setup(self, size, contig, module):
a = random([size,size])
# larger diagonal ensures non-singularity:
for i in range(size):
a[i,i] = 10*(.1+a[i,i])
b = random([size])
if contig != 'contig':
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
self.a = a
self.b = b
def time_solve(self, size, contig, module):
if module == 'numpy':
nl.solve(self.a, self.b)
else:
sl.solve(self.a, self.b)
def time_inv(self, size, contig, module):
if module == 'numpy':
nl.inv(self.a)
else:
sl.inv(self.a)
def time_det(self, size, contig, module):
if module == 'numpy':
nl.det(self.a)
else:
sl.det(self.a)
def time_eigvals(self, size, contig, module):
if module == 'numpy':
nl.eigvals(self.a)
else:
sl.eigvals(self.a)
def time_svd(self, size, contig, module):
if module == 'numpy':
nl.svd(self.a)
else:
sl.svd(self.a)
class Norm(Benchmark):
params = [
[(20, 20), (100, 100), (1000, 1000), (20, 1000), (1000, 20)],
['contig', 'nocont'],
['numpy', 'scipy']
]
param_names = ['shape', 'contiguous', 'module']
def setup(self, shape, contig, module):
a = np.random.randn(*shape)
if contig != 'contig':
a = a[-1::-1,-1::-1] # turn into a non-contiguous array
assert_(not a.flags['CONTIGUOUS'])
self.a = a
def time_1_norm(self, size, contig, module):
if module == 'numpy':
nl.norm(self.a, ord=1)
else:
sl.norm(self.a, ord=1)
def time_inf_norm(self, size, contig, module):
if module == 'numpy':
nl.norm(self.a, ord=np.inf)
else:
sl.norm(self.a, ord=np.inf)
def time_frobenius_norm(self, size, contig, module):
if module == 'numpy':
nl.norm(self.a)
else:
sl.norm(self.a)
| bsd-3-clause |
codilime/contrail-controller | src/discovery/disc_chash.py | 2 | 2278 | # Copyright http://michaelnielsen.org/blog/consistent-hashing/
#
import bisect
import hashlib
class ConsistentHash:
'''ConsistentHash(rl,nr) creates a consistent hash object for a
cluster of resources rl, using nr replicas.
resource_list is list of resource names. hash_tuples is a list of
tuples (j,k,hash), where j ranges over resources, k ranges over
replicas (0...r-1), and hash is the corresponding hash value, in
the range [0,1). The tuples are sorted by increasing hash value.
Class instance method, get_resource(key), returns the name of
the resource to which key should be mapped.
get_resources returns the entire list of resources in the ring
starting from resource to which key should be mapped and moving
clockwise. This is useful for picking backups or replica
candidates.
'''
def __init__(self,resource_list=[], num_replicas=1):
self.num_replicas = num_replicas
hash_tuples = [(j,k,my_hash(str(j)+"_"+str(k))) \
for j in resource_list \
for k in range(self.num_replicas)]
# Sort the hash tuples based on just the hash values
hash_tuples.sort(lambda x,y: cmp(x[2],y[2]))
self.hash_tuples = hash_tuples
def get_index(self,key):
'''Returns the index of the resource which key gets sent to.'''
h = my_hash(key)
# edge case where we cycle past hash value of 1 and back to 0.
if h > self.hash_tuples[-1][2]: return 0
hash_values = map(lambda x: x[2],self.hash_tuples)
index = bisect.bisect_left(hash_values,h)
return index
def get_resource(self,key):
'''Returns the name of the resource which key gets sent to.'''
index = self.get_index(key)
return self.hash_tuples[index][0]
def get_resources(self, key):
l = []
if len(self.hash_tuples) == 0:
return l
index = self.get_index(key)
h = self.hash_tuples[index-1][2] if index else self.hash_tuples[-1][2]
while h != self.hash_tuples[index][2]:
machine = self.hash_tuples[index][0]
if machine not in l:
l.append(machine)
index = (index + 1) % len(self.hash_tuples)
return l
def my_hash(key):
'''my_hash(key) returns a hash in the range [0,1).'''
return (int(hashlib.md5(key).hexdigest(),16) % 1000000)/1000000.0
| apache-2.0 |
MikeC84/jet-3.4.10-gdd05a11 | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
FuzzyHobbit/scrapy | tests/test_engine.py | 3 | 9519 | """
Scrapy engine tests
This starts a testing web server (using twisted.server.Site) and then crawls it
with the Scrapy crawler.
To view the testing web server in a browser you can start it by running this
module with the ``runserver`` argument::
python test_engine.py runserver
"""
from __future__ import print_function
import sys, os, re
from six.moves.urllib.parse import urlparse
from twisted.internet import reactor, defer
from twisted.web import server, static, util
from twisted.trial import unittest
from scrapy import signals
from scrapy.core.engine import ExecutionEngine
from scrapy.utils.test import get_crawler
from pydispatch import dispatcher
from tests import tests_datadir
from scrapy.spiders import Spider
from scrapy.item import Item, Field
from scrapy.linkextractors import LinkExtractor
from scrapy.http import Request
from scrapy.utils.signal import disconnect_all
class TestItem(Item):
name = Field()
url = Field()
price = Field()
class TestSpider(Spider):
name = "scrapytest.org"
allowed_domains = ["scrapytest.org", "localhost"]
itemurl_re = re.compile("item\d+.html")
name_re = re.compile("<h1>(.*?)</h1>", re.M)
price_re = re.compile(">Price: \$(.*?)<", re.M)
item_cls = TestItem
def parse(self, response):
xlink = LinkExtractor()
itemre = re.compile(self.itemurl_re)
for link in xlink.extract_links(response):
if itemre.search(link.url):
yield Request(url=link.url, callback=self.parse_item)
def parse_item(self, response):
item = self.item_cls()
m = self.name_re.search(response.body)
if m:
item['name'] = m.group(1)
item['url'] = response.url
m = self.price_re.search(response.body)
if m:
item['price'] = m.group(1)
return item
class TestDupeFilterSpider(TestSpider):
def make_requests_from_url(self, url):
return Request(url) # dont_filter=False
class DictItemsSpider(TestSpider):
item_cls = dict
def start_test_site(debug=False):
root_dir = os.path.join(tests_datadir, "test_site")
r = static.File(root_dir)
r.putChild("redirect", util.Redirect("/redirected"))
r.putChild("redirected", static.Data("Redirected here", "text/plain"))
port = reactor.listenTCP(0, server.Site(r), interface="127.0.0.1")
if debug:
print("Test server running at http://localhost:%d/ - hit Ctrl-C to finish." \
% port.getHost().port)
return port
class CrawlerRun(object):
"""A class to run the crawler and keep track of events occurred"""
def __init__(self, spider_class):
self.spider = None
self.respplug = []
self.reqplug = []
self.reqdropped = []
self.itemresp = []
self.signals_catched = {}
self.spider_class = spider_class
def run(self):
self.port = start_test_site()
self.portno = self.port.getHost().port
start_urls = [self.geturl("/"), self.geturl("/redirect"),
self.geturl("/redirect")] # a duplicate
for name, signal in vars(signals).items():
if not name.startswith('_'):
dispatcher.connect(self.record_signal, signal)
self.crawler = get_crawler(self.spider_class)
self.crawler.signals.connect(self.item_scraped, signals.item_scraped)
self.crawler.signals.connect(self.request_scheduled, signals.request_scheduled)
self.crawler.signals.connect(self.request_dropped, signals.request_dropped)
self.crawler.signals.connect(self.response_downloaded, signals.response_downloaded)
self.crawler.crawl(start_urls=start_urls)
self.spider = self.crawler.spider
self.deferred = defer.Deferred()
dispatcher.connect(self.stop, signals.engine_stopped)
return self.deferred
def stop(self):
self.port.stopListening()
for name, signal in vars(signals).items():
if not name.startswith('_'):
disconnect_all(signal)
self.deferred.callback(None)
def geturl(self, path):
return "http://localhost:%s%s" % (self.portno, path)
def getpath(self, url):
u = urlparse(url)
return u.path
def item_scraped(self, item, spider, response):
self.itemresp.append((item, response))
def request_scheduled(self, request, spider):
self.reqplug.append((request, spider))
def request_dropped(self, request, spider):
self.reqdropped.append((request, spider))
def response_downloaded(self, response, spider):
self.respplug.append((response, spider))
def record_signal(self, *args, **kwargs):
"""Record a signal and its parameters"""
signalargs = kwargs.copy()
sig = signalargs.pop('signal')
signalargs.pop('sender', None)
self.signals_catched[sig] = signalargs
class EngineTest(unittest.TestCase):
@defer.inlineCallbacks
def test_crawler(self):
for spider in TestSpider, DictItemsSpider:
self.run = CrawlerRun(spider)
yield self.run.run()
self._assert_visited_urls()
self._assert_scheduled_requests(urls_to_visit=8)
self._assert_downloaded_responses()
self._assert_scraped_items()
self._assert_signals_catched()
self.run = CrawlerRun(TestDupeFilterSpider)
yield self.run.run()
self._assert_scheduled_requests(urls_to_visit=7)
self._assert_dropped_requests()
def _assert_visited_urls(self):
must_be_visited = ["/", "/redirect", "/redirected",
"/item1.html", "/item2.html", "/item999.html"]
urls_visited = set([rp[0].url for rp in self.run.respplug])
urls_expected = set([self.run.geturl(p) for p in must_be_visited])
assert urls_expected <= urls_visited, "URLs not visited: %s" % list(urls_expected - urls_visited)
def _assert_scheduled_requests(self, urls_to_visit=None):
self.assertEqual(urls_to_visit, len(self.run.reqplug))
paths_expected = ['/item999.html', '/item2.html', '/item1.html']
urls_requested = set([rq[0].url for rq in self.run.reqplug])
urls_expected = set([self.run.geturl(p) for p in paths_expected])
assert urls_expected <= urls_requested
scheduled_requests_count = len(self.run.reqplug)
dropped_requests_count = len(self.run.reqdropped)
responses_count = len(self.run.respplug)
self.assertEqual(scheduled_requests_count,
dropped_requests_count + responses_count)
def _assert_dropped_requests(self):
self.assertEqual(len(self.run.reqdropped), 1)
def _assert_downloaded_responses(self):
# response tests
self.assertEqual(8, len(self.run.respplug))
for response, _ in self.run.respplug:
if self.run.getpath(response.url) == '/item999.html':
self.assertEqual(404, response.status)
if self.run.getpath(response.url) == '/redirect':
self.assertEqual(302, response.status)
def _assert_scraped_items(self):
self.assertEqual(2, len(self.run.itemresp))
for item, response in self.run.itemresp:
self.assertEqual(item['url'], response.url)
if 'item1.html' in item['url']:
self.assertEqual('Item 1 name', item['name'])
self.assertEqual('100', item['price'])
if 'item2.html' in item['url']:
self.assertEqual('Item 2 name', item['name'])
self.assertEqual('200', item['price'])
def _assert_signals_catched(self):
assert signals.engine_started in self.run.signals_catched
assert signals.engine_stopped in self.run.signals_catched
assert signals.spider_opened in self.run.signals_catched
assert signals.spider_idle in self.run.signals_catched
assert signals.spider_closed in self.run.signals_catched
self.assertEqual({'spider': self.run.spider},
self.run.signals_catched[signals.spider_opened])
self.assertEqual({'spider': self.run.spider},
self.run.signals_catched[signals.spider_idle])
self.run.signals_catched[signals.spider_closed].pop('spider_stats', None) # XXX: remove for scrapy 0.17
self.assertEqual({'spider': self.run.spider, 'reason': 'finished'},
self.run.signals_catched[signals.spider_closed])
@defer.inlineCallbacks
def test_close_downloader(self):
e = ExecutionEngine(get_crawler(TestSpider), lambda: None)
yield e.close()
@defer.inlineCallbacks
def test_close_spiders_downloader(self):
e = ExecutionEngine(get_crawler(TestSpider), lambda: None)
yield e.open_spider(TestSpider(), [])
self.assertEqual(len(e.open_spiders), 1)
yield e.close()
self.assertEqual(len(e.open_spiders), 0)
@defer.inlineCallbacks
def test_close_engine_spiders_downloader(self):
e = ExecutionEngine(get_crawler(TestSpider), lambda: None)
yield e.open_spider(TestSpider(), [])
e.start()
self.assertTrue(e.running)
yield e.close()
self.assertFalse(e.running)
self.assertEqual(len(e.open_spiders), 0)
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == 'runserver':
start_test_site(debug=True)
reactor.run()
| bsd-3-clause |
uclaros/QGIS | python/plugins/db_manager/sqledit.py | 33 | 6295 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ScriptEdit.py
---------------------
Date : February 2014
Copyright : (C) 2014 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'February 2014'
__copyright__ = '(C) 2014, Alexander Bruy'
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtGui import QColor, QFont, QKeySequence
from qgis.PyQt.QtWidgets import QShortcut
from qgis.PyQt.Qsci import QsciScintilla, QsciLexerSQL
from qgis.core import QgsSettings
class SqlEdit(QsciScintilla):
LEXER_PYTHON = 0
LEXER_R = 1
def __init__(self, parent=None):
QsciScintilla.__init__(self, parent)
self.mylexer = None
self.api = None
self.setCommonOptions()
self.initShortcuts()
def setCommonOptions(self):
# Enable non-ASCII characters
self.setUtf8(True)
# Default font
font = QFont()
font.setFamily('Courier')
font.setFixedPitch(True)
font.setPointSize(10)
self.setFont(font)
self.setMarginsFont(font)
self.setBraceMatching(QsciScintilla.SloppyBraceMatch)
self.setWrapMode(QsciScintilla.WrapWord)
self.setWrapVisualFlags(QsciScintilla.WrapFlagByText,
QsciScintilla.WrapFlagNone, 4)
self.setSelectionForegroundColor(QColor('#2e3436'))
self.setSelectionBackgroundColor(QColor('#babdb6'))
# Show line numbers
self.setMarginWidth(1, '000')
self.setMarginLineNumbers(1, True)
self.setMarginsForegroundColor(QColor('#2e3436'))
self.setMarginsBackgroundColor(QColor('#babdb6'))
# Highlight current line
self.setCaretLineVisible(True)
self.setCaretLineBackgroundColor(QColor('#d3d7cf'))
# Folding
self.setFolding(QsciScintilla.BoxedTreeFoldStyle)
self.setFoldMarginColors(QColor('#d3d7cf'), QColor('#d3d7cf'))
# Mark column 80 with vertical line
self.setEdgeMode(QsciScintilla.EdgeLine)
self.setEdgeColumn(80)
self.setEdgeColor(QColor('#eeeeec'))
# Indentation
self.setAutoIndent(True)
self.setIndentationsUseTabs(False)
self.setIndentationWidth(4)
self.setTabIndents(True)
self.setBackspaceUnindents(True)
self.setTabWidth(4)
# Autocomletion
self.setAutoCompletionThreshold(2)
self.setAutoCompletionSource(QsciScintilla.AcsAPIs)
self.setAutoCompletionCaseSensitivity(False)
# Load font from Python console settings
settings = QgsSettings()
fontName = settings.value('pythonConsole/fontfamilytext', 'Monospace')
fontSize = int(settings.value('pythonConsole/fontsize', 10))
self.defaultFont = QFont(fontName)
self.defaultFont.setFixedPitch(True)
self.defaultFont.setPointSize(fontSize)
self.defaultFont.setStyleHint(QFont.TypeWriter)
self.defaultFont.setBold(False)
self.boldFont = QFont(self.defaultFont)
self.boldFont.setBold(True)
self.italicFont = QFont(self.defaultFont)
self.italicFont.setItalic(True)
self.setFont(self.defaultFont)
self.setMarginsFont(self.defaultFont)
self.initLexer()
def initShortcuts(self):
(ctrl, shift) = (self.SCMOD_CTRL << 16, self.SCMOD_SHIFT << 16)
# Disable some shortcuts
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('D') + ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L') + ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L') + ctrl +
shift)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('T') + ctrl)
# self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord("Z") + ctrl)
# self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord("Y") + ctrl)
# Use Ctrl+Space for autocompletion
self.shortcutAutocomplete = QShortcut(QKeySequence(Qt.CTRL +
Qt.Key_Space), self)
self.shortcutAutocomplete.setContext(Qt.WidgetShortcut)
self.shortcutAutocomplete.activated.connect(self.autoComplete)
def autoComplete(self):
self.autoCompleteFromAll()
def initLexer(self):
self.mylexer = QsciLexerSQL()
colorDefault = QColor('#2e3436')
colorComment = QColor('#c00')
colorCommentBlock = QColor('#3465a4')
colorNumber = QColor('#4e9a06')
colorType = QColor('#4e9a06')
colorKeyword = QColor('#204a87')
colorString = QColor('#ce5c00')
self.mylexer.setDefaultFont(self.defaultFont)
self.mylexer.setDefaultColor(colorDefault)
self.mylexer.setColor(colorComment, 1)
self.mylexer.setColor(colorNumber, 2)
self.mylexer.setColor(colorString, 3)
self.mylexer.setColor(colorString, 4)
self.mylexer.setColor(colorKeyword, 5)
self.mylexer.setColor(colorString, 6)
self.mylexer.setColor(colorString, 7)
self.mylexer.setColor(colorType, 8)
self.mylexer.setColor(colorCommentBlock, 12)
self.mylexer.setColor(colorString, 15)
self.mylexer.setFont(self.italicFont, 1)
self.mylexer.setFont(self.boldFont, 5)
self.mylexer.setFont(self.boldFont, 8)
self.mylexer.setFont(self.italicFont, 12)
self.setLexer(self.mylexer)
def lexer(self):
return self.mylexer
def setMarginVisible(self, visible):
pass
| gpl-2.0 |
ImageEngine/gaffer | python/GafferUI/DotUI.py | 7 | 6501 | ##########################################################################
#
# Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import functools
import imath
import IECore
import Gaffer
import GafferUI
##########################################################################
# Public methods
##########################################################################
## May be called to connect the DotUI functionality to an application
# instance. This isn't done automatically because some applications
# may have graphs for which it doesn't make sense to use Dots. Typically
# this function would be called from an application startup file.
def connect( applicationRoot ) :
applicationRoot.__dotUIConnected = True
##########################################################################
# Metadata
##########################################################################
Gaffer.Metadata.registerNode(
Gaffer.Dot,
"description",
"""
A utility node which can be used for organising large graphs.
""",
"nodeGadget:minWidth", 0.0,
"nodeGadget:padding", 0.5,
"layout:activator:labelTypeIsCustom", lambda node : node["labelType"].getValue() == node.LabelType.Custom,
plugs = {
"in" : [
"plugValueWidget:type", ""
],
"out" : [
"plugValueWidget:type", ""
],
"labelType" : [
"description",
"""
The method used to apply an optional label
to the dot. Using a node name is recommended,
because it encourages the use of descriptive node
names, and updates automatically when nodes are
renamed or upstream connections change. The custom
label does however provide more flexibility, since
node names are restricted in the characters they
can use.
""",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
"nodule:type", "",
"preset:None", Gaffer.Dot.LabelType.None_,
"preset:Node Name", Gaffer.Dot.LabelType.NodeName,
"preset:Upstream Node Name", Gaffer.Dot.LabelType.UpstreamNodeName,
"preset:Custom", Gaffer.Dot.LabelType.Custom,
],
"label" : [
"description",
"""
The label displayed when the type is set to custom.
""",
"nodule:type", "",
"layout:activator", "labelTypeIsCustom",
],
},
)
##########################################################################
# GraphEditor menus
##########################################################################
def __insertDot( menu, destinationPlug ) :
graphEditor = menu.ancestor( GafferUI.GraphEditor )
gadgetWidget = graphEditor.graphGadgetWidget()
graphGadget = graphEditor.graphGadget()
with Gaffer.UndoScope( destinationPlug.ancestor( Gaffer.ScriptNode ) ) :
node = Gaffer.Dot()
graphGadget.getRoot().addChild( node )
node.setup( destinationPlug )
node["in"].setInput( destinationPlug.getInput() )
destinationPlug.setInput( node["out"] )
menuPosition = menu.popupPosition( relativeTo = gadgetWidget )
position = gadgetWidget.getViewportGadget().rasterToGadgetSpace(
imath.V2f( menuPosition.x, menuPosition.y ),
gadget = graphGadget
).p0
graphGadget.setNodePosition( node, imath.V2f( position.x, position.y ) )
def __connectionContextMenu( graphEditor, destinationPlug, menuDefinition ) :
applicationRoot = graphEditor.scriptNode().ancestor( Gaffer.ApplicationRoot )
connected = False
with IECore.IgnoredExceptions( AttributeError ) :
connected = applicationRoot.__dotUIConnected
if not connected :
return
if len( menuDefinition.items() ) :
menuDefinition.append( "/DotDivider", { "divider" : True } )
menuDefinition.append(
"/Insert Dot",
{
"command" : functools.partial( __insertDot, destinationPlug = destinationPlug ),
"active" : not Gaffer.MetadataAlgo.readOnly( destinationPlug ),
}
)
GafferUI.GraphEditor.connectionContextMenuSignal().connect( __connectionContextMenu, scoped = False )
def __setPlugMetadata( plug, key, value ) :
with Gaffer.UndoScope( plug.ancestor( Gaffer.ScriptNode ) ) :
Gaffer.Metadata.registerValue( plug, key, value )
def __graphEditorPlugContextMenu( graphEditor, plug, menuDefinition ) :
if isinstance( plug.node(), Gaffer.Dot ) :
## \todo This duplicates functionality from BoxUI. Is there some way
# we could share it?
currentEdge = Gaffer.Metadata.value( plug, "noduleLayout:section" )
if not currentEdge :
currentEdge = "top" if plug.direction() == plug.Direction.In else "bottom"
readOnly = Gaffer.MetadataAlgo.readOnly( plug )
for edge in ( "top", "bottom", "left", "right" ) :
menuDefinition.append(
"/Move To/" + edge.capitalize(),
{
"command" : functools.partial( __setPlugMetadata, plug, "noduleLayout:section", edge ),
"active" : edge != currentEdge and not readOnly,
}
)
GafferUI.GraphEditor.plugContextMenuSignal().connect( __graphEditorPlugContextMenu, scoped = False )
| bsd-3-clause |
hasgeek/funnel | tests/unit/models/test_user_User.py | 1 | 15870 | from datetime import timedelta
import pytest
from coaster.utils import utcnow
import funnel.models as models
def test_user(db_session):
"""Test for creation of user object from User model."""
user = models.User(username='hrun', fullname="Hrun the Barbarian")
db_session.add(user)
db_session.commit()
hrun = models.User.get(username='hrun')
assert isinstance(hrun, models.User)
assert user.username == 'hrun'
assert user.fullname == "Hrun the Barbarian"
assert user.state.ACTIVE
assert hrun == user
def test_user_pickername(user_twoflower, user_rincewind):
"""Test to verify pickername contains fullname and optional username."""
assert user_twoflower.pickername == "Twoflower"
assert user_rincewind.pickername == "Rincewind (@rincewind)"
def test_user_is_profile_complete(db_session, user_twoflower, user_rincewind):
"""
Test to check if user profile is complete.
That is fullname, username and email are present.
"""
# Both fixtures start out incomplete
assert user_twoflower.is_profile_complete() is False
assert user_rincewind.is_profile_complete() is False
# Rincewind claims an email address, but it is not verified
db_session.add(
models.UserEmailClaim(user=user_rincewind, email='rincewind@example.org')
)
db_session.commit()
assert user_rincewind.is_profile_complete() is False
# Rincewind's profile is complete when a verified email address is added
user_rincewind.add_email('rincewind@example.org')
assert user_rincewind.is_profile_complete() is True
# Email is insufficient for Twoflower
user_twoflower.add_email('twoflower@example.org')
assert user_twoflower.is_profile_complete() is False
# Twoflower also needs a username
user_twoflower.username = 'twoflower'
assert user_twoflower.is_profile_complete() is True
def test_user_organization_owned(user_ridcully, org_uu):
"""Test for verifying organizations a user is a owner of."""
assert list(user_ridcully.organizations_as_owner) == [org_uu]
def test_user_email(db_session, user_twoflower):
"""Add and retrieve an email address."""
assert user_twoflower.email == ''
useremail = user_twoflower.add_email('twoflower@example.org')
assert isinstance(useremail, models.UserEmail)
db_session.commit()
assert useremail.primary is False
# When there is no primary, accessing the `email` property will promote existing
assert user_twoflower.email == useremail
assert useremail.primary is True
useremail2 = user_twoflower.add_email( # type: ignore[unreachable]
'twoflower@example.com', primary=True
)
db_session.commit()
# The primary has changed
assert user_twoflower.email == useremail2
assert useremail.primary is False
assert useremail2.primary is True
def test_user_del_email(db_session, user_twoflower):
"""Delete an email address from a user's account."""
assert user_twoflower.primary_email is None
assert len(user_twoflower.emails) == 0
user_twoflower.add_email('twoflower@example.org', primary=True)
user_twoflower.add_email('twoflower@example.com')
user_twoflower.add_email('twoflower@example.net')
db_session.commit()
assert len(user_twoflower.emails) == 3
assert user_twoflower.primary_email is not None
assert str(user_twoflower.primary_email) == 'twoflower@example.org' # type: ignore[unreachable]
assert {str(e) for e in user_twoflower.emails} == {
'twoflower@example.org',
'twoflower@example.com',
'twoflower@example.net',
}
# Delete a non-primary email address. It will be removed
user_twoflower.del_email('twoflower@example.net')
db_session.commit()
assert len(user_twoflower.emails) == 2
assert user_twoflower.primary_email is not None
assert str(user_twoflower.primary_email) == 'twoflower@example.org'
assert {str(e) for e in user_twoflower.emails} == {
'twoflower@example.org',
'twoflower@example.com',
}
# Delete a primary email address. The next available address will be made primary
user_twoflower.del_email('twoflower@example.org')
db_session.commit()
assert len(user_twoflower.emails) == 1
assert user_twoflower.primary_email is not None
assert str(user_twoflower.primary_email) == 'twoflower@example.com'
assert {str(e) for e in user_twoflower.emails} == {
'twoflower@example.com',
}
# Delete last remaining email address. Primary will be removed
user_twoflower.del_email('twoflower@example.com')
db_session.commit()
assert len(user_twoflower.emails) == 0
assert user_twoflower.primary_email is None
assert user_twoflower.email == ''
def test_user_phone(db_session, user_twoflower):
"""Test to retrieve UserPhone property phone."""
assert user_twoflower.phone == ''
userphone = user_twoflower.add_phone('+12345678900')
assert isinstance(userphone, models.UserPhone)
db_session.commit()
assert userphone.primary is False
# When there is no primary, accessing the `phone` property will promote existing
assert user_twoflower.phone == userphone
assert userphone.primary is True
userphone2 = user_twoflower.add_phone( # type: ignore[unreachable]
'+12345678901', primary=True
)
db_session.commit()
# The primary has changed
assert user_twoflower.phone == userphone2
assert userphone.primary is False
assert userphone2.primary is True
def test_user_del_phone(db_session, user_twoflower):
"""Delete an phone address from a user's account."""
assert user_twoflower.primary_phone is None
assert len(user_twoflower.phones) == 0
user_twoflower.add_phone('+12345678900', primary=True)
user_twoflower.add_phone('+12345678901')
user_twoflower.add_phone('+12345678902')
db_session.commit()
assert len(user_twoflower.phones) == 3
assert user_twoflower.primary_phone is not None
assert str(user_twoflower.primary_phone) == '+12345678900' # type: ignore[unreachable]
assert {str(e) for e in user_twoflower.phones} == {
'+12345678900',
'+12345678901',
'+12345678902',
}
# Delete a non-primary phone address. It will be removed
user_twoflower.del_phone('+12345678902')
db_session.commit()
assert len(user_twoflower.phones) == 2
assert user_twoflower.primary_phone is not None
assert str(user_twoflower.primary_phone) == '+12345678900'
assert {str(e) for e in user_twoflower.phones} == {
'+12345678900',
'+12345678901',
}
# Delete a primary phone address. The next available address will be made primary
user_twoflower.del_phone('+12345678900')
db_session.commit()
assert len(user_twoflower.phones) == 1
assert user_twoflower.primary_phone is not None
assert str(user_twoflower.primary_phone) == '+12345678901'
assert {str(e) for e in user_twoflower.phones} == {
'+12345678901',
}
# Delete last remaining phone address. Primary will be removed
user_twoflower.del_phone('+12345678901')
db_session.commit()
assert len(user_twoflower.phones) == 0
assert user_twoflower.primary_phone is None
assert user_twoflower.phone == ''
def test_user_autocomplete(
db_session, user_twoflower, user_rincewind, user_dibbler, user_librarian
):
"""
Test for User autocomplete method.
Queries valid users defined in fixtures, as well as input that should not return
a response.
"""
user_rincewind.add_email('rincewind@example.org')
db_session.commit()
# A typical lookup with part of someone's name will find matches
assert models.User.autocomplete('Dib') == [user_dibbler]
# Spurious characters like `[` and `]` are ignored
assert models.User.autocomplete('[tw]') == [user_twoflower]
# Multiple users with the same starting character(s), sorted alphabetically
# Both users with and without usernames are found
assert user_librarian.fullname.startswith('The') # The `The` prefix is tested here
assert user_twoflower.username is None
assert user_librarian.username is not None
assert models.User.autocomplete('t') == [user_librarian, user_twoflower]
# Lookup by email address
assert models.User.autocomplete('rincewind@example.org') == [user_rincewind]
# More spurious characters
assert models.User.autocomplete('[]twofl') == [user_twoflower]
# Empty searches
assert models.User.autocomplete('@[') == []
assert models.User.autocomplete('[[]]') == []
assert models.User.autocomplete('[%') == []
# TODO: Test for @username searches against external ids (requires fixtures)
@pytest.mark.parametrize('defercols', [False, True])
def test_user_all(
db_session,
user_twoflower,
user_rincewind,
user_ridcully,
user_dibbler,
user_death,
user_mort,
defercols,
):
"""Retrieve all users matching specified criteria."""
# Some fixtures are not used in the tests because the test determines that they
# won't show up in the query unless specifically asked for
db_session.commit() # Commit required to generate UUID (userid/buid)
# A parameter is required
with pytest.raises(TypeError):
models.User.all()
with pytest.raises(TypeError):
models.User.all(defercols=True)
# Scenario 1: Lookup by buids only
assert set(
models.User.all(
buids=[user_twoflower.buid, user_rincewind.buid], defercols=defercols
)
) == {
user_twoflower,
user_rincewind,
}
# Scenario 2: lookup by buid or username
assert (
set(
models.User.all(
buids=[user_twoflower.buid, user_rincewind.buid],
usernames=[user_ridcully.username, user_dibbler.username],
defercols=defercols,
)
)
== {user_twoflower, user_rincewind, user_ridcully, user_dibbler}
)
# Scenario 3: lookup by usernames only
assert (
set(
models.User.all(
usernames=[user_ridcully.username, user_dibbler.username],
defercols=defercols,
)
)
== {user_ridcully, user_dibbler}
)
# Scenario 4: querying for a merged user buid
models.merge_users(user_death, user_rincewind)
db_session.commit()
assert set(
models.User.all(
buids=[user_twoflower.buid, user_rincewind.buid], defercols=defercols
)
) == {
user_twoflower,
user_death,
}
def test_user_add_email(db_session, user_rincewind):
"""Test to add email address for a user."""
# scenario 1: if primary flag is True and user has no existing email
email1 = 'rincewind@example.org'
useremail1 = user_rincewind.add_email(email1, primary=True)
db_session.commit()
assert user_rincewind.email == useremail1
assert useremail1.email == email1
assert useremail1.primary is True
# scenario 2: when primary flag is True but user has existing primary email
email2 = 'rincewind@example.com'
useremail2 = user_rincewind.add_email(email2, primary=True)
db_session.commit()
assert useremail2.email == email2
assert useremail2.primary is True
assert useremail1.primary is False
assert user_rincewind.email == useremail2 # type: ignore[unreachable]
# scenario 3: when primary flag is True but user has that existing email
useremail3 = user_rincewind.add_email(email1, primary=True)
db_session.commit()
assert useremail3 == useremail1
assert useremail3.primary is True
assert useremail2.primary is False
def test_make_email_primary(user_rincewind):
"""Test to make an email primary for a user."""
email = 'rincewind@example.org'
useremail = user_rincewind.add_email(email)
assert useremail.email == email
assert useremail.primary is False
assert user_rincewind.primary_email is None
user_rincewind.primary_email = useremail
assert useremail.primary is True
def test_user_password(user_twoflower):
"""Test to set user password."""
# User account starts out with no password
assert user_twoflower.pw_hash is None
# User account can set a password
user_twoflower.password = 'test-password'
assert user_twoflower.password_is('test-password') is True
assert user_twoflower.password_is('wrong-password') is False
def test_user_password_has_expired(db_session, user_twoflower):
"""Test to check if password for a user has expired."""
assert user_twoflower.pw_hash is None
user_twoflower.password = 'test-password'
db_session.commit() # Required to set pw_expires_at and pw_set_at
assert user_twoflower.pw_expires_at > user_twoflower.pw_set_at
assert user_twoflower.password_has_expired() is False
user_twoflower.pw_expires_at = utcnow() - timedelta(seconds=1)
assert user_twoflower.password_has_expired() is True
def test_password_hash_upgrade(user_twoflower):
"""Test for password hash upgrade."""
# pw_hash contains bcrypt.hash('password')
user_twoflower.pw_hash = (
'$2b$12$q/TiZH08kbgiUk2W0I99sOaW5hKQ1ETgJxoAv8TvV.5WxB3dYQINO'
)
assert user_twoflower.pw_hash.startswith('$2b$')
assert not user_twoflower.password_is('incorrect')
assert user_twoflower.pw_hash.startswith('$2b$')
assert not user_twoflower.password_is('incorrect', upgrade_hash=True)
assert user_twoflower.pw_hash.startswith('$2b$')
assert user_twoflower.password_is('password')
assert user_twoflower.pw_hash.startswith('$2b$')
assert user_twoflower.password_is('password', upgrade_hash=True)
# Transparent upgrade to Argon2 after a successful password validation
assert user_twoflower.pw_hash.startswith('$argon2id$')
def test_password_not_truncated(user_twoflower):
"""Argon2 passwords are not truncated at up to 1000 characters."""
# Bcrypt passwords are truncated at 72 characters, making larger length limits
# pointless. Argon2 passwords are not truncated for a very large size. Passlib has
# a default max size of 4096 chars.
# https://passlib.readthedocs.io/en/stable/lib/passlib.exc.html#passlib.exc.PasswordSizeError
user_twoflower.password = '1' * 999 + 'a'
assert user_twoflower.password_is('1' * 999 + 'a')
assert not user_twoflower.password_is('1' * 999 + 'b')
def test_user_merged_user(db_session, user_death, user_rincewind):
"""Test for checking if user had a old id."""
db_session.commit()
assert user_death.state.ACTIVE
assert user_rincewind.state.ACTIVE
models.merge_users(user_death, user_rincewind)
assert user_death.state.ACTIVE
assert user_rincewind.state.MERGED
assert {o.uuid for o in user_death.oldids} == {user_rincewind.uuid}
def test_user_get(db_session, user_twoflower, user_rincewind, user_death):
"""Test for User's get method."""
# scenario 1: if both username and buid not passed
db_session.commit()
with pytest.raises(TypeError):
models.User.get()
# scenario 2: if buid is passed
lookup_by_buid = models.User.get(buid=user_twoflower.buid)
assert lookup_by_buid == user_twoflower
# scenario 3: if username is passed
lookup_by_username = models.User.get(username='rincewind')
assert lookup_by_username == user_rincewind
# scenario 4: if defercols is set to True
lookup_by_username = models.User.get(username='rincewind', defercols=True)
assert lookup_by_username == user_rincewind
# scenario 5: when user.state.MERGED
assert user_rincewind.state.ACTIVE
models.merge_users(user_death, user_rincewind)
assert user_rincewind.state.MERGED
lookup_by_buid = models.User.get(buid=user_rincewind.buid)
assert lookup_by_buid == user_death
| agpl-3.0 |
z-e-r-o/plugin.video.osn | resources/lib/dns/grange.py | 7 | 1860 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS GENERATE range conversion."""
import dns
def from_text(text):
"""Convert the text form of a range in a GENERATE statement to an
integer.
@param text: the textual range
@type text: string
@return: The start, stop and step values.
@rtype: tuple
"""
# TODO, figure out the bounds on start, stop and step.
step = 1
cur = ''
state = 0
# state 0 1 2 3 4
# x - y / z
for c in text:
if c == '-' and state == 0:
start = int(cur)
cur = ''
state = 2
elif c == '/':
stop = int(cur)
cur = ''
state = 4
elif c.isdigit():
cur += c
else:
raise dns.exception.SyntaxError("Could not parse %s" % (c))
if state in (1, 3):
raise dns.exception.SyntaxError
if state == 2:
stop = int(cur)
if state == 4:
step = int(cur)
assert step >= 1
assert start >= 0
assert start <= stop
# TODO, can start == stop?
return (start, stop, step)
| gpl-3.0 |
schakrava/rockstor-core | src/rockstor/cli/rock_cli.py | 2 | 8123 | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import pwd
import cmd
import sys
import os
import readline
from base_console import BaseConsole
from setup_console import SetupConsole
from disks_console import DisksConsole
from pools_console import PoolsConsole
from shares_console import SharesConsole
from services_console import ServicesConsole
from sm_console import SMConsole
from support_console import SupportConsole
from network_console import NetworkConsole
from users_console import UsersConsole
from task_console import TaskConsole
from replication_console import ReplicationConsole
from backup_plugin_console import BackupPluginConsole
from rest_util import api_call
from nfs_export_console import NFSExportConsole
from api_keys import APIKeyConsole
from rest_util import set_token
ASCII_LOGO = """
__ __ __ __ ___ __ __
|__) / \ / ` |__/ /__` | / \ |__)
| \ \__/ \__, | \ .__/ | \__/ | \\
"""
class RockConsole(BaseConsole):
def __init__(self, greeting='Rockstor'):
self.user = pwd.getpwuid(os.getuid())[0]
self.greeting = self.user + '@' + greeting
self.prompt = self.greeting + '> '
self.intro = ('%s\nWelcome to Rockstor. The Smart Open Storage '
'Platform.' % ASCII_LOGO)
self.user_hist_file = os.path.expanduser('~') + '/.rcli.hist'
try:
readline.read_history_file(self.user_hist_file)
except:
pass
finally:
BaseConsole.__init__(self)
def postloop(self):
cmd.Cmd.postloop(self)
print("Thanks for Rocking on the Console")
try:
readline.write_history_file(self.user_hist_file)
except:
print('Command history could not be saved')
"""
Commands
"""
def do_utcnow(self, args):
"""
returns utc time on the server
"""
url = ('%scommands/utcnow' % BaseConsole.url)
print(api_call(url, calltype='post'))
def do_uptime(self, args):
"""
return uptime(in seconds) of the server
"""
url = ('%scommands/uptime' % BaseConsole.url)
print(api_call(url, calltype='post'))
def do_bootstrap(self, args):
"""
bootraps the storage state, mounts anything that needs to be mounted
etc..
"""
url = ('%scommands/bootstrap' % BaseConsole.url)
print(api_call(url, calltype='post'))
def do_shares(self, args):
"""
Subconsole for share related operations.
Go to shares subconsole: shares
Display list of shares: shares list <share_name>
Add a share: shares add pool_name share_name
Remove a share: shares delete share_name
share detail console: shares share_name
commands on a share: shares share_name <input>
"""
shares_console = SharesConsole(self.greeting)
if (len(args) == 0):
return shares_console.cmdloop()
return shares_console.onecmd(args)
def do_pools(self, args):
"""
Operations on pools can be done with this command.
Go to pools subconsole: pools
Display the list of pools: pools list
Add a pool: pools add
Remove a pool: pools delete
pool detail console: pools pool_name
commands on a pool: pools pool_name <input>
"""
pools_console = PoolsConsole(self.greeting)
if (len(args) == 0):
pools_console.cmdloop()
else:
pools_console.onecmd(args)
def do_disks(self, args):
disks_console = DisksConsole(self.greeting)
if (len(args) == 0):
disks_console.cmdloop()
else:
disks_console.onecmd(args)
def help_disks(self):
s = """
%(c)sPerform operations on disks.%(e)s
Available commands:
Go to disks subconsole: %(c)sdisks%(e)s
Display the list of disks: %(c)sdisks list%(e)s
Scan for new disks: %(c)sdisks scan%(e)s
""" % BaseConsole.c_params
print(s)
def do_services(self, args):
"""
Operations on all services can be done with this command.
Display the list of services: services list
service detail console: services service_name
nfs service console: services nfs <commands>
smb service console: services smb <command>
"""
services_console = ServicesConsole(self.greeting)
if (len(args) == 0):
services_console.cmdloop()
else:
services_console.onecmd(args)
def do_setup(self, args):
"""
Temporary method to setup for dev purposes. to be deprecated later,
perhaps.
"""
setup_console = SetupConsole(self.greeting)
if (len(args) == 0):
setup_console.cmdloop()
else:
setup_console.onecmd(args)
def do_smart(self, args):
"""
Smart manager console
"""
sm_console = SMConsole(self.greeting)
if (len(args) == 0):
sm_console.cmdloop()
else:
sm_console.onecmd(args)
def do_support(self, args):
"""
Support console
"""
support_console = SupportConsole(self.greeting)
if (len(args) == 0):
support_console.cmdloop()
else:
support_console.onecmd(args)
def do_network(self, args):
"""
Network console
"""
network_console = NetworkConsole(self.greeting)
if (len(args) == 0):
network_console.cmdloop()
else:
network_console.onecmd(args)
def do_users(self, args):
"""
Users console
"""
users_console = UsersConsole(self.greeting)
if (len(args) == 0):
users_console.cmdloop()
else:
users_console.onecmd(args)
def do_tasks(self, args):
"""
Task Scheduler Console
"""
task_console = TaskConsole(self.greeting)
if (len(args) == 0):
task_console.cmdloop()
else:
task_console.onecmd(args)
def do_replication(self, args):
"""
Replication console
"""
rc = ReplicationConsole(self.greeting)
if (len(args) == 0):
rc.cmdloop()
else:
rc.onecmd(args)
def do_backup(self, args):
"""
Backup plugin console
"""
bc = BackupPluginConsole(self.greeting)
if (len(args) == 0):
bc.cmdloop()
else:
bc.cmdloop(args)
def do_nfs_exports(self, args):
ne = NFSExportConsole(self.greeting)
if (len(args) == 0):
ne.cmdloop()
else:
ne.cmdloop(args)
def do_api_keys(self, args):
ak = APIKeyConsole(self.greeting)
if (len(args) == 0):
ak.cmdloop()
else:
ak.cmdloop(args)
def main():
set_token()
rc = RockConsole()
if (len(sys.argv) > 1):
if (sys.argv[1] == '-c'):
# command is called remotely using ssh
line = ' '.join(sys.argv[2:])
else:
line = ' '.join(sys.argv[1:])
return rc.postcmd(rc.onecmd(line), line)
else:
return rc.cmdloop()
| gpl-3.0 |
adityacs/ansible | lib/ansible/modules/system/svc.py | 37 | 10102 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: svc
author: "Brian Coca (@bcoca)"
version_added: "1.9"
short_description: Manage daemontools services.
description:
- Controls daemontools services on remote hosts using the svc utility.
options:
name:
required: true
description:
- Name of the service to manage.
state:
required: false
choices: [ started, stopped, restarted, reloaded, once ]
description:
- C(Started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
svc (svc -t) and C(killed) will always bounce the svc (svc -k).
C(reloaded) will send a sigusr1 (svc -1).
C(once) will run a normally downed svc once (svc -o), not really
an idempotent operation.
downed:
required: false
choices: [ "yes", "no" ]
default: no
description:
- Should a 'down' file exist or not, if it exists it disables auto startup.
defaults to no. Downed does not imply stopped.
enabled:
required: false
choices: [ "yes", "no" ]
description:
- Wheater the service is enabled or not, if disabled it also implies stopped.
Make note that a service can be enabled and downed (no auto restart).
service_dir:
required: false
default: /service
description:
- directory svscan watches for services
service_src:
required: false
description:
- directory where services are defined, the source of symlinks to service_dir.
'''
EXAMPLES = '''
# Example action to start svc dnscache, if not running
- svc:
name: dnscache
state: started
# Example action to stop svc dnscache, if running
- svc:
name: dnscache
state: stopped
# Example action to kill svc dnscache, in all cases
- svc:
name: dnscache
state: killed
# Example action to restart svc dnscache, in all cases
- svc:
name: dnscache
state: restarted
# Example action to reload svc dnscache, in all cases
- svc:
name: dnscache
state: reloaded
# Example using alt svc directory location
- svc:
name: dnscache
state: reloaded
service_dir: /var/service
'''
import platform
import shlex
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.basic import *
def _load_dist_subclass(cls, *args, **kwargs):
'''
Used for derivative implementations
'''
subclass = None
distro = kwargs['module'].params['distro']
# get the most specific superclass for this platform
if distro is not None:
for sc in cls.__subclasses__():
if sc.distro is not None and sc.distro == distro:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
class Svc(object):
"""
Main class that handles daemontools, can be subclassed and overridden in case
we want to use a 'derivative' like encore, s6, etc
"""
#def __new__(cls, *args, **kwargs):
# return _load_dist_subclass(cls, args, kwargs)
def __init__(self, module):
self.extra_paths = [ '/command', '/usr/local/bin' ]
self.report_vars = ['state', 'enabled', 'downed', 'svc_full', 'src_full', 'pid', 'duration', 'full_state']
self.module = module
self.name = module.params['name']
self.service_dir = module.params['service_dir']
self.service_src = module.params['service_src']
self.enabled = None
self.downed = None
self.full_state = None
self.state = None
self.pid = None
self.duration = None
self.svc_cmd = module.get_bin_path('svc', opt_dirs=self.extra_paths)
self.svstat_cmd = module.get_bin_path('svstat', opt_dirs=self.extra_paths)
self.svc_full = '/'.join([ self.service_dir, self.name ])
self.src_full = '/'.join([ self.service_src, self.name ])
self.enabled = os.path.lexists(self.svc_full)
if self.enabled:
self.downed = os.path.lexists('%s/down' % self.svc_full)
self.get_status()
else:
self.downed = os.path.lexists('%s/down' % self.src_full)
self.state = 'stopped'
def enable(self):
if os.path.exists(self.src_full):
try:
os.symlink(self.src_full, self.svc_full)
except OSError:
e = get_exception()
self.module.fail_json(path=self.src_full, msg='Error while linking: %s' % str(e))
else:
self.module.fail_json(msg="Could not find source for service to enable (%s)." % self.src_full)
def disable(self):
try:
os.unlink(self.svc_full)
except OSError:
e = get_exception()
self.module.fail_json(path=self.svc_full, msg='Error while unlinking: %s' % str(e))
self.execute_command([self.svc_cmd,'-dx',self.src_full])
src_log = '%s/log' % self.src_full
if os.path.exists(src_log):
self.execute_command([self.svc_cmd,'-dx',src_log])
def get_status(self):
(rc, out, err) = self.execute_command([self.svstat_cmd, self.svc_full])
if err is not None and err:
self.full_state = self.state = err
else:
self.full_state = out
m = re.search('\(pid (\d+)\)', out)
if m:
self.pid = m.group(1)
m = re.search('(\d+) seconds', out)
if m:
self.duration = m.group(1)
if re.search(' up ', out):
self.state = 'start'
elif re.search(' down ', out):
self.state = 'stopp'
else:
self.state = 'unknown'
return
if re.search(' want ', out):
self.state += 'ing'
else:
self.state += 'ed'
def start(self):
return self.execute_command([self.svc_cmd, '-u', self.svc_full])
def stopp(self):
return self.stop()
def stop(self):
return self.execute_command([self.svc_cmd, '-d', self.svc_full])
def once(self):
return self.execute_command([self.svc_cmd, '-o', self.svc_full])
def reload(self):
return self.execute_command([self.svc_cmd, '-1', self.svc_full])
def restart(self):
return self.execute_command([self.svc_cmd, '-t', self.svc_full])
def kill(self):
return self.execute_command([self.svc_cmd, '-k', self.svc_full])
def execute_command(self, cmd):
try:
(rc, out, err) = self.module.run_command(' '.join(cmd))
except Exception:
e = get_exception()
self.module.fail_json(msg="failed to execute: %s" % str(e))
return (rc, out, err)
def report(self):
self.get_status()
states = {}
for k in self.report_vars:
states[k] = self.__dict__[k]
return states
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(choices=['started', 'stopped', 'restarted', 'killed', 'reloaded', 'once']),
enabled = dict(required=False, type='bool'),
downed = dict(required=False, type='bool'),
dist = dict(required=False, default='daemontools'),
service_dir = dict(required=False, default='/service'),
service_src = dict(required=False, default='/etc/service'),
),
supports_check_mode=True,
)
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
state = module.params['state']
enabled = module.params['enabled']
downed = module.params['downed']
svc = Svc(module)
changed = False
orig_state = svc.report()
if enabled is not None and enabled != svc.enabled:
changed = True
if not module.check_mode:
try:
if enabled:
svc.enable()
else:
svc.disable()
except (OSError, IOError):
e = get_exception()
module.fail_json(msg="Could change service link: %s" % str(e))
if state is not None and state != svc.state:
changed = True
if not module.check_mode:
getattr(svc,state[:-2])()
if downed is not None and downed != svc.downed:
changed = True
if not module.check_mode:
d_file = "%s/down" % svc.svc_full
try:
if downed:
open(d_file, "a").close()
else:
os.unlink(d_file)
except (OSError, IOError):
e = get_exception()
module.fail_json(msg="Could change downed file: %s " % (str(e)))
module.exit_json(changed=changed, svc=svc.report())
if __name__ == '__main__':
main()
| gpl-3.0 |
salomon1184/bite-project | deps/gdata-python-client/samples/apps/marketplace_sample/gdata/tlslite/VerifierDB.py | 359 | 3104 | """Class for storing SRP password verifiers."""
from utils.cryptomath import *
from utils.compat import *
import mathtls
from BaseDB import BaseDB
class VerifierDB(BaseDB):
"""This class represent an in-memory or on-disk database of SRP
password verifiers.
A VerifierDB can be passed to a server handshake to authenticate
a client based on one of the verifiers.
This class is thread-safe.
"""
def __init__(self, filename=None):
"""Create a new VerifierDB instance.
@type filename: str
@param filename: Filename for an on-disk database, or None for
an in-memory database. If the filename already exists, follow
this with a call to open(). To create a new on-disk database,
follow this with a call to create().
"""
BaseDB.__init__(self, filename, "verifier")
def _getItem(self, username, valueStr):
(N, g, salt, verifier) = valueStr.split(" ")
N = base64ToNumber(N)
g = base64ToNumber(g)
salt = base64ToString(salt)
verifier = base64ToNumber(verifier)
return (N, g, salt, verifier)
def __setitem__(self, username, verifierEntry):
"""Add a verifier entry to the database.
@type username: str
@param username: The username to associate the verifier with.
Must be less than 256 characters in length. Must not already
be in the database.
@type verifierEntry: tuple
@param verifierEntry: The verifier entry to add. Use
L{tlslite.VerifierDB.VerifierDB.makeVerifier} to create a
verifier entry.
"""
BaseDB.__setitem__(self, username, verifierEntry)
def _setItem(self, username, value):
if len(username)>=256:
raise ValueError("username too long")
N, g, salt, verifier = value
N = numberToBase64(N)
g = numberToBase64(g)
salt = stringToBase64(salt)
verifier = numberToBase64(verifier)
valueStr = " ".join( (N, g, salt, verifier) )
return valueStr
def _checkItem(self, value, username, param):
(N, g, salt, verifier) = value
x = mathtls.makeX(salt, username, param)
v = powMod(g, x, N)
return (verifier == v)
def makeVerifier(username, password, bits):
"""Create a verifier entry which can be stored in a VerifierDB.
@type username: str
@param username: The username for this verifier. Must be less
than 256 characters in length.
@type password: str
@param password: The password for this verifier.
@type bits: int
@param bits: This values specifies which SRP group parameters
to use. It must be one of (1024, 1536, 2048, 3072, 4096, 6144,
8192). Larger values are more secure but slower. 2048 is a
good compromise between safety and speed.
@rtype: tuple
@return: A tuple which may be stored in a VerifierDB.
"""
return mathtls.makeVerifier(username, password, bits)
makeVerifier = staticmethod(makeVerifier) | apache-2.0 |
jtratner/sudoku-fuzzer-udacity | fuzz_solver.py | 1 | 9025 | # Use a different solved board to generate different tests.
valid = [[5,3,4,6,7,8,9,1,2],
[6,7,2,1,9,5,3,4,8],
[1,9,8,3,4,2,5,6,7],
[8,5,9,7,6,1,4,2,3],
[4,2,6,8,5,3,7,9,1],
[7,1,3,9,2,4,8,5,6],
[9,6,1,5,3,7,2,8,4],
[2,8,7,4,1,9,6,3,5],
[3,4,5,2,8,6,1,7,9]]
# test cases with no solution
no_soln1 = [
[1,2,3,4,5,6,7,8,0],
[0,0,0,0,0,0,0,0,9],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0]]
no_soln2 = [
[1, 2, 3, 0, 0, 0, 0, 0, 0],
[4, 5, 0, 0, 0, 0, 6, 0, 0],
[0, 0, 0, 6, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0]]
import random, time
squares = [(i,j) for i in range(9) for j in range(9)]
units = dict(((i,j), [[(i,k) for k in range(9)]] +
[[(k,j) for k in range(9)]] +
[[(k,l) for k in range(i/3*3, i/3*3+3) for l in range(j/3*3, j/3*3+3)]])
for (i,j) in squares)
peers = dict((s, set(sum(units[s], [])) - set([s]))
for s in squares)
def erase(board, i, j, d):
if d not in board[i][j]:
return board
board[i][j] = board[i][j].replace(d, '')
if len(board[i][j]) == 0:
return False # contradiction
elif len(board[i][j]) == 1:
d2 = board[i][j]
if not all(erase(board, i1, j1, d2) for (i1, j1) in peers[i,j]):
return False
for unit in units[(i,j)]:
numplaces = [(i1, j1) for (i1, j1) in unit if d in board[i1][j1]]
if len(numplaces) == 0:
return False
elif len(numplaces) == 1:
if not assign(board, numplaces[0][0], numplaces[0][1], d):
return False
return board
def assign(board, i, j, d):
if all(erase(board, i, j, d2) for d2 in board[i][j].replace(d, '')):
return board
else:
return False
def random_constr_prop_sudoku(N):
"""
Generates random sudoku puzzles by filling in cells while checking for
constraint violations. If a constraint is violated, random sudoku is called again.
"""
board = [['123456789' for _ in range(9)] for _ in range(9)]
cells = [s for s in squares]
random.shuffle(cells)
for cell in cells:
i,j = cell
if not assign(board, i, j, random.choice(board[i][j])):
break
ds = [board[i][j] for i in range(9) for j in range(9) if len(board[i][j]) == 1]
if len(ds) >= N and len(set(ds)) >= 8:
return [map(lambda v: int(v) if len(v) == 1 else 0, row) for row in board]
return random_constr_prop_sudoku(N)
## Contributed by David Froese
def random_froese_puzzle(check_sudoku, N):
"""
Generates random sudoku puzzles by randomly filling entries in the grid and
then calling check sudoku. Assumes check sudoku is running correctly.
"""
nums = range(1, 10)
grid = [[0 for _ in xrange(9)] for _ in xrange(9)] # empty grid
for _ in xrange(N):
i, j = random.randrange(0, 9), random.randrange(0, 9)
grid[i][j] = random.choice(nums)
if check_sudoku(grid) in [None, False]:
grid[i][j] = 0
return grid
return random_froese_puzzle(check_sudoku, N)
def check_random_solns(random_puzzle, solve_sudoku, check_sudoku,
iters, solve_fraction = 0.9):
random.seed()
solved = 0
num_nz = 0
range_mutates = range(17, 20)
for i in range(iters):
# Generate a valid random board
mutates = random.choice(range_mutates)
board = random_puzzle(mutates)
board_nz = 81 - sum(row.count(0) for row in board)
bd = ''.join(''.join(map(str, row)) for row in board)
# If it's unsolvable the solver screwed up
start = time.clock()
if solve_sudoku(board) not in [None, False]:
num_nz += board_nz
solved += 1
t = time.clock() - start
if t > 5.0:
print "board[%d] %s with %d non-zeros took (%.2f seconds)" % (i, bd, num_nz, t)
assert solved > (solve_fraction * iters), "Your solver failed on more than %.1f%% of random boards! It solved only %d / %d boards." % (100 * solve_fraction, solved, iters)
print "Your solver completed %d / %d random boards with average #non-zeros=%d generated by %s! Congrats!" % (solved, iters, num_nz/solved, repr(random_puzzle))
return True
# Random strategy 2: Take a valid board and perform transformations
# that do not change validity
# Transposing a grid maintains validity
def transpose(grid):
return map(list, zip(*grid))
# Permutes the row/column with another row/column in the same range
# (i.e. 6 with 6-8, 0 with 0-2, etc.)
def permute(grid, i, row=True):
if not row: grid = transpose(grid)
j = random.choice(range(i/3*3, i/3*3+3))
grid[j], grid[i] = grid[i], grid[j]
return grid if row else transpose(grid)
# Permutes the row/column blocks (i.e. 0-2 with 6-8)
def permute_block(grid, i, row=True):
if not row: grid = transpose(grid)
bi = i*3
bj = random.choice(range(3))*3
for offset in range(3):
grid[bi+offset], grid[bj+offset] = grid[bj+offset], grid[bi+offset]
return grid if row else transpose(grid)
# Reflects the board along the horizontal or vertical axis
def reflect(grid, horizontal=True):
if not horizontal: grid = transpose(grid)
for i in range(9): grid[i].reverse()
return grid if horizontal else transpose(grid)
def random_mutation_sudoku(soln, iters=1000):
# generate a valid grid
grid = copy(soln)
choices = [['reflect', horizontal] for horizontal in (True, False)] + [['transpose']] + [['permute', i, row] for row in (True, False) for i in range(9)] + [['permute_block', bi, row] for row in (True, False) for bi in range(3)]
for i in range(iters):
choice = random.choice(choices)
if choice[0] == 'reflect': grid = reflect(grid, *choice[1:])
if choice[0] == 'transpose': grid = transpose(grid)
if choice[0] == 'permute': grid = permute(grid, *choice[1:])
if choice[0] == 'permute_block': grid = permute_block(grid, *choice[1:])
return grid
# Make a copy of a grid so we can modify it without touching the original
def copy(grid):
return map (lambda x: x[:], grid)
# Assert than a solution remains solvable after mutates-many moves are undone.
# Run iters-many tests of this nature.
def fuzz_solution(soln, mutates, iters, check_sudoku, solve_sudoku):
""" fuzzes a given *valid* solution """
random.seed()
for i in range(iters):
board = copy(soln)
# Undo a set of moves. This should leave the board solvable
for mutate in range(mutates):
x = random.randrange(0,9)
y = random.randrange(0,9)
# Might already be 0 in which case we didn't undo "mutates" moves
# but still generated a reasonable test case
board[x][y] = 0
# If this board is invalid the test harness screwed up
assert check_sudoku(board), "Input checker failed with input {board}".format(board=board)
# If it's unsolvable the solver screwed up
assert solve_sudoku(board), "Solver failed to solve board {board}".format(board=board)
return True
def check_no_valid_solns(solve_sudoku, tests=None):
""" runs solver against cases with no solution"""
tests = tests or [no_soln1, no_soln2]
for test in tests:
res = solve_sudoku(test)
assert res is False, """Solver failed to return False for valid, but unsolveable sudoku.
Returned {res} instead. Input was: {test}""".format(test=test, res=res)
return True
def fuzz_solver(check_sudoku, solve_sudoku, mutates=10, iters=10, soln=None, tests=None):
soln = soln or valid
# Check that some boards have no valid solutions
if not check_no_valid_solns(solve_sudoku, tests):
return False
# Some boards should have solutions
if not fuzz_solution(soln, mutates, iters, check_sudoku, solve_sudoku):
return False
# Check for solutions exist for majority of random puzzles
# 1. Constraint propagated random board
if not check_random_solns(random_constr_prop_sudoku, solve_sudoku, check_sudoku, iters):
return False
# 2. Random boards accepted by check_sudoku
# (proposed by David Froese)
def random_froese_sudoku(N): return random_froese_puzzle(check_sudoku, N)
if not check_random_solns(random_froese_sudoku, solve_sudoku, check_sudoku, iters):
return False
# 3. Random boards created by mutating a valid board must have solutions
if not all(fuzz_solution(random_mutation_sudoku(soln), mutates, 1, check_sudoku, solve_sudoku) for _ in xrange(iters)):
return False
else:
print "Your solver completed %d randomly generated boards with %d mutations! Congrats!" % (iters, mutates)
return True
| mit |
pajlada/pajbot | pajbot/emoji.py | 2 | 66638 | ALL_EMOJI = [
"😀",
"😃",
"😄",
"😁",
"😆",
"😅",
"🤣",
"😂",
"🙂",
"🙃",
"😉",
"😊",
"😇",
"🥰",
"😍",
"🤩",
"😘",
"😗",
"☺️",
"☺",
"😚",
"😙",
"😋",
"😛",
"😜",
"🤪",
"😝",
"🤑",
"🤗",
"🤭",
"🤫",
"🤔",
"🤐",
"🤨",
"😐",
"😑",
"😶",
"😏",
"😒",
"🙄",
"😬",
"🤥",
"😌",
"😔",
"😪",
"🤤",
"😴",
"😷",
"🤒",
"🤕",
"🤢",
"🤮",
"🤧",
"🥵",
"🥶",
"🥴",
"😵",
"🤯",
"🤠",
"🥳",
"😎",
"🤓",
"🧐",
"😕",
"😟",
"🙁",
"☹️",
"☹",
"😮",
"😯",
"😲",
"😳",
"🥺",
"😦",
"😧",
"😨",
"😰",
"😥",
"😢",
"😭",
"😱",
"😖",
"😣",
"😞",
"😓",
"😩",
"😫",
"🥱",
"😤",
"😡",
"😠",
"🤬",
"😈",
"👿",
"💀",
"☠️",
"☠",
"💩",
"🤡",
"👹",
"👺",
"👻",
"👽",
"👾",
"🤖",
"😺",
"😸",
"😹",
"😻",
"😼",
"😽",
"🙀",
"😿",
"😾",
"🙈",
"🙉",
"🙊",
"💋",
"💌",
"💘",
"💝",
"💖",
"💗",
"💓",
"💞",
"💕",
"💟",
"❣️",
"❣",
"💔",
"❤️",
"❤",
"🧡",
"💛",
"💚",
"💙",
"💜",
"🤎",
"🖤",
"🤍",
"💯",
"💢",
"💥",
"💫",
"💦",
"💨",
"🕳️",
"🕳",
"💣",
"💬",
"👁️🗨️",
"👁🗨️",
"👁️🗨",
"👁🗨",
"🗨️",
"🗨",
"🗯️",
"🗯",
"💭",
"💤",
"👋",
"👋🏻",
"👋🏼",
"👋🏽",
"👋🏾",
"👋🏿",
"🤚",
"🤚🏻",
"🤚🏼",
"🤚🏽",
"🤚🏾",
"🤚🏿",
"🖐️",
"🖐",
"🖐🏻",
"🖐🏼",
"🖐🏽",
"🖐🏾",
"🖐🏿",
"✋",
"✋🏻",
"✋🏼",
"✋🏽",
"✋🏾",
"✋🏿",
"🖖",
"🖖🏻",
"🖖🏼",
"🖖🏽",
"🖖🏾",
"🖖🏿",
"👌",
"👌🏻",
"👌🏼",
"👌🏽",
"👌🏾",
"👌🏿",
"🤏",
"🤏🏻",
"🤏🏼",
"🤏🏽",
"🤏🏾",
"🤏🏿",
"✌️",
"✌",
"✌🏻",
"✌🏼",
"✌🏽",
"✌🏾",
"✌🏿",
"🤞",
"🤞🏻",
"🤞🏼",
"🤞🏽",
"🤞🏾",
"🤞🏿",
"🤟",
"🤟🏻",
"🤟🏼",
"🤟🏽",
"🤟🏾",
"🤟🏿",
"🤘",
"🤘🏻",
"🤘🏼",
"🤘🏽",
"🤘🏾",
"🤘🏿",
"🤙",
"🤙🏻",
"🤙🏼",
"🤙🏽",
"🤙🏾",
"🤙🏿",
"👈",
"👈🏻",
"👈🏼",
"👈🏽",
"👈🏾",
"👈🏿",
"👉",
"👉🏻",
"👉🏼",
"👉🏽",
"👉🏾",
"👉🏿",
"👆",
"👆🏻",
"👆🏼",
"👆🏽",
"👆🏾",
"👆🏿",
"🖕",
"🖕🏻",
"🖕🏼",
"🖕🏽",
"🖕🏾",
"🖕🏿",
"👇",
"👇🏻",
"👇🏼",
"👇🏽",
"👇🏾",
"👇🏿",
"☝️",
"☝",
"☝🏻",
"☝🏼",
"☝🏽",
"☝🏾",
"☝🏿",
"👍",
"👍🏻",
"👍🏼",
"👍🏽",
"👍🏾",
"👍🏿",
"👎",
"👎🏻",
"👎🏼",
"👎🏽",
"👎🏾",
"👎🏿",
"✊",
"✊🏻",
"✊🏼",
"✊🏽",
"✊🏾",
"✊🏿",
"👊",
"👊🏻",
"👊🏼",
"👊🏽",
"👊🏾",
"👊🏿",
"🤛",
"🤛🏻",
"🤛🏼",
"🤛🏽",
"🤛🏾",
"🤛🏿",
"🤜",
"🤜🏻",
"🤜🏼",
"🤜🏽",
"🤜🏾",
"🤜🏿",
"👏",
"👏🏻",
"👏🏼",
"👏🏽",
"👏🏾",
"👏🏿",
"🙌",
"🙌🏻",
"🙌🏼",
"🙌🏽",
"🙌🏾",
"🙌🏿",
"👐",
"👐🏻",
"👐🏼",
"👐🏽",
"👐🏾",
"👐🏿",
"🤲",
"🤲🏻",
"🤲🏼",
"🤲🏽",
"🤲🏾",
"🤲🏿",
"🤝",
"🙏",
"🙏🏻",
"🙏🏼",
"🙏🏽",
"🙏🏾",
"🙏🏿",
"✍️",
"✍",
"✍🏻",
"✍🏼",
"✍🏽",
"✍🏾",
"✍🏿",
"💅",
"💅🏻",
"💅🏼",
"💅🏽",
"💅🏾",
"💅🏿",
"🤳",
"🤳🏻",
"🤳🏼",
"🤳🏽",
"🤳🏾",
"🤳🏿",
"💪",
"💪🏻",
"💪🏼",
"💪🏽",
"💪🏾",
"💪🏿",
"🦾",
"🦿",
"🦵",
"🦵🏻",
"🦵🏼",
"🦵🏽",
"🦵🏾",
"🦵🏿",
"🦶",
"🦶🏻",
"🦶🏼",
"🦶🏽",
"🦶🏾",
"🦶🏿",
"👂",
"👂🏻",
"👂🏼",
"👂🏽",
"👂🏾",
"👂🏿",
"🦻",
"🦻🏻",
"🦻🏼",
"🦻🏽",
"🦻🏾",
"🦻🏿",
"👃",
"👃🏻",
"👃🏼",
"👃🏽",
"👃🏾",
"👃🏿",
"🧠",
"🦷",
"🦴",
"👀",
"👁️",
"👁",
"👅",
"👄",
"👶",
"👶🏻",
"👶🏼",
"👶🏽",
"👶🏾",
"👶🏿",
"🧒",
"🧒🏻",
"🧒🏼",
"🧒🏽",
"🧒🏾",
"🧒🏿",
"👦",
"👦🏻",
"👦🏼",
"👦🏽",
"👦🏾",
"👦🏿",
"👧",
"👧🏻",
"👧🏼",
"👧🏽",
"👧🏾",
"👧🏿",
"🧑",
"🧑🏻",
"🧑🏼",
"🧑🏽",
"🧑🏾",
"🧑🏿",
"👱",
"👱🏻",
"👱🏼",
"👱🏽",
"👱🏾",
"👱🏿",
"👨",
"👨🏻",
"👨🏼",
"👨🏽",
"👨🏾",
"👨🏿",
"🧔",
"🧔🏻",
"🧔🏼",
"🧔🏽",
"🧔🏾",
"🧔🏿",
"👱♂️",
"👱♂",
"👱🏻♂️",
"👱🏻♂",
"👱🏼♂️",
"👱🏼♂",
"👱🏽♂️",
"👱🏽♂",
"👱🏾♂️",
"👱🏾♂",
"👱🏿♂️",
"👱🏿♂",
"👨🦰",
"👨🏻🦰",
"👨🏼🦰",
"👨🏽🦰",
"👨🏾🦰",
"👨🏿🦰",
"👨🦱",
"👨🏻🦱",
"👨🏼🦱",
"👨🏽🦱",
"👨🏾🦱",
"👨🏿🦱",
"👨🦳",
"👨🏻🦳",
"👨🏼🦳",
"👨🏽🦳",
"👨🏾🦳",
"👨🏿🦳",
"👨🦲",
"👨🏻🦲",
"👨🏼🦲",
"👨🏽🦲",
"👨🏾🦲",
"👨🏿🦲",
"👩",
"👩🏻",
"👩🏼",
"👩🏽",
"👩🏾",
"👩🏿",
"👱♀️",
"👱♀",
"👱🏻♀️",
"👱🏻♀",
"👱🏼♀️",
"👱🏼♀",
"👱🏽♀️",
"👱🏽♀",
"👱🏾♀️",
"👱🏾♀",
"👱🏿♀️",
"👱🏿♀",
"👩🦰",
"👩🏻🦰",
"👩🏼🦰",
"👩🏽🦰",
"👩🏾🦰",
"👩🏿🦰",
"👩🦱",
"👩🏻🦱",
"👩🏼🦱",
"👩🏽🦱",
"👩🏾🦱",
"👩🏿🦱",
"👩🦳",
"👩🏻🦳",
"👩🏼🦳",
"👩🏽🦳",
"👩🏾🦳",
"👩🏿🦳",
"👩🦲",
"👩🏻🦲",
"👩🏼🦲",
"👩🏽🦲",
"👩🏾🦲",
"👩🏿🦲",
"🧓",
"🧓🏻",
"🧓🏼",
"🧓🏽",
"🧓🏾",
"🧓🏿",
"👴",
"👴🏻",
"👴🏼",
"👴🏽",
"👴🏾",
"👴🏿",
"👵",
"👵🏻",
"👵🏼",
"👵🏽",
"👵🏾",
"👵🏿",
"🙍",
"🙍🏻",
"🙍🏼",
"🙍🏽",
"🙍🏾",
"🙍🏿",
"🙍♂️",
"🙍♂",
"🙍🏻♂️",
"🙍🏻♂",
"🙍🏼♂️",
"🙍🏼♂",
"🙍🏽♂️",
"🙍🏽♂",
"🙍🏾♂️",
"🙍🏾♂",
"🙍🏿♂️",
"🙍🏿♂",
"🙍♀️",
"🙍♀",
"🙍🏻♀️",
"🙍🏻♀",
"🙍🏼♀️",
"🙍🏼♀",
"🙍🏽♀️",
"🙍🏽♀",
"🙍🏾♀️",
"🙍🏾♀",
"🙍🏿♀️",
"🙍🏿♀",
"🙎",
"🙎🏻",
"🙎🏼",
"🙎🏽",
"🙎🏾",
"🙎🏿",
"🙎♂️",
"🙎♂",
"🙎🏻♂️",
"🙎🏻♂",
"🙎🏼♂️",
"🙎🏼♂",
"🙎🏽♂️",
"🙎🏽♂",
"🙎🏾♂️",
"🙎🏾♂",
"🙎🏿♂️",
"🙎🏿♂",
"🙎♀️",
"🙎♀",
"🙎🏻♀️",
"🙎🏻♀",
"🙎🏼♀️",
"🙎🏼♀",
"🙎🏽♀️",
"🙎🏽♀",
"🙎🏾♀️",
"🙎🏾♀",
"🙎🏿♀️",
"🙎🏿♀",
"🙅",
"🙅🏻",
"🙅🏼",
"🙅🏽",
"🙅🏾",
"🙅🏿",
"🙅♂️",
"🙅♂",
"🙅🏻♂️",
"🙅🏻♂",
"🙅🏼♂️",
"🙅🏼♂",
"🙅🏽♂️",
"🙅🏽♂",
"🙅🏾♂️",
"🙅🏾♂",
"🙅🏿♂️",
"🙅🏿♂",
"🙅♀️",
"🙅♀",
"🙅🏻♀️",
"🙅🏻♀",
"🙅🏼♀️",
"🙅🏼♀",
"🙅🏽♀️",
"🙅🏽♀",
"🙅🏾♀️",
"🙅🏾♀",
"🙅🏿♀️",
"🙅🏿♀",
"🙆",
"🙆🏻",
"🙆🏼",
"🙆🏽",
"🙆🏾",
"🙆🏿",
"🙆♂️",
"🙆♂",
"🙆🏻♂️",
"🙆🏻♂",
"🙆🏼♂️",
"🙆🏼♂",
"🙆🏽♂️",
"🙆🏽♂",
"🙆🏾♂️",
"🙆🏾♂",
"🙆🏿♂️",
"🙆🏿♂",
"🙆♀️",
"🙆♀",
"🙆🏻♀️",
"🙆🏻♀",
"🙆🏼♀️",
"🙆🏼♀",
"🙆🏽♀️",
"🙆🏽♀",
"🙆🏾♀️",
"🙆🏾♀",
"🙆🏿♀️",
"🙆🏿♀",
"💁",
"💁🏻",
"💁🏼",
"💁🏽",
"💁🏾",
"💁🏿",
"💁♂️",
"💁♂",
"💁🏻♂️",
"💁🏻♂",
"💁🏼♂️",
"💁🏼♂",
"💁🏽♂️",
"💁🏽♂",
"💁🏾♂️",
"💁🏾♂",
"💁🏿♂️",
"💁🏿♂",
"💁♀️",
"💁♀",
"💁🏻♀️",
"💁🏻♀",
"💁🏼♀️",
"💁🏼♀",
"💁🏽♀️",
"💁🏽♀",
"💁🏾♀️",
"💁🏾♀",
"💁🏿♀️",
"💁🏿♀",
"🙋",
"🙋🏻",
"🙋🏼",
"🙋🏽",
"🙋🏾",
"🙋🏿",
"🙋♂️",
"🙋♂",
"🙋🏻♂️",
"🙋🏻♂",
"🙋🏼♂️",
"🙋🏼♂",
"🙋🏽♂️",
"🙋🏽♂",
"🙋🏾♂️",
"🙋🏾♂",
"🙋🏿♂️",
"🙋🏿♂",
"🙋♀️",
"🙋♀",
"🙋🏻♀️",
"🙋🏻♀",
"🙋🏼♀️",
"🙋🏼♀",
"🙋🏽♀️",
"🙋🏽♀",
"🙋🏾♀️",
"🙋🏾♀",
"🙋🏿♀️",
"🙋🏿♀",
"🧏",
"🧏🏻",
"🧏🏼",
"🧏🏽",
"🧏🏾",
"🧏🏿",
"🧏♂️",
"🧏♂",
"🧏🏻♂️",
"🧏🏻♂",
"🧏🏼♂️",
"🧏🏼♂",
"🧏🏽♂️",
"🧏🏽♂",
"🧏🏾♂️",
"🧏🏾♂",
"🧏🏿♂️",
"🧏🏿♂",
"🧏♀️",
"🧏♀",
"🧏🏻♀️",
"🧏🏻♀",
"🧏🏼♀️",
"🧏🏼♀",
"🧏🏽♀️",
"🧏🏽♀",
"🧏🏾♀️",
"🧏🏾♀",
"🧏🏿♀️",
"🧏🏿♀",
"🙇",
"🙇🏻",
"🙇🏼",
"🙇🏽",
"🙇🏾",
"🙇🏿",
"🙇♂️",
"🙇♂",
"🙇🏻♂️",
"🙇🏻♂",
"🙇🏼♂️",
"🙇🏼♂",
"🙇🏽♂️",
"🙇🏽♂",
"🙇🏾♂️",
"🙇🏾♂",
"🙇🏿♂️",
"🙇🏿♂",
"🙇♀️",
"🙇♀",
"🙇🏻♀️",
"🙇🏻♀",
"🙇🏼♀️",
"🙇🏼♀",
"🙇🏽♀️",
"🙇🏽♀",
"🙇🏾♀️",
"🙇🏾♀",
"🙇🏿♀️",
"🙇🏿♀",
"🤦",
"🤦🏻",
"🤦🏼",
"🤦🏽",
"🤦🏾",
"🤦🏿",
"🤦♂️",
"🤦♂",
"🤦🏻♂️",
"🤦🏻♂",
"🤦🏼♂️",
"🤦🏼♂",
"🤦🏽♂️",
"🤦🏽♂",
"🤦🏾♂️",
"🤦🏾♂",
"🤦🏿♂️",
"🤦🏿♂",
"🤦♀️",
"🤦♀",
"🤦🏻♀️",
"🤦🏻♀",
"🤦🏼♀️",
"🤦🏼♀",
"🤦🏽♀️",
"🤦🏽♀",
"🤦🏾♀️",
"🤦🏾♀",
"🤦🏿♀️",
"🤦🏿♀",
"🤷",
"🤷🏻",
"🤷🏼",
"🤷🏽",
"🤷🏾",
"🤷🏿",
"🤷♂️",
"🤷♂",
"🤷🏻♂️",
"🤷🏻♂",
"🤷🏼♂️",
"🤷🏼♂",
"🤷🏽♂️",
"🤷🏽♂",
"🤷🏾♂️",
"🤷🏾♂",
"🤷🏿♂️",
"🤷🏿♂",
"🤷♀️",
"🤷♀",
"🤷🏻♀️",
"🤷🏻♀",
"🤷🏼♀️",
"🤷🏼♀",
"🤷🏽♀️",
"🤷🏽♀",
"🤷🏾♀️",
"🤷🏾♀",
"🤷🏿♀️",
"🤷🏿♀",
"👨⚕️",
"👨⚕",
"👨🏻⚕️",
"👨🏻⚕",
"👨🏼⚕️",
"👨🏼⚕",
"👨🏽⚕️",
"👨🏽⚕",
"👨🏾⚕️",
"👨🏾⚕",
"👨🏿⚕️",
"👨🏿⚕",
"👩⚕️",
"👩⚕",
"👩🏻⚕️",
"👩🏻⚕",
"👩🏼⚕️",
"👩🏼⚕",
"👩🏽⚕️",
"👩🏽⚕",
"👩🏾⚕️",
"👩🏾⚕",
"👩🏿⚕️",
"👩🏿⚕",
"👨🎓",
"👨🏻🎓",
"👨🏼🎓",
"👨🏽🎓",
"👨🏾🎓",
"👨🏿🎓",
"👩🎓",
"👩🏻🎓",
"👩🏼🎓",
"👩🏽🎓",
"👩🏾🎓",
"👩🏿🎓",
"👨🏫",
"👨🏻🏫",
"👨🏼🏫",
"👨🏽🏫",
"👨🏾🏫",
"👨🏿🏫",
"👩🏫",
"👩🏻🏫",
"👩🏼🏫",
"👩🏽🏫",
"👩🏾🏫",
"👩🏿🏫",
"👨⚖️",
"👨⚖",
"👨🏻⚖️",
"👨🏻⚖",
"👨🏼⚖️",
"👨🏼⚖",
"👨🏽⚖️",
"👨🏽⚖",
"👨🏾⚖️",
"👨🏾⚖",
"👨🏿⚖️",
"👨🏿⚖",
"👩⚖️",
"👩⚖",
"👩🏻⚖️",
"👩🏻⚖",
"👩🏼⚖️",
"👩🏼⚖",
"👩🏽⚖️",
"👩🏽⚖",
"👩🏾⚖️",
"👩🏾⚖",
"👩🏿⚖️",
"👩🏿⚖",
"👨🌾",
"👨🏻🌾",
"👨🏼🌾",
"👨🏽🌾",
"👨🏾🌾",
"👨🏿🌾",
"👩🌾",
"👩🏻🌾",
"👩🏼🌾",
"👩🏽🌾",
"👩🏾🌾",
"👩🏿🌾",
"👨🍳",
"👨🏻🍳",
"👨🏼🍳",
"👨🏽🍳",
"👨🏾🍳",
"👨🏿🍳",
"👩🍳",
"👩🏻🍳",
"👩🏼🍳",
"👩🏽🍳",
"👩🏾🍳",
"👩🏿🍳",
"👨🔧",
"👨🏻🔧",
"👨🏼🔧",
"👨🏽🔧",
"👨🏾🔧",
"👨🏿🔧",
"👩🔧",
"👩🏻🔧",
"👩🏼🔧",
"👩🏽🔧",
"👩🏾🔧",
"👩🏿🔧",
"👨🏭",
"👨🏻🏭",
"👨🏼🏭",
"👨🏽🏭",
"👨🏾🏭",
"👨🏿🏭",
"👩🏭",
"👩🏻🏭",
"👩🏼🏭",
"👩🏽🏭",
"👩🏾🏭",
"👩🏿🏭",
"👨💼",
"👨🏻💼",
"👨🏼💼",
"👨🏽💼",
"👨🏾💼",
"👨🏿💼",
"👩💼",
"👩🏻💼",
"👩🏼💼",
"👩🏽💼",
"👩🏾💼",
"👩🏿💼",
"👨🔬",
"👨🏻🔬",
"👨🏼🔬",
"👨🏽🔬",
"👨🏾🔬",
"👨🏿🔬",
"👩🔬",
"👩🏻🔬",
"👩🏼🔬",
"👩🏽🔬",
"👩🏾🔬",
"👩🏿🔬",
"👨💻",
"👨🏻💻",
"👨🏼💻",
"👨🏽💻",
"👨🏾💻",
"👨🏿💻",
"👩💻",
"👩🏻💻",
"👩🏼💻",
"👩🏽💻",
"👩🏾💻",
"👩🏿💻",
"👨🎤",
"👨🏻🎤",
"👨🏼🎤",
"👨🏽🎤",
"👨🏾🎤",
"👨🏿🎤",
"👩🎤",
"👩🏻🎤",
"👩🏼🎤",
"👩🏽🎤",
"👩🏾🎤",
"👩🏿🎤",
"👨🎨",
"👨🏻🎨",
"👨🏼🎨",
"👨🏽🎨",
"👨🏾🎨",
"👨🏿🎨",
"👩🎨",
"👩🏻🎨",
"👩🏼🎨",
"👩🏽🎨",
"👩🏾🎨",
"👩🏿🎨",
"👨✈️",
"👨✈",
"👨🏻✈️",
"👨🏻✈",
"👨🏼✈️",
"👨🏼✈",
"👨🏽✈️",
"👨🏽✈",
"👨🏾✈️",
"👨🏾✈",
"👨🏿✈️",
"👨🏿✈",
"👩✈️",
"👩✈",
"👩🏻✈️",
"👩🏻✈",
"👩🏼✈️",
"👩🏼✈",
"👩🏽✈️",
"👩🏽✈",
"👩🏾✈️",
"👩🏾✈",
"👩🏿✈️",
"👩🏿✈",
"👨🚀",
"👨🏻🚀",
"👨🏼🚀",
"👨🏽🚀",
"👨🏾🚀",
"👨🏿🚀",
"👩🚀",
"👩🏻🚀",
"👩🏼🚀",
"👩🏽🚀",
"👩🏾🚀",
"👩🏿🚀",
"👨🚒",
"👨🏻🚒",
"👨🏼🚒",
"👨🏽🚒",
"👨🏾🚒",
"👨🏿🚒",
"👩🚒",
"👩🏻🚒",
"👩🏼🚒",
"👩🏽🚒",
"👩🏾🚒",
"👩🏿🚒",
"👮",
"👮🏻",
"👮🏼",
"👮🏽",
"👮🏾",
"👮🏿",
"👮♂️",
"👮♂",
"👮🏻♂️",
"👮🏻♂",
"👮🏼♂️",
"👮🏼♂",
"👮🏽♂️",
"👮🏽♂",
"👮🏾♂️",
"👮🏾♂",
"👮🏿♂️",
"👮🏿♂",
"👮♀️",
"👮♀",
"👮🏻♀️",
"👮🏻♀",
"👮🏼♀️",
"👮🏼♀",
"👮🏽♀️",
"👮🏽♀",
"👮🏾♀️",
"👮🏾♀",
"👮🏿♀️",
"👮🏿♀",
"🕵️",
"🕵",
"🕵🏻",
"🕵🏼",
"🕵🏽",
"🕵🏾",
"🕵🏿",
"🕵️♂️",
"🕵♂️",
"🕵️♂",
"🕵♂",
"🕵🏻♂️",
"🕵🏻♂",
"🕵🏼♂️",
"🕵🏼♂",
"🕵🏽♂️",
"🕵🏽♂",
"🕵🏾♂️",
"🕵🏾♂",
"🕵🏿♂️",
"🕵🏿♂",
"🕵️♀️",
"🕵♀️",
"🕵️♀",
"🕵♀",
"🕵🏻♀️",
"🕵🏻♀",
"🕵🏼♀️",
"🕵🏼♀",
"🕵🏽♀️",
"🕵🏽♀",
"🕵🏾♀️",
"🕵🏾♀",
"🕵🏿♀️",
"🕵🏿♀",
"💂",
"💂🏻",
"💂🏼",
"💂🏽",
"💂🏾",
"💂🏿",
"💂♂️",
"💂♂",
"💂🏻♂️",
"💂🏻♂",
"💂🏼♂️",
"💂🏼♂",
"💂🏽♂️",
"💂🏽♂",
"💂🏾♂️",
"💂🏾♂",
"💂🏿♂️",
"💂🏿♂",
"💂♀️",
"💂♀",
"💂🏻♀️",
"💂🏻♀",
"💂🏼♀️",
"💂🏼♀",
"💂🏽♀️",
"💂🏽♀",
"💂🏾♀️",
"💂🏾♀",
"💂🏿♀️",
"💂🏿♀",
"👷",
"👷🏻",
"👷🏼",
"👷🏽",
"👷🏾",
"👷🏿",
"👷♂️",
"👷♂",
"👷🏻♂️",
"👷🏻♂",
"👷🏼♂️",
"👷🏼♂",
"👷🏽♂️",
"👷🏽♂",
"👷🏾♂️",
"👷🏾♂",
"👷🏿♂️",
"👷🏿♂",
"👷♀️",
"👷♀",
"👷🏻♀️",
"👷🏻♀",
"👷🏼♀️",
"👷🏼♀",
"👷🏽♀️",
"👷🏽♀",
"👷🏾♀️",
"👷🏾♀",
"👷🏿♀️",
"👷🏿♀",
"🤴",
"🤴🏻",
"🤴🏼",
"🤴🏽",
"🤴🏾",
"🤴🏿",
"👸",
"👸🏻",
"👸🏼",
"👸🏽",
"👸🏾",
"👸🏿",
"👳",
"👳🏻",
"👳🏼",
"👳🏽",
"👳🏾",
"👳🏿",
"👳♂️",
"👳♂",
"👳🏻♂️",
"👳🏻♂",
"👳🏼♂️",
"👳🏼♂",
"👳🏽♂️",
"👳🏽♂",
"👳🏾♂️",
"👳🏾♂",
"👳🏿♂️",
"👳🏿♂",
"👳♀️",
"👳♀",
"👳🏻♀️",
"👳🏻♀",
"👳🏼♀️",
"👳🏼♀",
"👳🏽♀️",
"👳🏽♀",
"👳🏾♀️",
"👳🏾♀",
"👳🏿♀️",
"👳🏿♀",
"👲",
"👲🏻",
"👲🏼",
"👲🏽",
"👲🏾",
"👲🏿",
"🧕",
"🧕🏻",
"🧕🏼",
"🧕🏽",
"🧕🏾",
"🧕🏿",
"🤵",
"🤵🏻",
"🤵🏼",
"🤵🏽",
"🤵🏾",
"🤵🏿",
"👰",
"👰🏻",
"👰🏼",
"👰🏽",
"👰🏾",
"👰🏿",
"🤰",
"🤰🏻",
"🤰🏼",
"🤰🏽",
"🤰🏾",
"🤰🏿",
"🤱",
"🤱🏻",
"🤱🏼",
"🤱🏽",
"🤱🏾",
"🤱🏿",
"👼",
"👼🏻",
"👼🏼",
"👼🏽",
"👼🏾",
"👼🏿",
"🎅",
"🎅🏻",
"🎅🏼",
"🎅🏽",
"🎅🏾",
"🎅🏿",
"🤶",
"🤶🏻",
"🤶🏼",
"🤶🏽",
"🤶🏾",
"🤶🏿",
"🦸",
"🦸🏻",
"🦸🏼",
"🦸🏽",
"🦸🏾",
"🦸🏿",
"🦸♂️",
"🦸♂",
"🦸🏻♂️",
"🦸🏻♂",
"🦸🏼♂️",
"🦸🏼♂",
"🦸🏽♂️",
"🦸🏽♂",
"🦸🏾♂️",
"🦸🏾♂",
"🦸🏿♂️",
"🦸🏿♂",
"🦸♀️",
"🦸♀",
"🦸🏻♀️",
"🦸🏻♀",
"🦸🏼♀️",
"🦸🏼♀",
"🦸🏽♀️",
"🦸🏽♀",
"🦸🏾♀️",
"🦸🏾♀",
"🦸🏿♀️",
"🦸🏿♀",
"🦹",
"🦹🏻",
"🦹🏼",
"🦹🏽",
"🦹🏾",
"🦹🏿",
"🦹♂️",
"🦹♂",
"🦹🏻♂️",
"🦹🏻♂",
"🦹🏼♂️",
"🦹🏼♂",
"🦹🏽♂️",
"🦹🏽♂",
"🦹🏾♂️",
"🦹🏾♂",
"🦹🏿♂️",
"🦹🏿♂",
"🦹♀️",
"🦹♀",
"🦹🏻♀️",
"🦹🏻♀",
"🦹🏼♀️",
"🦹🏼♀",
"🦹🏽♀️",
"🦹🏽♀",
"🦹🏾♀️",
"🦹🏾♀",
"🦹🏿♀️",
"🦹🏿♀",
"🧙",
"🧙🏻",
"🧙🏼",
"🧙🏽",
"🧙🏾",
"🧙🏿",
"🧙♂️",
"🧙♂",
"🧙🏻♂️",
"🧙🏻♂",
"🧙🏼♂️",
"🧙🏼♂",
"🧙🏽♂️",
"🧙🏽♂",
"🧙🏾♂️",
"🧙🏾♂",
"🧙🏿♂️",
"🧙🏿♂",
"🧙♀️",
"🧙♀",
"🧙🏻♀️",
"🧙🏻♀",
"🧙🏼♀️",
"🧙🏼♀",
"🧙🏽♀️",
"🧙🏽♀",
"🧙🏾♀️",
"🧙🏾♀",
"🧙🏿♀️",
"🧙🏿♀",
"🧚",
"🧚🏻",
"🧚🏼",
"🧚🏽",
"🧚🏾",
"🧚🏿",
"🧚♂️",
"🧚♂",
"🧚🏻♂️",
"🧚🏻♂",
"🧚🏼♂️",
"🧚🏼♂",
"🧚🏽♂️",
"🧚🏽♂",
"🧚🏾♂️",
"🧚🏾♂",
"🧚🏿♂️",
"🧚🏿♂",
"🧚♀️",
"🧚♀",
"🧚🏻♀️",
"🧚🏻♀",
"🧚🏼♀️",
"🧚🏼♀",
"🧚🏽♀️",
"🧚🏽♀",
"🧚🏾♀️",
"🧚🏾♀",
"🧚🏿♀️",
"🧚🏿♀",
"🧛",
"🧛🏻",
"🧛🏼",
"🧛🏽",
"🧛🏾",
"🧛🏿",
"🧛♂️",
"🧛♂",
"🧛🏻♂️",
"🧛🏻♂",
"🧛🏼♂️",
"🧛🏼♂",
"🧛🏽♂️",
"🧛🏽♂",
"🧛🏾♂️",
"🧛🏾♂",
"🧛🏿♂️",
"🧛🏿♂",
"🧛♀️",
"🧛♀",
"🧛🏻♀️",
"🧛🏻♀",
"🧛🏼♀️",
"🧛🏼♀",
"🧛🏽♀️",
"🧛🏽♀",
"🧛🏾♀️",
"🧛🏾♀",
"🧛🏿♀️",
"🧛🏿♀",
"🧜",
"🧜🏻",
"🧜🏼",
"🧜🏽",
"🧜🏾",
"🧜🏿",
"🧜♂️",
"🧜♂",
"🧜🏻♂️",
"🧜🏻♂",
"🧜🏼♂️",
"🧜🏼♂",
"🧜🏽♂️",
"🧜🏽♂",
"🧜🏾♂️",
"🧜🏾♂",
"🧜🏿♂️",
"🧜🏿♂",
"🧜♀️",
"🧜♀",
"🧜🏻♀️",
"🧜🏻♀",
"🧜🏼♀️",
"🧜🏼♀",
"🧜🏽♀️",
"🧜🏽♀",
"🧜🏾♀️",
"🧜🏾♀",
"🧜🏿♀️",
"🧜🏿♀",
"🧝",
"🧝🏻",
"🧝🏼",
"🧝🏽",
"🧝🏾",
"🧝🏿",
"🧝♂️",
"🧝♂",
"🧝🏻♂️",
"🧝🏻♂",
"🧝🏼♂️",
"🧝🏼♂",
"🧝🏽♂️",
"🧝🏽♂",
"🧝🏾♂️",
"🧝🏾♂",
"🧝🏿♂️",
"🧝🏿♂",
"🧝♀️",
"🧝♀",
"🧝🏻♀️",
"🧝🏻♀",
"🧝🏼♀️",
"🧝🏼♀",
"🧝🏽♀️",
"🧝🏽♀",
"🧝🏾♀️",
"🧝🏾♀",
"🧝🏿♀️",
"🧝🏿♀",
"🧞",
"🧞♂️",
"🧞♂",
"🧞♀️",
"🧞♀",
"🧟",
"🧟♂️",
"🧟♂",
"🧟♀️",
"🧟♀",
"💆",
"💆🏻",
"💆🏼",
"💆🏽",
"💆🏾",
"💆🏿",
"💆♂️",
"💆♂",
"💆🏻♂️",
"💆🏻♂",
"💆🏼♂️",
"💆🏼♂",
"💆🏽♂️",
"💆🏽♂",
"💆🏾♂️",
"💆🏾♂",
"💆🏿♂️",
"💆🏿♂",
"💆♀️",
"💆♀",
"💆🏻♀️",
"💆🏻♀",
"💆🏼♀️",
"💆🏼♀",
"💆🏽♀️",
"💆🏽♀",
"💆🏾♀️",
"💆🏾♀",
"💆🏿♀️",
"💆🏿♀",
"💇",
"💇🏻",
"💇🏼",
"💇🏽",
"💇🏾",
"💇🏿",
"💇♂️",
"💇♂",
"💇🏻♂️",
"💇🏻♂",
"💇🏼♂️",
"💇🏼♂",
"💇🏽♂️",
"💇🏽♂",
"💇🏾♂️",
"💇🏾♂",
"💇🏿♂️",
"💇🏿♂",
"💇♀️",
"💇♀",
"💇🏻♀️",
"💇🏻♀",
"💇🏼♀️",
"💇🏼♀",
"💇🏽♀️",
"💇🏽♀",
"💇🏾♀️",
"💇🏾♀",
"💇🏿♀️",
"💇🏿♀",
"🚶",
"🚶🏻",
"🚶🏼",
"🚶🏽",
"🚶🏾",
"🚶🏿",
"🚶♂️",
"🚶♂",
"🚶🏻♂️",
"🚶🏻♂",
"🚶🏼♂️",
"🚶🏼♂",
"🚶🏽♂️",
"🚶🏽♂",
"🚶🏾♂️",
"🚶🏾♂",
"🚶🏿♂️",
"🚶🏿♂",
"🚶♀️",
"🚶♀",
"🚶🏻♀️",
"🚶🏻♀",
"🚶🏼♀️",
"🚶🏼♀",
"🚶🏽♀️",
"🚶🏽♀",
"🚶🏾♀️",
"🚶🏾♀",
"🚶🏿♀️",
"🚶🏿♀",
"🧍",
"🧍🏻",
"🧍🏼",
"🧍🏽",
"🧍🏾",
"🧍🏿",
"🧍♂️",
"🧍♂",
"🧍🏻♂️",
"🧍🏻♂",
"🧍🏼♂️",
"🧍🏼♂",
"🧍🏽♂️",
"🧍🏽♂",
"🧍🏾♂️",
"🧍🏾♂",
"🧍🏿♂️",
"🧍🏿♂",
"🧍♀️",
"🧍♀",
"🧍🏻♀️",
"🧍🏻♀",
"🧍🏼♀️",
"🧍🏼♀",
"🧍🏽♀️",
"🧍🏽♀",
"🧍🏾♀️",
"🧍🏾♀",
"🧍🏿♀️",
"🧍🏿♀",
"🧎",
"🧎🏻",
"🧎🏼",
"🧎🏽",
"🧎🏾",
"🧎🏿",
"🧎♂️",
"🧎♂",
"🧎🏻♂️",
"🧎🏻♂",
"🧎🏼♂️",
"🧎🏼♂",
"🧎🏽♂️",
"🧎🏽♂",
"🧎🏾♂️",
"🧎🏾♂",
"🧎🏿♂️",
"🧎🏿♂",
"🧎♀️",
"🧎♀",
"🧎🏻♀️",
"🧎🏻♀",
"🧎🏼♀️",
"🧎🏼♀",
"🧎🏽♀️",
"🧎🏽♀",
"🧎🏾♀️",
"🧎🏾♀",
"🧎🏿♀️",
"🧎🏿♀",
"👨🦯",
"👨🏻🦯",
"👨🏼🦯",
"👨🏽🦯",
"👨🏾🦯",
"👨🏿🦯",
"👩🦯",
"👩🏻🦯",
"👩🏼🦯",
"👩🏽🦯",
"👩🏾🦯",
"👩🏿🦯",
"👨🦼",
"👨🏻🦼",
"👨🏼🦼",
"👨🏽🦼",
"👨🏾🦼",
"👨🏿🦼",
"👩🦼",
"👩🏻🦼",
"👩🏼🦼",
"👩🏽🦼",
"👩🏾🦼",
"👩🏿🦼",
"👨🦽",
"👨🏻🦽",
"👨🏼🦽",
"👨🏽🦽",
"👨🏾🦽",
"👨🏿🦽",
"👩🦽",
"👩🏻🦽",
"👩🏼🦽",
"👩🏽🦽",
"👩🏾🦽",
"👩🏿🦽",
"🏃",
"🏃🏻",
"🏃🏼",
"🏃🏽",
"🏃🏾",
"🏃🏿",
"🏃♂️",
"🏃♂",
"🏃🏻♂️",
"🏃🏻♂",
"🏃🏼♂️",
"🏃🏼♂",
"🏃🏽♂️",
"🏃🏽♂",
"🏃🏾♂️",
"🏃🏾♂",
"🏃🏿♂️",
"🏃🏿♂",
"🏃♀️",
"🏃♀",
"🏃🏻♀️",
"🏃🏻♀",
"🏃🏼♀️",
"🏃🏼♀",
"🏃🏽♀️",
"🏃🏽♀",
"🏃🏾♀️",
"🏃🏾♀",
"🏃🏿♀️",
"🏃🏿♀",
"💃",
"💃🏻",
"💃🏼",
"💃🏽",
"💃🏾",
"💃🏿",
"🕺",
"🕺🏻",
"🕺🏼",
"🕺🏽",
"🕺🏾",
"🕺🏿",
"🕴️",
"🕴",
"🕴🏻",
"🕴🏼",
"🕴🏽",
"🕴🏾",
"🕴🏿",
"👯",
"👯♂️",
"👯♂",
"👯♀️",
"👯♀",
"🧖",
"🧖🏻",
"🧖🏼",
"🧖🏽",
"🧖🏾",
"🧖🏿",
"🧖♂️",
"🧖♂",
"🧖🏻♂️",
"🧖🏻♂",
"🧖🏼♂️",
"🧖🏼♂",
"🧖🏽♂️",
"🧖🏽♂",
"🧖🏾♂️",
"🧖🏾♂",
"🧖🏿♂️",
"🧖🏿♂",
"🧖♀️",
"🧖♀",
"🧖🏻♀️",
"🧖🏻♀",
"🧖🏼♀️",
"🧖🏼♀",
"🧖🏽♀️",
"🧖🏽♀",
"🧖🏾♀️",
"🧖🏾♀",
"🧖🏿♀️",
"🧖🏿♀",
"🧗",
"🧗🏻",
"🧗🏼",
"🧗🏽",
"🧗🏾",
"🧗🏿",
"🧗♂️",
"🧗♂",
"🧗🏻♂️",
"🧗🏻♂",
"🧗🏼♂️",
"🧗🏼♂",
"🧗🏽♂️",
"🧗🏽♂",
"🧗🏾♂️",
"🧗🏾♂",
"🧗🏿♂️",
"🧗🏿♂",
"🧗♀️",
"🧗♀",
"🧗🏻♀️",
"🧗🏻♀",
"🧗🏼♀️",
"🧗🏼♀",
"🧗🏽♀️",
"🧗🏽♀",
"🧗🏾♀️",
"🧗🏾♀",
"🧗🏿♀️",
"🧗🏿♀",
"🤺",
"🏇",
"🏇🏻",
"🏇🏼",
"🏇🏽",
"🏇🏾",
"🏇🏿",
"⛷️",
"⛷",
"🏂",
"🏂🏻",
"🏂🏼",
"🏂🏽",
"🏂🏾",
"🏂🏿",
"🏌️",
"🏌",
"🏌🏻",
"🏌🏼",
"🏌🏽",
"🏌🏾",
"🏌🏿",
"🏌️♂️",
"🏌♂️",
"🏌️♂",
"🏌♂",
"🏌🏻♂️",
"🏌🏻♂",
"🏌🏼♂️",
"🏌🏼♂",
"🏌🏽♂️",
"🏌🏽♂",
"🏌🏾♂️",
"🏌🏾♂",
"🏌🏿♂️",
"🏌🏿♂",
"🏌️♀️",
"🏌♀️",
"🏌️♀",
"🏌♀",
"🏌🏻♀️",
"🏌🏻♀",
"🏌🏼♀️",
"🏌🏼♀",
"🏌🏽♀️",
"🏌🏽♀",
"🏌🏾♀️",
"🏌🏾♀",
"🏌🏿♀️",
"🏌🏿♀",
"🏄",
"🏄🏻",
"🏄🏼",
"🏄🏽",
"🏄🏾",
"🏄🏿",
"🏄♂️",
"🏄♂",
"🏄🏻♂️",
"🏄🏻♂",
"🏄🏼♂️",
"🏄🏼♂",
"🏄🏽♂️",
"🏄🏽♂",
"🏄🏾♂️",
"🏄🏾♂",
"🏄🏿♂️",
"🏄🏿♂",
"🏄♀️",
"🏄♀",
"🏄🏻♀️",
"🏄🏻♀",
"🏄🏼♀️",
"🏄🏼♀",
"🏄🏽♀️",
"🏄🏽♀",
"🏄🏾♀️",
"🏄🏾♀",
"🏄🏿♀️",
"🏄🏿♀",
"🚣",
"🚣🏻",
"🚣🏼",
"🚣🏽",
"🚣🏾",
"🚣🏿",
"🚣♂️",
"🚣♂",
"🚣🏻♂️",
"🚣🏻♂",
"🚣🏼♂️",
"🚣🏼♂",
"🚣🏽♂️",
"🚣🏽♂",
"🚣🏾♂️",
"🚣🏾♂",
"🚣🏿♂️",
"🚣🏿♂",
"🚣♀️",
"🚣♀",
"🚣🏻♀️",
"🚣🏻♀",
"🚣🏼♀️",
"🚣🏼♀",
"🚣🏽♀️",
"🚣🏽♀",
"🚣🏾♀️",
"🚣🏾♀",
"🚣🏿♀️",
"🚣🏿♀",
"🏊",
"🏊🏻",
"🏊🏼",
"🏊🏽",
"🏊🏾",
"🏊🏿",
"🏊♂️",
"🏊♂",
"🏊🏻♂️",
"🏊🏻♂",
"🏊🏼♂️",
"🏊🏼♂",
"🏊🏽♂️",
"🏊🏽♂",
"🏊🏾♂️",
"🏊🏾♂",
"🏊🏿♂️",
"🏊🏿♂",
"🏊♀️",
"🏊♀",
"🏊🏻♀️",
"🏊🏻♀",
"🏊🏼♀️",
"🏊🏼♀",
"🏊🏽♀️",
"🏊🏽♀",
"🏊🏾♀️",
"🏊🏾♀",
"🏊🏿♀️",
"🏊🏿♀",
"⛹️",
"⛹",
"⛹🏻",
"⛹🏼",
"⛹🏽",
"⛹🏾",
"⛹🏿",
"⛹️♂️",
"⛹♂️",
"⛹️♂",
"⛹♂",
"⛹🏻♂️",
"⛹🏻♂",
"⛹🏼♂️",
"⛹🏼♂",
"⛹🏽♂️",
"⛹🏽♂",
"⛹🏾♂️",
"⛹🏾♂",
"⛹🏿♂️",
"⛹🏿♂",
"⛹️♀️",
"⛹♀️",
"⛹️♀",
"⛹♀",
"⛹🏻♀️",
"⛹🏻♀",
"⛹🏼♀️",
"⛹🏼♀",
"⛹🏽♀️",
"⛹🏽♀",
"⛹🏾♀️",
"⛹🏾♀",
"⛹🏿♀️",
"⛹🏿♀",
"🏋️",
"🏋",
"🏋🏻",
"🏋🏼",
"🏋🏽",
"🏋🏾",
"🏋🏿",
"🏋️♂️",
"🏋♂️",
"🏋️♂",
"🏋♂",
"🏋🏻♂️",
"🏋🏻♂",
"🏋🏼♂️",
"🏋🏼♂",
"🏋🏽♂️",
"🏋🏽♂",
"🏋🏾♂️",
"🏋🏾♂",
"🏋🏿♂️",
"🏋🏿♂",
"🏋️♀️",
"🏋♀️",
"🏋️♀",
"🏋♀",
"🏋🏻♀️",
"🏋🏻♀",
"🏋🏼♀️",
"🏋🏼♀",
"🏋🏽♀️",
"🏋🏽♀",
"🏋🏾♀️",
"🏋🏾♀",
"🏋🏿♀️",
"🏋🏿♀",
"🚴",
"🚴🏻",
"🚴🏼",
"🚴🏽",
"🚴🏾",
"🚴🏿",
"🚴♂️",
"🚴♂",
"🚴🏻♂️",
"🚴🏻♂",
"🚴🏼♂️",
"🚴🏼♂",
"🚴🏽♂️",
"🚴🏽♂",
"🚴🏾♂️",
"🚴🏾♂",
"🚴🏿♂️",
"🚴🏿♂",
"🚴♀️",
"🚴♀",
"🚴🏻♀️",
"🚴🏻♀",
"🚴🏼♀️",
"🚴🏼♀",
"🚴🏽♀️",
"🚴🏽♀",
"🚴🏾♀️",
"🚴🏾♀",
"🚴🏿♀️",
"🚴🏿♀",
"🚵",
"🚵🏻",
"🚵🏼",
"🚵🏽",
"🚵🏾",
"🚵🏿",
"🚵♂️",
"🚵♂",
"🚵🏻♂️",
"🚵🏻♂",
"🚵🏼♂️",
"🚵🏼♂",
"🚵🏽♂️",
"🚵🏽♂",
"🚵🏾♂️",
"🚵🏾♂",
"🚵🏿♂️",
"🚵🏿♂",
"🚵♀️",
"🚵♀",
"🚵🏻♀️",
"🚵🏻♀",
"🚵🏼♀️",
"🚵🏼♀",
"🚵🏽♀️",
"🚵🏽♀",
"🚵🏾♀️",
"🚵🏾♀",
"🚵🏿♀️",
"🚵🏿♀",
"🤸",
"🤸🏻",
"🤸🏼",
"🤸🏽",
"🤸🏾",
"🤸🏿",
"🤸♂️",
"🤸♂",
"🤸🏻♂️",
"🤸🏻♂",
"🤸🏼♂️",
"🤸🏼♂",
"🤸🏽♂️",
"🤸🏽♂",
"🤸🏾♂️",
"🤸🏾♂",
"🤸🏿♂️",
"🤸🏿♂",
"🤸♀️",
"🤸♀",
"🤸🏻♀️",
"🤸🏻♀",
"🤸🏼♀️",
"🤸🏼♀",
"🤸🏽♀️",
"🤸🏽♀",
"🤸🏾♀️",
"🤸🏾♀",
"🤸🏿♀️",
"🤸🏿♀",
"🤼",
"🤼♂️",
"🤼♂",
"🤼♀️",
"🤼♀",
"🤽",
"🤽🏻",
"🤽🏼",
"🤽🏽",
"🤽🏾",
"🤽🏿",
"🤽♂️",
"🤽♂",
"🤽🏻♂️",
"🤽🏻♂",
"🤽🏼♂️",
"🤽🏼♂",
"🤽🏽♂️",
"🤽🏽♂",
"🤽🏾♂️",
"🤽🏾♂",
"🤽🏿♂️",
"🤽🏿♂",
"🤽♀️",
"🤽♀",
"🤽🏻♀️",
"🤽🏻♀",
"🤽🏼♀️",
"🤽🏼♀",
"🤽🏽♀️",
"🤽🏽♀",
"🤽🏾♀️",
"🤽🏾♀",
"🤽🏿♀️",
"🤽🏿♀",
"🤾",
"🤾🏻",
"🤾🏼",
"🤾🏽",
"🤾🏾",
"🤾🏿",
"🤾♂️",
"🤾♂",
"🤾🏻♂️",
"🤾🏻♂",
"🤾🏼♂️",
"🤾🏼♂",
"🤾🏽♂️",
"🤾🏽♂",
"🤾🏾♂️",
"🤾🏾♂",
"🤾🏿♂️",
"🤾🏿♂",
"🤾♀️",
"🤾♀",
"🤾🏻♀️",
"🤾🏻♀",
"🤾🏼♀️",
"🤾🏼♀",
"🤾🏽♀️",
"🤾🏽♀",
"🤾🏾♀️",
"🤾🏾♀",
"🤾🏿♀️",
"🤾🏿♀",
"🤹",
"🤹🏻",
"🤹🏼",
"🤹🏽",
"🤹🏾",
"🤹🏿",
"🤹♂️",
"🤹♂",
"🤹🏻♂️",
"🤹🏻♂",
"🤹🏼♂️",
"🤹🏼♂",
"🤹🏽♂️",
"🤹🏽♂",
"🤹🏾♂️",
"🤹🏾♂",
"🤹🏿♂️",
"🤹🏿♂",
"🤹♀️",
"🤹♀",
"🤹🏻♀️",
"🤹🏻♀",
"🤹🏼♀️",
"🤹🏼♀",
"🤹🏽♀️",
"🤹🏽♀",
"🤹🏾♀️",
"🤹🏾♀",
"🤹🏿♀️",
"🤹🏿♀",
"🧘",
"🧘🏻",
"🧘🏼",
"🧘🏽",
"🧘🏾",
"🧘🏿",
"🧘♂️",
"🧘♂",
"🧘🏻♂️",
"🧘🏻♂",
"🧘🏼♂️",
"🧘🏼♂",
"🧘🏽♂️",
"🧘🏽♂",
"🧘🏾♂️",
"🧘🏾♂",
"🧘🏿♂️",
"🧘🏿♂",
"🧘♀️",
"🧘♀",
"🧘🏻♀️",
"🧘🏻♀",
"🧘🏼♀️",
"🧘🏼♀",
"🧘🏽♀️",
"🧘🏽♀",
"🧘🏾♀️",
"🧘🏾♀",
"🧘🏿♀️",
"🧘🏿♀",
"🛀",
"🛀🏻",
"🛀🏼",
"🛀🏽",
"🛀🏾",
"🛀🏿",
"🛌",
"🛌🏻",
"🛌🏼",
"🛌🏽",
"🛌🏾",
"🛌🏿",
"🧑🤝🧑",
"🧑🏻🤝🧑🏻",
"🧑🏼🤝🧑🏻",
"🧑🏼🤝🧑🏼",
"🧑🏽🤝🧑🏻",
"🧑🏽🤝🧑🏼",
"🧑🏽🤝🧑🏽",
"🧑🏾🤝🧑🏻",
"🧑🏾🤝🧑🏼",
"🧑🏾🤝🧑🏽",
"🧑🏾🤝🧑🏾",
"🧑🏿🤝🧑🏻",
"🧑🏿🤝🧑🏼",
"🧑🏿🤝🧑🏽",
"🧑🏿🤝🧑🏾",
"🧑🏿🤝🧑🏿",
"👭",
"👭🏻",
"👩🏼🤝👩🏻",
"👭🏼",
"👩🏽🤝👩🏻",
"👩🏽🤝👩🏼",
"👭🏽",
"👩🏾🤝👩🏻",
"👩🏾🤝👩🏼",
"👩🏾🤝👩🏽",
"👭🏾",
"👩🏿🤝👩🏻",
"👩🏿🤝👩🏼",
"👩🏿🤝👩🏽",
"👩🏿🤝👩🏾",
"👭🏿",
"👫",
"👫🏻",
"👩🏻🤝👨🏼",
"👩🏻🤝👨🏽",
"👩🏻🤝👨🏾",
"👩🏻🤝👨🏿",
"👩🏼🤝👨🏻",
"👫🏼",
"👩🏼🤝👨🏽",
"👩🏼🤝👨🏾",
"👩🏼🤝👨🏿",
"👩🏽🤝👨🏻",
"👩🏽🤝👨🏼",
"👫🏽",
"👩🏽🤝👨🏾",
"👩🏽🤝👨🏿",
"👩🏾🤝👨🏻",
"👩🏾🤝👨🏼",
"👩🏾🤝👨🏽",
"👫🏾",
"👩🏾🤝👨🏿",
"👩🏿🤝👨🏻",
"👩🏿🤝👨🏼",
"👩🏿🤝👨🏽",
"👩🏿🤝👨🏾",
"👫🏿",
"👬",
"👬🏻",
"👨🏼🤝👨🏻",
"👬🏼",
"👨🏽🤝👨🏻",
"👨🏽🤝👨🏼",
"👬🏽",
"👨🏾🤝👨🏻",
"👨🏾🤝👨🏼",
"👨🏾🤝👨🏽",
"👬🏾",
"👨🏿🤝👨🏻",
"👨🏿🤝👨🏼",
"👨🏿🤝👨🏽",
"👨🏿🤝👨🏾",
"👬🏿",
"💏",
"👩❤️💋👨",
"👩❤💋👨",
"👨❤️💋👨",
"👨❤💋👨",
"👩❤️💋👩",
"👩❤💋👩",
"💑",
"👩❤️👨",
"👩❤👨",
"👨❤️👨",
"👨❤👨",
"👩❤️👩",
"👩❤👩",
"👪",
"👨👩👦",
"👨👩👧",
"👨👩👧👦",
"👨👩👦👦",
"👨👩👧👧",
"👨👨👦",
"👨👨👧",
"👨👨👧👦",
"👨👨👦👦",
"👨👨👧👧",
"👩👩👦",
"👩👩👧",
"👩👩👧👦",
"👩👩👦👦",
"👩👩👧👧",
"👨👦",
"👨👦👦",
"👨👧",
"👨👧👦",
"👨👧👧",
"👩👦",
"👩👦👦",
"👩👧",
"👩👧👦",
"👩👧👧",
"🗣️",
"🗣",
"👤",
"👥",
"👣",
"🏻",
"🏼",
"🏽",
"🏾",
"🏿",
"🦰",
"🦱",
"🦳",
"🦲",
"🐵",
"🐒",
"🦍",
"🦧",
"🐶",
"🐕",
"🦮",
"🐕🦺",
"🐩",
"🐺",
"🦊",
"🦝",
"🐱",
"🐈",
"🦁",
"🐯",
"🐅",
"🐆",
"🐴",
"🐎",
"🦄",
"🦓",
"🦌",
"🐮",
"🐂",
"🐃",
"🐄",
"🐷",
"🐖",
"🐗",
"🐽",
"🐏",
"🐑",
"🐐",
"🐪",
"🐫",
"🦙",
"🦒",
"🐘",
"🦏",
"🦛",
"🐭",
"🐁",
"🐀",
"🐹",
"🐰",
"🐇",
"🐿️",
"🐿",
"🦔",
"🦇",
"🐻",
"🐨",
"🐼",
"🦥",
"🦦",
"🦨",
"🦘",
"🦡",
"🐾",
"🦃",
"🐔",
"🐓",
"🐣",
"🐤",
"🐥",
"🐦",
"🐧",
"🕊️",
"🕊",
"🦅",
"🦆",
"🦢",
"🦉",
"🦩",
"🦚",
"🦜",
"🐸",
"🐊",
"🐢",
"🦎",
"🐍",
"🐲",
"🐉",
"🦕",
"🦖",
"🐳",
"🐋",
"🐬",
"🐟",
"🐠",
"🐡",
"🦈",
"🐙",
"🐚",
"🐌",
"🦋",
"🐛",
"🐜",
"🐝",
"🐞",
"🦗",
"🕷️",
"🕷",
"🕸️",
"🕸",
"🦂",
"🦟",
"🦠",
"💐",
"🌸",
"💮",
"🏵️",
"🏵",
"🌹",
"🥀",
"🌺",
"🌻",
"🌼",
"🌷",
"🌱",
"🌲",
"🌳",
"🌴",
"🌵",
"🌾",
"🌿",
"☘️",
"☘",
"🍀",
"🍁",
"🍂",
"🍃",
"🍇",
"🍈",
"🍉",
"🍊",
"🍋",
"🍌",
"🍍",
"🥭",
"🍎",
"🍏",
"🍐",
"🍑",
"🍒",
"🍓",
"🥝",
"🍅",
"🥥",
"🥑",
"🍆",
"🥔",
"🥕",
"🌽",
"🌶️",
"🌶",
"🥒",
"🥬",
"🥦",
"🧄",
"🧅",
"🍄",
"🥜",
"🌰",
"🍞",
"🥐",
"🥖",
"🥨",
"🥯",
"🥞",
"🧇",
"🧀",
"🍖",
"🍗",
"🥩",
"🥓",
"🍔",
"🍟",
"🍕",
"🌭",
"🥪",
"🌮",
"🌯",
"🥙",
"🧆",
"🥚",
"🍳",
"🥘",
"🍲",
"🥣",
"🥗",
"🍿",
"🧈",
"🧂",
"🥫",
"🍱",
"🍘",
"🍙",
"🍚",
"🍛",
"🍜",
"🍝",
"🍠",
"🍢",
"🍣",
"🍤",
"🍥",
"🥮",
"🍡",
"🥟",
"🥠",
"🥡",
"🦀",
"🦞",
"🦐",
"🦑",
"🦪",
"🍦",
"🍧",
"🍨",
"🍩",
"🍪",
"🎂",
"🍰",
"🧁",
"🥧",
"🍫",
"🍬",
"🍭",
"🍮",
"🍯",
"🍼",
"🥛",
"☕",
"🍵",
"🍶",
"🍾",
"🍷",
"🍸",
"🍹",
"🍺",
"🍻",
"🥂",
"🥃",
"🥤",
"🧃",
"🧉",
"🧊",
"🥢",
"🍽️",
"🍽",
"🍴",
"🥄",
"🔪",
"🏺",
"🌍",
"🌎",
"🌏",
"🌐",
"🗺️",
"🗺",
"🗾",
"🧭",
"🏔️",
"🏔",
"⛰️",
"⛰",
"🌋",
"🗻",
"🏕️",
"🏕",
"🏖️",
"🏖",
"🏜️",
"🏜",
"🏝️",
"🏝",
"🏞️",
"🏞",
"🏟️",
"🏟",
"🏛️",
"🏛",
"🏗️",
"🏗",
"🧱",
"🏘️",
"🏘",
"🏚️",
"🏚",
"🏠",
"🏡",
"🏢",
"🏣",
"🏤",
"🏥",
"🏦",
"🏨",
"🏩",
"🏪",
"🏫",
"🏬",
"🏭",
"🏯",
"🏰",
"💒",
"🗼",
"🗽",
"⛪",
"🕌",
"🛕",
"🕍",
"⛩️",
"⛩",
"🕋",
"⛲",
"⛺",
"🌁",
"🌃",
"🏙️",
"🏙",
"🌄",
"🌅",
"🌆",
"🌇",
"🌉",
"♨️",
"♨",
"🎠",
"🎡",
"🎢",
"💈",
"🎪",
"🚂",
"🚃",
"🚄",
"🚅",
"🚆",
"🚇",
"🚈",
"🚉",
"🚊",
"🚝",
"🚞",
"🚋",
"🚌",
"🚍",
"🚎",
"🚐",
"🚑",
"🚒",
"🚓",
"🚔",
"🚕",
"🚖",
"🚗",
"🚘",
"🚙",
"🚚",
"🚛",
"🚜",
"🏎️",
"🏎",
"🏍️",
"🏍",
"🛵",
"🦽",
"🦼",
"🛺",
"🚲",
"🛴",
"🛹",
"🚏",
"🛣️",
"🛣",
"🛤️",
"🛤",
"🛢️",
"🛢",
"⛽",
"🚨",
"🚥",
"🚦",
"🛑",
"🚧",
"⚓",
"⛵",
"🛶",
"🚤",
"🛳️",
"🛳",
"⛴️",
"⛴",
"🛥️",
"🛥",
"🚢",
"✈️",
"✈",
"🛩️",
"🛩",
"🛫",
"🛬",
"🪂",
"💺",
"🚁",
"🚟",
"🚠",
"🚡",
"🛰️",
"🛰",
"🚀",
"🛸",
"🛎️",
"🛎",
"🧳",
"⌛",
"⏳",
"⌚",
"⏰",
"⏱️",
"⏱",
"⏲️",
"⏲",
"🕰️",
"🕰",
"🕛",
"🕧",
"🕐",
"🕜",
"🕑",
"🕝",
"🕒",
"🕞",
"🕓",
"🕟",
"🕔",
"🕠",
"🕕",
"🕡",
"🕖",
"🕢",
"🕗",
"🕣",
"🕘",
"🕤",
"🕙",
"🕥",
"🕚",
"🕦",
"🌑",
"🌒",
"🌓",
"🌔",
"🌕",
"🌖",
"🌗",
"🌘",
"🌙",
"🌚",
"🌛",
"🌜",
"🌡️",
"🌡",
"☀️",
"☀",
"🌝",
"🌞",
"🪐",
"⭐",
"🌟",
"🌠",
"🌌",
"☁️",
"☁",
"⛅",
"⛈️",
"⛈",
"🌤️",
"🌤",
"🌥️",
"🌥",
"🌦️",
"🌦",
"🌧️",
"🌧",
"🌨️",
"🌨",
"🌩️",
"🌩",
"🌪️",
"🌪",
"🌫️",
"🌫",
"🌬️",
"🌬",
"🌀",
"🌈",
"🌂",
"☂️",
"☂",
"☔",
"⛱️",
"⛱",
"⚡",
"❄️",
"❄",
"☃️",
"☃",
"⛄",
"☄️",
"☄",
"🔥",
"💧",
"🌊",
"🎃",
"🎄",
"🎆",
"🎇",
"🧨",
"✨",
"🎈",
"🎉",
"🎊",
"🎋",
"🎍",
"🎎",
"🎏",
"🎐",
"🎑",
"🧧",
"🎀",
"🎁",
"🎗️",
"🎗",
"🎟️",
"🎟",
"🎫",
"🎖️",
"🎖",
"🏆",
"🏅",
"🥇",
"🥈",
"🥉",
"⚽",
"⚾",
"🥎",
"🏀",
"🏐",
"🏈",
"🏉",
"🎾",
"🥏",
"🎳",
"🏏",
"🏑",
"🏒",
"🥍",
"🏓",
"🏸",
"🥊",
"🥋",
"🥅",
"⛳",
"⛸️",
"⛸",
"🎣",
"🤿",
"🎽",
"🎿",
"🛷",
"🥌",
"🎯",
"🪀",
"🪁",
"🎱",
"🔮",
"🧿",
"🎮",
"🕹️",
"🕹",
"🎰",
"🎲",
"🧩",
"🧸",
"♠️",
"♠",
"♥️",
"♥",
"♦️",
"♦",
"♣️",
"♣",
"♟️",
"♟",
"🃏",
"🀄",
"🎴",
"🎭",
"🖼️",
"🖼",
"🎨",
"🧵",
"🧶",
"👓",
"🕶️",
"🕶",
"🥽",
"🥼",
"🦺",
"👔",
"👕",
"👖",
"🧣",
"🧤",
"🧥",
"🧦",
"👗",
"👘",
"🥻",
"🩱",
"🩲",
"🩳",
"👙",
"👚",
"👛",
"👜",
"👝",
"🛍️",
"🛍",
"🎒",
"👞",
"👟",
"🥾",
"🥿",
"👠",
"👡",
"🩰",
"👢",
"👑",
"👒",
"🎩",
"🎓",
"🧢",
"⛑️",
"⛑",
"📿",
"💄",
"💍",
"💎",
"🔇",
"🔈",
"🔉",
"🔊",
"📢",
"📣",
"📯",
"🔔",
"🔕",
"🎼",
"🎵",
"🎶",
"🎙️",
"🎙",
"🎚️",
"🎚",
"🎛️",
"🎛",
"🎤",
"🎧",
"📻",
"🎷",
"🎸",
"🎹",
"🎺",
"🎻",
"🪕",
"🥁",
"📱",
"📲",
"☎️",
"☎",
"📞",
"📟",
"📠",
"🔋",
"🔌",
"💻",
"🖥️",
"🖥",
"🖨️",
"🖨",
"⌨️",
"⌨",
"🖱️",
"🖱",
"🖲️",
"🖲",
"💽",
"💾",
"💿",
"📀",
"🧮",
"🎥",
"🎞️",
"🎞",
"📽️",
"📽",
"🎬",
"📺",
"📷",
"📸",
"📹",
"📼",
"🔍",
"🔎",
"🕯️",
"🕯",
"💡",
"🔦",
"🏮",
"🪔",
"📔",
"📕",
"📖",
"📗",
"📘",
"📙",
"📚",
"📓",
"📒",
"📃",
"📜",
"📄",
"📰",
"🗞️",
"🗞",
"📑",
"🔖",
"🏷️",
"🏷",
"💰",
"💴",
"💵",
"💶",
"💷",
"💸",
"💳",
"🧾",
"💹",
"💱",
"💲",
"✉️",
"✉",
"📧",
"📨",
"📩",
"📤",
"📥",
"📦",
"📫",
"📪",
"📬",
"📭",
"📮",
"🗳️",
"🗳",
"✏️",
"✏",
"✒️",
"✒",
"🖋️",
"🖋",
"🖊️",
"🖊",
"🖌️",
"🖌",
"🖍️",
"🖍",
"📝",
"💼",
"📁",
"📂",
"🗂️",
"🗂",
"📅",
"📆",
"🗒️",
"🗒",
"🗓️",
"🗓",
"📇",
"📈",
"📉",
"📊",
"📋",
"📌",
"📍",
"📎",
"🖇️",
"🖇",
"📏",
"📐",
"✂️",
"✂",
"🗃️",
"🗃",
"🗄️",
"🗄",
"🗑️",
"🗑",
"🔒",
"🔓",
"🔏",
"🔐",
"🔑",
"🗝️",
"🗝",
"🔨",
"🪓",
"⛏️",
"⛏",
"⚒️",
"⚒",
"🛠️",
"🛠",
"🗡️",
"🗡",
"⚔️",
"⚔",
"🔫",
"🏹",
"🛡️",
"🛡",
"🔧",
"🔩",
"⚙️",
"⚙",
"🗜️",
"🗜",
"⚖️",
"⚖",
"🦯",
"🔗",
"⛓️",
"⛓",
"🧰",
"🧲",
"⚗️",
"⚗",
"🧪",
"🧫",
"🧬",
"🔬",
"🔭",
"📡",
"💉",
"🩸",
"💊",
"🩹",
"🩺",
"🚪",
"🛏️",
"🛏",
"🛋️",
"🛋",
"🪑",
"🚽",
"🚿",
"🛁",
"🪒",
"🧴",
"🧷",
"🧹",
"🧺",
"🧻",
"🧼",
"🧽",
"🧯",
"🛒",
"🚬",
"⚰️",
"⚰",
"⚱️",
"⚱",
"🗿",
"🏧",
"🚮",
"🚰",
"♿",
"🚹",
"🚺",
"🚻",
"🚼",
"🚾",
"🛂",
"🛃",
"🛄",
"🛅",
"⚠️",
"⚠",
"🚸",
"⛔",
"🚫",
"🚳",
"🚭",
"🚯",
"🚱",
"🚷",
"📵",
"🔞",
"☢️",
"☢",
"☣️",
"☣",
"⬆️",
"⬆",
"↗️",
"↗",
"➡️",
"➡",
"↘️",
"↘",
"⬇️",
"⬇",
"↙️",
"↙",
"⬅️",
"⬅",
"↖️",
"↖",
"↕️",
"↕",
"↔️",
"↔",
"↩️",
"↩",
"↪️",
"↪",
"⤴️",
"⤴",
"⤵️",
"⤵",
"🔃",
"🔄",
"🔙",
"🔚",
"🔛",
"🔜",
"🔝",
"🛐",
"⚛️",
"⚛",
"🕉️",
"🕉",
"✡️",
"✡",
"☸️",
"☸",
"☯️",
"☯",
"✝️",
"✝",
"☦️",
"☦",
"☪️",
"☪",
"☮️",
"☮",
"🕎",
"🔯",
"♈",
"♉",
"♊",
"♋",
"♌",
"♍",
"♎",
"♏",
"♐",
"♑",
"♒",
"♓",
"⛎",
"🔀",
"🔁",
"🔂",
"▶️",
"▶",
"⏩",
"⏭️",
"⏭",
"⏯️",
"⏯",
"◀️",
"◀",
"⏪",
"⏮️",
"⏮",
"🔼",
"⏫",
"🔽",
"⏬",
"⏸️",
"⏸",
"⏹️",
"⏹",
"⏺️",
"⏺",
"⏏️",
"⏏",
"🎦",
"🔅",
"🔆",
"📶",
"📳",
"📴",
"♀️",
"♀",
"♂️",
"♂",
"⚕️",
"⚕",
"♾️",
"♾",
"♻️",
"♻",
"⚜️",
"⚜",
"🔱",
"📛",
"🔰",
"⭕",
"✅",
"☑️",
"☑",
"✔️",
"✔",
"✖️",
"✖",
"❌",
"❎",
"➕",
"➖",
"➗",
"➰",
"➿",
"〽️",
"〽",
"✳️",
"✳",
"✴️",
"✴",
"❇️",
"❇",
"‼️",
"‼",
"⁉️",
"⁉",
"❓",
"❔",
"❕",
"❗",
"〰️",
"〰",
"©️",
"©",
"®️",
"®",
"™️",
"™",
"#️⃣",
"#⃣",
"*️⃣",
"*⃣",
"0️⃣",
"0⃣",
"1️⃣",
"1⃣",
"2️⃣",
"2⃣",
"3️⃣",
"3⃣",
"4️⃣",
"4⃣",
"5️⃣",
"5⃣",
"6️⃣",
"6⃣",
"7️⃣",
"7⃣",
"8️⃣",
"8⃣",
"9️⃣",
"9⃣",
"🔟",
"🔠",
"🔡",
"🔢",
"🔣",
"🔤",
"🅰️",
"🅰",
"🆎",
"🅱️",
"🅱",
"🆑",
"🆒",
"🆓",
"ℹ️",
"ℹ",
"🆔",
"Ⓜ️",
"Ⓜ",
"🆕",
"🆖",
"🅾️",
"🅾",
"🆗",
"🅿️",
"🅿",
"🆘",
"🆙",
"🆚",
"🈁",
"🈂️",
"🈂",
"🈷️",
"🈷",
"🈶",
"🈯",
"🉐",
"🈹",
"🈚",
"🈲",
"🉑",
"🈸",
"🈴",
"🈳",
"㊗️",
"㊗",
"㊙️",
"㊙",
"🈺",
"🈵",
"🔴",
"🟠",
"🟡",
"🟢",
"🔵",
"🟣",
"🟤",
"⚫",
"⚪",
"🟥",
"🟧",
"🟨",
"🟩",
"🟦",
"🟪",
"🟫",
"⬛",
"⬜",
"◼️",
"◼",
"◻️",
"◻",
"◾",
"◽",
"▪️",
"▪",
"▫️",
"▫",
"🔶",
"🔷",
"🔸",
"🔹",
"🔺",
"🔻",
"💠",
"🔘",
"🔳",
"🔲",
"🏁",
"🚩",
"🎌",
"🏴",
"🏳️",
"🏳",
"🏳️🌈",
"🏳🌈",
"🏴☠️",
"🏴☠",
"🇦🇨",
"🇦🇩",
"🇦🇪",
"🇦🇫",
"🇦🇬",
"🇦🇮",
"🇦🇱",
"🇦🇲",
"🇦🇴",
"🇦🇶",
"🇦🇷",
"🇦🇸",
"🇦🇹",
"🇦🇺",
"🇦🇼",
"🇦🇽",
"🇦🇿",
"🇧🇦",
"🇧🇧",
"🇧🇩",
"🇧🇪",
"🇧🇫",
"🇧🇬",
"🇧🇭",
"🇧🇮",
"🇧🇯",
"🇧🇱",
"🇧🇲",
"🇧🇳",
"🇧🇴",
"🇧🇶",
"🇧🇷",
"🇧🇸",
"🇧🇹",
"🇧🇻",
"🇧🇼",
"🇧🇾",
"🇧🇿",
"🇨🇦",
"🇨🇨",
"🇨🇩",
"🇨🇫",
"🇨🇬",
"🇨🇭",
"🇨🇮",
"🇨🇰",
"🇨🇱",
"🇨🇲",
"🇨🇳",
"🇨🇴",
"🇨🇵",
"🇨🇷",
"🇨🇺",
"🇨🇻",
"🇨🇼",
"🇨🇽",
"🇨🇾",
"🇨🇿",
"🇩🇪",
"🇩🇬",
"🇩🇯",
"🇩🇰",
"🇩🇲",
"🇩🇴",
"🇩🇿",
"🇪🇦",
"🇪🇨",
"🇪🇪",
"🇪🇬",
"🇪🇭",
"🇪🇷",
"🇪🇸",
"🇪🇹",
"🇪🇺",
"🇫🇮",
"🇫🇯",
"🇫🇰",
"🇫🇲",
"🇫🇴",
"🇫🇷",
"🇬🇦",
"🇬🇧",
"🇬🇩",
"🇬🇪",
"🇬🇫",
"🇬🇬",
"🇬🇭",
"🇬🇮",
"🇬🇱",
"🇬🇲",
"🇬🇳",
"🇬🇵",
"🇬🇶",
"🇬🇷",
"🇬🇸",
"🇬🇹",
"🇬🇺",
"🇬🇼",
"🇬🇾",
"🇭🇰",
"🇭🇲",
"🇭🇳",
"🇭🇷",
"🇭🇹",
"🇭🇺",
"🇮🇨",
"🇮🇩",
"🇮🇪",
"🇮🇱",
"🇮🇲",
"🇮🇳",
"🇮🇴",
"🇮🇶",
"🇮🇷",
"🇮🇸",
"🇮🇹",
"🇯🇪",
"🇯🇲",
"🇯🇴",
"🇯🇵",
"🇰🇪",
"🇰🇬",
"🇰🇭",
"🇰🇮",
"🇰🇲",
"🇰🇳",
"🇰🇵",
"🇰🇷",
"🇰🇼",
"🇰🇾",
"🇰🇿",
"🇱🇦",
"🇱🇧",
"🇱🇨",
"🇱🇮",
"🇱🇰",
"🇱🇷",
"🇱🇸",
"🇱🇹",
"🇱🇺",
"🇱🇻",
"🇱🇾",
"🇲🇦",
"🇲🇨",
"🇲🇩",
"🇲🇪",
"🇲🇫",
"🇲🇬",
"🇲🇭",
"🇲🇰",
"🇲🇱",
"🇲🇲",
"🇲🇳",
"🇲🇴",
"🇲🇵",
"🇲🇶",
"🇲🇷",
"🇲🇸",
"🇲🇹",
"🇲🇺",
"🇲🇻",
"🇲🇼",
"🇲🇽",
"🇲🇾",
"🇲🇿",
"🇳🇦",
"🇳🇨",
"🇳🇪",
"🇳🇫",
"🇳🇬",
"🇳🇮",
"🇳🇱",
"🇳🇴",
"🇳🇵",
"🇳🇷",
"🇳🇺",
"🇳🇿",
"🇴🇲",
"🇵🇦",
"🇵🇪",
"🇵🇫",
"🇵🇬",
"🇵🇭",
"🇵🇰",
"🇵🇱",
"🇵🇲",
"🇵🇳",
"🇵🇷",
"🇵🇸",
"🇵🇹",
"🇵🇼",
"🇵🇾",
"🇶🇦",
"🇷🇪",
"🇷🇴",
"🇷🇸",
"🇷🇺",
"🇷🇼",
"🇸🇦",
"🇸🇧",
"🇸🇨",
"🇸🇩",
"🇸🇪",
"🇸🇬",
"🇸🇭",
"🇸🇮",
"🇸🇯",
"🇸🇰",
"🇸🇱",
"🇸🇲",
"🇸🇳",
"🇸🇴",
"🇸🇷",
"🇸🇸",
"🇸🇹",
"🇸🇻",
"🇸🇽",
"🇸🇾",
"🇸🇿",
"🇹🇦",
"🇹🇨",
"🇹🇩",
"🇹🇫",
"🇹🇬",
"🇹🇭",
"🇹🇯",
"🇹🇰",
"🇹🇱",
"🇹🇲",
"🇹🇳",
"🇹🇴",
"🇹🇷",
"🇹🇹",
"🇹🇻",
"🇹🇼",
"🇹🇿",
"🇺🇦",
"🇺🇬",
"🇺🇲",
"🇺🇳",
"🇺🇸",
"🇺🇾",
"🇺🇿",
"🇻🇦",
"🇻🇨",
"🇻🇪",
"🇻🇬",
"🇻🇮",
"🇻🇳",
"🇻🇺",
"🇼🇫",
"🇼🇸",
"🇽🇰",
"🇾🇪",
"🇾🇹",
"🇿🇦",
"🇿🇲",
"🇿🇼",
"🏴",
"🏴",
"🏴",
]
| mit |
2015fallproject/2015fallcase1 | static/Brython3.2.0-20150701-214155/Lib/contextlib.py | 737 | 8788 | """Utilities for with-statement contexts. See PEP 343."""
import sys
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, *args, **kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, *self.args, **self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, *args, **kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
def _fix_exception_context(new_exc, old_exc):
while 1:
exc_context = new_exc.__context__
if exc_context in (None, frame_exc):
break
new_exc = exc_context
new_exc.__context__ = old_exc
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
return received_exc and suppressed_exc
| agpl-3.0 |
eric-stanley/youtube-dl | youtube_dl/extractor/morningstar.py | 220 | 1732 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class MorningstarIE(InfoExtractor):
IE_DESC = 'morningstar.com'
_VALID_URL = r'https?://(?:www\.)?morningstar\.com/[cC]over/video[cC]enter\.aspx\?id=(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.morningstar.com/cover/videocenter.aspx?id=615869',
'md5': '6c0acface7a787aadc8391e4bbf7b0f5',
'info_dict': {
'id': '615869',
'ext': 'mp4',
'title': 'Get Ahead of the Curve on 2013 Taxes',
'description': "Vanguard's Joel Dickson on managing higher tax rates for high-income earners and fund capital-gain distributions in 2013.",
'thumbnail': r're:^https?://.*m(?:orning)?star\.com/.+thumb\.jpg$'
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
r'<h1 id="titleLink">(.*?)</h1>', webpage, 'title')
video_url = self._html_search_regex(
r'<input type="hidden" id="hidVideoUrl" value="([^"]+)"',
webpage, 'video URL')
thumbnail = self._html_search_regex(
r'<input type="hidden" id="hidSnapshot" value="([^"]+)"',
webpage, 'thumbnail', fatal=False)
description = self._html_search_regex(
r'<div id="mstarDeck".*?>(.*?)</div>',
webpage, 'description', fatal=False)
return {
'id': video_id,
'title': title,
'url': video_url,
'thumbnail': thumbnail,
'description': description,
}
| unlicense |
comjoy91/SKorean-Election_result-Crawler | crawlers/electorates/local_administration.py | 2 | 4324 | #!/usr/bin/env python
# -*- encoding=utf-8 -*-
from crawlers.electorates.base_provincePage import *
from utils import sanitize, InvalidCrawlerError
def Crawler(nth, election_name, electionType, target):
if target == 'local-ea':
if nth == 1:
raise NotImplementedError('Educational Superintendent Election was not held in 1995.')
elif nth == 2:
raise NotImplementedError('Educational Superintendent Election was not held in 1998.')
elif nth == 3:
raise NotImplementedError('Educational Superintendent Election was not held in 2002.')
elif nth == 4:
raise NotImplementedError('Educational Superintendent Election was not held in 2006.')
if 1 <= nth <= 3:
crawler = Elector_Crawler_GuOld(int(nth), election_name, electionType, target)
elif 4 <= nth <= 6:
crawler = Elector_Crawler_Old(int(nth), election_name, electionType, target)
elif nth == 7:
raise InvalidCrawlerError('townCode', nth, election_name, electionType, target)
#"최근선거"로 들어갈 때의 code: crawler = Elector_Crawler_Recent(int(nth), election_name, electionType, target)
else:
raise InvalidCrawlerError('townCode', nth, election_name, electionType, target)
return crawler
class LocalDivision_ElectorCrawler_GuOld(MultiCityCrawler_province):
def __init__(self, nth, _election_name, _election_type, _target):
self.nth = nth
self.target = _target
self.elemType = 'local_division'
self.isRecent = False
# 여기서 크롤링된 데이터는 행정구역(시군구, 행정구 포함) 단위로 분류됨.
# n:1~2 - 여기서 크롤링된 데이터가 국회의원 지역구 단위로 분류되어, 같은 시군구에서도 갑/을로 분구된 것이 따로따로 표기됨. 단, 선거구가 합구된 곳은 시군구별로 다 명기됨.
# n:3 - 여기서 크롤링된 데이터는 시군구 단위로 분류됨.
self.urlPath_city_codes = 'http://info.nec.go.kr/bizcommon/selectbox/selectbox_cityCodeBySgJson_GuOld.json'
self.urlParam_city_codes = dict(electionId='0000000000', \
electionCode=_election_name, subElectionCode =_election_type)
self.urlPath_town_list = 'http://info.nec.go.kr/electioninfo/electionInfo_report.xhtml'
self.urlParam_town_list = dict(electionId='0000000000', electionName=_election_name,\
requestURI='/WEB-INF/jsp/electioninfo/0000000000/bi/bipb02.jsp',\
statementId='BIPB92_#1',\
oldElectionType=1, electionType=4, electionCode=-1,\
searchType=2, townCode=-1, sggCityCode=-1)
class LocalDivision_ElectorCrawler_Old(MultiCityCrawler_province):
def __init__(self, nth, _election_name, _election_type, _target):
self.nth = nth
self.target = _target
self.elemType = 'local_division'
self.isRecent = False
# 여기서 크롤링된 데이터는 행정구역(시군구, 행정구 포함) 단위로 분류됨.
self.urlPath_city_codes = 'http://info.nec.go.kr/bizcommon/selectbox/selectbox_cityCodeBySgJson_Old.json'
self.urlParam_city_codes = dict(electionId='0000000000', \
electionCode=_election_name, subElectionCode =_election_type)
self.urlPath_town_list = 'http://info.nec.go.kr/electioninfo/electionInfo_report.xhtml'
self.urlParam_town_list = dict(electionId='0000000000', electionName=_election_name,\
requestURI='/WEB-INF/jsp/electioninfo/0000000000/bi/bipb02.jsp',\
statementId='BIPB02_#2',\
oldElectionType=1, electionType=4, electionCode=-1,\
searchType=2, townCode=-1, sggCityCode=-1)
class LocalDivision_ElectorCrawler_Recent(MultiCityCrawler_province):
def __init__(self, nth, _election_name, _election_type, _target):
self.nth = nth
self.target = _target
self.elemType = 'local_division'
self.isRecent = True
# 여기서 크롤링된 데이터는 행정구역(시군구, 행정구 포함) 단위로 분류됨.
self.urlPath_city_codes = 'http://info.nec.go.kr/bizcommon/selectbox/selectbox_cityCodeBySgJson.json'
self.urlParam_city_codes = dict(electionId=_election_name, electionCode=_election_type)
self.urlPath_town_list = 'http://info.nec.go.kr/electioninfo/electionInfo_report.xhtml'
self.urlParam_town_list = dict(electionId=_election_name, statementId='BIPB02_#2',\
requestURI='/WEB-INF/jsp/electioninfo/'+_election_name+'/bi/bipb02.jsp',
electionCode=-1, searchType=2, townCode=-1)
| apache-2.0 |
wujf/rethinkdb | scripts/visualize_log_serializer.py | 48 | 25055 | #!/usr/bin/env python
# Copyright 2010-2012 RethinkDB, all rights reserved.
from collections import namedtuple
from parse_binary import *
import sys, os, traceback
def escape(string):
return string.replace("&", "&").replace("<", "<").replace(">", ">")
def print_anchor(obj):
print """<a name="obj-%d"/>""" % id(obj)
parse_block_id = parse_uint64_t
# This is sort of unintuitive, so pay attention.
#
# The visualizer needs to be robust even if there is corruption in a file. One approach to this is
# to carefully check every part of the file as we parse it; if there is an error, we can then record
# it and then move on. However, this approach breaks down if we forget to check something; the
# visualizer crashes without producing any output, and the traceback is all that the programmer has
# to figure out what went wrong in the log-serializer.
#
# The approach that we use instead is to trap exceptions. Whenever we parse a chunk of the file, we
# use the try_parse() function. If an exception occurs during the parsing, it returns a BadChunk
# object that holds the error message; if not, it returns a GoodChunk object that returns the
# original object. This way, if something goes wrong, we get an error report embedded in the file
# instead of on the console, even if we didn't plan to check for that error condition.
#
# try_store() is the companion to try_parse(). It is responsible for storing the [Good|Bad]Chunk
# that was produced by try_parse(). It uses a user-defined function to record the chunk. If the
# user-defined function fails, then we assume that it wasn't valid to try to parse that chunk of
# memory (perhaps it overlapped with another existing chunk) and we produce a BadChunkRef object
# instead
#
# The difference between BadChunk and BadChunkRef is that you get a BadChunk if you were looking
# in the right place for the data, but it was corrupted, and you get a BadChunkRef if your pointer
# is bad.
class Chunk(object):
pass
class GoodChunk(Chunk):
def __init__(self, offset, length, name, obj):
self.offset = offset
self.length = length
self.name = name
self.chunk_obj = obj
self.chunk_ok = True
def ref_as_html(self):
return """<a href="#obj-%d">0x%x</a>""" % (id(self), self.offset)
def chunk_print_html(self):
print """<div class="block">"""
print """<a name="obj-%d"/>""" % id(self)
print """<h1>0x%x - 0x%x: %s</h1>""" % (self.offset, self.offset + self.length - 1, self.name)
self.chunk_obj.print_html()
print """</div>"""
class BadChunk(Chunk):
def __init__(self, offset, length, name, contents, msg):
self.offset = offset
self.length = length
self.name = name
self.msg = msg
self.contents = contents
self.chunk_ok = False
def ref_as_html(self):
return """<a href="#obj-%d">0x%x</a>""" % (id(self), self.offset)
def chunk_print_html(self):
print """<div class="block">"""
print """<a name="obj-%d"/>""" % id(self)
print """<h1>0x%x - 0x%x: %s</h1>""" % (self.offset, self.offset + self.length - 1, self.name)
print """<pre style="color: red">%s</pre>""" % escape(self.msg)
print """<div class="hexdump">%s</div>""" % \
" ".join(x.encode("hex") for x in self.contents)
print """</div>"""
class BadChunkRef(Chunk):
def __init__(self, offset, length, name, msg):
self.offset = offset
self.length = length
self.name = name
self.msg = msg
self.chunk_ok = False
def ref_as_html(self):
return """<span style="color: red">Bad reference to 0x%x: <code>%s</code></span>""" % \
(self.offset, escape(self.msg))
def chunk_print_html(self):
print """<span style="color: red">Bad reference (but we're so screwed that we think it's
inside of a block): %s</span>""" % self.msg
def try_parse(db, offset, length, name, cls, *args):
if offset < 0:
return BadChunkRef(offset, length, name, "Negative offset")
elif length <= 0:
return BadChunkRef(offset, length, name, "Negative or zero length")
elif offset + length > len(db.block):
return BadChunkRef(
offset, length, name,
"Chunk 0x%x - 0x%x is beyond end of file (0x%x)" % (offset, offset + length - 1, len(db.block))
)
try:
x = cls.from_data(db, offset, *args)
assert isinstance(x, cls)
except Exception:
chunk = BadChunk(offset, length, name, db.block[offset: offset + length], traceback.format_exc())
else:
chunk = GoodChunk(offset, length, name, x)
return chunk
def try_store(chunk, fun, *args):
if isinstance(chunk, BadChunkRef):
return chunk
try:
fun(chunk, *args)
except Exception:
chunk = BadChunkRef(chunk.offset, chunk.length, chunk.name, traceback.format_exc())
return chunk
class Database(object):
def __init__(self, block):
self.block = block
self.extents = {}
# Determine configuration info
self.device_block_size = 0x1000
initial_static_header = StaticHeader.from_data(self, 0)
self.mb_extents = [0, 4]
self.extent_size = initial_static_header.sh.extent_size
self.block_size = initial_static_header.sh.btree_block_size
assert len(self.block) % self.extent_size == 0
assert self.extent_size % self.block_size == 0
assert self.block_size % self.device_block_size == 0
# Read metablock extents
metablock_versions = {}
metablock_extents = []
for mb_extent in self.mb_extents:
offset = mb_extent * self.extent_size
if offset >= len(self.block): continue
extent = try_store(try_parse(self, offset, self.extent_size, "Metablock Extent", MetablockExtent), self.add_extent)
metablock_extents.append(extent)
if extent.chunk_ok:
for mb in extent.chunk_obj.metablocks:
if mb.chunk_ok:
metablock_versions[mb.chunk_obj.mb.version] = mb
# Choose a metablock
if metablock_versions:
self.use_metablock(metablock_versions[max(metablock_versions.keys())])
# Notify the metablock extents that we found a valid metablock so that they don't print
# all of the invalid ones
for extent in metablock_extents:
if extent.chunk_ok:
extent.chunk_obj.found_a_valid_metablock = True
else:
self.metablock = None
# Fill in empty extents with placeholders
for offset in xrange(0, max(self.extents.keys()) + 1, self.extent_size):
if offset not in self.extents:
self.add_extent(GoodChunk(offset, self.extent_size, "Unused Extent", UnusedExtent()))
def add_extent(self, extent):
assert isinstance(extent, Chunk)
assert extent.name.endswith("Extent")
if extent.offset % self.extent_size != 0:
raise ValueError("Misaligned extent: 0x%x is not a multiple of 0x%x." % (extent.offset, self.extent_size))
if extent.offset in self.extents:
raise ValueError("Duplicate extent: 0x%x was parsed already." % extent.offset)
self.extents[extent.offset] = extent
def use_metablock(self, mb):
self.metablock = mb
lba_index_part = mb.chunk_obj.mb.metablock.lba_index_part
# Read the current LBA extent
if lba_index_part.last_lba_extent_offset >= 0:
first_lba_extent = try_parse(
self, lba_index_part.last_lba_extent_offset, self.extent_size, "LBA Extent", LBAExtent,
lba_index_part.last_lba_extent_entries_count)
first_lba_extent = try_store(first_lba_extent, self.add_extent)
else:
first_lba_extent = None
# Read the LBA superblock and its sub-extents
if lba_index_part.lba_superblock_offset >= 0:
size = lba_index_part.lba_superblock_entries_count * 16 + 1
while size % self.device_block_size != 0: size += 1
lba_superblock = try_parse(self, lba_index_part.lba_superblock_offset, size, "LBA Superblock", LBASuperblock,
lba_index_part.lba_superblock_entries_count)
lba_superblock = try_store(lba_superblock, LBASuperblockExtent.store_superblock, self)
else:
lba_superblock = None
# Reconstruct the LBA index
lba_extents = []
if lba_superblock and lba_superblock.chunk_ok:
lba_extents.extend(lba_superblock.chunk_obj.lba_extents)
if first_lba_extent:
lba_extents.append(first_lba_extent)
lba = {}
data_blocks = {}
for extent in reversed(lba_extents):
if extent.chunk_ok:
for pair in reversed(extent.chunk_obj.pairs):
if isinstance(pair, LBAPair) and pair.block_id not in lba:
lba[pair.block_id] = pair.block_offset
data_blocks[pair.block_id] = self.use_lba_pair(pair)
self.metablock.chunk_obj.was_used(first_lba_extent, lba_superblock, lba, data_blocks)
def use_lba_pair(self, pair):
if pair.block_offset == "delete":
data_block = None
else:
data_block = try_parse(self, pair.block_offset, self.block_size, "Data Block %d" % pair.block_id, DataBlock)
data_block = try_store(data_block, DataBlockExtent.store_data_block, self)
pair.was_used(data_block)
return data_block
def print_html(self):
print """<h1>Database</h1>"""
print """<p>End of file is at 0x%x</p>""" % len(self.block)
if self.metablock:
print """<p>Most recent metablock: %s</p>""" % self.metablock.ref_as_html()
else:
print """<p>No valid metablocks found.</p>"""
for i in sorted(self.extents.keys()):
self.extents[i].chunk_print_html()
class UnusedExtent(object):
def print_html(self):
print """<p>Nothing in this extent is reachable from the most recent metablock.</p>"""
class MetablockExtent(object):
@classmethod
def from_data(cls, db, offset):
static_header = try_parse(db, offset, db.device_block_size, "Static Header", StaticHeader)
metablocks = []
for o in xrange(db.device_block_size, db.extent_size, db.device_block_size):
metablocks.append(try_parse(db, offset + o, db.device_block_size, "Metablock", Metablock))
return MetablockExtent(static_header, metablocks)
def __init__(self, static_header, metablocks):
self.static_header = static_header
self.metablocks = metablocks
# If there are no valid metablocks in the entire database, we want to print each metablock
# that we tried and explain why it failed. If there is a valid metablock, that information
# is just junk. The database sets found_a_valid_metablock to True if it found at least one
# valid metablock.
self.found_a_valid_metablock = False
def print_html(self):
self.static_header.chunk_print_html()
for metablock in self.metablocks:
if not self.found_a_valid_metablock or metablock.chunk_ok:
metablock.chunk_print_html()
class StaticHeader(object):
@classmethod
def from_data(cls, db, offset):
static_header, parse_static_header = make_struct(
"static_header",
[
(None, parse_constant("RethinkDB\0")),
(None, parse_constant("0.0.0\0")),
(None, parse_constant("BTree Blocksize:\0")),
("btree_block_size", parse_uint64_t),
(None, parse_constant("Extent Size:\0")),
("extent_size", parse_uint64_t)
]
)
sh = parse_static_header(db.block, offset)[0]
return StaticHeader(sh)
def __init__(self, sh):
self.sh = sh
def print_html(self):
print """<p>Block size: %d</p>""" % self.sh.btree_block_size
print """<p>Extent size: %d</p>""" % self.sh.extent_size
class Metablock(object):
# The parser object is a little bit slow to construct, and many many metablocks are read, so
# we cache the parser object.
crc_metablock_parser_cache = {}
@classmethod
def make_parser(c, markers):
if markers not in c.crc_metablock_parser_cache:
def maybe(p):
if markers: return p
else: return parse_padding(0)
extent_manager_mb, parse_extent_manager_mb = make_struct(
"extent_manager_mb",
[("last_extent", parse_off64_t)]
)
lba_index_mb, parse_lba_index_mb = make_struct(
"lba_index_mb",
[
("last_lba_extent_offset", parse_off64_t),
("last_lba_extent_entries_count", parse_int),
(None, parse_padding(4)),
("lba_superblock_offset", parse_off64_t),
("lba_superblock_entries_count", parse_int),
(None, parse_padding(4))
]
)
data_block_mb, parse_data_block_mb = make_struct(
"data_block_mb",
[
("last_data_extent", parse_off64_t),
("blocks_in_last_data_extent", parse_int),
(None, parse_padding(4))
]
)
metablock_t, parse_metablock = make_struct(
"metablock_t",
[
("extent_manager_part", parse_extent_manager_mb),
("lba_index_part", parse_lba_index_mb),
("data_block_manager_part", parse_data_block_mb),
]
)
crc_metablock_t, parse_crc_metablock = make_struct(
"crc_metablock_t",
[
(None, maybe(parse_constant("metablock\xbd"))),
(None, maybe(parse_constant("crc:\xbd"))),
("crc", parse_uint32_t),
(None, maybe(parse_padding(1))),
(None, maybe(parse_constant("version:"))),
("version", parse_int),
("metablock", parse_metablock)
]
)
c.crc_metablock_parser_cache[markers] = parse_crc_metablock
return c.crc_metablock_parser_cache[markers]
@classmethod
def from_data(c, db, offset):
# Try to read the metablock with and without markers; if either works, we're good.
try:
mb = c.make_parser(True)(db.block, offset)[0]
assert mb.version > 0
except Exception, e:
error1 = traceback.format_exc()
try:
mb = c.make_parser(False)(db.block, offset)[0]
assert mb.version > 0
except Exception, e:
error2 = traceback.format_exc()
raise ValueError(
"Invalid metablock.\n\n"
"Problem when trying to parse with markers:\n\n" error1 "\n"
"Problem when trying to parse without markers:\n\n" error2
)
return Metablock(mb)
def __init__(self, mb):
self.mb = mb
self.chosen = False
def was_used(self, first_lba_extent, lba_superblock, lba, data_blocks):
self.chosen = True
self.first_lba_extent = first_lba_extent
self.lba_superblock = lba_superblock
self.lba = lba
self.data_blocks = data_blocks
def print_html(self):
print """<table>"""
print """<tr><td>CRC</td><td>0x%.8x</td></tr>""" % self.mb.crc
print """<tr><td>Version</td><td>%d</td></tr>""" % self.mb.version
print """<tr><td>Last extent</td><td>0x%x</td></tr>""" % \
self.mb.metablock.extent_manager_part.last_extent
print """<tr><td>Last LBA extent offset</td>"""
if self.chosen and self.first_lba_extent:
print """<td>%s</td>""" % self.first_lba_extent.ref_as_html()
else:
print """<td>0x%x</td>""" % self.mb.metablock.lba_index_part.last_lba_extent_offset
print """</tr>"""
print """<tr><td>Last LBA extent entries count</td><td>%d</td></tr>""" % \
self.mb.metablock.lba_index_part.last_lba_extent_entries_count
print """<tr><td>LBA superblock offset</td>"""
if self.chosen and self.lba_superblock:
print """<td>%s</td>""" % self.lba_superblock.ref_as_html()
else:
print """<td>0x%x</td>""" % self.mb.metablock.lba_index_part.lba_superblock_offset
print """</tr>"""
print """<tr><td>LBA superblock entries count</td><td>%d</td></tr>""" % \
self.mb.metablock.lba_index_part.lba_superblock_entries_count
print """<tr><td>Last data extent</td><td>0x%x</td></tr>""" % \
self.mb.metablock.data_block_manager_part.last_data_extent
print """<tr><td>Blocks in last data extent</td><td>%d</td></tr>""" % \
self.mb.metablock.data_block_manager_part.blocks_in_last_data_extent
print """</table>"""
class LBASuperblockExtent(object):
@classmethod
def store_superblock(self, chunk, db):
offset = chunk.offset - chunk.offset % db.extent_size
if offset not in db.extents:
c = GoodChunk(offset, db.extent_size, "LBA Superblock Extent", LBASuperblockExtent())
db.add_extent(c)
if db.extents[offset].name != "LBA Superblock Extent":
raise ValueError("Extent at 0x%x is a %r." % (offset, db.extents[offset].name))
assert db.extents[offset].chunk_ok
assert isinstance(db.extents[offset].chunk_obj, LBASuperblockExtent)
db.extents[offset].chunk_obj.children.add(chunk)
def __init__(self):
self.children = set()
def print_html(self):
for child in sorted(self.children, lambda x,y: cmp(x.offset, y.offset)):
child.chunk_print_html()
class LBASuperblock(object):
@classmethod
def from_data(cls, db, offset, how_many_lba_extents):
lba_extents = []
start_offset = offset
_, offset = parse_constant("lbasuper")(db.block, offset)
while offset % 16 != 0: offset += 1
for i in xrange(how_many_lba_extents):
lba_extent_offset, offset = parse_off64_t(db.block, offset)
how_many_pairs, offset = parse_int(db.block, offset)
offset += 4 # Padding after the int
lba_extent = try_parse(db, lba_extent_offset, db.extent_size, "LBA Extent", LBAExtent, how_many_pairs)
lba_extent = try_store(lba_extent, db.add_extent)
lba_extents.append(lba_extent)
while offset % db.device_block_size != 0:
offset += 1
return LBASuperblock(lba_extents)
def __init__(self, lba_extents):
self.lba_extents = lba_extents
def print_html(self):
for extent in self.lba_extents:
print """<p>Extent: %s</p>""" % extent.ref_as_html()
class LBAExtent(object):
@classmethod
def from_data(cls, db, offset, count):
_, offset = parse_constant("lbamagic")(db.block, offset);
while offset % 16 != 0: offset += 1
pairs = []
for i in xrange(count):
block_id, offset = parse_block_id(db.block, offset)
block_offset, offset = parse_off64_t(db.block, offset)
if block_id == 0xFFFFFFFFFFFFFFFF:
assert block_offset == -1
pairs.append(LBAPaddingPair())
else:
if block_offset == -1:
block_offset = "delete"
pairs.append(LBAPair(block_id, block_offset))
return LBAExtent(pairs)
def __init__(self, pairs):
self.pairs = pairs
def print_html(self):
print """<div style="-webkit-column-width: 310px">"""
print """<table>"""
print """<tr><th>Block ID</th><th>Offset</th></tr>"""
for entry in self.pairs:
entry.print_html()
print """</table>"""
print """</div>"""
class LBAPaddingPair(object):
def print_html(self):
print """<tr><td colspan=2><i>(padding)</i></td></tr>"""
class LBAPair(object):
def __init__(self, block_id, block_offset):
self.block_id = block_id
self.block_offset = block_offset
self.chosen = False
def was_used(self, data_block):
self.chosen = True
self.data_block = data_block
def print_html(self):
print """<tr>"""
print """<td>%d</td>""" % self.block_id
if self.chosen:
if self.block_offset == "delete":
print """<td>delete</td>"""
else:
print """<td>%s</td>""" % self.data_block.ref_as_html()
else:
if self.block_offset == "delete":
print """<td><i>delete</i></td>"""
else:
print """<td><i>0x%x</i></td>""" % self.block_offset
print """</tr>"""
class DataBlockExtent(object):
@classmethod
def store_data_block(self, chunk, db):
offset = chunk.offset - chunk.offset % db.extent_size
if offset not in db.extents:
ext = GoodChunk(offset, db.extent_size, "Data Block Extent", DataBlockExtent())
db.add_extent(ext)
if db.extents[offset].name != "Data Block Extent":
raise ValueError("Extent at 0x%x is a %r." % (offset, db.extents[offset].name))
assert db.extents[offset].chunk_ok
assert isinstance(db.extents[offset].chunk_obj, DataBlockExtent)
db.extents[offset].chunk_obj.children.add(chunk)
def __init__(self):
self.children = set()
def print_html(self):
for child in sorted(self.children, lambda x,y: cmp(x.offset, y.offset)):
child.chunk_print_html()
class DataBlock(object):
@classmethod
def from_data(cls, db, offset):
contents = db.block[offset: offset + db.block_size]
return DataBlock(contents)
def __init__(self, contents):
self.contents = contents
def print_html(self):
print """<div class="hexdump">%s</div>""" % \
" ".join(x.encode("hex") for x in self.contents)
def file_to_database(filename):
with file(filename) as f:
text = f.read()
return Database(text)
def database_to_html(db, filename):
with file(filename, "w") as f:
sys.stdout = f
try:
print """
<html>
<head>
<style type="text/css">
h1 {
font-size: 1.1em;
font-weight: normal;
}
.hexdump {
color: green;
font-family: monospace;
}
div.block {
border: dashed 1px;
margin: 0.2cm;
padding: 0.2cm;
}
table {
border-collapse: collapse;
}
td, th {
border: gray solid 1px;
width: 150px;
}
</style>
</head>
<body>
"""
db.print_html()
print """
</body>
</html>
"""
finally:
sys.stdout = sys.__stdout__
def database_to_blocks(db):
if not db.metablock or not db.metablock.chunk_ok:
return {}
else:
blocks = {}
for (block_id, data_block) in db.metablock.chunk_obj.data_blocks.iteritems():
if data_block is not None:
if data_block.chunk_ok:
blocks[block_id] = data_block.chunk_obj.contents
return blocks
if __name__ == "__main__":
if len(sys.argv) == 3:
database_to_html(file_to_database(sys.argv[1]), sys.argv[2])
else:
print "Usage: %s data_file output.html" % sys.argv[0]
| agpl-3.0 |
bak1an/django | django/contrib/auth/migrations/0001_initial.py | 21 | 4965 | import django.contrib.auth.models
from django.contrib.auth import validators
from django.db import migrations, models
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Permission',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50, verbose_name='name')),
('content_type', models.ForeignKey(
to='contenttypes.ContentType',
on_delete=models.CASCADE,
to_field='id',
verbose_name='content type',
)),
('codename', models.CharField(max_length=100, verbose_name='codename')),
],
options={
'ordering': ('content_type__app_label', 'content_type__model', 'codename'),
'unique_together': set([('content_type', 'codename')]),
'verbose_name': 'permission',
'verbose_name_plural': 'permissions',
},
managers=[
('objects', django.contrib.auth.models.PermissionManager()),
],
),
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=80, verbose_name='name')),
('permissions', models.ManyToManyField(to='auth.Permission', verbose_name='permissions', blank=True)),
],
options={
'verbose_name': 'group',
'verbose_name_plural': 'groups',
},
managers=[
('objects', django.contrib.auth.models.GroupManager()),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(
default=False,
help_text='Designates that this user has all permissions without explicitly assigning them.',
verbose_name='superuser status'
)),
('username', models.CharField(
help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True,
max_length=30, verbose_name='username',
validators=[validators.UnicodeUsernameValidator()],
)),
('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)),
('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)),
('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)),
('is_staff', models.BooleanField(
default=False, help_text='Designates whether the user can log into this admin site.',
verbose_name='staff status'
)),
('is_active', models.BooleanField(
default=True, verbose_name='active', help_text=(
'Designates whether this user should be treated as active. Unselect this instead of deleting '
'accounts.'
)
)),
('date_joined', models.DateTimeField(default=timezone.now, verbose_name='date joined')),
('groups', models.ManyToManyField(
to='auth.Group', verbose_name='groups', blank=True, related_name='user_set',
related_query_name='user', help_text=(
'The groups this user belongs to. A user will get all permissions granted to each of their '
'groups.'
)
)),
('user_permissions', models.ManyToManyField(
to='auth.Permission', verbose_name='user permissions', blank=True,
help_text='Specific permissions for this user.', related_name='user_set',
related_query_name='user')
),
],
options={
'swappable': 'AUTH_USER_MODEL',
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| bsd-3-clause |
saurabh6790/medsynaptic-app | selling/doctype/lead/test_lead.py | 30 | 1078 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
test_records = [
[{"doctype":"Lead", "lead_name": "_Test Lead", "status":"Open",
"email_id":"test_lead@example.com", "territory": "_Test Territory"}],
[{"doctype":"Lead", "lead_name": "_Test Lead 1", "status":"Open",
"email_id":"test_lead1@example.com"}],
[{"doctype":"Lead", "lead_name": "_Test Lead 2", "status":"Contacted",
"email_id":"test_lead2@example.com"}],
[{"doctype":"Lead", "lead_name": "_Test Lead 3", "status":"Converted",
"email_id":"test_lead3@example.com"}],
]
import webnotes
import unittest
class TestLead(unittest.TestCase):
def test_make_customer(self):
from selling.doctype.lead.lead import make_customer
customer = make_customer("_T-Lead-00001")
self.assertEquals(customer[0]["doctype"], "Customer")
self.assertEquals(customer[0]["lead_name"], "_T-Lead-00001")
customer[0].customer_group = "_Test Customer Group"
webnotes.bean(customer).insert()
| agpl-3.0 |
pratikmallya/hue | desktop/core/ext-py/Django-1.6.10/tests/file_storage/tests.py | 40 | 24415 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import errno
import os
import shutil
import sys
import tempfile
import time
import zlib
from datetime import datetime, timedelta
from io import BytesIO
try:
import threading
except ImportError:
import dummy_threading as threading
from django.conf import settings
from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured
from django.core.files.base import File, ContentFile
from django.core.files.images import get_image_dimensions
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.core.files.uploadedfile import UploadedFile
from django.test import LiveServerTestCase, SimpleTestCase
from django.test.utils import override_settings
from django.utils import six
from django.utils import unittest
from django.utils.six.moves.urllib.request import urlopen
from django.utils._os import upath
try:
from django.utils.image import Image
except ImproperlyConfigured:
Image = None
FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}'
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class('django.core.files.storage.FileSystemStorage'),
FileSystemStorage)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with six.assertRaisesRegex(self, ImproperlyConfigured,
"Error importing module storage: \"No module named '?storage'?\""):
get_storage_class('storage.NonExistingStorage')
def test_get_nonexisting_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
self.assertRaisesMessage(
ImproperlyConfigured,
'Module "django.core.files.storage" does not define a '
'"NonExistingStorage" attribute/class',
get_storage_class,
'django.core.files.storage.NonExistingStorage')
def test_get_nonexisting_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
# Error message may or may not be the fully qualified path.
with six.assertRaisesRegex(self, ImproperlyConfigured,
"Error importing module django.core.files.non_existing_storage: "
"\"No module named '?(django.core.files.)?non_existing_storage'?\""):
get_storage_class(
'django.core.files.non_existing_storage.NonExistingStorage')
class FileStorageTests(unittest.TestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(location=self.temp_dir,
base_url='/test_media_url/')
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix='aBc')
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_emtpy_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location='')
self.assertEqual(storage.base_location, '')
self.assertEqual(storage.location, upath(os.getcwd()))
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'w')
f.write('storage contents')
f.close()
self.assertTrue(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'r')
self.assertEqual(f.read(), 'storage contents')
f.close()
self.storage.delete('storage_test')
self.assertFalse(self.storage.exists('storage_test'))
def test_file_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
atime = self.storage.accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(
os.path.getatime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.accessed_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_created_time(self):
"""
File storage returns a Datetime object for the creation time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
ctime = self.storage.created_time(f_name)
self.assertEqual(ctime, datetime.fromtimestamp(
os.path.getctime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.created_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_modified_time(self):
"""
File storage returns a Datetime object for the last modified time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
mtime = self.storage.modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(
os.path.getmtime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.modified_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f.name = 'test.file'
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists('path/to'))
self.storage.save('path/to/test.file',
ContentFile('file saved with path'))
self.assertTrue(self.storage.exists('path/to'))
with self.storage.open('path/to/test.file') as f:
self.assertEqual(f.read(), b'file saved with path')
self.assertTrue(os.path.exists(
os.path.join(self.temp_dir, 'path', 'to', 'test.file')))
self.storage.delete('path/to/test.file')
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.assertEqual(self.storage.path(f_name),
os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the Web.
"""
self.assertEqual(self.storage.url('test.file'),
'%s%s' % (self.storage.base_url, 'test.file'))
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(self.storage.url(r"""~!*()'@#$%^&*abc`+ =.file"""),
"""/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file""")
# should stanslate os path separator(s) to the url path separator
self.assertEqual(self.storage.url("""a/b\\c.file"""),
"""/test_media_url/a/b/c.file""")
self.storage.base_url = None
self.assertRaises(ValueError, self.storage.url, 'test.file')
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists('storage_test_1'))
self.assertFalse(self.storage.exists('storage_test_2'))
self.assertFalse(self.storage.exists('storage_dir_1'))
f = self.storage.save('storage_test_1', ContentFile('custom content'))
f = self.storage.save('storage_test_2', ContentFile('custom content'))
os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1'))
dirs, files = self.storage.listdir('')
self.assertEqual(set(dirs), set(['storage_dir_1']))
self.assertEqual(set(files),
set(['storage_test_1', 'storage_test_2']))
self.storage.delete('storage_test_1')
self.storage.delete('storage_test_2')
os.rmdir(os.path.join(self.temp_dir, 'storage_dir_1'))
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
self.assertRaises(SuspiciousOperation, self.storage.exists, '..')
self.assertRaises(SuspiciousOperation, self.storage.exists, '/etc/passwd')
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = 'CaSe_SeNsItIvE'
file = temp_storage.open(mixed_case, 'w')
file.write('storage contents')
file.close()
self.assertEqual(os.path.join(self.temp_dir2, mixed_case),
temp_storage.path(mixed_case))
temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path):
if path == os.path.join(self.temp_dir, 'normal'):
real_makedirs(path)
elif path == os.path.join(self.temp_dir, 'raced'):
real_makedirs(path)
raise OSError(errno.EEXIST, 'simulated EEXIST')
elif path == os.path.join(self.temp_dir, 'error'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.makedirs = fake_makedirs
self.storage.save('normal/test.file',
ContentFile('saved normally'))
with self.storage.open('normal/test.file') as f:
self.assertEqual(f.read(), b'saved normally')
self.storage.save('raced/test.file',
ContentFile('saved with race'))
with self.storage.open('raced/test.file') as f:
self.assertEqual(f.read(), b'saved with race')
# Check that OSErrors aside from EEXIST are still raised.
self.assertRaises(OSError,
self.storage.save, 'error/test.file', ContentFile('not saved'))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, 'normal.file'):
real_remove(path)
elif path == os.path.join(self.temp_dir, 'raced.file'):
real_remove(path)
raise OSError(errno.ENOENT, 'simulated ENOENT')
elif path == os.path.join(self.temp_dir, 'error.file'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.remove = fake_remove
self.storage.save('normal.file', ContentFile('delete normally'))
self.storage.delete('normal.file')
self.assertFalse(self.storage.exists('normal.file'))
self.storage.save('raced.file', ContentFile('delete with race'))
self.storage.delete('raced.file')
self.assertFalse(self.storage.exists('normal.file'))
# Check that OSErrors aside from ENOENT are still raised.
self.storage.save('error.file', ContentFile('delete with error'))
self.assertRaises(OSError, self.storage.delete, 'error.file')
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behaviour when file.chunks() is raising an error
"""
f1 = ContentFile('chunks fails')
def failing_chunks():
raise IOError
f1.chunks = failing_chunks
with self.assertRaises(IOError):
self.storage.save('error.file', f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
with self.assertRaises(AssertionError):
self.storage.delete('')
class CustomStorage(FileSystemStorage):
def get_available_name(self, name):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
parts = name.split('.')
basename, ext = parts[0], parts[1:]
number = 2
while self.exists(name):
name = '.'.join([basename, str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save('custom_storage', ContentFile('custom contents'))
self.assertEqual(first, 'custom_storage')
second = self.storage.save('custom_storage', ContentFile('more contents'))
self.assertEqual(second, 'custom_storage.2')
self.storage.delete(first)
self.storage.delete(second)
class UnicodeFileNameTests(unittest.TestCase):
def test_unicode_file_names(self):
"""
Regression test for #8156: files with unicode names I can't quite figure
out the encoding situation between doctest and this file, but the actual
repr doesn't matter; it just shouldn't return a unicode object.
"""
uf = UploadedFile(name='¿Cómo?',content_type='text')
self.assertEqual(type(uf.__repr__()), str)
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super(ContentFile, self).chunks()
class FileSaveRaceConditionTest(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=['conflict'])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
name = self.save_file('conflict')
self.thread.join()
files = sorted(os.listdir(self.storage_dir))
self.assertEqual(files[0], 'conflict')
six.assertRegex(self, files[1], 'conflict_%s' % FILE_SUFFIX_REGEX)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports umasks and chmod.")
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
class FileStoragePathParsing(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save('dotted.path/test', ContentFile("1"))
self.storage.save('dotted.path/test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], 'test')
six.assertRegex(self, files[1], 'test_%s' % FILE_SUFFIX_REGEX)
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save('dotted.path/.test', ContentFile("1"))
self.storage.save('dotted.path/.test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], '.test')
six.assertRegex(self, files[1], '.test_%s' % FILE_SUFFIX_REGEX)
class DimensionClosingBug(unittest.TestCase):
"""
Test that get_image_dimensions() properly closes files (#8817)
"""
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_not_closing_of_files(self):
"""
Open files passed into get_image_dimensions() should stay opened.
"""
empty_io = BytesIO()
try:
get_image_dimensions(empty_io)
finally:
self.assertTrue(not empty_io.closed)
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_closing_of_filenames(self):
"""
get_image_dimensions() called with a filename should closed the file.
"""
# We need to inject a modified open() builtin into the images module
# that checks if the file was closed properly if the function is
# called with a filename instead of an file object.
# get_image_dimensions will call our catching_open instead of the
# regular builtin one.
class FileWrapper(object):
_closed = []
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return getattr(self.f, name)
def close(self):
self._closed.append(True)
self.f.close()
def catching_open(*args):
return FileWrapper(open(*args))
from django.core.files import images
images.open = catching_open
try:
get_image_dimensions(os.path.join(os.path.dirname(upath(__file__)), "test1.png"))
finally:
del images.open
self.assertTrue(FileWrapper._closed)
class InconsistentGetImageDimensionsBug(unittest.TestCase):
"""
Test that get_image_dimensions() works properly after various calls
using a file handler (#11158)
"""
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_multiple_calls(self):
"""
Multiple calls of get_image_dimensions() should return the same size.
"""
from django.core.files.images import ImageFile
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
image = ImageFile(open(img_path, 'rb'))
image_pil = Image.open(img_path)
size_1, size_2 = get_image_dimensions(image), get_image_dimensions(image)
self.assertEqual(image_pil.size, size_1)
self.assertEqual(size_1, size_2)
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_bug_19457(self):
"""
Regression test for #19457
get_image_dimensions fails on some pngs, while Image.size is working good on them
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "magic.png")
try:
size = get_image_dimensions(img_path)
except zlib.error:
self.fail("Exception raised from get_image_dimensions().")
self.assertEqual(size, Image.open(img_path).size)
class ContentFileTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_file_default_name(self):
self.assertEqual(ContentFile(b"content").name, None)
def test_content_file_custom_name(self):
"""
Test that the constructor of ContentFile accepts 'name' (#16590).
"""
name = "I can have a name too!"
self.assertEqual(ContentFile(b"content", name=name).name, name)
def test_content_file_input_type(self):
"""
Test that ContentFile can accept both bytes and unicode and that the
retrieved content is of the same type.
"""
self.assertIsInstance(ContentFile(b"content").read(), bytes)
if six.PY3:
self.assertIsInstance(ContentFile("español").read(), six.text_type)
else:
self.assertIsInstance(ContentFile("español").read(), bytes)
def test_content_saving(self):
"""
Test that ContentFile can be saved correctly with the filesystem storage,
both if it was initialized with string or unicode content"""
self.storage.save('bytes.txt', ContentFile(b"content"))
self.storage.save('unicode.txt', ContentFile("español"))
class NoNameFileTestCase(unittest.TestCase):
"""
Other examples of unnamed files may be tempfile.SpooledTemporaryFile or
urllib.urlopen()
"""
def test_noname_file_default_name(self):
self.assertEqual(File(BytesIO(b'A file with no name')).name, None)
def test_noname_file_get_size(self):
self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
urls = 'file_storage.urls'
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib2_urlopen(self):
"""
Test the File storage API with a file like object coming from urllib2.urlopen()
"""
file_like_object = urlopen(self.live_server_url + '/')
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + '/')
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
| apache-2.0 |
chiragjogi/odoo | addons/lunch/lunch.py | 344 | 23407 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from xml.sax.saxutils import escape
import time
from openerp.osv import fields, osv
from datetime import datetime
from lxml import etree
from openerp import tools
from openerp.tools.translate import _
class lunch_order(osv.Model):
"""
lunch order (contains one or more lunch order line(s))
"""
_name = 'lunch.order'
_description = 'Lunch Order'
_order = 'date desc'
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
res = []
for elmt in self.browse(cr, uid, ids, context=context):
name = _("Lunch Order")
name = name + ' ' + str(elmt.id)
res.append((elmt.id, name))
return res
def _price_get(self, cr, uid, ids, name, arg, context=None):
"""
get and sum the order lines' price
"""
result = dict.fromkeys(ids, 0)
for order in self.browse(cr, uid, ids, context=context):
result[order.id] = sum(order_line.product_id.price
for order_line in order.order_line_ids)
return result
def _fetch_orders_from_lines(self, cr, uid, ids, name, context=None):
"""
return the list of lunch orders to which belong the order lines `ids´
"""
result = set()
for order_line in self.browse(cr, uid, ids, context=context):
if order_line.order_id:
result.add(order_line.order_id.id)
return list(result)
def add_preference(self, cr, uid, ids, pref_id, context=None):
"""
create a new order line based on the preference selected (pref_id)
"""
assert len(ids) == 1
orderline_ref = self.pool.get('lunch.order.line')
prod_ref = self.pool.get('lunch.product')
order = self.browse(cr, uid, ids[0], context=context)
pref = orderline_ref.browse(cr, uid, pref_id, context=context)
new_order_line = {
'date': order.date,
'user_id': uid,
'product_id': pref.product_id.id,
'note': pref.note,
'order_id': order.id,
'price': pref.product_id.price,
'supplier': pref.product_id.supplier.id
}
return orderline_ref.create(cr, uid, new_order_line, context=context)
def _alerts_get(self, cr, uid, ids, name, arg, context=None):
"""
get the alerts to display on the order form
"""
result = {}
alert_msg = self._default_alerts_get(cr, uid, context=context)
for order in self.browse(cr, uid, ids, context=context):
if order.state == 'new':
result[order.id] = alert_msg
return result
def check_day(self, alert):
"""
This method is used by can_display_alert
to check if the alert day corresponds
to the current day
"""
today = datetime.now().isoweekday()
assert 1 <= today <= 7, "Should be between 1 and 7"
mapping = dict((idx, name) for idx, name in enumerate('days monday tuesday wednesday thursday friday saturday sunday'.split()))
return alert[mapping[today]]
def can_display_alert(self, alert):
"""
This method check if the alert can be displayed today
"""
if alert.alter_type == 'specific':
#the alert is only activated on a specific day
return alert.specific_day == time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
elif alert.alter_type == 'week':
#the alert is activated during some days of the week
return self.check_day(alert)
return True # alter_type == 'days' (every day)
def _default_alerts_get(self, cr, uid, context=None):
"""
get the alerts to display on the order form
"""
alert_ref = self.pool.get('lunch.alert')
alert_ids = alert_ref.search(cr, uid, [], context=context)
alert_msg = []
for alert in alert_ref.browse(cr, uid, alert_ids, context=context):
#check if the address must be displayed today
if self.can_display_alert(alert):
#display the address only during its active time
mynow = fields.datetime.context_timestamp(cr, uid, datetime.now(), context=context)
hour_to = int(alert.active_to)
min_to = int((alert.active_to - hour_to) * 60)
to_alert = datetime.strptime(str(hour_to) + ":" + str(min_to), "%H:%M")
hour_from = int(alert.active_from)
min_from = int((alert.active_from - hour_from) * 60)
from_alert = datetime.strptime(str(hour_from) + ":" + str(min_from), "%H:%M")
if mynow.time() >= from_alert.time() and mynow.time() <= to_alert.time():
alert_msg.append(alert.message)
return '\n'.join(alert_msg)
def onchange_price(self, cr, uid, ids, order_line_ids, context=None):
"""
Onchange methode that refresh the total price of order
"""
res = {'value': {'total': 0.0}}
order_line_ids = self.resolve_o2m_commands_to_record_dicts(cr, uid, "order_line_ids", order_line_ids, ["price"], context=context)
if order_line_ids:
tot = 0.0
product_ref = self.pool.get("lunch.product")
for prod in order_line_ids:
if 'product_id' in prod:
tot += product_ref.browse(cr, uid, prod['product_id'], context=context).price
else:
tot += prod['price']
res = {'value': {'total': tot}}
return res
def __getattr__(self, attr):
"""
this method catch unexisting method call and if it starts with
add_preference_'n' we execute the add_preference method with
'n' as parameter
"""
if attr.startswith('add_preference_'):
pref_id = int(attr[15:])
def specific_function(cr, uid, ids, context=None):
return self.add_preference(cr, uid, ids, pref_id, context=context)
return specific_function
return super(lunch_order, self).__getattr__(attr)
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
"""
Add preferences in the form view of order.line
"""
res = super(lunch_order,self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
line_ref = self.pool.get("lunch.order.line")
if view_type == 'form':
doc = etree.XML(res['arch'])
pref_ids = line_ref.search(cr, uid, [('user_id', '=', uid)], order='id desc', context=context)
xml_start = etree.Element("div")
#If there are no preference (it's the first time for the user)
if len(pref_ids)==0:
#create Elements
xml_no_pref_1 = etree.Element("div")
xml_no_pref_1.set('class','oe_inline oe_lunch_intro')
xml_no_pref_2 = etree.Element("h3")
xml_no_pref_2.text = _("This is the first time you order a meal")
xml_no_pref_3 = etree.Element("p")
xml_no_pref_3.set('class','oe_grey')
xml_no_pref_3.text = _("Select a product and put your order comments on the note.")
xml_no_pref_4 = etree.Element("p")
xml_no_pref_4.set('class','oe_grey')
xml_no_pref_4.text = _("Your favorite meals will be created based on your last orders.")
xml_no_pref_5 = etree.Element("p")
xml_no_pref_5.set('class','oe_grey')
xml_no_pref_5.text = _("Don't forget the alerts displayed in the reddish area")
#structure Elements
xml_start.append(xml_no_pref_1)
xml_no_pref_1.append(xml_no_pref_2)
xml_no_pref_1.append(xml_no_pref_3)
xml_no_pref_1.append(xml_no_pref_4)
xml_no_pref_1.append(xml_no_pref_5)
#Else: the user already have preferences so we display them
else:
preferences = line_ref.browse(cr, uid, pref_ids, context=context)
categories = {} #store the different categories of products in preference
count = 0
for pref in preferences:
#For each preference
categories.setdefault(pref.product_id.category_id.name, {})
#if this product has already been added to the categories dictionnary
if pref.product_id.id in categories[pref.product_id.category_id.name]:
#we check if for the same product the note has already been added
if pref.note not in categories[pref.product_id.category_id.name][pref.product_id.id]:
#if it's not the case then we add this to preferences
categories[pref.product_id.category_id.name][pref.product_id.id][pref.note] = pref
#if this product is not in the dictionnay, we add it
else:
categories[pref.product_id.category_id.name][pref.product_id.id] = {}
categories[pref.product_id.category_id.name][pref.product_id.id][pref.note] = pref
currency = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id
#For each preferences that we get, we will create the XML structure
for key, value in categories.items():
xml_pref_1 = etree.Element("div")
xml_pref_1.set('class', 'oe_lunch_30pc')
xml_pref_2 = etree.Element("h2")
xml_pref_2.text = key
xml_pref_1.append(xml_pref_2)
i = 0
value = value.values()
#TODO: sorted_values is used for a quick and dirty hack in order to display the 5 last orders of each categories.
#It would be better to fetch only the 5 items to display instead of fetching everything then sorting them in order to keep only the 5 last.
#NB: The note could also be ignored + we could fetch the preferences on the most ordered products instead of the last ones...
sorted_values = {}
for val in value:
for elmt in val.values():
sorted_values[elmt.id] = elmt
for key, pref in sorted(sorted_values.iteritems(), key=lambda (k, v): (k, v), reverse=True):
#We only show 5 preferences per category (or it will be too long)
if i == 5:
break
i += 1
xml_pref_3 = etree.Element("div")
xml_pref_3.set('class','oe_lunch_vignette')
xml_pref_1.append(xml_pref_3)
xml_pref_4 = etree.Element("span")
xml_pref_4.set('class','oe_lunch_button')
xml_pref_3.append(xml_pref_4)
xml_pref_5 = etree.Element("button")
xml_pref_5.set('name',"add_preference_"+str(pref.id))
xml_pref_5.set('class','oe_link oe_i oe_button_plus')
xml_pref_5.set('type','object')
xml_pref_5.set('string','+')
xml_pref_4.append(xml_pref_5)
xml_pref_6 = etree.Element("button")
xml_pref_6.set('name',"add_preference_"+str(pref.id))
xml_pref_6.set('class','oe_link oe_button_add')
xml_pref_6.set('type','object')
xml_pref_6.set('string',_("Add"))
xml_pref_4.append(xml_pref_6)
xml_pref_7 = etree.Element("div")
xml_pref_7.set('class','oe_group_text_button')
xml_pref_3.append(xml_pref_7)
xml_pref_8 = etree.Element("div")
xml_pref_8.set('class','oe_lunch_text')
xml_pref_8.text = escape(pref.product_id.name)+str(" ")
xml_pref_7.append(xml_pref_8)
price = pref.product_id.price or 0.0
cur = currency.name or ''
xml_pref_9 = etree.Element("span")
xml_pref_9.set('class','oe_tag')
xml_pref_9.text = str(price)+str(" ")+cur
xml_pref_8.append(xml_pref_9)
xml_pref_10 = etree.Element("div")
xml_pref_10.set('class','oe_grey')
xml_pref_10.text = escape(pref.note or '')
xml_pref_3.append(xml_pref_10)
xml_start.append(xml_pref_1)
first_node = doc.xpath("//div[@name='preferences']")
if first_node and len(first_node)>0:
first_node[0].append(xml_start)
res['arch'] = etree.tostring(doc)
return res
_columns = {
'user_id': fields.many2one('res.users', 'User Name', required=True, readonly=True, states={'new':[('readonly', False)]}),
'date': fields.date('Date', required=True, readonly=True, states={'new':[('readonly', False)]}),
'order_line_ids': fields.one2many('lunch.order.line', 'order_id', 'Products',
ondelete="cascade", readonly=True, states={'new':[('readonly', False)]},
copy=True),
'total': fields.function(_price_get, string="Total", store={
'lunch.order.line': (_fetch_orders_from_lines, ['product_id','order_id'], 20),
}),
'state': fields.selection([('new', 'New'), \
('confirmed','Confirmed'), \
('cancelled','Cancelled'), \
('partially','Partially Confirmed')] \
,'Status', readonly=True, select=True, copy=False),
'alerts': fields.function(_alerts_get, string="Alerts", type='text'),
}
_defaults = {
'user_id': lambda self, cr, uid, context: uid,
'date': fields.date.context_today,
'state': 'new',
'alerts': _default_alerts_get,
}
class lunch_order_line(osv.Model):
"""
lunch order line: one lunch order can have many order lines
"""
_name = 'lunch.order.line'
_description = 'lunch order line'
def onchange_price(self, cr, uid, ids, product_id, context=None):
if product_id:
price = self.pool.get('lunch.product').browse(cr, uid, product_id, context=context).price
return {'value': {'price': price}}
return {'value': {'price': 0.0}}
def order(self, cr, uid, ids, context=None):
"""
The order_line is ordered to the supplier but isn't received yet
"""
self.write(cr, uid, ids, {'state': 'ordered'}, context=context)
return self._update_order_lines(cr, uid, ids, context=context)
def confirm(self, cr, uid, ids, context=None):
"""
confirm one or more order line, update order status and create new cashmove
"""
cashmove_ref = self.pool.get('lunch.cashmove')
for order_line in self.browse(cr, uid, ids, context=context):
if order_line.state != 'confirmed':
values = {
'user_id': order_line.user_id.id,
'amount': -order_line.price,
'description': order_line.product_id.name,
'order_id': order_line.id,
'state': 'order',
'date': order_line.date,
}
cashmove_ref.create(cr, uid, values, context=context)
order_line.write({'state': 'confirmed'})
return self._update_order_lines(cr, uid, ids, context=context)
def _update_order_lines(self, cr, uid, ids, context=None):
"""
Update the state of lunch.order based on its orderlines
"""
orders_ref = self.pool.get('lunch.order')
orders = []
for order_line in self.browse(cr, uid, ids, context=context):
orders.append(order_line.order_id)
for order in set(orders):
isconfirmed = True
for orderline in order.order_line_ids:
if orderline.state == 'new':
isconfirmed = False
if orderline.state == 'cancelled':
isconfirmed = False
orders_ref.write(cr, uid, [order.id], {'state': 'partially'}, context=context)
if isconfirmed:
orders_ref.write(cr, uid, [order.id], {'state': 'confirmed'}, context=context)
return {}
def cancel(self, cr, uid, ids, context=None):
"""
cancel one or more order.line, update order status and unlink existing cashmoves
"""
cashmove_ref = self.pool.get('lunch.cashmove')
self.write(cr, uid, ids, {'state':'cancelled'}, context=context)
for order_line in self.browse(cr, uid, ids, context=context):
cash_ids = [cash.id for cash in order_line.cashmove]
cashmove_ref.unlink(cr, uid, cash_ids, context=context)
return self._update_order_lines(cr, uid, ids, context=context)
def _get_line_order_ids(self, cr, uid, ids, context=None):
"""
return the list of lunch.order.lines ids to which belong the lunch.order 'ids'
"""
result = set()
for lunch_order in self.browse(cr, uid, ids, context=context):
for lines in lunch_order.order_line_ids:
result.add(lines.id)
return list(result)
_columns = {
'name': fields.related('product_id', 'name', readonly=True),
'order_id': fields.many2one('lunch.order', 'Order', ondelete='cascade'),
'product_id': fields.many2one('lunch.product', 'Product', required=True),
'date': fields.related('order_id', 'date', type='date', string="Date", readonly=True, store={
'lunch.order': (_get_line_order_ids, ['date'], 10),
'lunch.order.line': (lambda self, cr, uid, ids, ctx: ids, [], 10),
}),
'supplier': fields.related('product_id', 'supplier', type='many2one', relation='res.partner', string="Supplier", readonly=True, store=True),
'user_id': fields.related('order_id', 'user_id', type='many2one', relation='res.users', string='User', readonly=True, store=True),
'note': fields.text('Note'),
'price': fields.float("Price"),
'state': fields.selection([('new', 'New'), \
('confirmed', 'Received'), \
('ordered', 'Ordered'), \
('cancelled', 'Cancelled')], \
'Status', readonly=True, select=True),
'cashmove': fields.one2many('lunch.cashmove', 'order_id', 'Cash Move', ondelete='cascade'),
}
_defaults = {
'state': 'new',
}
class lunch_product(osv.Model):
"""
lunch product
"""
_name = 'lunch.product'
_description = 'lunch product'
_columns = {
'name': fields.char('Product', required=True),
'category_id': fields.many2one('lunch.product.category', 'Category', required=True),
'description': fields.text('Description', size=256),
'price': fields.float('Price', digits=(16,2)), #TODO: use decimal precision of 'Account', move it from product to decimal_precision
'supplier': fields.many2one('res.partner', 'Supplier'),
}
class lunch_product_category(osv.Model):
"""
lunch product category
"""
_name = 'lunch.product.category'
_description = 'lunch product category'
_columns = {
'name': fields.char('Category', required=True), #such as PIZZA, SANDWICH, PASTA, CHINESE, BURGER, ...
}
class lunch_cashmove(osv.Model):
"""
lunch cashmove => order or payment
"""
_name = 'lunch.cashmove'
_description = 'lunch cashmove'
_columns = {
'user_id': fields.many2one('res.users', 'User Name', required=True),
'date': fields.date('Date', required=True),
'amount': fields.float('Amount', required=True), #depending on the kind of cashmove, the amount will be positive or negative
'description': fields.text('Description'), #the description can be an order or a payment
'order_id': fields.many2one('lunch.order.line', 'Order', ondelete='cascade'),
'state': fields.selection([('order','Order'), ('payment','Payment')], 'Is an order or a Payment'),
}
_defaults = {
'user_id': lambda self, cr, uid, context: uid,
'date': fields.date.context_today,
'state': 'payment',
}
class lunch_alert(osv.Model):
"""
lunch alert
"""
_name = 'lunch.alert'
_description = 'Lunch Alert'
_columns = {
'message': fields.text('Message', size=256, required=True),
'alter_type': fields.selection([('specific', 'Specific Day'), \
('week', 'Every Week'), \
('days', 'Every Day')], \
string='Recurrency', required=True, select=True),
'specific_day': fields.date('Day'),
'monday': fields.boolean('Monday'),
'tuesday': fields.boolean('Tuesday'),
'wednesday': fields.boolean('Wednesday'),
'thursday': fields.boolean('Thursday'),
'friday': fields.boolean('Friday'),
'saturday': fields.boolean('Saturday'),
'sunday': fields.boolean('Sunday'),
'active_from': fields.float('Between', required=True),
'active_to': fields.float('And', required=True),
}
_defaults = {
'alter_type': 'specific',
'specific_day': fields.date.context_today,
'active_from': 7,
'active_to': 23,
}
| agpl-3.0 |
osspeak/osspeak | osspeak/recognition/commands/monitor.py | 1 | 3535 | import threading
import collections
import log
import copy
import asyncio
import settings
import clargs
from recognition.actions.library import pywindow
from recognition.commands import loader
from recognition.actions import perform
from communication import topics, pubsub
import time
def create_message_subscriptions(msg_list, command_module_controller):
pubsub.subscribe(topics.RELOAD_COMMAND_MODULE_FILES, lambda: set_message(msg_list, topics.RELOAD_COMMAND_MODULE_FILES))
pubsub.subscribe(topics.RELOAD_GRAMMAR, lambda: set_message(msg_list, topics.RELOAD_GRAMMAR))
pubsub.subscribe(topics.PERFORM_COMMANDS,
lambda grammar_id, words: perform_commands(command_module_controller, grammar_id, words))
def start_watching_user_state():
msg_list = [None]
command_module_file_pattern = settings.settings['file_pattern']
module_loader = loader.StaticFileCommandModuleLoader(settings.settings['command_directory'], command_module_file_pattern)
command_module_controller = loader.CommandModuleController(module_loader)
command_module_controller.command_modules = command_module_controller.initialize_command_modules()
engine_status_history = collections.deque([], 10)
create_message_subscriptions(msg_list, command_module_controller)
fut = watch_user_system_state(msg_list, command_module_controller)
asyncio.ensure_future(fut)
async def watch_user_system_state(msg_list, command_module_controller):
from recognition.actions.library.stdlib import namespace, engine
previous_window = None
previous_state = None
previous_engine_settings = copy.copy(engine.settings)
initial_load_done = False
while True:
current_state = copy.copy(namespace['state'])
current_window = pywindow.foreground_window().title.lower()
current_engine_settings = copy.copy(engine.settings)
is_different_window = current_window != previous_window
is_different_state = current_state != previous_state
is_different_engine_settings = current_engine_settings != previous_engine_settings
msg = msg_list[0]
if is_different_window or is_different_state or msg:
msg_list[0] = None
new_active_modules = command_module_controller.get_active_modules(current_window)
reload_files = msg == topics.RELOAD_COMMAND_MODULE_FILES
if new_active_modules != command_module_controller.active_command_modules or reload_files:
initialize_modules = not initial_load_done or reload_files
command_module_controller.load_modules(current_window, initialize_modules=False)
initial_load_done = True
elif msg == topics.RELOAD_GRAMMAR:
raise NotImplementedError
command_module_controller.load_and_send_grammar()
previous_window = current_window
previous_state = current_state
if is_different_engine_settings:
pubsub.publish(topics.SET_ENGINE_SETTINGS, current_engine_settings)
previous_engine_settings = current_engine_settings
await asyncio.sleep(1)
def set_message(msg_list, msg):
msg_list[0] = msg
def perform_commands(command_module_controller: loader.CommandModuleController, grammar_id: str, words):
try:
grammar_context = command_module_controller.grammars[grammar_id]
except KeyError:
log.logger.warning(f'Grammar {grammar_id} no longer exists')
return
perform.perform_commands(grammar_context, words) | mit |
icereval/waterbutler | waterbutler/core/streams/base.py | 7 | 3254 | import abc
import asyncio
class BaseStream(asyncio.StreamReader, metaclass=abc.ABCMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.readers = {}
self.writers = {}
@abc.abstractproperty
def size(self):
pass
def add_reader(self, name, reader):
self.readers[name] = reader
def remove_reader(self, name):
del self.readers[name]
def add_writer(self, name, writer):
self.writers[name] = writer
def remove_writer(self, name):
del self.writers[name]
def feed_eof(self):
super().feed_eof()
for reader in self.readers.values():
reader.feed_eof()
for writer in self.writers.values():
if hasattr(writer, 'can_write_eof') and writer.can_write_eof():
writer.write_eof()
@asyncio.coroutine
def read(self, size=-1):
eof = self.at_eof()
data = yield from self._read(size)
if not eof:
for reader in self.readers.values():
reader.feed_data(data)
for writer in self.writers.values():
writer.write(data)
return data
@abc.abstractmethod
@asyncio.coroutine
def _read(self, size):
pass
class MultiStream(asyncio.StreamReader):
"""Concatenate a series of `StreamReader` objects into a single stream.
Reads from the current stream until exhausted, then continues to the next,
etc. Used to build streaming form data for Figshare uploads.
Originally written by @jmcarp
"""
def __init__(self, *streams):
super().__init__()
self._size = 0
self.stream = []
self._streams = []
self.add_streams(*streams)
@property
def size(self):
return self._size
@property
def streams(self):
return self._streams
def add_streams(self, *streams):
self._size += sum(x.size for x in streams)
self._streams.extend(streams)
if not self.stream:
self._cycle()
@asyncio.coroutine
def read(self, n=-1):
if n < 0:
return (yield from super().read(n))
chunk = b''
while self.stream and (len(chunk) < n or n == -1):
if n == -1:
chunk += yield from self.stream.read(-1)
else:
chunk += yield from self.stream.read(n - len(chunk))
if self.stream.at_eof():
self._cycle()
return chunk
def _cycle(self):
try:
self.stream = self.streams.pop(0)
except IndexError:
self.stream = None
self.feed_eof()
class StringStream(BaseStream):
def __init__(self, data):
super().__init__()
if isinstance(data, str):
data = data.encode('UTF-8')
elif not isinstance(data, bytes):
raise TypeError('Data must be either str or bytes, found {!r}'.format(type(data)))
self._size = len(data)
self.feed_data(data)
self.feed_eof()
@property
def size(self):
return self._size
@asyncio.coroutine
def _read(self, n=-1):
return (yield from asyncio.StreamReader.read(self, n))
| apache-2.0 |
andreaso/ansible | lib/ansible/modules/cloud/cloudstack/cs_pod.py | 39 | 7969 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_pod
short_description: Manages pods on Apache CloudStack based clouds.
description:
- Create, update, delete pods.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the pod.
required: true
id:
description:
- uuid of the exising pod.
default: null
required: false
start_ip:
description:
- Starting IP address for the Pod.
- Required on C(state=present)
default: null
required: false
end_ip:
description:
- Ending IP address for the Pod.
default: null
required: false
netmask:
description:
- Netmask for the Pod.
- Required on C(state=present)
default: null
required: false
gateway:
description:
- Gateway for the Pod.
- Required on C(state=present)
default: null
required: false
zone:
description:
- Name of the zone in which the pod belongs to.
- If not set, default zone is used.
required: false
default: null
state:
description:
- State of the pod.
required: false
default: 'present'
choices: [ 'present', 'enabled', 'disabled', 'absent' ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Ensure a pod is present
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
start_ip: 10.100.10.101
gateway: 10.100.10.1
netmask: 255.255.255.0
# Ensure a pod is disabled
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: disabled
# Ensure a pod is enabled
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: enabled
# Ensure a pod is absent
- local_action:
module: cs_pod
name: pod1
zone: ch-zrh-ix-01
state: absent
'''
RETURN = '''
---
id:
description: UUID of the pod.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the pod.
returned: success
type: string
sample: pod01
start_ip:
description: Starting IP of the pod.
returned: success
type: string
sample: 10.100.1.101
end_ip:
description: Ending IP of the pod.
returned: success
type: string
sample: 10.100.1.254
netmask:
description: Netmask of the pod.
returned: success
type: string
sample: 255.255.255.0
gateway:
description: Gateway of the pod.
returned: success
type: string
sample: 10.100.1.1
allocation_state:
description: State of the pod.
returned: success
type: string
sample: Enabled
zone:
description: Name of zone the pod is in.
returned: success
type: string
sample: ch-gva-2
'''
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackPod(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPod, self).__init__(module)
self.returns = {
'endip': 'end_ip',
'startip': 'start_ip',
'gateway': 'gateway',
'netmask': 'netmask',
'allocationstate': 'allocation_state',
}
self.pod = None
def _get_common_pod_args(self):
args = {}
args['name'] = self.module.params.get('name')
args['zoneid'] = self.get_zone(key='id')
args['startip'] = self.module.params.get('start_ip')
args['endip'] = self.module.params.get('end_ip')
args['netmask'] = self.module.params.get('netmask')
args['gateway'] = self.module.params.get('gateway')
state = self.module.params.get('state')
if state in [ 'enabled', 'disabled']:
args['allocationstate'] = state.capitalize()
return args
def get_pod(self):
if not self.pod:
args = {}
uuid = self.module.params.get('id')
if uuid:
args['id'] = uuid
args['zoneid'] = self.get_zone(key='id')
pods = self.cs.listPods(**args)
if pods:
self.pod = pods['pod'][0]
return self.pod
args['name'] = self.module.params.get('name')
args['zoneid'] = self.get_zone(key='id')
pods = self.cs.listPods(**args)
if pods:
self.pod = pods['pod'][0]
return self.pod
def present_pod(self):
pod = self.get_pod()
if pod:
pod = self._update_pod()
else:
pod = self._create_pod()
return pod
def _create_pod(self):
required_params = [
'start_ip',
'netmask',
'gateway',
]
self.module.fail_on_missing_params(required_params=required_params)
pod = None
self.result['changed'] = True
args = self._get_common_pod_args()
if not self.module.check_mode:
res = self.cs.createPod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
pod = res['pod']
return pod
def _update_pod(self):
pod = self.get_pod()
args = self._get_common_pod_args()
args['id'] = pod['id']
if self.has_changed(args, pod):
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.updatePod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
pod = res['pod']
return pod
def absent_pod(self):
pod = self.get_pod()
if pod:
self.result['changed'] = True
args = {}
args['id'] = pod['id']
if not self.module.check_mode:
res = self.cs.deletePod(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
return pod
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
id = dict(default=None),
name = dict(required=True),
gateway = dict(default=None),
netmask = dict(default=None),
start_ip = dict(default=None),
end_ip = dict(default=None),
zone = dict(default=None),
state = dict(choices=['present', 'enabled', 'disabled', 'absent'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
try:
acs_pod = AnsibleCloudStackPod(module)
state = module.params.get('state')
if state in ['absent']:
pod = acs_pod.absent_pod()
else:
pod = acs_pod.present_pod()
result = acs_pod.get_result(pod)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Senseg/Py4A | python3-alpha/extra_modules/gdata/tlslite/TLSConnection.py | 48 | 70329 | """
MAIN CLASS FOR TLS LITE (START HERE!).
"""
import socket
from .utils.compat import formatExceptionTrace
from .TLSRecordLayer import TLSRecordLayer
from .Session import Session
from .constants import *
from .utils.cryptomath import getRandomBytes
from .errors import *
from .messages import *
from .mathtls import *
from .HandshakeSettings import HandshakeSettings
class TLSConnection(TLSRecordLayer):
"""
This class wraps a socket and provides TLS handshaking and data
transfer.
To use this class, create a new instance, passing a connected
socket into the constructor. Then call some handshake function.
If the handshake completes without raising an exception, then a TLS
connection has been negotiated. You can transfer data over this
connection as if it were a socket.
This class provides both synchronous and asynchronous versions of
its key functions. The synchronous versions should be used when
writing single-or multi-threaded code using blocking sockets. The
asynchronous versions should be used when performing asynchronous,
event-based I/O with non-blocking sockets.
Asynchronous I/O is a complicated subject; typically, you should
not use the asynchronous functions directly, but should use some
framework like asyncore or Twisted which TLS Lite integrates with
(see
L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
"""
def __init__(self, sock):
"""Create a new TLSConnection instance.
@param sock: The socket data will be transmitted on. The
socket should already be connected. It may be in blocking or
non-blocking mode.
@type sock: L{socket.socket}
"""
TLSRecordLayer.__init__(self, sock)
def handshakeClientSRP(self, username, password, session=None,
settings=None, checker=None, async=False):
"""Perform an SRP handshake in the role of client.
This function performs a TLS/SRP handshake. SRP mutually
authenticates both parties to each other using only a
username and password. This function may also perform a
combined SRP and server-certificate handshake, if the server
chooses to authenticate itself with a certificate chain in
addition to doing SRP.
TLS/SRP is non-standard. Most TLS implementations don't
support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} or
U{http://trevp.net/tlssrp/} for the latest information on
TLS/SRP.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The SRP username.
@type password: str
@param password: The SRP password.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. This
session must be an SRP session performed with the same username
and password as were passed in. If the resumption does not
succeed, a full SRP handshake will be performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(srpParams=(username, password),
session=session, settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientCert(self, certChain=None, privateKey=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a certificate-based handshake in the role of client.
This function performs an SSL or TLS handshake. The server
will authenticate itself using an X.509 or cryptoID certificate
chain. If the handshake succeeds, the server's certificate
chain will be stored in the session's serverCertChain attribute.
Unless a checker object is passed in, this function does no
validation or checking of the server's certificate chain.
If the server requests client authentication, the
client will send the passed-in certificate chain, and use the
passed-in private key to authenticate itself. If no
certificate chain and private key were passed in, the client
will attempt to proceed without client authentication. The
server may or may not allow this.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
server requests client authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the server
requests client authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(certParams=(certChain,
privateKey), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a to-be-determined type of handshake in the role of client.
This function performs an SSL or TLS handshake. If the server
requests client certificate authentication, the
certCallback will be invoked and should return a (certChain,
privateKey) pair. If the callback returns None, the library
will attempt to proceed without client authentication. The
server may or may not allow this.
If the server requests SRP authentication, the srpCallback
will be invoked and should return a (username, password) pair.
If the callback returns None, the local implementation will
signal a user_canceled error alert.
After the handshake completes, the client can inspect the
connection's session attribute to determine what type of
authentication was performed.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type srpCallback: callable
@param srpCallback: The callback to be used if the server
requests SRP authentication. If None, the client will not
offer support for SRP ciphersuites.
@type certCallback: callable
@param certCallback: The callback to be used if the server
requests client certificate authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
certCallback), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientSharedKey(self, username, sharedKey, settings=None,
checker=None, async=False):
"""Perform a shared-key handshake in the role of client.
This function performs a shared-key handshake. Using shared
symmetric keys of high entropy (128 bits or greater) mutually
authenticates both parties to each other.
TLS with shared-keys is non-standard. Most TLS
implementations don't support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} for the
latest information on TLS with shared-keys. If the shared-keys
Internet-Draft changes or is superceded, TLS Lite will track
those changes, so the shared-key support in later versions of
TLS Lite may become incompatible with this version.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The shared-key username.
@type sharedKey: str
@param sharedKey: The shared key.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
sharedKey), settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def _handshakeClientAsync(self, srpParams=(), certParams=(),
unknownParams=(), sharedKeyParams=(),
session=None, settings=None, checker=None,
recursive=False):
handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
certParams=certParams, unknownParams=unknownParams,
sharedKeyParams=sharedKeyParams, session=session,
settings=settings, recursive=recursive)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
sharedKeyParams, session, settings, recursive):
if not recursive:
self._handshakeStart(client=True)
#Unpack parameters
srpUsername = None # srpParams
password = None # srpParams
clientCertChain = None # certParams
privateKey = None # certParams
srpCallback = None # unknownParams
certCallback = None # unknownParams
#session # sharedKeyParams (or session)
#settings # settings
if srpParams:
srpUsername, password = srpParams
elif certParams:
clientCertChain, privateKey = certParams
elif unknownParams:
srpCallback, certCallback = unknownParams
elif sharedKeyParams:
session = Session()._createSharedKey(*sharedKeyParams)
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Validate parameters
if srpUsername and not password:
raise ValueError("Caller passed a username but no password")
if password and not srpUsername:
raise ValueError("Caller passed a password but no username")
if clientCertChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not clientCertChain:
raise ValueError("Caller passed a privateKey but no certChain")
if clientCertChain:
foundType = False
try:
import cryptoIDlib.CertChain
if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
if "cryptoID" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't "\
"match Handshake Settings")
settings.certificateTypes = ["cryptoID"]
foundType = True
except ImportError:
pass
if not foundType and isinstance(clientCertChain,
X509CertChain):
if "x509" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't match "\
"Handshake Settings")
settings.certificateTypes = ["x509"]
foundType = True
if not foundType:
raise ValueError("Unrecognized certificate type")
if session:
if not session.valid():
session = None #ignore non-resumable sessions...
elif session.resumable and \
(session.srpUsername != srpUsername):
raise ValueError("Session username doesn't match")
#Add Faults to parameters
if srpUsername and self.fault == Fault.badUsername:
srpUsername += "GARBAGE"
if password and self.fault == Fault.badPassword:
password += "GARBAGE"
if sharedKeyParams:
identifier = sharedKeyParams[0]
sharedKey = sharedKeyParams[1]
if self.fault == Fault.badIdentifier:
identifier += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
elif self.fault == Fault.badSharedKey:
sharedKey += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
#Initialize locals
serverCertChain = None
cipherSuite = 0
certificateType = CertificateType.x509
premasterSecret = None
#Get client nonce
clientRandom = getRandomBytes(32)
#Initialize acceptable ciphersuites
cipherSuites = []
if srpParams:
cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
elif certParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif unknownParams:
if srpCallback:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += \
CipherSuite.getSrpSuites(settings.cipherNames)
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif sharedKeyParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
else:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate types
certificateTypes = settings._getCertificateTypes()
#Tentatively set the version to the client's minimum version.
#We'll use this for the ClientHello, and if an error occurs
#parsing the Server Hello, we'll use this version for the response
self.version = settings.maxVersion
#Either send ClientHello (with a resumable session)...
if session:
#If it's a resumable (i.e. not a shared-key session), then its
#ciphersuite must be one of the acceptable ciphersuites
if (not sharedKeyParams) and \
session.cipherSuite not in cipherSuites:
raise ValueError("Session's cipher suite not consistent "\
"with parameters")
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
session.sessionID, cipherSuites,
certificateTypes, session.srpUsername)
#Or send ClientHello (without)
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
createByteArraySequence([]), cipherSuites,
certificateTypes, srpUsername)
for result in self._sendMsg(clientHello):
yield result
#Get ServerHello (or missing_srp_username)
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.server_hello):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, ServerHello):
serverHello = msg
elif isinstance(msg, Alert):
alert = msg
#If it's not a missing_srp_username, re-raise
if alert.description != AlertDescription.missing_srp_username:
self._shutdown(False)
raise TLSRemoteAlert(alert)
#If we're not in SRP callback mode, we won't have offered SRP
#without a username, so we shouldn't get this alert
if not srpCallback:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
srpParams = srpCallback()
#If the callback returns None, cancel the handshake
if srpParams == None:
for result in self._sendError(AlertDescription.user_canceled):
yield result
#Recursively perform handshake
for result in self._handshakeClientAsyncHelper(srpParams,
None, None, None, None, settings, True):
yield result
return
#Get the server version. Do this before anything else, so any
#error alerts will use the server's version
self.version = serverHello.server_version
#Future responses from server must use this version
self._versionCheck = True
#Check ServerHello
if serverHello.server_version < settings.minVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(serverHello.server_version)):
yield result
if serverHello.server_version > settings.maxVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too new version: %s" % str(serverHello.server_version)):
yield result
if serverHello.cipher_suite not in cipherSuites:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect ciphersuite"):
yield result
if serverHello.certificate_type not in certificateTypes:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect certificate type"):
yield result
if serverHello.compression_method != 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect compression method"):
yield result
#Get the server nonce
serverRandom = serverHello.random
#If the server agrees to resume
if session and session.sessionID and \
serverHello.session_id == session.sessionID:
#If a shared-key, we're flexible about suites; otherwise the
#server-chosen suite has to match the session's suite
if sharedKeyParams:
session.cipherSuite = serverHello.cipher_suite
elif serverHello.cipher_suite != session.cipherSuite:
for result in self._sendError(\
AlertDescription.illegal_parameter,\
"Server's ciphersuite doesn't match session"):
yield result
#Set the session for this connection
self.session = session
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
for result in self._sendFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
#If server DOES NOT agree to resume
else:
if sharedKeyParams:
for result in self._sendError(\
AlertDescription.user_canceled,
"Was expecting a shared-key resumption"):
yield result
#We've already validated these
cipherSuite = serverHello.cipher_suite
certificateType = serverHello.certificate_type
#If the server chose an SRP suite...
if cipherSuite in CipherSuite.srpSuites:
#Get ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an SRP+RSA suite...
elif cipherSuite in CipherSuite.srpRsaSuites:
#Get Certificate, ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an RSA suite...
elif cipherSuite in CipherSuite.rsaSuites:
#Get Certificate[, CertificateRequest], ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
(HandshakeType.server_hello_done,
HandshakeType.certificate_request)):
if result in (0,1):
yield result
else:
break
msg = result
certificateRequest = None
if isinstance(msg, CertificateRequest):
certificateRequest = msg
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
elif isinstance(msg, ServerHelloDone):
serverHelloDone = msg
else:
raise AssertionError()
#Calculate SRP premaster secret, if server chose an SRP or
#SRP+RSA suite
if cipherSuite in CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites:
#Get and check the server's group parameters and B value
N = serverKeyExchange.srp_N
g = serverKeyExchange.srp_g
s = serverKeyExchange.srp_s
B = serverKeyExchange.srp_B
if (g,N) not in goodGroupParameters:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"Unknown group parameters"):
yield result
if numBits(N) < settings.minKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too small: %d" % numBits(N)):
yield result
if numBits(N) > settings.maxKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too large: %d" % numBits(N)):
yield result
if B % N == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Suspicious B value"):
yield result
#Check the server's signature, if server chose an
#SRP+RSA suite
if cipherSuite in CipherSuite.srpRsaSuites:
#Hash ServerKeyExchange/ServerSRPParams
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
#Extract signature bytes from ServerKeyExchange
sigBytes = serverKeyExchange.signature
if len(sigBytes) == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server sent an SRP ServerKeyExchange "\
"message without a signature"):
yield result
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Verify signature
if not publicKey.verify(sigBytes, hashBytes):
for result in self._sendError(\
AlertDescription.decrypt_error,
"Signature failed to verify"):
yield result
#Calculate client's ephemeral DH values (a, A)
a = bytesToNumber(getRandomBytes(32))
A = powMod(g, a, N)
#Calculate client's static DH values (x, v)
x = makeX(bytesToString(s), srpUsername, password)
v = powMod(g, x, N)
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
k = makeK(N, g)
S = powMod((B - (k*v)) % N, a+(u*x), N)
if self.fault == Fault.badA:
A = N
S = 0
premasterSecret = numberToBytes(S)
#Send ClientKeyExchange
for result in self._sendMsg(\
ClientKeyExchange(cipherSuite).createSRP(A)):
yield result
#Calculate RSA premaster secret, if server chose an RSA suite
elif cipherSuite in CipherSuite.rsaSuites:
#Handle the presence of a CertificateRequest
if certificateRequest:
if unknownParams and certCallback:
certParamsNew = certCallback()
if certParamsNew:
clientCertChain, privateKey = certParamsNew
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Calculate premaster secret
premasterSecret = getRandomBytes(48)
premasterSecret[0] = settings.maxVersion[0]
premasterSecret[1] = settings.maxVersion[1]
if self.fault == Fault.badPremasterPadding:
premasterSecret[0] = 5
if self.fault == Fault.shortPremasterSecret:
premasterSecret = premasterSecret[:-1]
#Encrypt premaster secret to server's public key
encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
#If client authentication was requested, send Certificate
#message, either with certificates or empty
if certificateRequest:
clientCertificate = Certificate(certificateType)
if clientCertChain:
#Check to make sure we have the same type of
#certificates the server requested
wrongType = False
if certificateType == CertificateType.x509:
if not isinstance(clientCertChain, X509CertChain):
wrongType = True
elif certificateType == CertificateType.cryptoID:
if not isinstance(clientCertChain,
cryptoIDlib.CertChain.CertChain):
wrongType = True
if wrongType:
for result in self._sendError(\
AlertDescription.handshake_failure,
"Client certificate is of wrong type"):
yield result
clientCertificate.create(clientCertChain)
for result in self._sendMsg(clientCertificate):
yield result
else:
#The server didn't request client auth, so we
#zeroize these so the clientCertChain won't be
#stored in the session.
privateKey = None
clientCertChain = None
#Send ClientKeyExchange
clientKeyExchange = ClientKeyExchange(cipherSuite,
self.version)
clientKeyExchange.createRSA(encryptedPreMasterSecret)
for result in self._sendMsg(clientKeyExchange):
yield result
#If client authentication was requested and we have a
#private key, send CertificateVerify
if certificateRequest and privateKey:
if self.version == (3,0):
#Create a temporary session object, just for the
#purpose of creating the CertificateVerify
session = Session()
session._calcMasterSecret(self.version,
premasterSecret,
clientRandom,
serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(\
self._handshake_md5.digest() + \
self._handshake_sha.digest())
if self.fault == Fault.badVerifyMessage:
verifyBytes[0] = ((verifyBytes[0]+1) % 256)
signedBytes = privateKey.sign(verifyBytes)
certificateVerify = CertificateVerify()
certificateVerify.create(signedBytes)
for result in self._sendMsg(certificateVerify):
yield result
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = serverHello.session_id
self.session.cipherSuite = cipherSuite
self.session.srpUsername = srpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Perform a handshake in the role of server.
This function performs an SSL or TLS handshake. Depending on
the arguments and the behavior of the client, this function can
perform a shared-key, SRP, or certificate-based handshake. It
can also perform a combined SRP and server-certificate
handshake.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
This function does not send a Hello Request message before
performing the handshake, so if re-handshaking is required,
the server must signal the client to begin the re-handshake
through some other means.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
@param sharedKeyDB: A database of shared symmetric keys
associated with usernames. If the client performs a
shared-key handshake, the session's sharedKeyUsername
attribute will be set.
@type verifierDB: L{tlslite.VerifierDB.VerifierDB}
@param verifierDB: A database of SRP password verifiers
associated with usernames. If the client performs an SRP
handshake, the session's srpUsername attribute will be set.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
client requests server certificate authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the client
requests server certificate authentication.
@type reqCert: bool
@param reqCert: Whether to request client certificate
authentication. This only applies if the client chooses server
certificate authentication; if the client chooses SRP or
shared-key authentication, this will be ignored. If the client
performs a client certificate authentication, the sessions's
clientCertChain attribute will be set.
@type sessionCache: L{tlslite.SessionCache.SessionCache}
@param sessionCache: An in-memory cache of resumable sessions.
The client can resume sessions from this cache. Alternatively,
if the client performs a full handshake, a new session will be
added to the cache.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites and SSL/TLS version chosen by the server.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache, settings,
checker):
pass
def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Start a server handshake operation on the TLS connection.
This function returns a generator which behaves similarly to
handshakeServer(). Successive invocations of the generator
will return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or it will raise StopIteration
if the handshake operation is complete.
@rtype: iterable
@return: A generator; see above for details.
"""
handshaker = self._handshakeServerAsyncHelper(\
sharedKeyDB=sharedKeyDB,
verifierDB=verifierDB, certChain=certChain,
privateKey=privateKey, reqCert=reqCert,
sessionCache=sessionCache, settings=settings)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache,
settings):
self._handshakeStart(client=False)
if (not sharedKeyDB) and (not verifierDB) and (not certChain):
raise ValueError("Caller passed no authentication credentials")
if certChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not certChain:
raise ValueError("Caller passed a privateKey but no certChain")
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Initialize acceptable cipher suites
cipherSuites = []
if verifierDB:
if certChain:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
if sharedKeyDB or certChain:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate type
certificateType = None
if certChain:
try:
import cryptoIDlib.CertChain
if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
certificateType = CertificateType.cryptoID
except ImportError:
pass
if isinstance(certChain, X509CertChain):
certificateType = CertificateType.x509
if certificateType == None:
raise ValueError("Unrecognized certificate type")
#Initialize locals
clientCertChain = None
serverCertChain = None #We may set certChain to this later
postFinishedError = None
#Tentatively set version to most-desirable version, so if an error
#occurs parsing the ClientHello, this is what we'll use for the
#error alert
self.version = settings.maxVersion
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#If client's version is too low, reject it
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#Calculate the first cipher suite intersection.
#This is the 'privileged' ciphersuite. We'll use it if we're
#doing a shared-key resumption or a new negotiation. In fact,
#the only time we won't use it is if we're resuming a non-sharedkey
#session, in which case we use the ciphersuite from the session.
#
#Given the current ciphersuite ordering, this means we prefer SRP
#over non-SRP.
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If resumption was requested...
if clientHello.session_id and (sharedKeyDB or sessionCache):
session = None
#Check in the sharedKeys container
if sharedKeyDB and len(clientHello.session_id)==16:
try:
#Trim off zero padding, if any
for x in range(16):
if clientHello.session_id[x]==0:
break
self.allegedSharedKeyUsername = bytesToString(\
clientHello.session_id[:x])
session = sharedKeyDB[self.allegedSharedKeyUsername]
if not session.sharedKey:
raise AssertionError()
#use privileged ciphersuite
session.cipherSuite = cipherSuite
except KeyError:
pass
#Then check in the session cache
if sessionCache and not session:
try:
session = sessionCache[bytesToString(\
clientHello.session_id)]
if session.sharedKey:
raise AssertionError()
if not session.resumable:
raise AssertionError()
#Check for consistency with ClientHello
if session.cipherSuite not in cipherSuites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if session.cipherSuite not in clientHello.cipher_suites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if clientHello.srp_username:
if clientHello.srp_username != session.srpUsername:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
except KeyError:
pass
#If a session is found..
if session:
#Set the session
self.session = session
#Send ServerHello
serverHello = ServerHello()
serverHello.create(self.version, serverRandom,
session.sessionID, session.cipherSuite,
certificateType)
for result in self._sendMsg(serverHello):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
return
#If not a resumption...
#TRICKY: we might have chosen an RSA suite that was only deemed
#acceptable because of the shared-key resumption. If the shared-
#key resumption failed, because the identifier wasn't recognized,
#we might fall through to here, where we have an RSA suite
#chosen, but no certificate.
if cipherSuite in CipherSuite.rsaSuites and not certChain:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If an RSA suite is chosen, check for certificate type intersection
#(We do this check down here because if the mismatch occurs but the
# client is using a shared-key session, it's okay)
if cipherSuite in CipherSuite.rsaSuites + \
CipherSuite.srpRsaSuites:
if certificateType not in clientHello.certificate_types:
for result in self._sendError(\
AlertDescription.handshake_failure,
"the client doesn't support my certificate type"):
yield result
#Move certChain -> serverCertChain, now that we're using it
serverCertChain = certChain
#Create sessionID
if sessionCache:
sessionID = getRandomBytes(32)
else:
sessionID = createByteArraySequence([])
#If we've selected an SRP suite, exchange keys and calculate
#premaster secret:
if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
#If there's no SRP username...
if not clientHello.srp_username:
#Ask the client to re-send ClientHello with one
for result in self._sendMsg(Alert().create(\
AlertDescription.missing_srp_username,
AlertLevel.warning)):
yield result
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#Check ClientHello
#If client's version is too low, reject it (COPIED CODE; BAD!)
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Recalculate the privileged cipher suite, making sure to
#pick an SRP suite
cipherSuites = [c for c in cipherSuites if c in \
CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites]
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#The username better be there, this time
if not clientHello.srp_username:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Client resent a hello, but without the SRP"\
" username"):
yield result
#Get username
self.allegedSrpUsername = clientHello.srp_username
#Get parameters from username
try:
entry = verifierDB[self.allegedSrpUsername]
except KeyError:
for result in self._sendError(\
AlertDescription.unknown_srp_username):
yield result
(N, g, s, v) = entry
#Calculate server's ephemeral DH values (b, B)
b = bytesToNumber(getRandomBytes(32))
k = makeK(N, g)
B = (powMod(g, b, N) + (k*v)) % N
#Create ServerKeyExchange, signing it if necessary
serverKeyExchange = ServerKeyExchange(cipherSuite)
serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
if cipherSuite in CipherSuite.srpRsaSuites:
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
serverKeyExchange.signature = privateKey.sign(hashBytes)
#Send ServerHello[, Certificate], ServerKeyExchange,
#ServerHelloDone
msgs = []
serverHello = ServerHello()
serverHello.create(self.version, serverRandom, sessionID,
cipherSuite, certificateType)
msgs.append(serverHello)
if cipherSuite in CipherSuite.srpRsaSuites:
certificateMsg = Certificate(certificateType)
certificateMsg.create(serverCertChain)
msgs.append(certificateMsg)
msgs.append(serverKeyExchange)
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get and check ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
A = clientKeyExchange.srp_A
if A % N == 0:
postFinishedError = (AlertDescription.illegal_parameter,
"Suspicious A value")
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
S = powMod((A * powMod(v,u,N)) % N, b, N)
premasterSecret = numberToBytes(S)
#If we've selected an RSA suite, exchange keys and calculate
#premaster secret:
elif cipherSuite in CipherSuite.rsaSuites:
#Send ServerHello, Certificate[, CertificateRequest],
#ServerHelloDone
msgs = []
msgs.append(ServerHello().create(self.version, serverRandom,
sessionID, cipherSuite, certificateType))
msgs.append(Certificate(certificateType).create(serverCertChain))
if reqCert:
msgs.append(CertificateRequest())
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get [Certificate,] (if was requested)
if reqCert:
if self.version == (3,0):
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, Alert):
#If it's not a no_certificate alert, re-raise
alert = msg
if alert.description != \
AlertDescription.no_certificate:
self._shutdown(False)
raise TLSRemoteAlert(alert)
elif isinstance(msg, Certificate):
clientCertificate = msg
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
elif self.version in ((3,1), (3,2)):
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
clientCertificate = result
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
#Get ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
#Decrypt ClientKeyExchange
premasterSecret = privateKey.decrypt(\
clientKeyExchange.encryptedPreMasterSecret)
randomPreMasterSecret = getRandomBytes(48)
versionCheck = (premasterSecret[0], premasterSecret[1])
if not premasterSecret:
premasterSecret = randomPreMasterSecret
elif len(premasterSecret)!=48:
premasterSecret = randomPreMasterSecret
elif versionCheck != clientHello.client_version:
if versionCheck != self.version: #Tolerate buggy IE clients
premasterSecret = randomPreMasterSecret
#Get and check CertificateVerify, if relevant
if clientCertChain:
if self.version == (3,0):
#Create a temporary session object, just for the purpose
#of checking the CertificateVerify
session = Session()
session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(self._handshake_md5.digest() +\
self._handshake_sha.digest())
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate_verify):
if result in (0,1):
yield result
else:
break
certificateVerify = result
publicKey = clientCertChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too small: %d" % len(publicKey))
if len(publicKey) > settings.maxKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too large: %d" % len(publicKey))
if not publicKey.verify(certificateVerify.signature,
verifyBytes):
postFinishedError = (AlertDescription.decrypt_error,
"Signature failed to verify")
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = sessionID
self.session.cipherSuite = cipherSuite
self.session.srpUsername = self.allegedSrpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
#If we were holding a post-finished error until receiving the client
#finished message, send it now. We delay the call until this point
#because calling sendError() throws an exception, and our caller might
#shut down the socket upon receiving the exception. If he did, and the
#client was still sending its ChangeCipherSpec or Finished messages, it
#would cause a socket error on the client side. This is a lot of
#consideration to show to misbehaving clients, but this would also
#cause problems with fault-testing.
if postFinishedError:
for result in self._sendError(*postFinishedError):
yield result
for result in self._sendFinished():
yield result
#Add the session object to the session cache
if sessionCache and sessionID:
sessionCache[bytesToString(sessionID)] = self.session
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def _handshakeWrapperAsync(self, handshaker, checker):
if not self.fault:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except:
self._shutdown(False)
raise
else:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except socket.error as e:
raise TLSFaultError("socket error!")
except TLSAbruptCloseError as e:
raise TLSFaultError("abrupt close error!")
except TLSAlert as alert:
if alert.description not in Fault.faultAlerts[self.fault]:
raise TLSFaultError(str(alert))
else:
pass
except:
self._shutdown(False)
raise
else:
raise TLSFaultError("No error!")
def _getKeyFromChain(self, certificate, settings):
#Get and check cert chain from the Certificate message
certChain = certificate.certChain
if not certChain or certChain.getNumCerts() == 0:
for result in self._sendError(AlertDescription.illegal_parameter,
"Other party sent a Certificate message without "\
"certificates"):
yield result
#Get and check public key from the cert chain
publicKey = certChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too small: %d" % len(publicKey)):
yield result
if len(publicKey) > settings.maxKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too large: %d" % len(publicKey)):
yield result
yield publicKey, certChain
| apache-2.0 |
2ndQuadrant/ansible | lib/ansible/module_utils/aws/iam.py | 74 | 2029 | # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import traceback
try:
from botocore.exceptions import ClientError, NoCredentialsError
except ImportError:
pass # caught by HAS_BOTO3
from ansible.module_utils._text import to_native
def get_aws_account_id(module):
""" Given AnsibleAWSModule instance, get the active AWS account ID
get_account_id tries too find out the account that we are working
on. It's not guaranteed that this will be easy so we try in
several different ways. Giving either IAM or STS privilages to
the account should be enough to permit this.
"""
account_id = None
try:
sts_client = module.client('sts')
account_id = sts_client.get_caller_identity().get('Account')
# non-STS sessions may also get NoCredentialsError from this STS call, so
# we must catch that too and try the IAM version
except (ClientError, NoCredentialsError):
try:
iam_client = module.client('iam')
account_id = iam_client.get_user()['User']['Arn'].split(':')[4]
except ClientError as e:
if (e.response['Error']['Code'] == 'AccessDenied'):
except_msg = to_native(e)
# don't match on `arn:aws` because of China region `arn:aws-cn` and similar
account_id = except_msg.search(r"arn:\w+:iam::([0-9]{12,32}):\w+/").group(1)
if account_id is None:
module.fail_json_aws(e, msg="Could not get AWS account information")
except Exception as e:
module.fail_json(
msg="Failed to get AWS account information, Try allowing sts:GetCallerIdentity or iam:GetUser permissions.",
exception=traceback.format_exc()
)
if not account_id:
module.fail_json(msg="Failed while determining AWS account ID. Try allowing sts:GetCallerIdentity or iam:GetUser permissions.")
return to_native(account_id)
| gpl-3.0 |
thisisshi/cloud-custodian | tools/c7n_azure/tests_azure/tests_resources/test_event_hub.py | 2 | 4550 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from ..azure_common import BaseTest, arm_template, cassette_name
class EventHubTest(BaseTest):
def test_event_hub_schema_validate(self):
with self.sign_out_patch():
p = self.load_policy({
'name': 'test-event-hub-compliance',
'resource': 'azure.eventhub'
}, validate=True)
self.assertTrue(p)
@arm_template('eventhub.json')
def test_find_by_name(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@cassette_name('firewall')
def test_firewall_rules_include_cidr(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'include': ['11.0.0.0/24']}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
@cassette_name('firewall')
def test_firewall_rules_not_include_cidr(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'include': ['11.0.1.0/24']}],
})
resources = p.run()
self.assertEqual(len(resources), 0)
@cassette_name('firewall')
def test_firewall_rules_ranges(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'include': ['11.0.0.0-11.0.0.255']}],
}, validate=True)
resources = p.run()
self.assertEqual(1, len(resources))
@cassette_name('firewall')
def test_firewall_rules_not_ranges(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'include': ['11.0.1.0-11.0.1.255']}],
}, validate=True)
resources = p.run()
self.assertEqual(0, len(resources))
@cassette_name('firewall')
def test_firewall_rules_equal(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'equal': ['11.0.0.0/24', '10.1.1.1/32']}],
}, validate=True)
resources = p.run()
self.assertEqual(1, len(resources))
@cassette_name('firewall')
def test_firewall_rules_not_equal(self):
p = self.load_policy({
'name': 'test-azure-eventhub',
'resource': 'azure.eventhub',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'contains',
'value_type': 'normalize',
'value': '-cctesteventhubns'},
{'type': 'firewall-rules',
'equal': ['11.0.1.0/24', '10.1.1.1/32']}],
}, validate=True)
resources = p.run()
self.assertEqual(0, len(resources))
| apache-2.0 |
QuLogic/iris | lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py | 3 | 2794 | # (C) British Crown Copyright 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Unit tests for :class:`iris.analysis.trajectory.Trajectory`.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
from iris.analysis.trajectory import Trajectory
class Test___init__(tests.IrisTest):
def test_2_points(self):
# basic 2-seg line along x
waypoints = [{'lat': 0, 'lon': 0}, {'lat': 1, 'lon': 2}]
trajectory = Trajectory(waypoints, sample_count=5)
self.assertEqual(trajectory.length, np.sqrt(5))
self.assertEqual(trajectory.sample_count, 5)
self.assertEqual(trajectory.sampled_points,
[{'lat': 0.0, 'lon': 0.0},
{'lat': 0.25, 'lon': 0.5},
{'lat': 0.5, 'lon': 1.0},
{'lat': 0.75, 'lon': 1.5},
{'lat': 1.0, 'lon': 2.0}])
def test_3_points(self):
# basic 2-seg line along x
waypoints = [{'lat': 0, 'lon': 0}, {'lat': 0, 'lon': 1},
{'lat': 0, 'lon': 2}]
trajectory = Trajectory(waypoints, sample_count=21)
self.assertEqual(trajectory.length, 2.0)
self.assertEqual(trajectory.sample_count, 21)
self.assertEqual(trajectory.sampled_points[19],
{'lat': 0.0, 'lon': 1.9000000000000001})
def test_zigzag(self):
# 4-seg m-shape
waypoints = [{'lat': 0, 'lon': 0}, {'lat': 1, 'lon': 1},
{'lat': 0, 'lon': 2}, {'lat': 1, 'lon': 3},
{'lat': 0, 'lon': 4}]
trajectory = Trajectory(waypoints, sample_count=33)
self.assertEqual(trajectory.length, 5.6568542494923806)
self.assertEqual(trajectory.sample_count, 33)
self.assertEqual(trajectory.sampled_points[31],
{'lat': 0.12499999999999989, 'lon': 3.875})
if __name__ == "__main__":
tests.main()
| gpl-3.0 |
jamielennox/django-openstack-auth-websso | openstack_auth_websso/plugin.py | 1 | 2014 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient.v3 import client as v3_client
from openstack_auth.plugin import base
from openstack_auth import exceptions
from openstack_auth import utils
__all__ = ['FederatedTokenPlugin']
class FederatedTokenPlugin(base.BasePlugin):
"""Authenticate against keystone with an existing token."""
def get_plugin(self, auth_url=None, token=None, project_id=None,
**kwargs):
if not all((auth_url, token)):
return None
if utils.get_keystone_version() >= 3:
return v3_auth.Token(auth_url=auth_url,
token=token,
project_id=project_id,
reauthenticate=False)
else:
return v2_auth.Token(auth_url=auth_url,
token=token,
tenant_id=project_id,
reauthenticate=False)
def list_projects(self, session, auth_plugin, auth_ref=None):
if utils.get_keystone_version() < 3:
msg = _('Cannot list federated tokens from v2 API')
raise exceptions.KeystoneAuthException(msg)
client = v3_client.Client(session=session, auth=auth_plugin)
return client.federation.projects.list()
| apache-2.0 |
denisenkom/django-sqlserver | tests/multiple_database/routers.py | 379 | 1927 | from __future__ import unicode_literals
from django.db import DEFAULT_DB_ALIAS
class TestRouter(object):
"""
Vaguely behave like primary/replica, but the databases aren't assumed to
propagate changes.
"""
def db_for_read(self, model, instance=None, **hints):
if instance:
return instance._state.db or 'other'
return 'other'
def db_for_write(self, model, **hints):
return DEFAULT_DB_ALIAS
def allow_relation(self, obj1, obj2, **hints):
return obj1._state.db in ('default', 'other') and obj2._state.db in ('default', 'other')
def allow_migrate(self, db, app_label, **hints):
return True
class AuthRouter(object):
"""
Control all database operations on models in the contrib.auth application.
"""
def db_for_read(self, model, **hints):
"Point all read operations on auth models to 'default'"
if model._meta.app_label == 'auth':
# We use default here to ensure we can tell the difference
# between a read request and a write request for Auth objects
return 'default'
return None
def db_for_write(self, model, **hints):
"Point all operations on auth models to 'other'"
if model._meta.app_label == 'auth':
return 'other'
return None
def allow_relation(self, obj1, obj2, **hints):
"Allow any relation if a model in Auth is involved"
if obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth':
return True
return None
def allow_migrate(self, db, app_label, **hints):
"Make sure the auth app only appears on the 'other' db"
if app_label == 'auth':
return db == 'other'
return None
class WriteRouter(object):
# A router that only expresses an opinion on writes
def db_for_write(self, model, **hints):
return 'writer'
| mit |
GaetanCambier/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/urort.py | 172 | 2248 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
)
from ..utils import (
unified_strdate,
)
class UrortIE(InfoExtractor):
IE_DESC = 'NRK P3 Urørt'
_VALID_URL = r'https?://(?:www\.)?urort\.p3\.no/#!/Band/(?P<id>[^/]+)$'
_TEST = {
'url': 'https://urort.p3.no/#!/Band/Gerilja',
'md5': '5ed31a924be8a05e47812678a86e127b',
'info_dict': {
'id': '33124-24',
'ext': 'mp3',
'title': 'The Bomb',
'thumbnail': 're:^https?://.+\.jpg',
'uploader': 'Gerilja',
'uploader_id': 'Gerilja',
'upload_date': '20100323',
},
'params': {
'matchtitle': '^The Bomb$', # To test, we want just one video
}
}
def _real_extract(self, url):
playlist_id = self._match_id(url)
fstr = compat_urllib_parse.quote("InternalBandUrl eq '%s'" % playlist_id)
json_url = 'http://urort.p3.no/breeze/urort/TrackDTOViews?$filter=%s&$orderby=Released%%20desc&$expand=Tags%%2CFiles' % fstr
songs = self._download_json(json_url, playlist_id)
entries = []
for s in songs:
formats = [{
'tbr': f.get('Quality'),
'ext': f['FileType'],
'format_id': '%s-%s' % (f['FileType'], f.get('Quality', '')),
'url': 'http://p3urort.blob.core.windows.net/tracks/%s' % f['FileRef'],
'preference': 3 if f['FileType'] == 'mp3' else 2,
} for f in s['Files']]
self._sort_formats(formats)
e = {
'id': '%d-%s' % (s['BandId'], s['$id']),
'title': s['Title'],
'uploader_id': playlist_id,
'uploader': s.get('BandName', playlist_id),
'thumbnail': 'http://urort.p3.no/cloud/images/%s' % s['Image'],
'upload_date': unified_strdate(s.get('Released')),
'formats': formats,
}
entries.append(e)
return {
'_type': 'playlist',
'id': playlist_id,
'title': playlist_id,
'entries': entries,
}
| gpl-3.0 |
0x0aNL/p2pool-0x0a | p2pool/bitcoin/networks/usde.py | 8 | 1210 | import os
import platform
from twisted.internet import defer
from .. import data, helper
from p2pool.util import pack
P2P_PREFIX = 'd9d9f9bd'.decode('hex') #pchmessagestart
P2P_PORT = 54449
ADDRESS_VERSION = 38 #pubkey_address
RPC_PORT = 54448
RPC_CHECK = defer.inlineCallbacks(lambda bitcoind: defer.returnValue(
'usdeaddress' in (yield bitcoind.rpc_help()) and
not (yield bitcoind.rpc_getinfo())['testnet']
))
SUBSIDY_FUNC = lambda height: 1000*100000000 if height>1000 else 100*100000000
POW_FUNC = lambda data: pack.IntType(256).unpack(__import__('ltc_scrypt').getPoWHash(data))
BLOCK_PERIOD = 60 # s
SYMBOL = 'USDe'
CONF_FILE_FUNC = lambda: os.path.join(os.path.join(os.environ['APPDATA'], 'usde') if platform.system() == 'Windows' else os.path.expanduser('~/Library/Application Support/usde/') if platform.system() == 'Darwin' else os.path.expanduser('~/.usde'), 'usde.conf')
BLOCK_EXPLORER_URL_PREFIX = 'http://altexplorer.net/block/'
ADDRESS_EXPLORER_URL_PREFIX = 'https://altexplorer.net/address/'
TX_EXPLORER_URL_PREFIX = 'https://altexplorer.net/tx/'
SANE_TARGET_RANGE = (2**256//1000000000 - 1, 2**256//1000 - 1)
DUMB_SCRYPT_DIFF = 2**16
DUST_THRESHOLD = 0.03e8
| gpl-3.0 |
Tesora/tesora-horizon | openstack_dashboard/test/api_tests/network_tests.py | 15 | 35376 | # Copyright 2013 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import itertools
import uuid
from django import http
from django.test.utils import override_settings
from mox3.mox import IsA # noqa
from novaclient.v2 import floating_ip_pools
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class NetworkClientTestCase(test.APITestCase):
def test_networkclient_no_neutron(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(False)
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.nova.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.nova.SecurityGroupManager)
def test_networkclient_neutron(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.neutronclient = self.stub_neutronclient()
self.neutronclient.list_extensions() \
.AndReturn({'extensions': self.api_extensions.list()})
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.neutron.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.neutron.SecurityGroupManager)
def test_networkclient_neutron_with_nova_security_group(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.neutronclient = self.stub_neutronclient()
self.neutronclient.list_extensions().AndReturn({'extensions': []})
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.neutron.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.nova.SecurityGroupManager)
class NetworkApiNovaTestBase(test.APITestCase):
def setUp(self):
super(NetworkApiNovaTestBase, self).setUp()
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(False)
class NetworkApiNovaSecurityGroupTests(NetworkApiNovaTestBase):
def test_server_update_security_groups(self):
all_secgroups = self.security_groups.list()
added_secgroup = all_secgroups[2]
rm_secgroup = all_secgroups[0]
cur_secgroups_raw = [{'id': sg.id, 'name': sg.name,
'rules': []}
for sg in all_secgroups[0:2]]
cur_secgroups_ret = {'security_groups': cur_secgroups_raw}
new_sg_ids = [sg.id for sg in all_secgroups[1:3]]
instance_id = self.servers.first().id
novaclient = self.stub_novaclient()
novaclient.security_groups = self.mox.CreateMockAnything()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.client = self.mox.CreateMockAnything()
novaclient.security_groups.list().AndReturn(all_secgroups)
url = '/servers/%s/os-security-groups' % instance_id
novaclient.client.get(url).AndReturn((200, cur_secgroups_ret))
novaclient.servers.add_security_group(instance_id, added_secgroup.name)
novaclient.servers.remove_security_group(instance_id, rm_secgroup.name)
self.mox.ReplayAll()
api.network.server_update_security_groups(
self.request, instance_id, new_sg_ids)
class NetworkApiNovaFloatingIpTests(NetworkApiNovaTestBase):
def test_floating_ip_pools_list(self):
pool_names = ['pool1', 'pool2']
pools = [floating_ip_pools.FloatingIPPool(
None, {'name': pool}) for pool in pool_names]
novaclient = self.stub_novaclient()
novaclient.floating_ip_pools = self.mox.CreateMockAnything()
novaclient.floating_ip_pools.list().AndReturn(pools)
self.mox.ReplayAll()
ret = api.network.floating_ip_pools_list(self.request)
self.assertEqual(pool_names, [p.name for p in ret])
def test_floating_ip_list(self):
fips = self.api_floating_ips.list()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.list().AndReturn(fips)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_list(self.request)
for r, e in zip(ret, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(e, attr), getattr(r, attr))
self.assertEqual(e.instance_id, r.port_id)
exp_instance_type = 'compute' if e.instance_id else None
self.assertEqual(exp_instance_type, r.instance_type)
def test_floating_ip_get(self):
fip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.get(fip.id).AndReturn(fip)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip.id)
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(fip, attr), getattr(ret, attr))
self.assertEqual(fip.instance_id, ret.port_id)
self.assertEqual(fip.instance_id, ret.instance_id)
self.assertEqual('compute', ret.instance_type)
def test_floating_ip_allocate(self):
pool_name = 'fip_pool'
fip = [fip for fip in self.api_floating_ips.list()
if not fip.instance_id][0]
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.create(pool=pool_name).AndReturn(fip)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_allocate(self.request, pool_name)
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(fip, attr), getattr(ret, attr))
self.assertIsNone(ret.port_id)
self.assertIsNone(ret.instance_type)
def test_floating_ip_release(self):
fip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.delete(fip.id)
self.mox.ReplayAll()
api.network.tenant_floating_ip_release(self.request, fip.id)
def test_floating_ip_associate(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.add_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
api.network.floating_ip_associate(self.request,
floating_ip.id,
server.id)
def test_floating_ip_disassociate(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.remove_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
api.network.floating_ip_disassociate(self.request,
floating_ip.id)
def test_floating_ip_target_list(self):
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list().AndReturn(servers)
self.mox.ReplayAll()
targets = api.network.floating_ip_target_list(self.request)
for target, server in zip(targets, servers):
self.assertEqual(server.id, target.id)
self.assertEqual('%s (%s)' % (server.name, server.id), target.name)
def test_floating_ip_target_get_by_instance(self):
self.mox.ReplayAll()
instance_id = self.servers.first().id
ret = api.network.floating_ip_target_get_by_instance(self.request,
instance_id)
self.assertEqual(instance_id, ret)
class NetworkApiNeutronTestBase(test.APITestCase):
def setUp(self):
super(NetworkApiNeutronTestBase, self).setUp()
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.qclient = self.stub_neutronclient()
class NetworkApiNeutronTests(NetworkApiNeutronTestBase):
def _get_expected_addresses(self, server, no_fip_expected=True):
server_ports = self.ports.filter(device_id=server.id)
addresses = collections.defaultdict(list)
for p in server_ports:
net_name = self.networks.get(id=p['network_id']).name
for ip in p.fixed_ips:
addresses[net_name].append(
{'version': 4,
'addr': ip['ip_address'],
'OS-EXT-IPS-MAC:mac_addr': p.mac_address,
'OS-EXT-IPS:type': 'fixed'})
if no_fip_expected:
continue
fips = self.q_floating_ips.filter(port_id=p['id'])
if not fips:
continue
# Only one FIP should match.
fip = fips[0]
addresses[net_name].append(
{'version': 4,
'addr': fip.floating_ip_address,
'OS-EXT-IPS-MAC:mac_addr': p.mac_address,
'OS-EXT-IPS:type': 'floating'})
return addresses
def _check_server_address(self, res_server_data, no_fip_expected=False):
expected_addresses = self._get_expected_addresses(res_server_data,
no_fip_expected)
self.assertEqual(len(expected_addresses),
len(res_server_data.addresses))
for net, addresses in expected_addresses.items():
self.assertIn(net, res_server_data.addresses)
self.assertEqual(addresses, res_server_data.addresses[net])
def _test_servers_update_addresses(self, router_enabled=True):
tenant_id = self.request.user.tenant_id
servers = copy.deepcopy(self.servers.list())
server_ids = [server.id for server in servers]
server_ports = [p for p in self.api_ports.list()
if p['device_id'] in server_ids]
server_port_ids = [p['id'] for p in server_ports]
if router_enabled:
assoc_fips = [fip for fip in self.api_q_floating_ips.list()
if fip['port_id'] in server_port_ids]
server_network_ids = [p['network_id'] for p in server_ports]
server_networks = [net for net in self.api_networks.list()
if net['id'] in server_network_ids]
self.qclient.list_ports(device_id=server_ids) \
.AndReturn({'ports': server_ports})
if router_enabled:
self.qclient.list_floatingips(tenant_id=tenant_id,
port_id=server_port_ids) \
.AndReturn({'floatingips': assoc_fips})
self.qclient.list_ports(tenant_id=tenant_id) \
.AndReturn({'ports': self.api_ports.list()})
self.qclient.list_networks(id=set(server_network_ids)) \
.AndReturn({'networks': server_networks})
self.qclient.list_subnets() \
.AndReturn({'subnets': self.api_subnets.list()})
self.mox.ReplayAll()
api.network.servers_update_addresses(self.request, servers)
self.assertEqual(self.servers.count(), len(servers))
self.assertEqual([server.id for server in self.servers.list()],
[server.id for server in servers])
no_fip_expected = not router_enabled
# server[0] has one fixed IP and one floating IP
# if router ext isenabled.
self._check_server_address(servers[0], no_fip_expected)
# The expected is also calculated, we examine the result manually once.
addrs = servers[0].addresses['net1']
if router_enabled:
self.assertEqual(2, len(addrs))
self.assertEqual('fixed', addrs[0]['OS-EXT-IPS:type'])
self.assertEqual('floating', addrs[1]['OS-EXT-IPS:type'])
else:
self.assertEqual(1, len(addrs))
self.assertEqual('fixed', addrs[0]['OS-EXT-IPS:type'])
# server[1] has one fixed IP.
self._check_server_address(servers[1], no_fip_expected)
# manual check.
addrs = servers[1].addresses['net2']
self.assertEqual(1, len(addrs))
self.assertEqual('fixed', addrs[0]['OS-EXT-IPS:type'])
# server[2] has no corresponding ports in neutron_data,
# so it should be an empty dict.
self.assertFalse(servers[2].addresses)
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': True})
def test_servers_update_addresses(self):
self._test_servers_update_addresses()
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': False})
def test_servers_update_addresses_router_disabled(self):
self._test_servers_update_addresses(router_enabled=False)
class NetworkApiNeutronSecurityGroupTests(NetworkApiNeutronTestBase):
def setUp(self):
super(NetworkApiNeutronSecurityGroupTests, self).setUp()
self.qclient.list_extensions() \
.AndReturn({'extensions': self.api_extensions.list()})
self.sg_dict = dict([(sg['id'], sg['name']) for sg
in self.api_q_secgroups.list()])
def _cmp_sg_rule(self, exprule, retrule):
self.assertEqual(exprule['id'], retrule.id)
self.assertEqual(exprule['security_group_id'],
retrule.parent_group_id)
self.assertEqual(exprule['direction'],
retrule.direction)
self.assertEqual(exprule['ethertype'],
retrule.ethertype)
self.assertEqual(exprule['port_range_min'],
retrule.from_port)
self.assertEqual(exprule['port_range_max'],
retrule.to_port,)
if (exprule['remote_ip_prefix'] is None and
exprule['remote_group_id'] is None):
expcidr = ('::/0' if exprule['ethertype'] == 'IPv6'
else '0.0.0.0/0')
else:
expcidr = exprule['remote_ip_prefix']
self.assertEqual(expcidr, retrule.ip_range.get('cidr'))
self.assertEqual(self.sg_dict.get(exprule['remote_group_id']),
retrule.group.get('name'))
def _cmp_sg(self, exp_sg, ret_sg):
self.assertEqual(exp_sg['id'], ret_sg.id)
self.assertEqual(exp_sg['name'], ret_sg.name)
exp_rules = exp_sg['security_group_rules']
self.assertEqual(len(exp_rules), len(ret_sg.rules))
for (exprule, retrule) in itertools.izip(exp_rules, ret_sg.rules):
self._cmp_sg_rule(exprule, retrule)
def test_security_group_list(self):
sgs = self.api_q_secgroups.list()
tenant_id = self.request.user.tenant_id
# use deepcopy to ensure self.api_q_secgroups is not modified.
self.qclient.list_security_groups(tenant_id=tenant_id) \
.AndReturn({'security_groups': copy.deepcopy(sgs)})
self.mox.ReplayAll()
rets = api.network.security_group_list(self.request)
self.assertEqual(len(sgs), len(rets))
for (exp, ret) in itertools.izip(sgs, rets):
self._cmp_sg(exp, ret)
def test_security_group_get(self):
secgroup = self.api_q_secgroups.first()
sg_ids = set([secgroup['id']] +
[rule['remote_group_id'] for rule
in secgroup['security_group_rules']
if rule['remote_group_id']])
related_sgs = [sg for sg in self.api_q_secgroups.list()
if sg['id'] in sg_ids]
# use deepcopy to ensure self.api_q_secgroups is not modified.
self.qclient.show_security_group(secgroup['id']) \
.AndReturn({'security_group': copy.deepcopy(secgroup)})
self.qclient.list_security_groups(id=sg_ids, fields=['id', 'name']) \
.AndReturn({'security_groups': related_sgs})
self.mox.ReplayAll()
ret = api.network.security_group_get(self.request, secgroup['id'])
self._cmp_sg(secgroup, ret)
def test_security_group_create(self):
secgroup = self.api_q_secgroups.list()[1]
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description'],
'tenant_id': self.request.user.project_id}}
self.qclient.create_security_group(body) \
.AndReturn({'security_group': copy.deepcopy(secgroup)})
self.mox.ReplayAll()
ret = api.network.security_group_create(self.request, secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
def test_security_group_update(self):
secgroup = self.api_q_secgroups.list()[1]
secgroup = copy.deepcopy(secgroup)
secgroup['name'] = 'newname'
secgroup['description'] = 'new description'
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description']}}
self.qclient.update_security_group(secgroup['id'], body) \
.AndReturn({'security_group': secgroup})
self.mox.ReplayAll()
ret = api.network.security_group_update(self.request,
secgroup['id'],
secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
def test_security_group_delete(self):
secgroup = self.api_q_secgroups.first()
self.qclient.delete_security_group(secgroup['id'])
self.mox.ReplayAll()
api.network.security_group_delete(self.request, secgroup['id'])
def test_security_group_rule_create(self):
sg_rule = [r for r in self.api_q_secgroup_rules.list()
if r['protocol'] == 'tcp' and r['remote_ip_prefix']][0]
sg_id = sg_rule['security_group_id']
secgroup = [sg for sg in self.api_q_secgroups.list()
if sg['id'] == sg_id][0]
post_rule = copy.deepcopy(sg_rule)
del post_rule['id']
del post_rule['tenant_id']
post_body = {'security_group_rule': post_rule}
self.qclient.create_security_group_rule(post_body) \
.AndReturn({'security_group_rule': copy.deepcopy(sg_rule)})
self.qclient.list_security_groups(id=set([sg_id]),
fields=['id', 'name']) \
.AndReturn({'security_groups': [copy.deepcopy(secgroup)]})
self.mox.ReplayAll()
ret = api.network.security_group_rule_create(
self.request, sg_rule['security_group_id'],
sg_rule['direction'], sg_rule['ethertype'], sg_rule['protocol'],
sg_rule['port_range_min'], sg_rule['port_range_max'],
sg_rule['remote_ip_prefix'], sg_rule['remote_group_id'])
self._cmp_sg_rule(sg_rule, ret)
def test_security_group_rule_delete(self):
sg_rule = self.api_q_secgroup_rules.first()
self.qclient.delete_security_group_rule(sg_rule['id'])
self.mox.ReplayAll()
api.network.security_group_rule_delete(self.request, sg_rule['id'])
def _get_instance(self, cur_sg_ids):
instance_port = [p for p in self.api_ports.list()
if p['device_owner'].startswith('compute:')][0]
instance_id = instance_port['device_id']
# Emulate an instance with two ports
instance_ports = []
for _i in range(2):
p = copy.deepcopy(instance_port)
p['id'] = str(uuid.uuid4())
p['security_groups'] = cur_sg_ids
instance_ports.append(p)
return (instance_id, instance_ports)
def test_server_security_groups(self):
cur_sg_ids = [sg['id'] for sg in self.api_q_secgroups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports(device_id=instance_id) \
.AndReturn({'ports': instance_ports})
secgroups = copy.deepcopy(self.api_q_secgroups.list())
self.qclient.list_security_groups(id=set(cur_sg_ids)) \
.AndReturn({'security_groups': secgroups})
self.mox.ReplayAll()
api.network.server_security_groups(self.request, instance_id)
def test_server_update_security_groups(self):
cur_sg_ids = [self.api_q_secgroups.first()['id']]
new_sg_ids = [sg['id'] for sg in self.api_q_secgroups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports(device_id=instance_id) \
.AndReturn({'ports': instance_ports})
for p in instance_ports:
body = {'port': {'security_groups': new_sg_ids}}
self.qclient.update_port(p['id'], body=body).AndReturn({'port': p})
self.mox.ReplayAll()
api.network.server_update_security_groups(
self.request, instance_id, new_sg_ids)
def test_security_group_backend(self):
self.mox.ReplayAll()
self.assertEqual('neutron',
api.network.security_group_backend(self.request))
class NetworkApiNeutronFloatingIpTests(NetworkApiNeutronTestBase):
def setUp(self):
super(NetworkApiNeutronFloatingIpTests, self).setUp()
self.qclient.list_extensions() \
.AndReturn({'extensions': self.api_extensions.list()})
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': True})
def test_floating_ip_supported(self):
self.mox.ReplayAll()
self.assertTrue(api.network.floating_ip_supported(self.request))
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': False})
def test_floating_ip_supported_false(self):
self.mox.ReplayAll()
self.assertFalse(api.network.floating_ip_supported(self.request))
def test_floating_ip_pools_list(self):
search_opts = {'router:external': True}
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
self.qclient.list_networks(**search_opts) \
.AndReturn({'networks': ext_nets})
self.mox.ReplayAll()
rets = api.network.floating_ip_pools_list(self.request)
for attr in ['id', 'name']:
self.assertEqual([p[attr] for p in ext_nets],
[getattr(p, attr) for p in rets])
def test_floating_ip_list(self):
fips = self.api_q_floating_ips.list()
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_floatingips(**filters) \
.AndReturn({'floatingips': fips})
self.qclient.list_ports(**filters) \
.AndReturn({'ports': self.api_ports.list()})
self.mox.ReplayAll()
rets = api.network.tenant_floating_ip_list(self.request)
assoc_port = self.api_ports.list()[1]
self.assertEqual(len(fips), len(rets))
for ret, exp in zip(rets, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(exp[attr], getattr(ret, attr))
if exp['port_id']:
dev_id = assoc_port['device_id'] if exp['port_id'] else None
self.assertEqual(dev_id, ret.instance_id)
self.assertEqual('compute', ret.instance_type)
else:
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
def test_floating_ip_list_all_tenants(self):
fips = self.api_q_floating_ips.list()
self.qclient.list_floatingips().AndReturn({'floatingips': fips})
self.qclient.list_ports().AndReturn({'ports': self.api_ports.list()})
self.mox.ReplayAll()
# all_tenants option for floating IP list is api.neutron specific,
# so we call api.neutron.FloatingIpManager directly and
# actually we don't need NetworkClient in this test.
# setUp() in the base class sets up mox to expect
# api.base.is_service_enabled() is called and we need to call
# NetworkClient even if we don't use it so that mox.VerifyAll
# doesn't complain it.
api.network.NetworkClient(self.request)
fip_manager = api.neutron.FloatingIpManager(self.request)
rets = fip_manager.list(all_tenants=True)
assoc_port = self.api_ports.list()[1]
self.assertEqual(len(fips), len(rets))
for ret, exp in zip(rets, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), exp[attr])
if exp['port_id']:
dev_id = assoc_port['device_id'] if exp['port_id'] else None
self.assertEqual(dev_id, ret.instance_id)
self.assertEqual('compute', ret.instance_type)
else:
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
def _test_floating_ip_get_associated(self, assoc_port, exp_instance_type):
fip = self.api_q_floating_ips.list()[1]
self.qclient.show_floatingip(fip['id']).AndReturn({'floatingip': fip})
self.qclient.show_port(assoc_port['id']) \
.AndReturn({'port': assoc_port})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertEqual(assoc_port['device_id'], ret.instance_id)
self.assertEqual(exp_instance_type, ret.instance_type)
def test_floating_ip_get_associated(self):
assoc_port = self.api_ports.list()[1]
self._test_floating_ip_get_associated(assoc_port, 'compute')
def test_floating_ip_get_associated_with_loadbalancer_vip(self):
assoc_port = copy.deepcopy(self.api_ports.list()[1])
assoc_port['device_owner'] = 'neutron:LOADBALANCER'
assoc_port['device_id'] = str(uuid.uuid4())
assoc_port['name'] = 'vip-' + str(uuid.uuid4())
self._test_floating_ip_get_associated(assoc_port, 'loadbalancer')
def test_floating_ip_get_unassociated(self):
fip = self.api_q_floating_ips.list()[0]
self.qclient.show_floatingip(fip['id']).AndReturn({'floatingip': fip})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
def test_floating_ip_allocate(self):
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
ext_net = ext_nets[0]
fip = self.api_q_floating_ips.first()
self.qclient.create_floatingip(
{'floatingip': {'floating_network_id': ext_net['id'],
'tenant_id': self.request.user.project_id}}) \
.AndReturn({'floatingip': fip})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_allocate(self.request,
ext_net['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
def test_floating_ip_release(self):
fip = self.api_q_floating_ips.first()
self.qclient.delete_floatingip(fip['id'])
self.mox.ReplayAll()
api.network.tenant_floating_ip_release(self.request, fip['id'])
def test_floating_ip_associate(self):
fip = self.api_q_floating_ips.list()[1]
assoc_port = self.api_ports.list()[1]
ip_address = assoc_port['fixed_ips'][0]['ip_address']
target_id = '%s_%s' % (assoc_port['id'], ip_address)
params = {'port_id': assoc_port['id'],
'fixed_ip_address': ip_address}
self.qclient.update_floatingip(fip['id'],
{'floatingip': params})
self.mox.ReplayAll()
api.network.floating_ip_associate(self.request, fip['id'], target_id)
def test_floating_ip_disassociate(self):
fip = self.api_q_floating_ips.list()[1]
self.qclient.update_floatingip(fip['id'],
{'floatingip': {'port_id': None}})
self.mox.ReplayAll()
api.network.floating_ip_disassociate(self.request, fip['id'])
def _get_target_id(self, port):
param = {'id': port['id'],
'addr': port['fixed_ips'][0]['ip_address']}
return '%(id)s_%(addr)s' % param
def _get_target_name(self, port):
param = {'svrid': port['device_id'],
'addr': port['fixed_ips'][0]['ip_address']}
return 'server_%(svrid)s: %(addr)s' % param
def _subs_from_port(self, port):
return [ip['subnet_id'] for ip in port['fixed_ips']]
@override_settings(
OPENSTACK_NEUTRON_NETWORK={
'enable_lb': True,
'enable_fip_topology_check': True,
}
)
def test_floating_ip_target_list(self):
ports = self.api_ports.list()
# Port on the first subnet is connected to a router
# attached to external network in neutron_data.
subnet_id = self.subnets.first().id
shared_nets = [n for n in self.api_networks.list() if n['shared']]
shared_subnet_ids = [s for n in shared_nets for s in n['subnets']]
target_ports = [
(self._get_target_id(p), self._get_target_name(p)) for p in ports
if (not p['device_owner'].startswith('network:') and
(subnet_id in self._subs_from_port(p) or
(set(shared_subnet_ids) & set(self._subs_from_port(p)))))
]
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_ports(**filters).AndReturn({'ports': ports})
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
search_opts = {'project_id': self.request.user.tenant_id}
novaclient.servers.list(True, search_opts).AndReturn(servers)
search_opts = {'router:external': True}
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
self.qclient.list_networks(**search_opts) \
.AndReturn({'networks': ext_nets})
self.qclient.list_routers().AndReturn({'routers':
self.api_routers.list()})
self.qclient.list_networks(shared=True).AndReturn({'networks':
shared_nets})
shared_subs = [s for s in self.api_subnets.list()
if s['id'] in shared_subnet_ids]
self.qclient.list_subnets().AndReturn({'subnets': shared_subs})
self.qclient.list_vips().AndReturn({'vips': self.vips.list()})
self.mox.ReplayAll()
rets = api.network.floating_ip_target_list(self.request)
self.assertEqual(len(target_ports), len(rets))
for ret, exp in zip(rets, target_ports):
self.assertEqual(exp[0], ret.id)
self.assertEqual(exp[1], ret.name)
def test_floating_ip_target_get_by_instance(self):
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == '1']
search_opts = {'device_id': '1'}
self.qclient.list_ports(**search_opts).AndReturn({'ports': candidates})
self.mox.ReplayAll()
ret = api.network.floating_ip_target_get_by_instance(self.request, '1')
self.assertEqual(self._get_target_id(candidates[0]), ret)
def test_target_floating_ip_port_by_instance(self):
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == '1']
search_opts = {'device_id': '1'}
self.qclient.list_ports(**search_opts).AndReturn({'ports': candidates})
self.mox.ReplayAll()
ret = api.network.floating_ip_target_list_by_instance(self.request,
'1')
self.assertEqual(self._get_target_id(candidates[0]), ret[0])
self.assertEqual(len(candidates), len(ret))
def test_floating_ip_target_get_by_instance_with_preloaded_target(self):
target_list = [{'name': 'name11', 'id': 'id11', 'instance_id': 'vm1'},
{'name': 'name21', 'id': 'id21', 'instance_id': 'vm2'},
{'name': 'name22', 'id': 'id22', 'instance_id': 'vm2'}]
self.mox.ReplayAll()
ret = api.network.floating_ip_target_get_by_instance(
self.request, 'vm2', target_list)
self.assertEqual('id21', ret)
def test_target_floating_ip_port_by_instance_with_preloaded_target(self):
target_list = [{'name': 'name11', 'id': 'id11', 'instance_id': 'vm1'},
{'name': 'name21', 'id': 'id21', 'instance_id': 'vm2'},
{'name': 'name22', 'id': 'id22', 'instance_id': 'vm2'}]
self.mox.ReplayAll()
ret = api.network.floating_ip_target_list_by_instance(
self.request, 'vm2', target_list)
self.assertEqual(['id21', 'id22'], ret)
| apache-2.0 |
marierm/supercollider | external_libraries/simplejson-2.3.2/encoder.py | 44 | 20259 | """Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
from . import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from .decoder import PosInf
ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
u'\u2028': '\\u2028',
u'\u2029': '\\u2029',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
self.namedtuple_as_object = namedtuple_as_object
self.tuple_as_array = tuple_as_array
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(), _current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| gpl-3.0 |
georgtroska/root | interpreter/llvm/src/tools/clang/bindings/python/tests/cindex/test_code_completion.py | 93 | 2766 | from clang.cindex import TranslationUnit
def check_completion_results(cr, expected):
assert cr is not None
assert len(cr.diagnostics) == 0
completions = [str(c) for c in cr.results]
for c in expected:
assert c in completions
def test_code_complete():
files = [('fake.c', """
/// Aaa.
int test1;
/// Bbb.
void test2(void);
void f() {
}
""")]
tu = TranslationUnit.from_source('fake.c', ['-std=c99'], unsaved_files=files,
options=TranslationUnit.PARSE_INCLUDE_BRIEF_COMMENTS_IN_CODE_COMPLETION)
cr = tu.codeComplete('fake.c', 9, 1, unsaved_files=files, include_brief_comments=True)
expected = [
"{'int', ResultType} | {'test1', TypedText} || Priority: 50 || Availability: Available || Brief comment: Aaa.",
"{'void', ResultType} | {'test2', TypedText} | {'(', LeftParen} | {')', RightParen} || Priority: 50 || Availability: Available || Brief comment: Bbb.",
"{'return', TypedText} || Priority: 40 || Availability: Available || Brief comment: None"
]
check_completion_results(cr, expected)
def test_code_complete_availability():
files = [('fake.cpp', """
class P {
protected:
int member;
};
class Q : public P {
public:
using P::member;
};
void f(P x, Q y) {
x.; // member is inaccessible
y.; // member is accessible
}
""")]
tu = TranslationUnit.from_source('fake.cpp', ['-std=c++98'], unsaved_files=files)
cr = tu.codeComplete('fake.cpp', 12, 5, unsaved_files=files)
expected = [
"{'const', TypedText} || Priority: 40 || Availability: Available || Brief comment: None",
"{'volatile', TypedText} || Priority: 40 || Availability: Available || Brief comment: None",
"{'operator', TypedText} || Priority: 40 || Availability: Available || Brief comment: None",
"{'P', TypedText} | {'::', Text} || Priority: 75 || Availability: Available || Brief comment: None",
"{'Q', TypedText} | {'::', Text} || Priority: 75 || Availability: Available || Brief comment: None"
]
check_completion_results(cr, expected)
cr = tu.codeComplete('fake.cpp', 13, 5, unsaved_files=files)
expected = [
"{'P', TypedText} | {'::', Text} || Priority: 75 || Availability: Available || Brief comment: None",
"{'P &', ResultType} | {'operator=', TypedText} | {'(', LeftParen} | {'const P &', Placeholder} | {')', RightParen} || Priority: 34 || Availability: Available || Brief comment: None",
"{'int', ResultType} | {'member', TypedText} || Priority: 35 || Availability: NotAccessible || Brief comment: None",
"{'void', ResultType} | {'~P', TypedText} | {'(', LeftParen} | {')', RightParen} || Priority: 34 || Availability: Available || Brief comment: None"
]
check_completion_results(cr, expected)
| lgpl-2.1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.