repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
statico/friend-rescue | backend/simplejson/decoder.py | 65 | 11225 | """
Implementation of JSONDecoder
"""
import re
import sys
from simplejson.scanner import Scanner, pattern
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
pass
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
import struct
import sys
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
lineno, colno = linecol(doc, pos)
if end is None:
return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
'true': True,
'false': False,
'null': None,
}
def JSONConstant(match, context, c=_CONSTANTS):
s = match.group(0)
fn = getattr(context, 'parse_constant', None)
if fn is None:
rval = c[s]
else:
rval = fn(s)
return rval, None
pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
def JSONNumber(match, context):
match = JSONNumber.regex.match(match.string, *match.span())
integer, frac, exp = match.groups()
if frac or exp:
fn = getattr(context, 'parse_float', None) or float
res = fn(integer + (frac or '') + (exp or ''))
else:
fn = getattr(context, 'parse_int', None) or int
res = fn(integer)
return res, None
pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
if terminator == '"':
break
elif terminator != '\\':
if strict:
raise ValueError(errmsg("Invalid control character %r at", s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
if esc != 'u':
try:
m = _b[esc]
except KeyError:
raise ValueError(
errmsg("Invalid \\escape: %r" % (esc,), s, end))
end += 1
else:
esc = s[end + 1:end + 5]
next_end = end + 5
msg = "Invalid \\uXXXX escape"
try:
if len(esc) != 4:
raise ValueError
uni = int(esc, 16)
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
m = unichr(uni)
except ValueError:
raise ValueError(errmsg(msg, s, end))
end = next_end
_append(m)
return u''.join(chunks), end
# Use speedup
try:
scanstring = c_scanstring
except NameError:
scanstring = py_scanstring
def JSONString(match, context):
encoding = getattr(context, 'encoding', None)
strict = getattr(context, 'strict', True)
return scanstring(match.string, match.end(), encoding, strict)
pattern(r'"')(JSONString)
WHITESPACE = re.compile(r'\s*', FLAGS)
def JSONObject(match, context, _w=WHITESPACE.match):
pairs = {}
s = match.string
end = _w(s, match.end()).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
encoding = getattr(context, 'encoding', None)
strict = getattr(context, 'strict', True)
iterscan = JSONScanner.iterscan
while True:
key, end = scanstring(s, end, encoding, strict)
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end = _w(s, end + 1).end()
try:
value, end = iterscan(s, idx=end, context=context).next()
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
end = _w(s, end).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == '}':
break
if nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
end = _w(s, end).end()
nextchar = s[end:end + 1]
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
object_hook = getattr(context, 'object_hook', None)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
pattern(r'{')(JSONObject)
def JSONArray(match, context, _w=WHITESPACE.match):
values = []
s = match.string
end = _w(s, match.end()).end()
# Look-ahead for trivial empty array
nextchar = s[end:end + 1]
if nextchar == ']':
return values, end + 1
iterscan = JSONScanner.iterscan
while True:
try:
value, end = iterscan(s, idx=end, context=context).next()
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
values.append(value)
end = _w(s, end).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
if nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
end = _w(s, end).end()
return values, end
pattern(r'\[')(JSONArray)
ANYTHING = [
JSONObject,
JSONArray,
JSONString,
JSONConstant,
JSONNumber,
]
JSONScanner = Scanner(ANYTHING)
class JSONDecoder(object):
"""
Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
_scanner = Scanner(ANYTHING)
__all__ = ['__init__', 'decode', 'raw_decode']
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""
``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float
self.parse_int = parse_int
self.parse_constant = parse_constant
self.strict = strict
def decode(self, s, _w=WHITESPACE.match):
"""
Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, **kw):
"""
Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
kw.setdefault('context', self)
try:
obj, end = self._scanner.iterscan(s, **kw).next()
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
__all__ = ['JSONDecoder']
| mit |
BinRoot/TensorFlow-Book | ch04_classification/logistic_2d.py | 1 | 1737 | import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
learning_rate = 0.1
training_epochs = 2000
def sigmoid(x):
return 1. / (1. + np.exp(-x))
x1_label1 = np.random.normal(3, 1, 1000)
x2_label1 = np.random.normal(2, 1, 1000)
x1_label2 = np.random.normal(7, 1, 1000)
x2_label2 = np.random.normal(6, 1, 1000)
x1s = np.append(x1_label1, x1_label2)
x2s = np.append(x2_label1, x2_label2)
ys = np.asarray([0.] * len(x1_label1) + [1.] * len(x1_label2))
X1 = tf.placeholder(tf.float32, shape=(None,), name="x1")
X2 = tf.placeholder(tf.float32, shape=(None,), name="x2")
Y = tf.placeholder(tf.float32, shape=(None,), name="y")
w = tf.Variable([0., 0., 0.], name="w", trainable=True)
y_model = tf.sigmoid(-(w[2] * X2 + w[1] * X1 + w[0]))
cost = tf.reduce_mean(-tf.log(y_model) * Y -tf.log(1 - y_model) * (1 - Y))
train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
prev_err = 0
for epoch in range(training_epochs):
err, _ = sess.run([cost, train_op], {X1: x1s, X2: x2s, Y: ys})
print(epoch, err)
if abs(prev_err - err) < 0.0001:
break
prev_err = err
w_val = sess.run(w)
x1_boundary, x2_boundary = [], []
for x1_test in np.linspace(0, 10, 100):
for x2_test in np.linspace(0, 10, 100):
z = sigmoid(-x2_test*w_val[2] - x1_test*w_val[1] - w_val[0])
if abs(z - 0.5) < 0.01:
x1_boundary.append(x1_test)
x2_boundary.append(x2_test)
plt.scatter(x1_boundary, x2_boundary, c='b', marker='o', s=20)
plt.scatter(x1_label1, x2_label1, c='r', marker='x', s=20)
plt.scatter(x1_label2, x2_label2, c='g', marker='1', s=20)
plt.show()
| mit |
hazrpg/calibre | src/calibre/gui2/store/stores/amazon_uk_plugin.py | 3 | 5362 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2015, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import (unicode_literals, division, absolute_import,
print_function)
store_version = 11 # Needed for dynamic plugin loading
from contextlib import closing
import urllib
from lxml import html
from PyQt5.Qt import QUrl
from calibre import browser
from calibre.gui2 import open_url
from calibre.gui2.store import StorePlugin
from calibre.gui2.store.search_result import SearchResult
SEARCH_BASE_URL = 'http://www.amazon.co.uk/s/'
SEARCH_BASE_QUERY = {'url': 'search-alias=digital-text'}
DETAILS_URL = 'http://amazon.co.uk/dp/'
STORE_LINK = 'http://www.amazon.co.uk'
DRM_SEARCH_TEXT = 'Simultaneous Device Usage'
DRM_FREE_TEXT = 'Unlimited'
def search_amazon(query, max_results=10, timeout=60,
write_html_to=None,
base_url=SEARCH_BASE_URL,
base_query=SEARCH_BASE_QUERY,
field_keywords='field-keywords'
):
uquery = base_query.copy()
uquery[field_keywords] = query
def asbytes(x):
if isinstance(x, type('')):
x = x.encode('utf-8')
return x
uquery = {asbytes(k):asbytes(v) for k, v in uquery.iteritems()}
url = base_url + '?' + urllib.urlencode(uquery).decode('ascii')
br = browser()
counter = max_results
with closing(br.open(url, timeout=timeout)) as f:
raw = f.read()
if write_html_to is not None:
with open(write_html_to, 'wb') as f:
f.write(raw)
doc = html.fromstring(raw)
try:
results = doc.xpath('//div[@id="atfResults" and @class]')[0]
except IndexError:
return
if 's-result-list-parent-container' in results.get('class', ''):
data_xpath = "descendant-or-self::li[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-result-item ')]"
format_xpath = './/a[contains(text(), "Kindle Edition")]//text()'
asin_xpath = '@data-asin'
cover_xpath = "descendant-or-self::img[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-access-image ')]/@src"
title_xpath = "descendant-or-self::h2[@class and contains(concat(' ', normalize-space(@class), ' '), ' s-access-title ')]//text()"
author_xpath = './/span[starts-with(text(), "by ")]/following-sibling::span//text()'
price_xpath = '(.//span[contains(@class, " s-price ")])[last()]//text()'
else:
return
for data in doc.xpath(data_xpath):
if counter <= 0:
break
# Even though we are searching digital-text only Amazon will still
# put in results for non Kindle books (author pages). Se we need
# to explicitly check if the item is a Kindle book and ignore it
# if it isn't.
format = ''.join(data.xpath(format_xpath))
if 'kindle' not in format.lower():
continue
# We must have an asin otherwise we can't easily reference the
# book later.
asin = data.xpath(asin_xpath)
if asin:
asin = asin[0]
else:
continue
cover_url = ''.join(data.xpath(cover_xpath))
title = ''.join(data.xpath(title_xpath))
author = ''.join(data.xpath(author_xpath))
try:
author = author.split('by ', 1)[1].split(" (")[0]
except:
pass
price = ''.join(data.xpath(price_xpath))
counter -= 1
s = SearchResult()
s.cover_url = cover_url.strip()
s.title = title.strip()
s.author = author.strip()
s.price = price.strip()
s.detail_item = asin.strip()
s.formats = 'Kindle'
yield s
class AmazonKindleStore(StorePlugin):
def open(self, parent=None, detail_item=None, external=False):
store_link = (DETAILS_URL + detail_item) if detail_item else STORE_LINK
open_url(QUrl(store_link))
def search(self, query, max_results=10, timeout=60):
for result in search_amazon(query, max_results=max_results, timeout=timeout):
yield result
def get_details(self, search_result, timeout):
url = DETAILS_URL
br = browser()
with closing(br.open(url + search_result.detail_item, timeout=timeout)) as nf:
idata = html.fromstring(nf.read())
if idata.xpath('boolean(//div[@class="content"]//li/b[contains(text(), "' +
DRM_SEARCH_TEXT + '")])'):
if idata.xpath('boolean(//div[@class="content"]//li[contains(., "' +
DRM_FREE_TEXT + '") and contains(b, "' +
DRM_SEARCH_TEXT + '")])'):
search_result.drm = SearchResult.DRM_UNLOCKED
else:
search_result.drm = SearchResult.DRM_UNKNOWN
else:
search_result.drm = SearchResult.DRM_LOCKED
return True
if __name__ == '__main__':
import sys
for result in search_amazon(' '.join(sys.argv[1:]), write_html_to='/t/amazon.html'):
print (result)
| gpl-3.0 |
saiwing-yeung/scikit-learn | examples/decomposition/plot_pca_vs_fa_model_selection.py | 70 | 4523 | """
===============================================================
Model selection with Probabilistic PCA and Factor Analysis (FA)
===============================================================
Probabilistic PCA and Factor Analysis are probabilistic models.
The consequence is that the likelihood of new data can be used
for model selection and covariance estimation.
Here we compare PCA and FA with cross-validation on low rank data corrupted
with homoscedastic noise (noise variance
is the same for each feature) or heteroscedastic noise (noise variance
is the different for each feature). In a second step we compare the model
likelihood to the likelihoods obtained from shrinkage covariance estimators.
One can observe that with homoscedastic noise both FA and PCA succeed
in recovering the size of the low rank subspace. The likelihood with PCA
is higher than FA in this case. However PCA fails and overestimates
the rank when heteroscedastic noise is present. Under appropriate
circumstances the low rank models are more likely than shrinkage models.
The automatic estimation from
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604
by Thomas P. Minka is also compared.
"""
# Authors: Alexandre Gramfort
# Denis A. Engemann
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg
from sklearn.decomposition import PCA, FactorAnalysis
from sklearn.covariance import ShrunkCovariance, LedoitWolf
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
print(__doc__)
###############################################################################
# Create the data
n_samples, n_features, rank = 1000, 50, 10
sigma = 1.
rng = np.random.RandomState(42)
U, _, _ = linalg.svd(rng.randn(n_features, n_features))
X = np.dot(rng.randn(n_samples, rank), U[:, :rank].T)
# Adding homoscedastic noise
X_homo = X + sigma * rng.randn(n_samples, n_features)
# Adding heteroscedastic noise
sigmas = sigma * rng.rand(n_features) + sigma / 2.
X_hetero = X + rng.randn(n_samples, n_features) * sigmas
###############################################################################
# Fit the models
n_components = np.arange(0, n_features, 5) # options for n_components
def compute_scores(X):
pca = PCA(svd_solver='full')
fa = FactorAnalysis()
pca_scores, fa_scores = [], []
for n in n_components:
pca.n_components = n
fa.n_components = n
pca_scores.append(np.mean(cross_val_score(pca, X)))
fa_scores.append(np.mean(cross_val_score(fa, X)))
return pca_scores, fa_scores
def shrunk_cov_score(X):
shrinkages = np.logspace(-2, 0, 30)
cv = GridSearchCV(ShrunkCovariance(), {'shrinkage': shrinkages})
return np.mean(cross_val_score(cv.fit(X).best_estimator_, X))
def lw_score(X):
return np.mean(cross_val_score(LedoitWolf(), X))
for X, title in [(X_homo, 'Homoscedastic Noise'),
(X_hetero, 'Heteroscedastic Noise')]:
pca_scores, fa_scores = compute_scores(X)
n_components_pca = n_components[np.argmax(pca_scores)]
n_components_fa = n_components[np.argmax(fa_scores)]
pca = PCA(svd_solver='full', n_components='mle')
pca.fit(X)
n_components_pca_mle = pca.n_components_
print("best n_components by PCA CV = %d" % n_components_pca)
print("best n_components by FactorAnalysis CV = %d" % n_components_fa)
print("best n_components by PCA MLE = %d" % n_components_pca_mle)
plt.figure()
plt.plot(n_components, pca_scores, 'b', label='PCA scores')
plt.plot(n_components, fa_scores, 'r', label='FA scores')
plt.axvline(rank, color='g', label='TRUTH: %d' % rank, linestyle='-')
plt.axvline(n_components_pca, color='b',
label='PCA CV: %d' % n_components_pca, linestyle='--')
plt.axvline(n_components_fa, color='r',
label='FactorAnalysis CV: %d' % n_components_fa,
linestyle='--')
plt.axvline(n_components_pca_mle, color='k',
label='PCA MLE: %d' % n_components_pca_mle, linestyle='--')
# compare with other covariance estimators
plt.axhline(shrunk_cov_score(X), color='violet',
label='Shrunk Covariance MLE', linestyle='-.')
plt.axhline(lw_score(X), color='orange',
label='LedoitWolf MLE' % n_components_pca_mle, linestyle='-.')
plt.xlabel('nb of components')
plt.ylabel('CV scores')
plt.legend(loc='lower right')
plt.title(title)
plt.show()
| bsd-3-clause |
Balachan27/django | tests/swappable_models/tests.py | 339 | 2130 | from __future__ import unicode_literals
from swappable_models.models import Article
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core import management
from django.test import TestCase, override_settings
from django.utils.six import StringIO
class SwappableModelTests(TestCase):
available_apps = [
'swappable_models',
'django.contrib.auth',
'django.contrib.contenttypes',
]
@override_settings(TEST_ARTICLE_MODEL='swappable_models.AlternateArticle')
def test_generated_data(self):
"Permissions and content types are not created for a swapped model"
# Delete all permissions and content_types
Permission.objects.filter(content_type__app_label='swappable_models').delete()
ContentType.objects.filter(app_label='swappable_models').delete()
# Re-run migrate. This will re-build the permissions and content types.
new_io = StringIO()
management.call_command('migrate', interactive=False, stdout=new_io)
# Check that content types and permissions exist for the swapped model,
# but not for the swappable model.
apps_models = [(p.content_type.app_label, p.content_type.model)
for p in Permission.objects.all()]
self.assertIn(('swappable_models', 'alternatearticle'), apps_models)
self.assertNotIn(('swappable_models', 'article'), apps_models)
apps_models = [(ct.app_label, ct.model)
for ct in ContentType.objects.all()]
self.assertIn(('swappable_models', 'alternatearticle'), apps_models)
self.assertNotIn(('swappable_models', 'article'), apps_models)
@override_settings(TEST_ARTICLE_MODEL='swappable_models.article')
def test_case_insensitive(self):
"Model names are case insensitive. Check that model swapping honors this."
try:
Article.objects.all()
except AttributeError:
self.fail('Swappable model names should be case insensitive.')
self.assertIsNone(Article._meta.swapped)
| bsd-3-clause |
andante20/volatility | volatility/plugins/vboxinfo.py | 45 | 1725 | # Volatility
# Copyright (C) 2009-2012 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.plugins.crashinfo as crashinfo
class VBoxInfo(crashinfo.CrashInfo):
"""Dump virtualbox information"""
target_as = ['VirtualBoxCoreDumpElf64']
def render_text(self, outfd, data):
header = data.get_header()
outfd.write("Magic: {0:#x}\n".format(header.u32Magic))
outfd.write("Format: {0:#x}\n".format(header.u32FmtVersion))
outfd.write("VirtualBox {0}.{1}.{2} (revision {3})\n".format(
header.Major,
header.Minor, header.Build,
header.u32VBoxRevision))
outfd.write("CPUs: {0}\n\n".format(header.cCpus))
self.table_header(outfd, [("File Offset", "[addrpad]"),
("Memory Offset", "[addrpad]"),
("Size", "[addrpad]")])
for memory_offset, file_offset, length in data.get_runs():
self.table_row(outfd, file_offset, memory_offset, length)
| gpl-2.0 |
chinmaygarde/mojo | third_party/cython/src/Cython/Plex/Lexicons.py | 102 | 6291 | #=======================================================================
#
# Python Lexical Analyser
#
# Lexical Analyser Specification
#
#=======================================================================
import types
import Actions
import DFA
import Errors
import Machines
import Regexps
# debug_flags for Lexicon constructor
DUMP_NFA = 1
DUMP_DFA = 2
class State(object):
"""
This class is used as part of a Plex.Lexicon specification to
introduce a user-defined state.
Constructor:
State(name, token_specifications)
"""
name = None
tokens = None
def __init__(self, name, tokens):
self.name = name
self.tokens = tokens
class Lexicon(object):
"""
Lexicon(specification) builds a lexical analyser from the given
|specification|. The specification consists of a list of
specification items. Each specification item may be either:
1) A token definition, which is a tuple:
(pattern, action)
The |pattern| is a regular axpression built using the
constructors defined in the Plex module.
The |action| is the action to be performed when this pattern
is recognised (see below).
2) A state definition:
State(name, tokens)
where |name| is a character string naming the state,
and |tokens| is a list of token definitions as
above. The meaning and usage of states is described
below.
Actions
-------
The |action| in a token specication may be one of three things:
1) A function, which is called as follows:
function(scanner, text)
where |scanner| is the relevant Scanner instance, and |text|
is the matched text. If the function returns anything
other than None, that value is returned as the value of the
token. If it returns None, scanning continues as if the IGNORE
action were specified (see below).
2) One of the following special actions:
IGNORE means that the recognised characters will be treated as
white space and ignored. Scanning will continue until
the next non-ignored token is recognised before returning.
TEXT causes the scanned text itself to be returned as the
value of the token.
3) Any other value, which is returned as the value of the token.
States
------
At any given time, the scanner is in one of a number of states.
Associated with each state is a set of possible tokens. When scanning,
only tokens associated with the current state are recognised.
There is a default state, whose name is the empty string. Token
definitions which are not inside any State definition belong to
the default state.
The initial state of the scanner is the default state. The state can
be changed in one of two ways:
1) Using Begin(state_name) as the action of a token.
2) Calling the begin(state_name) method of the Scanner.
To change back to the default state, use '' as the state name.
"""
machine = None # Machine
tables = None # StateTableMachine
def __init__(self, specifications, debug = None, debug_flags = 7, timings = None):
if type(specifications) != types.ListType:
raise Errors.InvalidScanner("Scanner definition is not a list")
if timings:
from Timing import time
total_time = 0.0
time1 = time()
nfa = Machines.Machine()
default_initial_state = nfa.new_initial_state('')
token_number = 1
for spec in specifications:
if isinstance(spec, State):
user_initial_state = nfa.new_initial_state(spec.name)
for token in spec.tokens:
self.add_token_to_machine(
nfa, user_initial_state, token, token_number)
token_number = token_number + 1
elif type(spec) == types.TupleType:
self.add_token_to_machine(
nfa, default_initial_state, spec, token_number)
token_number = token_number + 1
else:
raise Errors.InvalidToken(
token_number,
"Expected a token definition (tuple) or State instance")
if timings:
time2 = time()
total_time = total_time + (time2 - time1)
time3 = time()
if debug and (debug_flags & 1):
debug.write("\n============= NFA ===========\n")
nfa.dump(debug)
dfa = DFA.nfa_to_dfa(nfa, debug = (debug_flags & 3) == 3 and debug)
if timings:
time4 = time()
total_time = total_time + (time4 - time3)
if debug and (debug_flags & 2):
debug.write("\n============= DFA ===========\n")
dfa.dump(debug)
if timings:
timings.write("Constructing NFA : %5.2f\n" % (time2 - time1))
timings.write("Converting to DFA: %5.2f\n" % (time4 - time3))
timings.write("TOTAL : %5.2f\n" % total_time)
self.machine = dfa
def add_token_to_machine(self, machine, initial_state, token_spec, token_number):
try:
(re, action_spec) = self.parse_token_definition(token_spec)
# Disabled this -- matching empty strings can be useful
#if re.nullable:
# raise Errors.InvalidToken(
# token_number, "Pattern can match 0 input symbols")
if isinstance(action_spec, Actions.Action):
action = action_spec
else:
try:
action_spec.__call__
except AttributeError:
action = Actions.Return(action_spec)
else:
action = Actions.Call(action_spec)
final_state = machine.new_state()
re.build_machine(machine, initial_state, final_state,
match_bol = 1, nocase = 0)
final_state.set_action(action, priority = -token_number)
except Errors.PlexError, e:
raise e.__class__("Token number %d: %s" % (token_number, e))
def parse_token_definition(self, token_spec):
if type(token_spec) != types.TupleType:
raise Errors.InvalidToken("Token definition is not a tuple")
if len(token_spec) != 2:
raise Errors.InvalidToken("Wrong number of items in token definition")
pattern, action = token_spec
if not isinstance(pattern, Regexps.RE):
raise Errors.InvalidToken("Pattern is not an RE instance")
return (pattern, action)
def get_initial_state(self, name):
return self.machine.get_initial_state(name)
| bsd-3-clause |
strint/tensorflow | tensorflow/contrib/tensor_forest/python/ops/data_ops.py | 36 | 6976 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ops for preprocessing data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.python.ops import tensor_forest_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import tf_logging as logging
# Data column types for indicating categorical or other non-float values.
DATA_FLOAT = 0
DATA_CATEGORICAL = 1
DTYPE_TO_FTYPE = {
dtypes.string: DATA_CATEGORICAL,
dtypes.int32: DATA_CATEGORICAL,
dtypes.int64: DATA_CATEGORICAL,
dtypes.float32: DATA_FLOAT,
dtypes.float64: DATA_FLOAT
}
def CastToFloat(tensor):
if tensor.dtype == dtypes.string:
return tensor_forest_ops.reinterpret_string_to_float(tensor)
elif tensor.dtype.is_integer:
return math_ops.to_float(tensor)
else:
return tensor
# TODO(gilberth): If protos are ever allowed in dynamically loaded custom
# op libraries, convert this to a proto like a sane person.
class TensorForestDataSpec(object):
def __init__(self):
self.sparse = DataColumnCollection()
self.dense = DataColumnCollection()
self.dense_features_size = 0
def SerializeToString(self):
return 'dense_features_size: %d dense: [%s] sparse: [%s]' % (
self.dense_features_size, self.dense.SerializeToString(),
self.sparse.SerializeToString())
class DataColumnCollection(object):
"""Collection of DataColumns, meant to mimic a proto repeated field."""
def __init__(self):
self.cols = []
def add(self): # pylint: disable=invalid-name
self.cols.append(DataColumn())
return self.cols[-1]
def size(self): # pylint: disable=invalid-name
return len(self.cols)
def SerializeToString(self):
ret = ''
for c in self.cols:
ret += '{%s}' % c.SerializeToString()
return ret
class DataColumn(object):
def __init__(self):
self.name = ''
self.original_type = ''
self.size = 0
def SerializeToString(self):
return 'name: {0} original_type: {1} size: {2}'.format(self.name,
self.original_type,
self.size)
def ParseDataTensorOrDict(data):
"""Return a tensor to use for input data.
The incoming features can be a dict where keys are the string names of the
columns, which we turn into a single 2-D tensor.
Args:
data: `Tensor` or `dict` of `Tensor` objects.
Returns:
A 2-D tensor for input to tensor_forest, a keys tensor for the
tf.Examples if they exist, and a list of the type of each column
(e.g. continuous float, categorical).
"""
data_spec = TensorForestDataSpec()
if isinstance(data, dict):
dense_features_size = 0
dense_features = []
sparse_features = []
for k in sorted(data.keys()):
is_sparse = isinstance(data[k], sparse_tensor.SparseTensor)
if is_sparse:
# TODO(gilberth): support sparse categorical.
if data[k].dtype == dtypes.string:
logging.info('TensorForest does not support sparse categorical. '
'Transform it into a number with hash buckets.')
continue
elif data_spec.sparse.size() == 0:
col_spec = data_spec.sparse.add()
col_spec.original_type = DATA_FLOAT
col_spec.name = 'all_sparse'
col_spec.size = -1
sparse_features.append(
sparse_tensor.SparseTensor(data[
k].indices, CastToFloat(data[k].values), data[k].dense_shape))
else:
col_spec = data_spec.dense.add()
col_spec.original_type = DTYPE_TO_FTYPE[data[k].dtype]
col_spec.name = k
# the second dimension of get_shape should always be known.
shape = data[k].get_shape()
if len(shape) == 1:
col_spec.size = 1
else:
col_spec.size = shape[1].value
dense_features_size += col_spec.size
dense_features.append(CastToFloat(data[k]))
processed_dense_features = None
processed_sparse_features = None
if dense_features:
processed_dense_features = array_ops.concat(dense_features, 1)
data_spec.dense_features_size = dense_features_size
if sparse_features:
processed_sparse_features = sparse_ops.sparse_concat(1, sparse_features)
logging.info(data_spec.SerializeToString())
return processed_dense_features, processed_sparse_features, data_spec
elif isinstance(data, sparse_tensor.SparseTensor):
col_spec = data_spec.sparse.add()
col_spec.name = 'sparse_features'
col_spec.original_type = DTYPE_TO_FTYPE[data.dtype]
col_spec.size = -1
data_spec.dense_features_size = 0
return None, data, data_spec
else:
data = ops.convert_to_tensor(data)
col_spec = data_spec.dense.add()
col_spec.name = 'dense_features'
col_spec.original_type = DTYPE_TO_FTYPE[data.dtype]
col_spec.size = data.get_shape()[1]
data_spec.dense_features_size = col_spec.size
return data, None, data_spec
def ParseLabelTensorOrDict(labels):
"""Return a tensor to use for input labels to tensor_forest.
The incoming targets can be a dict where keys are the string names of the
columns, which we turn into a single 1-D tensor for classification or
2-D tensor for regression.
Converts sparse tensors to dense ones.
Args:
labels: `Tensor` or `dict` of `Tensor` objects.
Returns:
A 2-D tensor for labels/outputs.
"""
if isinstance(labels, dict):
return math_ops.to_float(
array_ops.concat(
[
sparse_ops.sparse_tensor_to_dense(
labels[k], default_value=-1) if isinstance(
labels, sparse_tensor.SparseTensor) else labels[k]
for k in sorted(labels.keys())
],
1))
else:
if isinstance(labels, sparse_tensor.SparseTensor):
return math_ops.to_float(sparse_ops.sparse_tensor_to_dense(
labels, default_value=-1))
else:
return math_ops.to_float(labels)
| apache-2.0 |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyQt4/QtNetwork/QNetworkSession.py | 1 | 4868 | # encoding: utf-8
# module PyQt4.QtNetwork
# from /usr/lib/python3/dist-packages/PyQt4/QtNetwork.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QNetworkSession(__PyQt4_QtCore.QObject):
""" QNetworkSession(QNetworkConfiguration, QObject parent=None) """
def accept(self): # real signature unknown; restored from __doc__
""" QNetworkSession.accept() """
pass
def activeTime(self): # real signature unknown; restored from __doc__
""" QNetworkSession.activeTime() -> int """
return 0
def bytesReceived(self): # real signature unknown; restored from __doc__
""" QNetworkSession.bytesReceived() -> int """
return 0
def bytesWritten(self): # real signature unknown; restored from __doc__
""" QNetworkSession.bytesWritten() -> int """
return 0
def close(self): # real signature unknown; restored from __doc__
""" QNetworkSession.close() """
pass
def closed(self, *args, **kwargs): # real signature unknown
""" QNetworkSession.closed [signal] """
pass
def configuration(self): # real signature unknown; restored from __doc__
""" QNetworkSession.configuration() -> QNetworkConfiguration """
return QNetworkConfiguration
def connectNotify(self, SIGNAL, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
""" QNetworkSession.connectNotify(SIGNAL()) """
pass
def disconnectNotify(self, SIGNAL, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
""" QNetworkSession.disconnectNotify(SIGNAL()) """
pass
def error(self): # real signature unknown; restored from __doc__
"""
QNetworkSession.error() -> QNetworkSession.SessionError
QNetworkSession.error[QNetworkSession.SessionError] [signal]
"""
pass
def errorString(self): # real signature unknown; restored from __doc__
""" QNetworkSession.errorString() -> str """
return ""
def ignore(self): # real signature unknown; restored from __doc__
""" QNetworkSession.ignore() """
pass
def interface(self): # real signature unknown; restored from __doc__
""" QNetworkSession.interface() -> QNetworkInterface """
return QNetworkInterface
def isOpen(self): # real signature unknown; restored from __doc__
""" QNetworkSession.isOpen() -> bool """
return False
def migrate(self): # real signature unknown; restored from __doc__
""" QNetworkSession.migrate() """
pass
def newConfigurationActivated(self, *args, **kwargs): # real signature unknown
""" QNetworkSession.newConfigurationActivated [signal] """
pass
def open(self): # real signature unknown; restored from __doc__
""" QNetworkSession.open() """
pass
def opened(self, *args, **kwargs): # real signature unknown
""" QNetworkSession.opened [signal] """
pass
def preferredConfigurationChanged(self, *args, **kwargs): # real signature unknown
""" QNetworkSession.preferredConfigurationChanged[QNetworkConfiguration, bool] [signal] """
pass
def reject(self): # real signature unknown; restored from __doc__
""" QNetworkSession.reject() """
pass
def sessionProperty(self, p_str): # real signature unknown; restored from __doc__
""" QNetworkSession.sessionProperty(str) -> object """
return object()
def setSessionProperty(self, p_str, p_object): # real signature unknown; restored from __doc__
""" QNetworkSession.setSessionProperty(str, object) """
pass
def state(self): # real signature unknown; restored from __doc__
""" QNetworkSession.state() -> QNetworkSession.State """
pass
def stateChanged(self, *args, **kwargs): # real signature unknown
""" QNetworkSession.stateChanged[QNetworkSession.State] [signal] """
pass
def stop(self): # real signature unknown; restored from __doc__
""" QNetworkSession.stop() """
pass
def waitForOpened(self, int_msecs=30000): # real signature unknown; restored from __doc__
""" QNetworkSession.waitForOpened(int msecs=30000) -> bool """
return False
def __init__(self, QNetworkConfiguration, QObject_parent=None): # real signature unknown; restored from __doc__
pass
Closing = 4
Connected = 3
Connecting = 2
Disconnected = 5
Invalid = 0
InvalidConfigurationError = 4
NotAvailable = 1
OperationNotSupportedError = 3
Roaming = 6
RoamingError = 2
SessionAbortedError = 1
SessionError = None # (!) real value is ''
State = None # (!) real value is ''
UnknownSessionError = 0
| gpl-2.0 |
mozilla/kitsune | kitsune/sumo/tests/test_views.py | 1 | 2202 | from django.contrib.sites.models import Site
from django.http import HttpResponsePermanentRedirect, HttpResponseRedirect
from django.test import override_settings
from django.test.client import RequestFactory
from unittest import mock
from nose.tools import eq_
from pyquery import PyQuery as pq
from kitsune.sumo.middleware import LocaleURLMiddleware
from kitsune.sumo.tests import TestCase
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.views import deprecated_redirect, redirect_to
class RedirectTests(TestCase):
rf = RequestFactory()
def test_redirect_to(self):
resp = redirect_to(self.rf.get("/"), url="home", permanent=False)
assert isinstance(resp, HttpResponseRedirect)
eq_(reverse("home"), resp["location"])
def test_redirect_permanent(self):
resp = redirect_to(self.rf.get("/"), url="home")
assert isinstance(resp, HttpResponsePermanentRedirect)
eq_(reverse("home"), resp["location"])
@mock.patch.object(Site.objects, "get_current")
def test_deprecated_redirect(self, get_current):
get_current.return_value.domain = "su.mo.com"
req = self.rf.get("/en-US/")
# Since we're rendering a template we need this to run.
LocaleURLMiddleware().process_request(req)
resp = deprecated_redirect(req, url="home")
eq_(200, resp.status_code)
doc = pq(resp.content)
assert doc("meta[http-equiv=refresh]")
refresh = doc("meta[http-equiv=refresh]")
timeout, url = refresh.attr("content").split(";url=")
eq_("10", timeout)
eq_(reverse("home"), url)
class RobotsTestCase(TestCase):
# Use the hard-coded URL because it's well-known.
@override_settings(ENGAGE_ROBOTS=False)
def test_disengaged(self):
response = self.client.get("/robots.txt")
eq_(b"User-Agent: *\nDisallow: /", response.content)
eq_("text/plain", response["content-type"])
@override_settings(ENGAGE_ROBOTS=True)
def test_engaged(self):
response = self.client.get("/robots.txt")
eq_("text/plain", response["content-type"])
assert len(response.content) > len("User-agent: *\nDisallow: /")
| bsd-3-clause |
111pontes/ydk-py | cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_policy_repository_cfg.py | 1 | 68395 |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'RoutingPolicy.RoutePolicies.RoutePolicy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.RoutePolicies.RoutePolicy',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Route policy name
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-route-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' policy statements
''',
'rpl_route_policy',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.RoutePolicies' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.RoutePolicies',
False,
[
_MetaInfoClassMember('route-policy', REFERENCE_LIST, 'RoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.RoutePolicies.RoutePolicy',
[], [],
''' Information about an individual policy
''',
'route_policy',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'route-policies',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependEtagSets.PrependEtagSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependEtagSets.PrependEtagSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('etag-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Etag Set
''',
'etag_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-etag-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependEtagSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependEtagSets',
False,
[
_MetaInfoClassMember('prepend-etag-set', REFERENCE_LIST, 'PrependEtagSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependEtagSets.PrependEtagSet',
[], [],
''' Prepend the entries to the existing set
''',
'prepend_etag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-etag-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrefixSets.PrefixSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrefixSets.PrefixSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-prefix-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' prefix statements
''',
'rpl_prefix_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prefix-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrefixSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrefixSets',
False,
[
_MetaInfoClassMember('prefix-set', REFERENCE_LIST, 'PrefixSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrefixSets.PrefixSet',
[], [],
''' Information about an individual set
''',
'prefix_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prefix-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendEtagSets.AppendEtagSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendEtagSets.AppendEtagSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('etag-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Etag Set
''',
'etag_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-etag-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendEtagSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendEtagSets',
False,
[
_MetaInfoClassMember('append-etag-set', REFERENCE_LIST, 'AppendEtagSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendEtagSets.AppendEtagSet',
[], [],
''' Append the entries to the existing set
''',
'append_etag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-etag-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveEtagSets.RemoveEtagSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveEtagSets.RemoveEtagSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('etag-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Etag Set
''',
'etag_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-etag-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveEtagSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveEtagSets',
False,
[
_MetaInfoClassMember('remove-etag-set', REFERENCE_LIST, 'RemoveEtagSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveEtagSets.RemoveEtagSet',
[], [],
''' Remove the entries from the existing set
''',
'remove_etag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-etag-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.MacSets.MacSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.MacSets.MacSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('mac-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Mac Set
''',
'mac_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'mac-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.MacSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.MacSets',
False,
[
_MetaInfoClassMember('mac-set', REFERENCE_LIST, 'MacSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.MacSets.MacSet',
[], [],
''' Information about an individual set
''',
'mac_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'mac-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaqueSets.ExtendedCommunityOpaqueSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaqueSets.ExtendedCommunityOpaqueSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-opaque-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community Opaque Set
''',
'rpl_extended_community_opaque_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-opaque-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaqueSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaqueSets',
False,
[
_MetaInfoClassMember('extended-community-opaque-set', REFERENCE_LIST, 'ExtendedCommunityOpaqueSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityOpaqueSets.ExtendedCommunityOpaqueSet',
[], [],
''' Information about an individual set
''',
'extended_community_opaque_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-opaque-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependMacSets.PrependMacSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependMacSets.PrependMacSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('mac-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Mac Set
''',
'mac_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-mac-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependMacSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependMacSets',
False,
[
_MetaInfoClassMember('prepend-mac-set', REFERENCE_LIST, 'PrependMacSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependMacSets.PrependMacSet',
[], [],
''' Prepend the entries to the existing set
''',
'prepend_mac_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-mac-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.OspfAreaSets.OspfAreaSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfAreaSets.OspfAreaSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rplospf-area-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area Set
''',
'rplospf_area_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'ospf-area-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.OspfAreaSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfAreaSets',
False,
[
_MetaInfoClassMember('ospf-area-set', REFERENCE_LIST, 'OspfAreaSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.OspfAreaSets.OspfAreaSet',
[], [],
''' Information about an individual OSPF area set.
Usage: OSPF area set allows to define named
set of area numbers which can be
referenced in the route-policy. Area sets
may be used during redistribution of the ospf
protocol. Example: ospf-area-set EXAMPLE
1,
192.168.1.255
end-set
Syntax: OSPF area number can be entered as 32
bit number or in the ip address
format. See example.
Semantic: Area numbers listed in the set will
be searched for a match. In the
example these are areas 1 and
192.168.1.255.
''',
'ospf_area_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'ospf-area-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendMacSets.AppendMacSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendMacSets.AppendMacSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('mac-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Mac Set
''',
'mac_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-mac-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendMacSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendMacSets',
False,
[
_MetaInfoClassMember('append-mac-set', REFERENCE_LIST, 'AppendMacSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendMacSets.AppendMacSet',
[], [],
''' Append the entries to the existing set
''',
'append_mac_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-mac-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCostSets.ExtendedCommunityCostSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCostSets.ExtendedCommunityCostSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-cost-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community Cost Set
''',
'rpl_extended_community_cost_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-cost-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCostSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCostSets',
False,
[
_MetaInfoClassMember('extended-community-cost-set', REFERENCE_LIST, 'ExtendedCommunityCostSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityCostSets.ExtendedCommunityCostSet',
[], [],
''' Information about an individual set
''',
'extended_community_cost_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-cost-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveMacSets.RemoveMacSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveMacSets.RemoveMacSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('mac-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Mac Set
''',
'mac_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-mac-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveMacSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveMacSets',
False,
[
_MetaInfoClassMember('remove-mac-set', REFERENCE_LIST, 'RemoveMacSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveMacSets.RemoveMacSet',
[], [],
''' Remove the entries from the existing set
''',
'remove_mac_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-mac-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySooSets.ExtendedCommunitySooSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySooSets.ExtendedCommunitySooSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-soo-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community SOO Set
''',
'rpl_extended_community_soo_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-soo-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySooSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySooSets',
False,
[
_MetaInfoClassMember('extended-community-soo-set', REFERENCE_LIST, 'ExtendedCommunitySooSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunitySooSets.ExtendedCommunitySooSet',
[], [],
''' Information about an individual set
''',
'extended_community_soo_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-soo-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.EsiSets.EsiSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.EsiSets.EsiSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('esi-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Esi Set
''',
'esi_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'esi-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.EsiSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.EsiSets',
False,
[
_MetaInfoClassMember('esi-set', REFERENCE_LIST, 'EsiSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.EsiSets.EsiSet',
[], [],
''' Information about an individual set
''',
'esi_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'esi-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependEsiSets.PrependEsiSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependEsiSets.PrependEsiSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('esi-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Esi Set
''',
'esi_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-esi-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PrependEsiSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PrependEsiSets',
False,
[
_MetaInfoClassMember('prepend-esi-set', REFERENCE_LIST, 'PrependEsiSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependEsiSets.PrependEsiSet',
[], [],
''' Prepend the entries to the existing set
''',
'prepend_esi_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'prepend-esi-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendEsiSets.AppendEsiSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendEsiSets.AppendEsiSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('esi-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Esi Set
''',
'esi_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-esi-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AppendEsiSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AppendEsiSets',
False,
[
_MetaInfoClassMember('append-esi-set', REFERENCE_LIST, 'AppendEsiSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendEsiSets.AppendEsiSet',
[], [],
''' Append the entries to the existing set
''',
'append_esi_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'append-esi-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveEsiSets.RemoveEsiSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveEsiSets.RemoveEsiSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('esi-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Esi Set
''',
'esi_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-esi-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RemoveEsiSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RemoveEsiSets',
False,
[
_MetaInfoClassMember('remove-esi-set', REFERENCE_LIST, 'RemoveEsiSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveEsiSets.RemoveEsiSet',
[], [],
''' Remove the entries from the existing set
''',
'remove_esi_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'remove-esi-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNhSets.ExtendedCommunitySegNhSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNhSets.ExtendedCommunitySegNhSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-seg-nh-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community SegNH Set
''',
'rpl_extended_community_seg_nh_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-seg-nh-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNhSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNhSets',
False,
[
_MetaInfoClassMember('extended-community-seg-nh-set', REFERENCE_LIST, 'ExtendedCommunitySegNhSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunitySegNhSets.ExtendedCommunitySegNhSet',
[], [],
''' Information about an individual set
''',
'extended_community_seg_nh_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-seg-nh-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RdSets.RdSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RdSets.RdSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rplrd-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' RD Set
''',
'rplrd_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'rd-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.RdSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.RdSets',
False,
[
_MetaInfoClassMember('rd-set', REFERENCE_LIST, 'RdSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RdSets.RdSet',
[], [],
''' Information about an individual set
''',
'rd_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'rd-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.PolicyGlobalSetTable' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.PolicyGlobalSetTable',
False,
[
_MetaInfoClassMember('policy-global-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Information about an individual set
''',
'policy_global_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'policy-global-set-table',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidthSets.ExtendedCommunityBandwidthSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidthSets.ExtendedCommunityBandwidthSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-bandwidth-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community Bandwidth Set
''',
'rpl_extended_community_bandwidth_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-bandwidth-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidthSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidthSets',
False,
[
_MetaInfoClassMember('extended-community-bandwidth-set', REFERENCE_LIST, 'ExtendedCommunityBandwidthSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityBandwidthSets.ExtendedCommunityBandwidthSet',
[], [],
''' Information about an individual set
''',
'extended_community_bandwidth_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-bandwidth-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.CommunitySets.CommunitySet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.CommunitySets.CommunitySet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-community-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Community Set
''',
'rpl_community_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'community-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.CommunitySets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.CommunitySets',
False,
[
_MetaInfoClassMember('community-set', REFERENCE_LIST, 'CommunitySet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.CommunitySets.CommunitySet',
[], [],
''' Information about an individual set
''',
'community_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'community-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AsPathSets.AsPathSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPathSets.AsPathSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rplas-path-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' ASPath Set
''',
'rplas_path_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'as-path-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.AsPathSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPathSets',
False,
[
_MetaInfoClassMember('as-path-set', REFERENCE_LIST, 'AsPathSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AsPathSets.AsPathSet',
[], [],
''' Information about an individual set
''',
'as_path_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'as-path-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.TagSets.TagSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.TagSets.TagSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-tag-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Tag Set
''',
'rpl_tag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'tag-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.TagSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.TagSets',
False,
[
_MetaInfoClassMember('tag-set', REFERENCE_LIST, 'TagSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.TagSets.TagSet',
[], [],
''' Information about an individual set
''',
'tag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'tag-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.EtagSets.EtagSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.EtagSets.EtagSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('etag-set-as-text', ATTRIBUTE, 'str' , None, None,
[], [],
''' Etag Set
''',
'etag_set_as_text',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'etag-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.EtagSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.EtagSets',
False,
[
_MetaInfoClassMember('etag-set', REFERENCE_LIST, 'EtagSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.EtagSets.EtagSet',
[], [],
''' Information about an individual set
''',
'etag_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'etag-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRtSets.ExtendedCommunityRtSet' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRtSets.ExtendedCommunityRtSet',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], [b'[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-cfg', True),
_MetaInfoClassMember('rpl-extended-community-rt-set', ATTRIBUTE, 'str' , None, None,
[], [],
''' Extended Community RT Set
''',
'rpl_extended_community_rt_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-rt-set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRtSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRtSets',
False,
[
_MetaInfoClassMember('extended-community-rt-set', REFERENCE_LIST, 'ExtendedCommunityRtSet' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityRtSets.ExtendedCommunityRtSet',
[], [],
''' Information about an individual set
''',
'extended_community_rt_set',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'extended-community-rt-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Sets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets',
False,
[
_MetaInfoClassMember('append-esi-sets', REFERENCE_CLASS, 'AppendEsiSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendEsiSets',
[], [],
''' Information about Esi sets
''',
'append_esi_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('append-etag-sets', REFERENCE_CLASS, 'AppendEtagSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendEtagSets',
[], [],
''' Information about Etag sets
''',
'append_etag_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('append-mac-sets', REFERENCE_CLASS, 'AppendMacSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AppendMacSets',
[], [],
''' Information about Mac sets
''',
'append_mac_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('as-path-sets', REFERENCE_CLASS, 'AsPathSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.AsPathSets',
[], [],
''' Information about AS Path sets
''',
'as_path_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('community-sets', REFERENCE_CLASS, 'CommunitySets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.CommunitySets',
[], [],
''' Information about Community sets
''',
'community_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('esi-sets', REFERENCE_CLASS, 'EsiSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.EsiSets',
[], [],
''' Information about Esi sets
''',
'esi_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('etag-sets', REFERENCE_CLASS, 'EtagSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.EtagSets',
[], [],
''' Information about Etag sets
''',
'etag_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-bandwidth-sets', REFERENCE_CLASS, 'ExtendedCommunityBandwidthSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityBandwidthSets',
[], [],
''' Information about Bandwidth sets
''',
'extended_community_bandwidth_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-cost-sets', REFERENCE_CLASS, 'ExtendedCommunityCostSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityCostSets',
[], [],
''' Information about Cost sets
''',
'extended_community_cost_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-opaque-sets', REFERENCE_CLASS, 'ExtendedCommunityOpaqueSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityOpaqueSets',
[], [],
''' Information about Opaque sets
''',
'extended_community_opaque_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-rt-sets', REFERENCE_CLASS, 'ExtendedCommunityRtSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunityRtSets',
[], [],
''' Information about RT sets
''',
'extended_community_rt_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-seg-nh-sets', REFERENCE_CLASS, 'ExtendedCommunitySegNhSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunitySegNhSets',
[], [],
''' Information about SegNH sets
''',
'extended_community_seg_nh_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('extended-community-soo-sets', REFERENCE_CLASS, 'ExtendedCommunitySooSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.ExtendedCommunitySooSets',
[], [],
''' Information about SOO sets
''',
'extended_community_soo_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('mac-sets', REFERENCE_CLASS, 'MacSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.MacSets',
[], [],
''' Information about Mac sets
''',
'mac_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('ospf-area-sets', REFERENCE_CLASS, 'OspfAreaSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.OspfAreaSets',
[], [],
''' Information about OSPF Area sets
''',
'ospf_area_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('policy-global-set-table', REFERENCE_CLASS, 'PolicyGlobalSetTable' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PolicyGlobalSetTable',
[], [],
''' Information about PolicyGlobal sets
''',
'policy_global_set_table',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('prefix-sets', REFERENCE_CLASS, 'PrefixSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrefixSets',
[], [],
''' Information about Prefix sets
''',
'prefix_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('prepend-esi-sets', REFERENCE_CLASS, 'PrependEsiSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependEsiSets',
[], [],
''' Information about Esi sets
''',
'prepend_esi_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('prepend-etag-sets', REFERENCE_CLASS, 'PrependEtagSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependEtagSets',
[], [],
''' Information about Etag sets
''',
'prepend_etag_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('prepend-mac-sets', REFERENCE_CLASS, 'PrependMacSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.PrependMacSets',
[], [],
''' Information about Mac sets
''',
'prepend_mac_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('rd-sets', REFERENCE_CLASS, 'RdSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RdSets',
[], [],
''' Information about RD sets
''',
'rd_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('remove-esi-sets', REFERENCE_CLASS, 'RemoveEsiSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveEsiSets',
[], [],
''' Information about Esi sets
''',
'remove_esi_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('remove-etag-sets', REFERENCE_CLASS, 'RemoveEtagSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveEtagSets',
[], [],
''' Information about Etag sets
''',
'remove_etag_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('remove-mac-sets', REFERENCE_CLASS, 'RemoveMacSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.RemoveMacSets',
[], [],
''' Information about Mac sets
''',
'remove_mac_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('tag-sets', REFERENCE_CLASS, 'TagSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets.TagSets',
[], [],
''' Information about Tag sets
''',
'tag_sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy.Limits' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Limits',
False,
[
_MetaInfoClassMember('maximum-lines-of-policy', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Maximum number of lines of policy configuration
that may be configured in total
''',
'maximum_lines_of_policy',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('maximum-number-of-policies', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' Maximum number of policies that may be
configured
''',
'maximum_number_of_policies',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'limits',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
'RoutingPolicy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy',
False,
[
_MetaInfoClassMember('editor', ATTRIBUTE, 'str' , None, None,
[], [],
''' 'emacs' or 'vim' or 'nano'
''',
'editor',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('limits', REFERENCE_CLASS, 'Limits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Limits',
[], [],
''' Limits for Routing Policy
''',
'limits',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('route-policies', REFERENCE_CLASS, 'RoutePolicies' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.RoutePolicies',
[], [],
''' All configured policies
''',
'route_policies',
'Cisco-IOS-XR-policy-repository-cfg', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg', 'RoutingPolicy.Sets',
[], [],
''' All configured sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-cfg', False),
],
'Cisco-IOS-XR-policy-repository-cfg',
'routing-policy',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-cfg'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_cfg'
),
},
}
_meta_table['RoutingPolicy.RoutePolicies.RoutePolicy']['meta_info'].parent =_meta_table['RoutingPolicy.RoutePolicies']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependEtagSets.PrependEtagSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.PrependEtagSets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrefixSets.PrefixSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.PrefixSets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendEtagSets.AppendEtagSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AppendEtagSets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveEtagSets.RemoveEtagSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.RemoveEtagSets']['meta_info']
_meta_table['RoutingPolicy.Sets.MacSets.MacSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.MacSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaqueSets.ExtendedCommunityOpaqueSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaqueSets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependMacSets.PrependMacSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.PrependMacSets']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfAreaSets.OspfAreaSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfAreaSets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendMacSets.AppendMacSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AppendMacSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCostSets.ExtendedCommunityCostSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCostSets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveMacSets.RemoveMacSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.RemoveMacSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySooSets.ExtendedCommunitySooSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySooSets']['meta_info']
_meta_table['RoutingPolicy.Sets.EsiSets.EsiSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.EsiSets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependEsiSets.PrependEsiSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.PrependEsiSets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendEsiSets.AppendEsiSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AppendEsiSets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveEsiSets.RemoveEsiSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.RemoveEsiSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNhSets.ExtendedCommunitySegNhSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNhSets']['meta_info']
_meta_table['RoutingPolicy.Sets.RdSets.RdSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.RdSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidthSets.ExtendedCommunityBandwidthSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidthSets']['meta_info']
_meta_table['RoutingPolicy.Sets.CommunitySets.CommunitySet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.CommunitySets']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPathSets.AsPathSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPathSets']['meta_info']
_meta_table['RoutingPolicy.Sets.TagSets.TagSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.TagSets']['meta_info']
_meta_table['RoutingPolicy.Sets.EtagSets.EtagSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.EtagSets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRtSets.ExtendedCommunityRtSet']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRtSets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependEtagSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrefixSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendEtagSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveEtagSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.MacSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaqueSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependMacSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfAreaSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendMacSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCostSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveMacSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySooSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.EsiSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.PrependEsiSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.AppendEsiSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.RemoveEsiSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNhSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.RdSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.PolicyGlobalSetTable']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidthSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.CommunitySets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPathSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.TagSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.EtagSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRtSets']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.RoutePolicies']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
_meta_table['RoutingPolicy.Sets']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
_meta_table['RoutingPolicy.Limits']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
| apache-2.0 |
felipenaselva/repo.felipe | plugin.video.arquivobrasil/resources/lib/UniversalAnalytics/Tracker.py | 11 | 18661 | ###############################################################################
# Universal Analytics for Python
# Copyright (c) 2013, Analytics Pros
#
# This project is free software, distributed under the BSD license.
# Analytics Pros offers consulting and integration services if your firm needs
# assistance in strategy, implementation, or auditing existing work.
###############################################################################
from urllib2 import urlopen, build_opener, install_opener
from urllib2 import Request, HTTPSHandler
from urllib2 import URLError, HTTPError
from urllib import urlencode
import random
import datetime
import time
import uuid
import hashlib
import socket
def generate_uuid(basedata = None):
""" Provides a _random_ UUID with no input, or a UUID4-format MD5 checksum of any input data provided """
if basedata is None:
return str(uuid.uuid4())
elif isinstance(basedata, basestring):
checksum = hashlib.md5(basedata).hexdigest()
return '%8s-%4s-%4s-%4s-%12s' % (checksum[0:8], checksum[8:12], checksum[12:16], checksum[16:20], checksum[20:32])
class Time(datetime.datetime):
""" Wrappers and convenience methods for processing various time representations """
@classmethod
def from_unix(cls, seconds, milliseconds = 0):
""" Produce a full |datetime.datetime| object from a Unix timestamp """
base = list(time.gmtime(seconds))[0:6]
base.append(milliseconds * 1000) # microseconds
return cls(* base)
@classmethod
def to_unix(cls, timestamp):
""" Wrapper over time module to produce Unix epoch time as a float """
if not isinstance(timestamp, datetime.datetime):
raise TypeError, 'Time.milliseconds expects a datetime object'
base = time.mktime(timestamp.timetuple())
return base
@classmethod
def milliseconds_offset(cls, timestamp, now = None):
""" Offset time (in milliseconds) from a |datetime.datetime| object to now """
if isinstance(timestamp, (int, float)):
base = timestamp
else:
base = cls.to_unix(timestamp)
base = base + (timestamp.microsecond / 1000000)
if now is None:
now = time.time()
return (now - base) * 1000
class HTTPRequest(object):
""" URL Construction and request handling abstraction.
This is not intended to be used outside this module.
Automates mapping of persistent state (i.e. query parameters)
onto transcient datasets for each query.
"""
endpoint = 'https://www.google-analytics.com/collect'
@staticmethod
def debug():
""" Activate debugging on urllib2 """
handler = HTTPSHandler(debuglevel = 1)
opener = build_opener(handler)
install_opener(opener)
# Store properties for all requests
def __init__(self, user_agent = None, *args, **opts):
self.user_agent = user_agent or 'Analytics Pros - Universal Analytics (Python)'
@classmethod
def fixUTF8(cls, data): # Ensure proper encoding for UA's servers...
""" Convert all strings to UTF-8 """
for key in data:
if isinstance(data[ key ], basestring):
data[ key ] = data[ key ].encode('utf-8')
return data
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint + '?' + urlencode(self.fixUTF8(data)),
headers = {
'User-Agent': self.user_agent
}
)
self.open(request)
def open(self, request):
try:
return urlopen(request)
except HTTPError as e:
return False
except URLError as e:
self.cache_request(request)
return False
def cache_request(self, request):
# TODO: implement a proper caching mechanism here for re-transmitting hits
# record = (Time.now(), request.get_full_url(), request.get_data(), request.headers)
pass
class HTTPPost(HTTPRequest):
# Apply stored properties to the given dataset & POST to the configured endpoint
def send(self, data):
request = Request(
self.endpoint,
data = urlencode(self.fixUTF8(data)),
headers = {
'User-Agent': self.user_agent
}
)
self.open(request)
class Tracker(object):
""" Primary tracking interface for Universal Analytics """
params = None
parameter_alias = {}
valid_hittypes = ('pageview', 'event', 'social', 'screenview', 'transaction', 'item', 'exception', 'timing')
@classmethod
def alias(cls, typemap, base, *names):
""" Declare an alternate (humane) name for a measurement protocol parameter """
cls.parameter_alias[ base ] = (typemap, base)
for i in names:
cls.parameter_alias[ i ] = (typemap, base)
@classmethod
def coerceParameter(cls, name, value = None):
if isinstance(name, basestring) and name[0] == '&':
return name[1:], str(value)
elif name in cls.parameter_alias:
typecast, param_name = cls.parameter_alias.get(name)
return param_name, typecast(value)
else:
raise KeyError, 'Parameter "{0}" is not recognized'.format(name)
def payload(self, data):
for key, value in data.iteritems():
try:
yield self.coerceParameter(key, value)
except KeyError:
continue
option_sequence = {
'pageview': [ (basestring, 'dp') ],
'event': [ (basestring, 'ec'), (basestring, 'ea'), (basestring, 'el'), (int, 'ev') ],
'social': [ (basestring, 'sn'), (basestring, 'sa'), (basestring, 'st') ],
'timing': [ (basestring, 'utc'), (basestring, 'utv'), (basestring, 'utt'), (basestring, 'utl') ]
}
@classmethod
def consume_options(cls, data, hittype, args):
""" Interpret sequential arguments related to known hittypes based on declared structures """
opt_position = 0
data[ 't' ] = hittype # integrate hit type parameter
if hittype in cls.option_sequence:
for expected_type, optname in cls.option_sequence[ hittype ]:
if opt_position < len(args) and isinstance(args[opt_position], expected_type):
data[ optname ] = args[ opt_position ]
opt_position += 1
@classmethod
def hittime(cls, timestamp = None, age = None, milliseconds = None):
""" Returns an integer represeting the milliseconds offset for a given hit (relative to now) """
if isinstance(timestamp, (int, float)):
return int(Time.milliseconds_offset(Time.from_unix(timestamp, milliseconds = milliseconds)))
if isinstance(timestamp, datetime.datetime):
return int(Time.milliseconds_offset(timestamp))
if isinstance(age, (int, float)):
return int(age * 1000) + (milliseconds or 0)
@property
def account(self):
return self.params.get('tid', None)
def __init__(self, account, name = None, client_id = None, hash_client_id = False, user_id = None, user_agent = None, use_post = True):
if use_post is False:
self.http = HTTPRequest(user_agent = user_agent)
else:
self.http = HTTPPost(user_agent = user_agent)
self.params = { 'v': 1, 'tid': account }
if client_id is None:
client_id = generate_uuid()
self.params[ 'cid' ] = client_id
self.hash_client_id = hash_client_id
if user_id is not None:
self.params[ 'uid' ] = user_id
def set_timestamp(self, data):
""" Interpret time-related options, apply queue-time parameter as needed """
if 'hittime' in data: # an absolute timestamp
data['qt'] = self.hittime(timestamp = data.pop('hittime', None))
if 'hitage' in data: # a relative age (in seconds)
data['qt'] = self.hittime(age = data.pop('hitage', None))
def send(self, hittype, *args, **data):
""" Transmit HTTP requests to Google Analytics using the measurement protocol """
if hittype not in self.valid_hittypes:
raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype)))
self.set_timestamp(data)
self.consume_options(data, hittype, args)
for item in args: # process dictionary-object arguments of transcient data
if isinstance(item, dict):
for key, val in self.payload(item):
data[ key ] = val
for k, v in self.params.iteritems(): # update only absent parameters
if k not in data:
data[ k ] = v
data = dict(self.payload(data))
if self.hash_client_id:
data[ 'cid' ] = generate_uuid(data[ 'cid' ])
# Transmit the hit to Google...
self.http.send(data)
# Setting persistent attibutes of the session/hit/etc (inc. custom dimensions/metrics)
def set(self, name, value = None):
if isinstance(name, dict):
for key, value in name.iteritems():
try:
param, value = self.coerceParameter(key, value)
self.params[param] = value
except KeyError:
pass
elif isinstance(name, basestring):
try:
param, value = self.coerceParameter(name, value)
self.params[param] = value
except KeyError:
pass
def __getitem__(self, name):
param, value = self.coerceParameter(name, None)
return self.params.get(param, None)
def __setitem__(self, name, value):
param, value = self.coerceParameter(name, value)
self.params[param] = value
def __delitem__(self, name):
param, value = self.coerceParameter(name, None)
if param in self.params:
del self.params[param]
def safe_unicode(obj):
""" Safe convertion to the Unicode string version of the object """
try:
return unicode(obj)
except UnicodeDecodeError:
return obj.decode('utf-8')
# Declaring name mappings for Measurement Protocol parameters
MAX_CUSTOM_DEFINITIONS = 200
MAX_EC_LISTS = 11 # 1-based index
MAX_EC_PRODUCTS = 11 # 1-based index
MAX_EC_PROMOTIONS = 11 # 1-based index
Tracker.alias(int, 'v', 'protocol-version')
Tracker.alias(safe_unicode, 'cid', 'client-id', 'clientId', 'clientid')
Tracker.alias(safe_unicode, 'tid', 'trackingId', 'account')
Tracker.alias(safe_unicode, 'uid', 'user-id', 'userId', 'userid')
Tracker.alias(safe_unicode, 'uip', 'user-ip', 'userIp', 'ipaddr')
Tracker.alias(safe_unicode, 'ua', 'userAgent', 'userAgentOverride', 'user-agent')
Tracker.alias(safe_unicode, 'dp', 'page', 'path')
Tracker.alias(safe_unicode, 'dt', 'title', 'pagetitle', 'pageTitle' 'page-title')
Tracker.alias(safe_unicode, 'dl', 'location')
Tracker.alias(safe_unicode, 'dh', 'hostname')
Tracker.alias(safe_unicode, 'sc', 'sessioncontrol', 'session-control', 'sessionControl')
Tracker.alias(safe_unicode, 'dr', 'referrer', 'referer')
Tracker.alias(int, 'qt', 'queueTime', 'queue-time')
Tracker.alias(safe_unicode, 't', 'hitType', 'hittype')
Tracker.alias(int, 'aip', 'anonymizeIp', 'anonIp', 'anonymize-ip')
# Campaign attribution
Tracker.alias(safe_unicode, 'cn', 'campaign', 'campaignName', 'campaign-name')
Tracker.alias(safe_unicode, 'cs', 'source', 'campaignSource', 'campaign-source')
Tracker.alias(safe_unicode, 'cm', 'medium', 'campaignMedium', 'campaign-medium')
Tracker.alias(safe_unicode, 'ck', 'keyword', 'campaignKeyword', 'campaign-keyword')
Tracker.alias(safe_unicode, 'cc', 'content', 'campaignContent', 'campaign-content')
Tracker.alias(safe_unicode, 'ci', 'campaignId', 'campaignID', 'campaign-id')
# Technical specs
Tracker.alias(safe_unicode, 'sr', 'screenResolution', 'screen-resolution', 'resolution')
Tracker.alias(safe_unicode, 'vp', 'viewport', 'viewportSize', 'viewport-size')
Tracker.alias(safe_unicode, 'de', 'encoding', 'documentEncoding', 'document-encoding')
Tracker.alias(int, 'sd', 'colors', 'screenColors', 'screen-colors')
Tracker.alias(safe_unicode, 'ul', 'language', 'user-language', 'userLanguage')
# Mobile app
Tracker.alias(safe_unicode, 'an', 'appName', 'app-name', 'app')
Tracker.alias(safe_unicode, 'cd', 'contentDescription', 'screenName', 'screen-name', 'content-description')
Tracker.alias(safe_unicode, 'av', 'appVersion', 'app-version', 'version')
Tracker.alias(safe_unicode, 'aid', 'appID', 'appId', 'application-id', 'app-id', 'applicationId')
Tracker.alias(safe_unicode, 'aiid', 'appInstallerId', 'app-installer-id')
# Ecommerce
Tracker.alias(safe_unicode, 'ta', 'affiliation', 'transactionAffiliation', 'transaction-affiliation')
Tracker.alias(safe_unicode, 'ti', 'transaction', 'transactionId', 'transaction-id')
Tracker.alias(float, 'tr', 'revenue', 'transactionRevenue', 'transaction-revenue')
Tracker.alias(float, 'ts', 'shipping', 'transactionShipping', 'transaction-shipping')
Tracker.alias(float, 'tt', 'tax', 'transactionTax', 'transaction-tax')
Tracker.alias(safe_unicode, 'cu', 'currency', 'transactionCurrency', 'transaction-currency') # Currency code, e.g. USD, EUR
Tracker.alias(safe_unicode, 'in', 'item-name', 'itemName')
Tracker.alias(float, 'ip', 'item-price', 'itemPrice')
Tracker.alias(float, 'iq', 'item-quantity', 'itemQuantity')
Tracker.alias(safe_unicode, 'ic', 'item-code', 'sku', 'itemCode')
Tracker.alias(safe_unicode, 'iv', 'item-variation', 'item-category', 'itemCategory', 'itemVariation')
# Events
Tracker.alias(safe_unicode, 'ec', 'event-category', 'eventCategory', 'category')
Tracker.alias(safe_unicode, 'ea', 'event-action', 'eventAction', 'action')
Tracker.alias(safe_unicode, 'el', 'event-label', 'eventLabel', 'label')
Tracker.alias(int, 'ev', 'event-value', 'eventValue', 'value')
Tracker.alias(int, 'ni', 'noninteractive', 'nonInteractive', 'noninteraction', 'nonInteraction')
# Social
Tracker.alias(safe_unicode, 'sa', 'social-action', 'socialAction')
Tracker.alias(safe_unicode, 'sn', 'social-network', 'socialNetwork')
Tracker.alias(safe_unicode, 'st', 'social-target', 'socialTarget')
# Exceptions
Tracker.alias(safe_unicode, 'exd', 'exception-description', 'exceptionDescription', 'exDescription')
Tracker.alias(int, 'exf', 'exception-fatal', 'exceptionFatal', 'exFatal')
# User Timing
Tracker.alias(safe_unicode, 'utc', 'timingCategory', 'timing-category')
Tracker.alias(safe_unicode, 'utv', 'timingVariable', 'timing-variable')
Tracker.alias(float, 'utt', 'time', 'timingTime', 'timing-time')
Tracker.alias(safe_unicode, 'utl', 'timingLabel', 'timing-label')
Tracker.alias(float, 'dns', 'timingDNS', 'timing-dns')
Tracker.alias(float, 'pdt', 'timingPageLoad', 'timing-page-load')
Tracker.alias(float, 'rrt', 'timingRedirect', 'timing-redirect')
Tracker.alias(safe_unicode, 'tcp', 'timingTCPConnect', 'timing-tcp-connect')
Tracker.alias(safe_unicode, 'srt', 'timingServerResponse', 'timing-server-response')
# Custom dimensions and metrics
for i in range(0,200):
Tracker.alias(safe_unicode, 'cd{0}'.format(i), 'dimension{0}'.format(i))
Tracker.alias(int, 'cm{0}'.format(i), 'metric{0}'.format(i))
# Enhanced Ecommerce
Tracker.alias(str, 'pa') # Product action
Tracker.alias(str, 'tcc') # Coupon code
Tracker.alias(unicode, 'pal') # Product action list
Tracker.alias(int, 'cos') # Checkout step
Tracker.alias(str, 'col') # Checkout step option
Tracker.alias(str, 'promoa') # Promotion action
for product_index in range(1, MAX_EC_PRODUCTS):
Tracker.alias(str, 'pr{0}id'.format(product_index)) # Product SKU
Tracker.alias(unicode, 'pr{0}nm'.format(product_index)) # Product name
Tracker.alias(unicode, 'pr{0}br'.format(product_index)) # Product brand
Tracker.alias(unicode, 'pr{0}ca'.format(product_index)) # Product category
Tracker.alias(unicode, 'pr{0}va'.format(product_index)) # Product variant
Tracker.alias(str, 'pr{0}pr'.format(product_index)) # Product price
Tracker.alias(int, 'pr{0}qt'.format(product_index)) # Product quantity
Tracker.alias(str, 'pr{0}cc'.format(product_index)) # Product coupon code
Tracker.alias(int, 'pr{0}ps'.format(product_index)) # Product position
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'pr{0}cd{1}'.format(product_index, custom_index)) # Product custom dimension
Tracker.alias(int, 'pr{0}cm{1}'.format(product_index, custom_index)) # Product custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(str, 'il{0}pi{1}id'.format(list_index, product_index)) # Product impression SKU
Tracker.alias(unicode, 'il{0}pi{1}nm'.format(list_index, product_index)) # Product impression name
Tracker.alias(unicode, 'il{0}pi{1}br'.format(list_index, product_index)) # Product impression brand
Tracker.alias(unicode, 'il{0}pi{1}ca'.format(list_index, product_index)) # Product impression category
Tracker.alias(unicode, 'il{0}pi{1}va'.format(list_index, product_index)) # Product impression variant
Tracker.alias(int, 'il{0}pi{1}ps'.format(list_index, product_index)) # Product impression position
Tracker.alias(int, 'il{0}pi{1}pr'.format(list_index, product_index)) # Product impression price
for custom_index in range(MAX_CUSTOM_DEFINITIONS):
Tracker.alias(str, 'il{0}pi{1}cd{2}'.format(list_index, product_index, custom_index)) # Product impression custom dimension
Tracker.alias(int, 'il{0}pi{1}cm{2}'.format(list_index, product_index, custom_index)) # Product impression custom metric
for list_index in range(1, MAX_EC_LISTS):
Tracker.alias(unicode, 'il{0}nm'.format(list_index)) # Product impression list name
for promotion_index in range(1, MAX_EC_PROMOTIONS):
Tracker.alias(str, 'promo{0}id'.format(promotion_index)) # Promotion ID
Tracker.alias(unicode, 'promo{0}nm'.format(promotion_index)) # Promotion name
Tracker.alias(str, 'promo{0}cr'.format(promotion_index)) # Promotion creative
Tracker.alias(str, 'promo{0}ps'.format(promotion_index)) # Promotion position
# Shortcut for creating trackers
def create(account, *args, **kwargs):
return Tracker(account, *args, **kwargs)
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4
| gpl-2.0 |
ybellavance/python-for-android | python3-alpha/extra_modules/gdata/exif/__init__.py | 45 | 6980 | # -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.exif, implementing the exif namespace in gdata
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module maps elements from the {EXIF} namespace[1] to GData objects.
These elements describe image data, using exif attributes[2].
Picasa Web Albums uses the exif namespace to represent Exif data encoded
in a photo [3].
Picasa Web Albums uses the following exif elements:
exif:distance
exif:exposure
exif:flash
exif:focallength
exif:fstop
exif:imageUniqueID
exif:iso
exif:make
exif:model
exif:tags
exif:time
[1]: http://schemas.google.com/photos/exif/2007.
[2]: http://en.wikipedia.org/wiki/Exif
[3]: http://code.google.com/apis/picasaweb/reference.html#exif_reference
"""
__author__ = 'havard@gulldahl.no'# (Håvard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
EXIF_NAMESPACE = 'http://schemas.google.com/photos/exif/2007'
class ExifBaseElement(atom.AtomBase):
"""Base class for elements in the EXIF_NAMESPACE (%s). To add new elements, you only need to add the element tag name to self._tag
""" % EXIF_NAMESPACE
_tag = ''
_namespace = EXIF_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Distance(ExifBaseElement):
"(float) The distance to the subject, e.g. 0.0"
_tag = 'distance'
def DistanceFromString(xml_string):
return atom.CreateClassFromXMLString(Distance, xml_string)
class Exposure(ExifBaseElement):
"(float) The exposure time used, e.g. 0.025 or 8.0E4"
_tag = 'exposure'
def ExposureFromString(xml_string):
return atom.CreateClassFromXMLString(Exposure, xml_string)
class Flash(ExifBaseElement):
"""(string) Boolean value indicating whether the flash was used.
The .text attribute will either be `true' or `false'
As a convenience, this object's .bool method will return what you want,
so you can say:
flash_used = bool(Flash)
"""
_tag = 'flash'
def __bool__(self):
if self.text.lower() in ('true','false'):
return self.text.lower() == 'true'
def FlashFromString(xml_string):
return atom.CreateClassFromXMLString(Flash, xml_string)
class Focallength(ExifBaseElement):
"(float) The focal length used, e.g. 23.7"
_tag = 'focallength'
def FocallengthFromString(xml_string):
return atom.CreateClassFromXMLString(Focallength, xml_string)
class Fstop(ExifBaseElement):
"(float) The fstop value used, e.g. 5.0"
_tag = 'fstop'
def FstopFromString(xml_string):
return atom.CreateClassFromXMLString(Fstop, xml_string)
class ImageUniqueID(ExifBaseElement):
"(string) The unique image ID for the photo. Generated by Google Photo servers"
_tag = 'imageUniqueID'
def ImageUniqueIDFromString(xml_string):
return atom.CreateClassFromXMLString(ImageUniqueID, xml_string)
class Iso(ExifBaseElement):
"(int) The iso equivalent value used, e.g. 200"
_tag = 'iso'
def IsoFromString(xml_string):
return atom.CreateClassFromXMLString(Iso, xml_string)
class Make(ExifBaseElement):
"(string) The make of the camera used, e.g. Fictitious Camera Company"
_tag = 'make'
def MakeFromString(xml_string):
return atom.CreateClassFromXMLString(Make, xml_string)
class Model(ExifBaseElement):
"(string) The model of the camera used,e.g AMAZING-100D"
_tag = 'model'
def ModelFromString(xml_string):
return atom.CreateClassFromXMLString(Model, xml_string)
class Time(ExifBaseElement):
"""(int) The date/time the photo was taken, e.g. 1180294337000.
Represented as the number of milliseconds since January 1st, 1970.
The value of this element will always be identical to the value
of the <gphoto:timestamp>.
Look at this object's .isoformat() for a human friendly datetime string:
photo_epoch = Time.text # 1180294337000
photo_isostring = Time.isoformat() # '2007-05-27T19:32:17.000Z'
Alternatively:
photo_datetime = Time.datetime() # (requires python >= 2.3)
"""
_tag = 'time'
def isoformat(self):
"""(string) Return the timestamp as a ISO 8601 formatted string,
e.g. '2007-05-27T19:32:17.000Z'
"""
import time
epoch = float(self.text)/1000
return time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(epoch))
def datetime(self):
"""(datetime.datetime) Return the timestamp as a datetime.datetime object
Requires python 2.3
"""
import datetime
epoch = float(self.text)/1000
return datetime.datetime.fromtimestamp(epoch)
def TimeFromString(xml_string):
return atom.CreateClassFromXMLString(Time, xml_string)
class Tags(ExifBaseElement):
"""The container for all exif elements.
The <exif:tags> element can appear as a child of a photo entry.
"""
_tag = 'tags'
_children = atom.AtomBase._children.copy()
_children['{%s}fstop' % EXIF_NAMESPACE] = ('fstop', Fstop)
_children['{%s}make' % EXIF_NAMESPACE] = ('make', Make)
_children['{%s}model' % EXIF_NAMESPACE] = ('model', Model)
_children['{%s}distance' % EXIF_NAMESPACE] = ('distance', Distance)
_children['{%s}exposure' % EXIF_NAMESPACE] = ('exposure', Exposure)
_children['{%s}flash' % EXIF_NAMESPACE] = ('flash', Flash)
_children['{%s}focallength' % EXIF_NAMESPACE] = ('focallength', Focallength)
_children['{%s}iso' % EXIF_NAMESPACE] = ('iso', Iso)
_children['{%s}time' % EXIF_NAMESPACE] = ('time', Time)
_children['{%s}imageUniqueID' % EXIF_NAMESPACE] = ('imageUniqueID', ImageUniqueID)
def __init__(self, extension_elements=None, extension_attributes=None, text=None):
ExifBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.fstop=None
self.make=None
self.model=None
self.distance=None
self.exposure=None
self.flash=None
self.focallength=None
self.iso=None
self.time=None
self.imageUniqueID=None
def TagsFromString(xml_string):
return atom.CreateClassFromXMLString(Tags, xml_string)
| apache-2.0 |
aronparsons/spacewalk | backend/server/rhnSQL/sql_lib.py | 7 | 2062 | #
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# A collection of classes and functions for handy data manipulation
# This file includes common classes and functions that are used by
# misc parts of the RHN backend
#
# Before changing any of this stuff, please grep through the sources to
# check how the function/class you are about to modify is used first.
# Or ask gafton.
#
import string
def build_sql_insert(table, hash_name, items):
""" This statement builds a sql statement for an insert
of 'items' into "table" indexed by "hash_name"
"""
sql = "insert into %s ( %s, %s ) values ( :p0, %s )" % (
table, hash_name,
string.join(map(lambda a: a[0], items), ", "),
string.join(map(lambda a: ":p_%s" % a[0], items), ", "))
pdict = {"p0": None} # This must be reset after we return from this call
map(pdict.update, map(lambda a: {"p_%s" % a[0]: a[1]}, items))
return sql, pdict
def build_sql_update(table, hash_name, items):
""" This statement builds a sql statement for an update
of 'items' into "table" indexed by "hash_name"
"""
sql = "update %s set %s where %s = :p0" % (
table,
string.join(map(lambda a: "%s = :p_%s" % (a, a),
map(lambda a: a[0], items)),
", "),
hash_name)
pdict = {"p0": None} # This must be reset after we return from this call
map(pdict.update, map(lambda a: {"p_%s" % a[0]: a[1]}, items))
return sql, pdict
| gpl-2.0 |
alon/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/example/echo_wsh.py | 494 | 2197 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
_GOODBYE_MESSAGE = u'Goodbye'
def web_socket_do_extra_handshake(request):
# This example handler accepts any request. See origin_check_wsh.py for how
# to reject access from untrusted scripts based on origin value.
pass # Always accept.
def web_socket_transfer_data(request):
while True:
line = request.ws_stream.receive_message()
if line is None:
return
if isinstance(line, unicode):
request.ws_stream.send_message(line, binary=False)
if line == _GOODBYE_MESSAGE:
return
else:
request.ws_stream.send_message(line, binary=True)
# vi:sts=4 sw=4 et
| mpl-2.0 |
goulu/networkx | networkx/algorithms/hierarchy.py | 10 | 1777 | # -*- coding: utf-8 -*-
"""
Flow Hierarchy.
"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import networkx as nx
__authors__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)'])
__all__ = ['flow_hierarchy']
def flow_hierarchy(G, weight=None):
"""Returns the flow hierarchy of a directed network.
Flow hierarchy is defined as the fraction of edges not participating
in cycles in a directed graph [1]_.
Parameters
----------
G : DiGraph or MultiDiGraph
A directed graph
weight : key,optional (default=None)
Attribute to use for node weights. If None the weight defaults to 1.
Returns
-------
h : float
Flow heirarchy value
Notes
-----
The algorithm described in [1]_ computes the flow hierarchy through
exponentiation of the adjacency matrix. This function implements an
alternative approach that finds strongly connected components.
An edge is in a cycle if and only if it is in a strongly connected
component, which can be found in `O(m)` time using Tarjan's algorithm.
References
----------
.. [1] Luo, J.; Magee, C.L. (2011),
Detecting evolving patterns of self-organizing networks by flow
hierarchy measurement, Complexity, Volume 16 Issue 6 53-61.
DOI: 10.1002/cplx.20368
http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf
"""
if not G.is_directed():
raise nx.NetworkXError("G must be a digraph in flow_heirarchy")
scc = nx.strongly_connected_components(G)
return 1.-sum(G.subgraph(c).size(weight) for c in scc)/float(G.size(weight))
| bsd-3-clause |
bitesofcode/projexui | projexui/widgets/xorbschemabox.py | 2 | 4667 | """ Defines a combo box for selecting orb schemas """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2011, Projex Software'
__license__ = 'LGPL'
# maintenance information
__maintainer__ = 'Projex Software'
__email__ = 'team@projexsoftware.com'
#------------------------------------------------------------------------------
from projexui.qt import Signal, PyObject
from projexui.widgets.xcombobox import XComboBox
import projex
projex.requires('orb')
try:
from orb import Orb
except ImportError:
logger.warning('Orb is required for the XOrbSchemaBox')
Orb = None
class XOrbSchemaBox(XComboBox):
""" Defines a combo box that contains schemas from the ORB system. """
__designer_group__ = 'ProjexUI - ORB'
currentSchemaChanged = Signal(PyObject)
currentTableChanged = Signal(PyObject)
def __init__( self, parent = None ):
super(XOrbSchemaBox, self).__init__( parent )
# define custom properties
self._schemas = []
if ( Orb ):
self.setSchemas(Orb.instance().schemas())
# create connections
self.currentIndexChanged.connect( self.emitCurrentChanged )
def currentSchema( self ):
"""
Returns the schema found at the current index for this combo box.
:return <orb.TableSchema> || None
"""
index = self.currentIndex()
if ( 0 <= index and index < len(self._schemas) ):
return self._schemas[index]
return None
def emitCurrentChanged( self ):
"""
Emits the current schema changed signal for this combobox, provided \
the signals aren't blocked.
"""
if ( not self.signalsBlocked() ):
schema = self.currentSchema()
self.currentSchemaChanged.emit(schema)
if ( schema ):
self.currentTableChanged.emit(schema.model())
else:
self.currentTableChanged.emit(None)
def iconMapper( self ):
"""
Returns the icon mapping method to be used for this combobox.
:return <method> || None
"""
return self._iconMapper
def labelMapper( self ):
"""
Returns the label mapping method to be used for this combobox.
:return <method> || None
"""
return self._labelMapper
def schemas( self ):
"""
Returns the schema list that ist linked with this combo box.
:return [<orb.Table>, ..]
"""
return self._schemas
def refresh( self ):
"""
Refreshs the current user interface to match the latest settings.
"""
schemas = self.schemas()
self.blockSignals(True)
self.clear()
self.addItems([schema.name() for schema in schemas])
self.blockSignals(False)
def setCurrentSchema( self, schema ):
"""
Sets the index for this combobox to the inputed schema instance.
:param schema <orb.TableSchema>
:return <bool> success
"""
if ( not schema in self._schemas ):
return False
index = self._schemas.index(schema)
self.setCurrentIndex(index)
return True
def setIconMapper( self, mapper ):
"""
Sets the icon mapping method for this combobox to the inputed mapper. \
The inputed mapper method should take a orb.Table instance as input \
and return a QIcon as output.
:param mapper | <method> || None
"""
self._iconMapper = mapper
def setLabelMapper( self, mapper ):
"""
Sets the label mapping method for this combobox to the inputed mapper.\
The inputed mapper method should take a orb.Table instance as input \
and return a string as output.
:param mapper | <method>
"""
self._labelMapper = mapper
def setSchemas( self, schemas ):
"""
Sets the schemas on this combobox to the inputed schema list.
:param schemas | [<orb.Table>, ..]
"""
self._schemas = schemas
self.refresh()
__designer_plugins__ = [XOrbSchemaBox] | lgpl-3.0 |
erkrishna9/odoo | addons/sale_stock/company.py | 384 | 1524 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'security_lead': fields.float(
'Security Days', required=True,
help="Margin of error for dates promised to customers. "\
"Products will be scheduled for procurement and delivery "\
"that many days earlier than the actual promised date, to "\
"cope with unexpected delays in the supply chain."),
}
_defaults = {
'security_lead': 0.0,
}
| agpl-3.0 |
nelsonsar/ansible | lib/ansible/galaxy/role.py | 33 | 12340 | ########################################################################
#
# (C) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
import datetime
import os
import subprocess
import tarfile
import tempfile
import yaml
from shutil import rmtree
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.urls import open_url
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyRole(object):
SUPPORTED_SCMS = set(['git', 'hg'])
META_MAIN = os.path.join('meta', 'main.yml')
META_INSTALL = os.path.join('meta', '.galaxy_install_info')
ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars')
def __init__(self, galaxy, name, src=None, version=None, scm=None, role_path=None):
self._metadata = None
self._install_info = None
self.options = galaxy.options
self.name = name
self.version = version
self.src = src or name
self.scm = scm
if role_path is not None:
self.path = role_path
else:
for path in galaxy.roles_paths:
role_path = os.path.join(path, self.name)
if os.path.exists(role_path):
self.path = role_path
break
else:
# use the first path by default
self.path = os.path.join(galaxy.roles_paths[0], self.name)
def __eq__(self, other):
return self.name == other.name
@property
def metadata(self):
"""
Returns role metadata
"""
if self._metadata is None:
meta_path = os.path.join(self.path, self.META_MAIN)
if os.path.isfile(meta_path):
try:
f = open(meta_path, 'r')
self._metadata = yaml.safe_load(f)
except:
display.vvvvv("Unable to load metadata for %s" % self.name)
return False
finally:
f.close()
return self._metadata
@property
def install_info(self):
"""
Returns role install info
"""
if self._install_info is None:
info_path = os.path.join(self.path, self.META_INSTALL)
if os.path.isfile(info_path):
try:
f = open(info_path, 'r')
self._install_info = yaml.safe_load(f)
except:
display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
return False
finally:
f.close()
return self._install_info
def _write_galaxy_install_info(self):
"""
Writes a YAML-formatted file to the role's meta/ directory
(named .galaxy_install_info) which contains some information
we can use later for commands like 'list' and 'info'.
"""
info = dict(
version=self.version,
install_date=datetime.datetime.utcnow().strftime("%c"),
)
info_path = os.path.join(self.path, self.META_INSTALL)
try:
f = open(info_path, 'w+')
self._install_info = yaml.safe_dump(info, f)
except:
return False
finally:
f.close()
return True
def remove(self):
"""
Removes the specified role from the roles path. There is a
sanity check to make sure there's a meta/main.yml file at this
path so the user doesn't blow away random directories
"""
if self.metadata:
try:
rmtree(self.path)
return True
except:
pass
return False
def fetch(self, role_data):
"""
Downloads the archived role from github to a temp location
"""
if role_data:
# first grab the file and save it to a temp location
if "github_user" in role_data and "github_repo" in role_data:
archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
else:
archive_url = self.src
display.display("- downloading role from %s" % archive_url)
try:
url_file = open_url(archive_url)
temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
while data:
temp_file.write(data)
data = url_file.read()
temp_file.close()
return temp_file.name
except:
# TODO: better urllib2 error handling for error
# messages that are more exact
display.error("failed to download the file.")
return False
def install(self, role_filename):
# the file is a tar, so open it that way and extract it
# to the specified (or default) roles directory
if not tarfile.is_tarfile(role_filename):
display.error("the file downloaded was not a tar.gz")
return False
else:
if role_filename.endswith('.gz'):
role_tar_file = tarfile.open(role_filename, "r:gz")
else:
role_tar_file = tarfile.open(role_filename, "r")
# verify the role's meta file
meta_file = None
members = role_tar_file.getmembers()
# next find the metadata file
for member in members:
if self.META_MAIN in member.name:
meta_file = member
break
if not meta_file:
display.error("this role does not appear to have a meta/main.yml file.")
return False
else:
try:
self._metadata = yaml.safe_load(role_tar_file.extractfile(meta_file))
except:
display.error("this role does not appear to have a valid meta/main.yml file.")
return False
# we strip off the top-level directory for all of the files contained within
# the tar file here, since the default is 'github_repo-target', and change it
# to the specified role's name
display.display("- extracting %s to %s" % (self.name, self.path))
try:
if os.path.exists(self.path):
if not os.path.isdir(self.path):
display.error("the specified roles path exists and is not a directory.")
return False
elif not getattr(self.options, "force", False):
display.error("the specified role %s appears to already exist. Use --force to replace it." % self.name)
return False
else:
# using --force, remove the old path
if not self.remove():
display.error("%s doesn't appear to contain a role." % self.path)
display.error(" please remove this directory manually if you really want to put the role here.")
return False
else:
os.makedirs(self.path)
# now we do the actual extraction to the path
for member in members:
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop the leading directory, as mentioned above
if member.isreg() or member.issym():
parts = member.name.split(os.sep)[1:]
final_parts = []
for part in parts:
if part != '..' and '~' not in part and '$' not in part:
final_parts.append(part)
member.name = os.path.join(*final_parts)
role_tar_file.extract(member, self.path)
# write out the install info file for later use
self._write_galaxy_install_info()
except OSError as e:
display.error("Could not update files in %s: %s" % (self.path, str(e)))
return False
# return the parsed yaml metadata
display.display("- %s was installed successfully" % self.name)
return True
@property
def spec(self):
"""
Returns role spec info
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
@staticmethod
def url_to_spec(roleurl):
# gets the role name out of a repo like
# http://git.example.com/repos/repo.git" => "repo"
if '://' not in roleurl and '@' not in roleurl:
return roleurl
trailing_path = roleurl.split('/')[-1]
if trailing_path.endswith('.git'):
trailing_path = trailing_path[:-4]
if trailing_path.endswith('.tar.gz'):
trailing_path = trailing_path[:-7]
if ',' in trailing_path:
trailing_path = trailing_path.split(',')[0]
return trailing_path
@staticmethod
def scm_archive_role(scm, role_url, role_version, role_name):
if scm not in ['hg', 'git']:
display.display("- scm %s is not currently supported" % scm)
return False
tempdir = tempfile.mkdtemp()
clone_cmd = [scm, 'clone', role_url, role_name]
with open('/dev/null', 'w') as devnull:
try:
display.display("- executing: %s" % " ".join(clone_cmd))
popen = subprocess.Popen(clone_cmd, cwd=tempdir, stdout=devnull, stderr=devnull)
except:
raise AnsibleError("error executing: %s" % " ".join(clone_cmd))
rc = popen.wait()
if rc != 0:
display.display("- command %s failed" % ' '.join(clone_cmd))
display.display(" in directory %s" % tempdir)
return False
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar')
if scm == 'hg':
archive_cmd = ['hg', 'archive', '--prefix', "%s/" % role_name]
if role_version:
archive_cmd.extend(['-r', role_version])
archive_cmd.append(temp_file.name)
if scm == 'git':
archive_cmd = ['git', 'archive', '--prefix=%s/' % role_name, '--output=%s' % temp_file.name]
if role_version:
archive_cmd.append(role_version)
else:
archive_cmd.append('HEAD')
with open('/dev/null', 'w') as devnull:
display.display("- executing: %s" % " ".join(archive_cmd))
popen = subprocess.Popen(archive_cmd, cwd=os.path.join(tempdir, role_name),
stderr=devnull, stdout=devnull)
rc = popen.wait()
if rc != 0:
display.display("- command %s failed" % ' '.join(archive_cmd))
display.display(" in directory %s" % tempdir)
return False
rmtree(tempdir, ignore_errors=True)
return temp_file.name
| gpl-3.0 |
nitzmahone/ansible | lib/ansible/modules/cloud/cloudstack/cs_portforward.py | 5 | 12346 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_portforward
short_description: Manages port forwarding rules on Apache CloudStack based clouds.
description:
- Create, update and remove port forwarding rules.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
ip_address:
description:
- Public IP address the rule is assigned to.
required: true
vm:
description:
- Name of virtual machine which we make the port forwarding rule for.
- Required if C(state=present).
state:
description:
- State of the port forwarding rule.
default: present
choices: [ present, absent ]
protocol:
description:
- Protocol of the port forwarding rule.
default: tcp
choices: [ tcp, udp ]
public_port:
description:
- Start public port for this rule.
required: true
public_end_port:
description:
- End public port for this rule.
- If not specified equal C(public_port).
private_port:
description:
- Start private port for this rule.
required: true
private_end_port:
description:
- End private port for this rule.
- If not specified equal C(private_port).
open_firewall:
description:
- Whether the firewall rule for public port should be created, while creating the new rule.
- Use M(cs_firewall) for managing firewall rules.
default: false
type: bool
vm_guest_ip:
description:
- VM guest NIC secondary IP address for the port forwarding rule.
default: false
network:
description:
- Name of the network.
version_added: "2.3"
vpc:
description:
- Name of the VPC.
version_added: "2.3"
domain:
description:
- Domain the C(vm) is related to.
account:
description:
- Account the C(vm) is related to.
project:
description:
- Name of the project the C(vm) is located in.
zone:
description:
- Name of the zone in which the virtual machine is in.
- If not set, default zone is used.
poll_async:
description:
- Poll async jobs until job has finished.
default: true
type: bool
tags:
description:
- List of tags. Tags are a list of dictionaries having keys C(key) and C(value).
- "To delete all tags, set a empty list e.g. C(tags: [])."
aliases: [ tag ]
version_added: "2.4"
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: 1.2.3.4:80 -> web01:8080
local_action:
module: cs_portforward
ip_address: 1.2.3.4
vm: web01
public_port: 80
private_port: 8080
- name: forward SSH and open firewall
local_action:
module: cs_portforward
ip_address: '{{ public_ip }}'
vm: '{{ inventory_hostname }}'
public_port: '{{ ansible_ssh_port }}'
private_port: 22
open_firewall: true
- name: forward DNS traffic, but do not open firewall
local_action:
module: cs_portforward
ip_address: 1.2.3.4
vm: '{{ inventory_hostname }}'
public_port: 53
private_port: 53
protocol: udp
- name: remove ssh port forwarding
local_action:
module: cs_portforward
ip_address: 1.2.3.4
public_port: 22
private_port: 22
state: absent
'''
RETURN = '''
---
id:
description: UUID of the public IP address.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
ip_address:
description: Public IP address.
returned: success
type: string
sample: 1.2.3.4
protocol:
description: Protocol.
returned: success
type: string
sample: tcp
private_port:
description: Start port on the virtual machine's IP address.
returned: success
type: int
sample: 80
private_end_port:
description: End port on the virtual machine's IP address.
returned: success
type: int
public_port:
description: Start port on the public IP address.
returned: success
type: int
sample: 80
public_end_port:
description: End port on the public IP address.
returned: success
type: int
sample: 80
tags:
description: Tags related to the port forwarding.
returned: success
type: list
sample: []
vm_name:
description: Name of the virtual machine.
returned: success
type: string
sample: web-01
vm_display_name:
description: Display name of the virtual machine.
returned: success
type: string
sample: web-01
vm_guest_ip:
description: IP of the virtual machine.
returned: success
type: string
sample: 10.101.65.152
vpc:
description: Name of the VPC.
returned: success
type: string
sample: my_vpc
network:
description: Name of the network.
returned: success
type: string
sample: dmz
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import AnsibleCloudStack, cs_argument_spec, cs_required_together
class AnsibleCloudStackPortforwarding(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPortforwarding, self).__init__(module)
self.returns = {
'virtualmachinedisplayname': 'vm_display_name',
'virtualmachinename': 'vm_name',
'ipaddress': 'ip_address',
'vmguestip': 'vm_guest_ip',
'publicip': 'public_ip',
'protocol': 'protocol',
}
# these values will be casted to int
self.returns_to_int = {
'publicport': 'public_port',
'publicendport': 'public_end_port',
'privateport': 'private_port',
'privateendport': 'private_end_port',
}
self.portforwarding_rule = None
def get_portforwarding_rule(self):
if not self.portforwarding_rule:
protocol = self.module.params.get('protocol')
public_port = self.module.params.get('public_port')
args = {
'ipaddressid': self.get_ip_address(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
}
portforwarding_rules = self.query_api('listPortForwardingRules', **args)
if portforwarding_rules and 'portforwardingrule' in portforwarding_rules:
for rule in portforwarding_rules['portforwardingrule']:
if (protocol == rule['protocol'] and
public_port == int(rule['publicport'])):
self.portforwarding_rule = rule
break
return self.portforwarding_rule
def present_portforwarding_rule(self):
portforwarding_rule = self.get_portforwarding_rule()
if portforwarding_rule:
portforwarding_rule = self.update_portforwarding_rule(portforwarding_rule)
else:
portforwarding_rule = self.create_portforwarding_rule()
if portforwarding_rule:
portforwarding_rule = self.ensure_tags(resource=portforwarding_rule, resource_type='PortForwardingRule')
self.portforwarding_rule = portforwarding_rule
return portforwarding_rule
def create_portforwarding_rule(self):
args = {
'protocol': self.module.params.get('protocol'),
'publicport': self.module.params.get('public_port'),
'publicendport': self.get_or_fallback('public_end_port', 'public_port'),
'privateport': self.module.params.get('private_port'),
'privateendport': self.get_or_fallback('private_end_port', 'private_port'),
'openfirewall': self.module.params.get('open_firewall'),
'vmguestip': self.get_vm_guest_ip(),
'ipaddressid': self.get_ip_address(key='id'),
'virtualmachineid': self.get_vm(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'networkid': self.get_network(key='id'),
}
portforwarding_rule = None
self.result['changed'] = True
if not self.module.check_mode:
portforwarding_rule = self.query_api('createPortForwardingRule', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
portforwarding_rule = self.poll_job(portforwarding_rule, 'portforwardingrule')
return portforwarding_rule
def update_portforwarding_rule(self, portforwarding_rule):
args = {
'protocol': self.module.params.get('protocol'),
'publicport': self.module.params.get('public_port'),
'publicendport': self.get_or_fallback('public_end_port', 'public_port'),
'privateport': self.module.params.get('private_port'),
'privateendport': self.get_or_fallback('private_end_port', 'private_port'),
'vmguestip': self.get_vm_guest_ip(),
'ipaddressid': self.get_ip_address(key='id'),
'virtualmachineid': self.get_vm(key='id'),
'networkid': self.get_network(key='id'),
}
if self.has_changed(args, portforwarding_rule):
self.result['changed'] = True
if not self.module.check_mode:
# API broken in 4.2.1?, workaround using remove/create instead of update
# portforwarding_rule = self.query_api('updatePortForwardingRule', **args)
self.absent_portforwarding_rule()
portforwarding_rule = self.query_api('createPortForwardingRule', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
portforwarding_rule = self.poll_job(portforwarding_rule, 'portforwardingrule')
return portforwarding_rule
def absent_portforwarding_rule(self):
portforwarding_rule = self.get_portforwarding_rule()
if portforwarding_rule:
self.result['changed'] = True
args = {
'id': portforwarding_rule['id'],
}
if not self.module.check_mode:
res = self.query_api('deletePortForwardingRule', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'portforwardingrule')
return portforwarding_rule
def get_result(self, portforwarding_rule):
super(AnsibleCloudStackPortforwarding, self).get_result(portforwarding_rule)
if portforwarding_rule:
for search_key, return_key in self.returns_to_int.items():
if search_key in portforwarding_rule:
self.result[return_key] = int(portforwarding_rule[search_key])
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
ip_address=dict(required=True),
protocol=dict(choices=['tcp', 'udp'], default='tcp'),
public_port=dict(type='int', required=True),
public_end_port=dict(type='int'),
private_port=dict(type='int', required=True),
private_end_port=dict(type='int'),
state=dict(choices=['present', 'absent'], default='present'),
open_firewall=dict(type='bool', default=False),
vm_guest_ip=dict(),
vm=dict(),
vpc=dict(),
network=dict(),
zone=dict(),
domain=dict(),
account=dict(),
project=dict(),
poll_async=dict(type='bool', default=True),
tags=dict(type='list', aliases=['tag']),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_pf = AnsibleCloudStackPortforwarding(module)
state = module.params.get('state')
if state in ['absent']:
pf_rule = acs_pf.absent_portforwarding_rule()
else:
pf_rule = acs_pf.present_portforwarding_rule()
result = acs_pf.get_result(pf_rule)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
bh107/bohrium | bridge/npbackend/bohrium/concatenate.py | 5 | 13321 | """
Array concatenate functions
~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from . import array_create
def atleast_1d(*arys):
"""
Convert inputs to arrays with at least one dimension.
Scalar inputs are converted to 1-dimensional arrays, whilst
higher-dimensional inputs are preserved.
Parameters
----------
arys1, arys2, ... : array_like
One or more input arrays.
Returns
-------
ret : ndarray
An array, or list of arrays, each with ``a.ndim >= 1``.
Copies are made only if necessary.
See Also
--------
atleast_2d, atleast_3d
Examples
--------
>>> np.atleast_1d(1.0)
array_create.array([ 1.])
>>> x = np.arange(9.0).reshape(3,3)
>>> np.atleast_1d(x)
array_create.array([[ 0., 1., 2.],
[ 3., 4., 5.],
[ 6., 7., 8.]])
>>> np.atleast_1d(x) is x
True
>>> np.atleast_1d(1, [3, 4])
[array_create.array([1]), array_create.array([3, 4])]
"""
res = []
for ary in arys:
ary = array_create.array(ary)
if len(ary.shape) == 0:
result = ary.reshape(1)
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def atleast_2d(*arys):
"""
View inputs as arrays with at least two dimensions.
Parameters
----------
arys1, arys2, ... : array_like
One or more array-like sequences. Non-array inputs are converted
to arrays. Arrays that already have two or more dimensions are
preserved.
Returns
-------
res, res2, ... : ndarray
An array, or list of arrays, each with ``a.ndim >= 2``.
Copies are avoided where possible, and views with two or more
dimensions are returned.
See Also
--------
atleast_1d, atleast_3d
Examples
--------
>>> np.atleast_2d(3.0)
array_create.array([[ 3.]])
>>> x = np.arange(3.0)
>>> np.atleast_2d(x)
array_create.array([[ 0., 1., 2.]])
>>> np.atleast_2d(x).base is x
True
>>> np.atleast_2d(1, [1, 2], [[1, 2]])
[array_create.array([[1]]), array_create.array([[1, 2]]), array_create.array([[1, 2]])]
"""
res = []
for ary in arys:
ary = array_create.array(ary)
if len(ary.shape) == 0:
result = ary.reshape(1, 1)
elif len(ary.shape) == 1:
result = ary[None, :]
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def atleast_3d(*arys):
"""
View inputs as arrays with at least three dimensions.
Parameters
----------
arys1, arys2, ... : array_like
One or more array-like sequences. Non-array inputs are converted to
arrays. Arrays that already have three or more dimensions are
preserved.
Returns
-------
res1, res2, ... : ndarray
An array, or list of arrays, each with ``a.ndim >= 3``. Copies are
avoided where possible, and views with three or more dimensions are
returned. For example, a 1-D array of shape ``(N,)`` becomes a view
of shape ``(1, N, 1)``, and a 2-D array of shape ``(M, N)`` becomes a
view of shape ``(M, N, 1)``.
See Also
--------
atleast_1d, atleast_2d
Examples
--------
>>> np.atleast_3d(3.0)
array_create.array([[[ 3.]]])
>>> x = np.arange(3.0)
>>> np.atleast_3d(x).shape
(1, 3, 1)
>>> x = np.arange(12.0).reshape(4,3)
>>> np.atleast_3d(x).shape
(4, 3, 1)
>>> np.atleast_3d(x).base is x.base # x is a reshape, so not base itself
True
>>> for arr in np.atleast_3d([1, 2], [[1, 2]], [[[1, 2]]]):
... print(arr, arr.shape)
...
[[[1]
[2]]] (1, 2, 1)
[[[1]
[2]]] (1, 2, 1)
[[[1 2]]] (1, 1, 2)
"""
res = []
for ary in arys:
ary = array_create.array(ary)
if len(ary.shape) == 0:
result = ary.reshape(1, 1, 1)
elif len(ary.shape) == 1:
result = ary[None, :, None]
elif len(ary.shape) == 2:
result = ary[:, :, None]
else:
result = ary
res.append(result)
if len(res) == 1:
return res[0]
else:
return res
def concatenate(array_list, axis=0):
"""
concatenate((a1, a2, ...), axis=0)
Join a sequence of arrays along an existing axis.
Parameters
----------
a1, a2, ... : sequence of array_like
The arrays must have the same shape, except in the dimension
corresponding to `axis` (the first, by default).
axis : int, optional
The axis along which the arrays will be joined. Default is 0.
Returns
-------
res : ndarray
The concatenated array.
See Also
--------
ma.concatenate : Concatenate function that preserves input masks.
array_split : Split an array into multiple sub-arrays of equal or
near-equal size.
split : Split array into a list of multiple sub-arrays of equal size.
hsplit : Split array into multiple sub-arrays horizontally (column wise)
vsplit : Split array into multiple sub-arrays vertically (row wise)
dsplit : Split array into multiple sub-arrays along the 3rd axis (depth).
stack : Stack a sequence of arrays along a new axis.
hstack : Stack arrays in sequence horizontally (column wise)
vstack : Stack arrays in sequence vertically (row wise)
dstack : Stack arrays in sequence depth wise (along third dimension)
Notes
-----
When one or more of the arrays to be concatenated is a MaskedArray,
this function will return a MaskedArray object instead of an ndarray,
but the input masks are *not* preserved. In cases where a MaskedArray
is expected as input, use the ma.concatenate function from the masked
array module instead.
Examples
--------
>>> a = np.array_create.array([[1, 2], [3, 4]])
>>> b = np.array_create.array([[5, 6]])
>>> np.concatenate((a, b), axis=0)
array_create.array([[1, 2],
[3, 4],
[5, 6]])
>>> np.concatenate((a, b.T), axis=1)
array_create.array([[1, 2, 5],
[3, 4, 6]])
This function will not preserve masking of MaskedArray inputs.
>>> a = np.ma.arange(3)
>>> a[1] = np.ma.masked
>>> b = np.arange(2, 5)
>>> a
masked_array(data = [0 -- 2],
mask = [False True False],
fill_value = 999999)
>>> b
array_create.array([2, 3, 4])
>>> np.concatenate([a, b])
masked_array(data = [0 1 2 2 3 4],
mask = False,
fill_value = 999999)
>>> np.ma.concatenate([a, b])
masked_array(data = [0 -- 2 2 3 4],
mask = [False True False False False False],
fill_value = 999999)
"""
if len(array_list) == 0:
return None
# We form an assignment to the new 'ret' array, which has a shape[axis] that are the sum of
# the axis dimensions in 'array_list'. Then we copy each array in 'array_list' into the axis dimension of 'ret'
ret_shape = list(array_list[0].shape)
ret_shape[axis] = 0
for ary in array_list:
ret_shape[axis] += ary.shape[axis]
ret = array_create.empty(ret_shape, dtype=array_list[0].dtype)
slice = "ret["
for i in range(ret.ndim):
if i == axis:
slice += "AXIS"
else:
slice += ":"
if i < ret.ndim - 1:
slice += ", "
slice += "]"
len_count = 0
for i in range(len(array_list)):
axis_slice = "%d:%d+%d" % (len_count, len_count, array_list[i].shape[axis])
cmd = slice.replace("AXIS", axis_slice)
cmd += " = array_list[i]"
exec (cmd)
len_count += array_list[i].shape[axis]
return ret
def vstack(tup):
"""
Stack arrays in sequence vertically (row wise).
Take a sequence of arrays and stack them vertically to make a single
array. Rebuild arrays divided by `vsplit`.
This function continues to be supported for backward compatibility, but
you should prefer ``np.concatenate`` or ``np.stack``. The ``np.stack``
function was added in NumPy 1.10.
Parameters
----------
tup : sequence of ndarrays
Tuple containing arrays to be stacked. The arrays must have the same
shape along all but the first axis.
Returns
-------
stacked : ndarray
The array formed by stacking the given arrays.
See Also
--------
stack : Join a sequence of arrays along a new axis.
hstack : Stack arrays in sequence horizontally (column wise).
dstack : Stack arrays in sequence depth wise (along third dimension).
concatenate : Join a sequence of arrays along an existing axis.
vsplit : Split array into a list of multiple sub-arrays vertically.
Notes
-----
Equivalent to ``np.concatenate(tup, axis=0)`` if `tup` contains arrays that
are at least 2-dimensional.
Examples
--------
>>> a = np.array_create.array([1, 2, 3])
>>> b = np.array_create.array([2, 3, 4])
>>> np.vstack((a,b))
array_create.array([[1, 2, 3],
[2, 3, 4]])
>>> a = np.array_create.array([[1], [2], [3]])
>>> b = np.array_create.array([[2], [3], [4]])
>>> np.vstack((a,b))
array_create.array([[1],
[2],
[3],
[2],
[3],
[4]])
"""
return concatenate([atleast_2d(_m) for _m in tup], 0)
def hstack(tup):
"""
Stack arrays in sequence horizontally (column wise).
Take a sequence of arrays and stack them horizontally to make
a single array. Rebuild arrays divided by `hsplit`.
This function continues to be supported for backward compatibility, but
you should prefer ``np.concatenate`` or ``np.stack``. The ``np.stack``
function was added in NumPy 1.10.
Parameters
----------
tup : sequence of ndarrays
All arrays must have the same shape along all but the second axis.
Returns
-------
stacked : ndarray
The array formed by stacking the given arrays.
See Also
--------
stack : Join a sequence of arrays along a new axis.
vstack : Stack arrays in sequence vertically (row wise).
dstack : Stack arrays in sequence depth wise (along third axis).
concatenate : Join a sequence of arrays along an existing axis.
hsplit : Split array along second axis.
Notes
-----
Equivalent to ``np.concatenate(tup, axis=1)``
Examples
--------
>>> a = np.array((1,2,3))
>>> b = np.array((2,3,4))
>>> np.hstack((a,b))
array_create.array([1, 2, 3, 2, 3, 4])
>>> a = np.array_create.array([[1],[2],[3]])
>>> b = np.array_create.array([[2],[3],[4]])
>>> np.hstack((a,b))
array_create.array([[1, 2],
[2, 3],
[3, 4]])
"""
arrs = [atleast_1d(_m) for _m in tup]
# As a special case, dimension 0 of 1-dimensional arrays is "horizontal"
if arrs[0].ndim == 1:
return concatenate(arrs, 0)
else:
return concatenate(arrs, 1)
def stack(arrays, axis=0):
"""
Join a sequence of arrays along a new axis.
The `axis` parameter specifies the index of the new axis in the dimensions
of the result. For example, if ``axis=0`` it will be the first dimension
and if ``axis=-1`` it will be the last dimension.
.. versionadded:: 1.10.0
Parameters
----------
arrays : sequence of array_like
Each array must have the same shape.
axis : int, optional
The axis in the result array along which the input arrays are stacked.
Returns
-------
stacked : ndarray
The stacked array has one more dimension than the input arrays.
See Also
--------
concatenate : Join a sequence of arrays along an existing axis.
split : Split array into a list of multiple sub-arrays of equal size.
Examples
--------
>>> arrays = [np.random.randn(3, 4) for _ in range(10)]
>>> np.stack(arrays, axis=0).shape
(10, 3, 4)
>>> np.stack(arrays, axis=1).shape
(3, 10, 4)
>>> np.stack(arrays, axis=2).shape
(3, 4, 10)
>>> a = np.array_create.array([1, 2, 3])
>>> b = np.array_create.array([2, 3, 4])
>>> np.stack((a, b))
array_create.array([[1, 2, 3],
[2, 3, 4]])
>>> np.stack((a, b), axis=-1)
array_create.array([[1, 2],
[2, 3],
[3, 4]])
"""
arrays = [array_create.array(arr) for arr in arrays]
if not arrays:
raise ValueError('need at least one array to stack')
shapes = set(arr.shape for arr in arrays)
if len(shapes) != 1:
raise ValueError('all input arrays must have the same shape')
result_ndim = arrays[0].ndim + 1
if not -result_ndim <= axis < result_ndim:
msg = 'axis {0} out of bounds [-{1}, {1})'.format(axis, result_ndim)
raise IndexError(msg)
if axis < 0:
axis += result_ndim
sl = (slice(None),) * axis + (None,)
expanded_arrays = [arr[sl] for arr in arrays]
return concatenate(expanded_arrays, axis=axis)
| apache-2.0 |
837468220/python-for-android | python3-alpha/python3-src/Lib/lib2to3/tests/test_pytree.py | 131 | 17346 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Unit tests for pytree.py.
NOTE: Please *don't* add doc strings to individual test methods!
In verbose mode, printing of the module, class and method name is much
more helpful than printing of (the first line of) the docstring,
especially when debugging a test.
"""
from __future__ import with_statement
import sys
import warnings
# Testing imports
from . import support
from lib2to3 import pytree
try:
sorted
except NameError:
def sorted(lst):
l = list(lst)
l.sort()
return l
class TestNodes(support.TestCase):
"""Unit tests for nodes (Base, Leaf, Node)."""
if sys.version_info >= (2,6):
# warnings.catch_warnings is new in 2.6.
def test_deprecated_prefix_methods(self):
l = pytree.Leaf(100, "foo")
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
self.assertEqual(l.get_prefix(), "")
l.set_prefix("hi")
self.assertEqual(l.prefix, "hi")
self.assertEqual(len(w), 2)
for warning in w:
self.assertTrue(warning.category is DeprecationWarning)
self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
"use the prefix property")
self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
"use the prefix property")
def test_instantiate_base(self):
if __debug__:
# Test that instantiating Base() raises an AssertionError
self.assertRaises(AssertionError, pytree.Base)
def test_leaf(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.type, 100)
self.assertEqual(l1.value, "foo")
def test_leaf_repr(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(repr(l1), "Leaf(100, 'foo')")
def test_leaf_str(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(str(l1), "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1)))
self.assertEqual(str(l2), " foo")
def test_leaf_str_numeric_value(self):
# Make sure that the Leaf's value is stringified. Failing to
# do this can cause a TypeError in certain situations.
l1 = pytree.Leaf(2, 5)
l1.prefix = "foo_"
self.assertEqual(str(l1), "foo_5")
def test_leaf_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0)))
self.assertEqual(l1, l2)
l3 = pytree.Leaf(101, "foo")
l4 = pytree.Leaf(100, "bar")
self.assertNotEqual(l1, l3)
self.assertNotEqual(l1, l4)
def test_leaf_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
self.assertFalse(l1.was_changed)
l1.prefix = " ##\n\n"
self.assertEqual(l1.prefix, " ##\n\n")
self.assertTrue(l1.was_changed)
def test_node(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(200, "bar")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(n1.type, 1000)
self.assertEqual(n1.children, [l1, l2])
def test_node_repr(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(repr(n1),
"Node(1000, [%s, %s])" % (repr(l1), repr(l2)))
def test_node_str(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(str(n1), "foo bar")
def test_node_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
n1 = pytree.Node(1000, [l1])
self.assertEqual(n1.prefix, "")
n1.prefix = " "
self.assertEqual(n1.prefix, " ")
self.assertEqual(l1.prefix, " ")
def test_get_suffix(self):
l1 = pytree.Leaf(100, "foo", prefix="a")
l2 = pytree.Leaf(100, "bar", prefix="b")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(l1.get_suffix(), l2.prefix)
self.assertEqual(l2.get_suffix(), "")
self.assertEqual(n1.get_suffix(), "")
l3 = pytree.Leaf(100, "bar", prefix="c")
n2 = pytree.Node(1000, [n1, l3])
self.assertEqual(n1.get_suffix(), l3.prefix)
self.assertEqual(l3.get_suffix(), "")
self.assertEqual(n2.get_suffix(), "")
def test_node_equality(self):
n1 = pytree.Node(1000, ())
n2 = pytree.Node(1000, [], context=(" ", (1, 0)))
self.assertEqual(n1, n2)
n3 = pytree.Node(1001, ())
self.assertNotEqual(n1, n3)
def test_node_recursive_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
n2 = pytree.Node(1000, [l2])
self.assertEqual(n1, n2)
l3 = pytree.Leaf(100, "bar")
n3 = pytree.Node(1000, [l3])
self.assertNotEqual(n1, n3)
def test_replace(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
self.assertEqual(n1.children, [l1, l2, l3])
self.assertTrue(isinstance(n1.children, list))
self.assertFalse(n1.was_changed)
l2new = pytree.Leaf(100, "-")
l2.replace(l2new)
self.assertEqual(n1.children, [l1, l2new, l3])
self.assertTrue(isinstance(n1.children, list))
self.assertTrue(n1.was_changed)
def test_replace_with_list(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")])
self.assertEqual(str(n1), "foo**bar")
self.assertTrue(isinstance(n1.children, list))
def test_leaves(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [l3])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(list(n1.leaves()), [l1, l2, l3])
def test_depth(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(l1.depth(), 2)
self.assertEqual(n3.depth(), 1)
self.assertEqual(n1.depth(), 0)
def test_post_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1])
def test_pre_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3])
def test_changed(self):
l1 = pytree.Leaf(100, "f")
self.assertFalse(l1.was_changed)
l1.changed()
self.assertTrue(l1.was_changed)
l1 = pytree.Leaf(100, "f")
n1 = pytree.Node(1000, [l1])
self.assertFalse(n1.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
n2 = pytree.Node(1000, [n1])
self.assertFalse(l1.was_changed)
self.assertFalse(n1.was_changed)
self.assertFalse(n2.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertFalse(l1.was_changed)
def test_leaf_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self", prefix=prefix)
self.assertTrue(str(l1), prefix + "self")
self.assertEqual(l1.prefix, prefix)
def test_node_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self")
l2 = pytree.Leaf(100, "foo", prefix="_")
n1 = pytree.Node(1000, [l1, l2], prefix=prefix)
self.assertTrue(str(n1), prefix + "self_foo")
self.assertEqual(n1.prefix, prefix)
self.assertEqual(l1.prefix, prefix)
self.assertEqual(l2.prefix, "_")
def test_remove(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [n1])
self.assertEqual(n1.remove(), 0)
self.assertEqual(n2.children, [])
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertFalse(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertEqual(l2.remove(), 1)
self.assertEqual(l1.remove(), 0)
self.assertEqual(n1.children, [])
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
def test_remove_parentless(self):
n1 = pytree.Node(1000, [])
n1.remove()
self.assertEqual(n1.parent, None)
l1 = pytree.Leaf(100, "foo")
l1.remove()
self.assertEqual(l1.parent, None)
def test_node_set_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.set_child(0, l2)
self.assertEqual(l1.parent, None)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2])
n2 = pytree.Node(1000, [l1])
n2.set_child(0, n1)
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, n2)
self.assertEqual(n2.parent, None)
self.assertEqual(n2.children, [n1])
self.assertRaises(IndexError, n1.set_child, 4, l2)
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.set_child, 0, list)
def test_node_insert_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.insert_child(0, l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2, l1])
l3 = pytree.Leaf(100, "abc")
n1.insert_child(2, l3)
self.assertEqual(n1.children, [l2, l1, l3])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.insert_child, 0, list)
def test_node_append_child(self):
n1 = pytree.Node(1000, [])
l1 = pytree.Leaf(100, "foo")
n1.append_child(l1)
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.children, [l1])
l2 = pytree.Leaf(100, "bar")
n1.append_child(l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l1, l2])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.append_child, list)
def test_node_next_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertTrue(n1.next_sibling is n2)
self.assertEqual(n2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_leaf_next_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertTrue(l1.next_sibling is l2)
self.assertEqual(l2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_node_prev_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertTrue(n2.prev_sibling is n1)
self.assertEqual(n1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
def test_leaf_prev_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertTrue(l2.prev_sibling is l1)
self.assertEqual(l1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
class TestPatterns(support.TestCase):
"""Unit tests for tree matching patterns."""
def test_basic_patterns(self):
# Build a tree
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern matching a leaf
pl = pytree.LeafPattern(100, "foo", name="pl")
r = {}
self.assertFalse(pl.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n1, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n2, results=r))
self.assertEqual(r, {})
self.assertTrue(pl.match(l1, results=r))
self.assertEqual(r, {"pl": l1})
r = {}
self.assertFalse(pl.match(l2, results=r))
self.assertEqual(r, {})
# Build a pattern matching a node
pn = pytree.NodePattern(1000, [pl], name="pn")
self.assertFalse(pn.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(n1, results=r))
self.assertEqual(r, {})
self.assertTrue(pn.match(n2, results=r))
self.assertEqual(r, {"pn": n2, "pl": l3})
r = {}
self.assertFalse(pn.match(l1, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(l2, results=r))
self.assertEqual(r, {})
def test_wildcard(self):
# Build a tree for testing
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern
pl = pytree.LeafPattern(100, "foo", name="pl")
pn = pytree.NodePattern(1000, [pl], name="pn")
pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw")
r = {}
self.assertFalse(pw.match_seq([root], r))
self.assertEqual(r, {})
self.assertFalse(pw.match_seq([n1], r))
self.assertEqual(r, {})
self.assertTrue(pw.match_seq([n2], r))
# These are easier to debug
self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"])
self.assertEqual(r["pl"], l1)
self.assertEqual(r["pn"], n2)
self.assertEqual(r["pw"], [n2])
# But this is equivalent
self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]})
r = {}
self.assertTrue(pw.match_seq([l1, l3], r))
self.assertEqual(r, {"pl": l3, "pw": [l1, l3]})
self.assertTrue(r["pl"] is l3)
r = {}
def test_generate_matches(self):
la = pytree.Leaf(1, "a")
lb = pytree.Leaf(1, "b")
lc = pytree.Leaf(1, "c")
ld = pytree.Leaf(1, "d")
le = pytree.Leaf(1, "e")
lf = pytree.Leaf(1, "f")
leaves = [la, lb, lc, ld, le, lf]
root = pytree.Node(1000, leaves)
pa = pytree.LeafPattern(1, "a", "pa")
pb = pytree.LeafPattern(1, "b", "pb")
pc = pytree.LeafPattern(1, "c", "pc")
pd = pytree.LeafPattern(1, "d", "pd")
pe = pytree.LeafPattern(1, "e", "pe")
pf = pytree.LeafPattern(1, "f", "pf")
pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe],
[pa, pb], [pc, pd], [pe, pf]],
min=1, max=4, name="pw")
self.assertEqual([x[0] for x in pw.generate_matches(leaves)],
[3, 5, 2, 4, 6])
pr = pytree.NodePattern(type=1000, content=[pw], name="pr")
matches = list(pytree.generate_matches([pr], [root]))
self.assertEqual(len(matches), 1)
c, r = matches[0]
self.assertEqual(c, 1)
self.assertEqual(str(r["pr"]), "abcdef")
self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf])
for c in "abcdef":
self.assertEqual(r["p" + c], pytree.Leaf(1, c))
def test_has_key_example(self):
pattern = pytree.NodePattern(331,
(pytree.LeafPattern(7),
pytree.WildcardPattern(name="args"),
pytree.LeafPattern(8)))
l1 = pytree.Leaf(7, "(")
l2 = pytree.Leaf(3, "x")
l3 = pytree.Leaf(8, ")")
node = pytree.Node(331, [l1, l2, l3])
r = {}
self.assertTrue(pattern.match(node, r))
self.assertEqual(r["args"], [l2])
| apache-2.0 |
SCSSG/Odoo-SCS | addons/account/wizard/account_report_print_journal.py | 378 | 3440 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from lxml import etree
class account_print_journal(osv.osv_memory):
_inherit = "account.common.journal.report"
_name = 'account.print.journal'
_description = 'Account Print Journal'
_columns = {
'sort_selection': fields.selection([('l.date', 'Date'),
('am.name', 'Journal Entry Number'),],
'Entries Sorted by', required=True),
'journal_ids': fields.many2many('account.journal', 'account_print_journal_journal_rel', 'account_id', 'journal_id', 'Journals', required=True),
}
_defaults = {
'sort_selection': 'am.name',
'filter': 'filter_period',
'journal_ids': False,
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
'''
used to set the domain on 'journal_ids' field: we exclude or only propose the journals of type
sale/purchase (+refund) accordingly to the presence of the key 'sale_purchase_only' in the context.
'''
if context is None:
context = {}
res = super(account_print_journal, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
if context.get('sale_purchase_only'):
domain ="[('type', 'in', ('sale','purchase','sale_refund','purchase_refund'))]"
else:
domain ="[('type', 'not in', ('sale','purchase','sale_refund','purchase_refund'))]"
nodes = doc.xpath("//field[@name='journal_ids']")
for node in nodes:
node.set('domain', domain)
res['arch'] = etree.tostring(doc)
return res
def _print_report(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data = self.pre_print_report(cr, uid, ids, data, context=context)
data['form'].update(self.read(cr, uid, ids, ['sort_selection'], context=context)[0])
if context.get('sale_purchase_only'):
return self.pool['report'].get_action(cr, uid, [], 'account.report_salepurchasejournal', data=data, context=context)
else:
return self.pool['report'].get_action(cr, uid, [], 'account.report_journal', data=data, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
shamangeorge/beets | beetsplug/unimported.py | 6 | 2147 | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2019, Joris Jensen
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""
List all files in the library folder which are not listed in the
beets library database, including art files
"""
from __future__ import absolute_import, division, print_function
import os
from beets import util
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, print_
__author__ = 'https://github.com/MrNuggelz'
class Unimported(BeetsPlugin):
def __init__(self):
super(Unimported, self).__init__()
self.config.add(
{
'ignore_extensions': []
}
)
def commands(self):
def print_unimported(lib, opts, args):
ignore_exts = [('.' + x).encode() for x
in self.config['ignore_extensions'].as_str_seq()]
in_folder = set(
(os.path.join(r, file) for r, d, f in os.walk(lib.directory)
for file in f if not any(
[file.endswith(extension) for extension in
ignore_exts])))
in_library = set(x.path for x in lib.items())
art_files = set(x.artpath for x in lib.albums())
for f in in_folder - in_library - art_files:
print_(util.displayable_path(f))
unimported = Subcommand(
'unimported',
help='list all files in the library folder which are not listed'
' in the beets library database')
unimported.func = print_unimported
return [unimported]
| mit |
gundalow/ansible | test/units/modules/test_yum.py | 35 | 9941 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
from ansible.modules.yum import YumModule
yum_plugin_load_error = """
Plugin "product-id" can't be imported
Plugin "search-disabled-repos" can't be imported
Plugin "subscription-manager" can't be imported
Plugin "product-id" can't be imported
Plugin "search-disabled-repos" can't be imported
Plugin "subscription-manager" can't be imported
"""
# from https://github.com/ansible/ansible/issues/20608#issuecomment-276106505
wrapped_output_1 = """
Загружены модули: fastestmirror
Loading mirror speeds from cached hostfile
* base: mirror.h1host.ru
* extras: mirror.h1host.ru
* updates: mirror.h1host.ru
vms-agent.x86_64 0.0-9 dev
"""
# from https://github.com/ansible/ansible/issues/20608#issuecomment-276971275
wrapped_output_2 = """
Загружены модули: fastestmirror
Loading mirror speeds from cached hostfile
* base: mirror.corbina.net
* extras: mirror.corbina.net
* updates: mirror.corbina.net
empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty.x86_64
0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.1-0
addons
libtiff.x86_64 4.0.3-27.el7_3 updates
"""
# From https://github.com/ansible/ansible/issues/20608#issuecomment-276698431
wrapped_output_3 = """
Loaded plugins: fastestmirror, langpacks
Loading mirror speeds from cached hostfile
ceph.x86_64 1:11.2.0-0.el7 ceph
ceph-base.x86_64 1:11.2.0-0.el7 ceph
ceph-common.x86_64 1:11.2.0-0.el7 ceph
ceph-mds.x86_64 1:11.2.0-0.el7 ceph
ceph-mon.x86_64 1:11.2.0-0.el7 ceph
ceph-osd.x86_64 1:11.2.0-0.el7 ceph
ceph-selinux.x86_64 1:11.2.0-0.el7 ceph
libcephfs1.x86_64 1:11.0.2-0.el7 ceph
librados2.x86_64 1:11.2.0-0.el7 ceph
libradosstriper1.x86_64 1:11.2.0-0.el7 ceph
librbd1.x86_64 1:11.2.0-0.el7 ceph
librgw2.x86_64 1:11.2.0-0.el7 ceph
python-cephfs.x86_64 1:11.2.0-0.el7 ceph
python-rados.x86_64 1:11.2.0-0.el7 ceph
python-rbd.x86_64 1:11.2.0-0.el7 ceph
"""
# from https://github.com/ansible/ansible-modules-core/issues/4318#issuecomment-251416661
wrapped_output_4 = """
ipxe-roms-qemu.noarch 20160127-1.git6366fa7a.el7
rhelosp-9.0-director-puddle
quota.x86_64 1:4.01-11.el7_2.1 rhelosp-rhel-7.2-z
quota-nls.noarch 1:4.01-11.el7_2.1 rhelosp-rhel-7.2-z
rdma.noarch 7.2_4.1_rc6-2.el7 rhelosp-rhel-7.2-z
screen.x86_64 4.1.0-0.23.20120314git3c2946.el7_2
rhelosp-rhel-7.2-z
sos.noarch 3.2-36.el7ost.2 rhelosp-9.0-puddle
sssd-client.x86_64 1.13.0-40.el7_2.12 rhelosp-rhel-7.2-z
"""
# A 'normal-ish' yum check-update output, without any wrapped lines
unwrapped_output_rhel7 = """
Loaded plugins: etckeeper, product-id, search-disabled-repos, subscription-
: manager
This system is not registered to Red Hat Subscription Management. You can use subscription-manager to register.
NetworkManager-openvpn.x86_64 1:1.2.6-1.el7 epel
NetworkManager-openvpn-gnome.x86_64 1:1.2.6-1.el7 epel
cabal-install.x86_64 1.16.1.0-2.el7 epel
cgit.x86_64 1.1-1.el7 epel
python34-libs.x86_64 3.4.5-3.el7 epel
python34-test.x86_64 3.4.5-3.el7 epel
python34-tkinter.x86_64 3.4.5-3.el7 epel
python34-tools.x86_64 3.4.5-3.el7 epel
qgit.x86_64 2.6-4.el7 epel
rdiff-backup.x86_64 1.2.8-12.el7 epel
stoken-libs.x86_64 0.91-1.el7 epel
xlockmore.x86_64 5.49-2.el7 epel
"""
# Some wrapped obsoletes for prepending to output for testing both
wrapped_output_rhel7_obsoletes_postfix = """
Obsoleting Packages
ddashboard.x86_64 0.2.0.1-1.el7_3 mhlavink-developerdashboard
developerdashboard.x86_64 0.1.12.2-1.el7_2 @mhlavink-developerdashboard
python-bugzilla.noarch 1.2.2-3.el7_2.1 mhlavink-developerdashboard
python-bugzilla-develdashboardfixes.noarch
1.2.2-3.el7 @mhlavink-developerdashboard
python2-futures.noarch 3.0.5-1.el7 epel
python-futures.noarch 3.0.3-1.el7 @epel
python2-pip.noarch 8.1.2-5.el7 epel
python-pip.noarch 7.1.0-1.el7 @epel
python2-pyxdg.noarch 0.25-6.el7 epel
pyxdg.noarch 0.25-5.el7 @epel
python2-simplejson.x86_64 3.10.0-1.el7 epel
python-simplejson.x86_64 3.3.3-1.el7 @epel
Security: kernel-3.10.0-327.28.2.el7.x86_64 is an installed security update
Security: kernel-3.10.0-327.22.2.el7.x86_64 is the currently running version
"""
longname = """
Loaded plugins: fastestmirror, priorities, rhnplugin
This system is receiving updates from RHN Classic or Red Hat Satellite.
Loading mirror speeds from cached hostfile
xxxxxxxxxxxxxxxxxxxxxxxxxx.noarch
1.16-1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
glibc.x86_64 2.17-157.el7_3.1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"""
unwrapped_output_rhel7_obsoletes = unwrapped_output_rhel7 + wrapped_output_rhel7_obsoletes_postfix
unwrapped_output_rhel7_expected_new_obsoletes_pkgs = [
"ddashboard", "python-bugzilla", "python2-futures", "python2-pip",
"python2-pyxdg", "python2-simplejson"
]
unwrapped_output_rhel7_expected_old_obsoletes_pkgs = [
"developerdashboard", "python-bugzilla-develdashboardfixes",
"python-futures", "python-pip", "pyxdg", "python-simplejson"
]
unwrapped_output_rhel7_expected_updated_pkgs = [
"NetworkManager-openvpn", "NetworkManager-openvpn-gnome", "cabal-install",
"cgit", "python34-libs", "python34-test", "python34-tkinter",
"python34-tools", "qgit", "rdiff-backup", "stoken-libs", "xlockmore"
]
class TestYumUpdateCheckParse(unittest.TestCase):
def _assert_expected(self, expected_pkgs, result):
for expected_pkg in expected_pkgs:
self.assertIn(expected_pkg, result)
self.assertEqual(len(result), len(expected_pkgs))
self.assertIsInstance(result, dict)
def test_empty_output(self):
res, obs = YumModule.parse_check_update("")
expected_pkgs = []
self._assert_expected(expected_pkgs, res)
def test_longname(self):
res, obs = YumModule.parse_check_update(longname)
expected_pkgs = ['xxxxxxxxxxxxxxxxxxxxxxxxxx', 'glibc']
self._assert_expected(expected_pkgs, res)
def test_plugin_load_error(self):
res, obs = YumModule.parse_check_update(yum_plugin_load_error)
expected_pkgs = []
self._assert_expected(expected_pkgs, res)
def test_wrapped_output_1(self):
res, obs = YumModule.parse_check_update(wrapped_output_1)
expected_pkgs = ["vms-agent"]
self._assert_expected(expected_pkgs, res)
def test_wrapped_output_2(self):
res, obs = YumModule.parse_check_update(wrapped_output_2)
expected_pkgs = ["empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty-empty",
"libtiff"]
self._assert_expected(expected_pkgs, res)
def test_wrapped_output_3(self):
res, obs = YumModule.parse_check_update(wrapped_output_3)
expected_pkgs = ["ceph", "ceph-base", "ceph-common", "ceph-mds",
"ceph-mon", "ceph-osd", "ceph-selinux", "libcephfs1",
"librados2", "libradosstriper1", "librbd1", "librgw2",
"python-cephfs", "python-rados", "python-rbd"]
self._assert_expected(expected_pkgs, res)
def test_wrapped_output_4(self):
res, obs = YumModule.parse_check_update(wrapped_output_4)
expected_pkgs = ["ipxe-roms-qemu", "quota", "quota-nls", "rdma", "screen",
"sos", "sssd-client"]
self._assert_expected(expected_pkgs, res)
def test_wrapped_output_rhel7(self):
res, obs = YumModule.parse_check_update(unwrapped_output_rhel7)
self._assert_expected(unwrapped_output_rhel7_expected_updated_pkgs, res)
def test_wrapped_output_rhel7_obsoletes(self):
res, obs = YumModule.parse_check_update(unwrapped_output_rhel7_obsoletes)
self._assert_expected(
unwrapped_output_rhel7_expected_updated_pkgs + unwrapped_output_rhel7_expected_new_obsoletes_pkgs,
res
)
self._assert_expected(unwrapped_output_rhel7_expected_old_obsoletes_pkgs, obs)
| gpl-3.0 |
unofficial-opensource-apple/llvmgcc42 | llvmCore/utils/lit/lit/Test.py | 32 | 2299 | import os
# Test results.
class TestResult:
def __init__(self, name, isFailure):
self.name = name
self.isFailure = isFailure
PASS = TestResult('PASS', False)
XFAIL = TestResult('XFAIL', False)
FAIL = TestResult('FAIL', True)
XPASS = TestResult('XPASS', True)
UNRESOLVED = TestResult('UNRESOLVED', True)
UNSUPPORTED = TestResult('UNSUPPORTED', False)
# Test classes.
class TestFormat:
"""TestFormat - Test information provider."""
def __init__(self, name):
self.name = name
class TestSuite:
"""TestSuite - Information on a group of tests.
A test suite groups together a set of logically related tests.
"""
def __init__(self, name, source_root, exec_root, config):
self.name = name
self.source_root = source_root
self.exec_root = exec_root
# The test suite configuration.
self.config = config
def getSourcePath(self, components):
return os.path.join(self.source_root, *components)
def getExecPath(self, components):
return os.path.join(self.exec_root, *components)
class Test:
"""Test - Information on a single test instance."""
def __init__(self, suite, path_in_suite, config):
self.suite = suite
self.path_in_suite = path_in_suite
self.config = config
# The test result code, once complete.
self.result = None
# Any additional output from the test, once complete.
self.output = None
# The wall time to execute this test, if timing and once complete.
self.elapsed = None
# The repeat index of this test, or None.
self.index = None
def copyWithIndex(self, index):
import copy
res = copy.copy(self)
res.index = index
return res
def setResult(self, result, output, elapsed):
assert self.result is None, "Test result already set!"
self.result = result
self.output = output
self.elapsed = elapsed
def getFullName(self):
return self.suite.config.name + ' :: ' + '/'.join(self.path_in_suite)
def getSourcePath(self):
return self.suite.getSourcePath(self.path_in_suite)
def getExecPath(self):
return self.suite.getExecPath(self.path_in_suite)
| gpl-2.0 |
piotroxp/scibibscan | scib/lib/python3.5/site-packages/setuptools/sandbox.py | 259 | 13925 | import os
import sys
import tempfile
import operator
import functools
import itertools
import re
import contextlib
import pickle
import pkg_resources
if sys.platform.startswith('java'):
import org.python.modules.posix.PosixModule as _os
else:
_os = sys.modules[os.name]
try:
_file = file
except NameError:
_file = None
_open = open
from distutils.errors import DistutilsError
from pkg_resources import working_set
from setuptools import compat
from setuptools.compat import builtins
__all__ = [
"AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
]
def _execfile(filename, globals, locals=None):
"""
Python 3 implementation of execfile.
"""
mode = 'rb'
with open(filename, mode) as stream:
script = stream.read()
# compile() function in Python 2.6 and 3.1 requires LF line endings.
if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2):
script = script.replace(b'\r\n', b'\n')
script = script.replace(b'\r', b'\n')
if locals is None:
locals = globals
code = compile(script, filename, 'exec')
exec(code, globals, locals)
@contextlib.contextmanager
def save_argv(repl=None):
saved = sys.argv[:]
if repl is not None:
sys.argv[:] = repl
try:
yield saved
finally:
sys.argv[:] = saved
@contextlib.contextmanager
def save_path():
saved = sys.path[:]
try:
yield saved
finally:
sys.path[:] = saved
@contextlib.contextmanager
def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
if not os.path.isdir(replacement):
os.makedirs(replacement)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved
@contextlib.contextmanager
def pushd(target):
saved = os.getcwd()
os.chdir(target)
try:
yield saved
finally:
os.chdir(saved)
class UnpickleableException(Exception):
"""
An exception representing another Exception that could not be pickled.
"""
@classmethod
def dump(cls, type, exc):
"""
Always return a dumped (pickled) type and exc. If exc can't be pickled,
wrap it in UnpickleableException first.
"""
try:
return pickle.dumps(type), pickle.dumps(exc)
except Exception:
return cls.dump(cls, cls(repr(exc)))
class ExceptionSaver:
"""
A Context Manager that will save an exception, serialized, and restore it
later.
"""
def __enter__(self):
return self
def __exit__(self, type, exc, tb):
if not exc:
return
# dump the exception
self._saved = UnpickleableException.dump(type, exc)
self._tb = tb
# suppress the exception
return True
def resume(self):
"restore and re-raise any exception"
if '_saved' not in vars(self):
return
type, exc = map(pickle.loads, self._saved)
compat.reraise(type, exc, self._tb)
@contextlib.contextmanager
def save_modules():
"""
Context in which imported modules are saved.
Translates exceptions internal to the context into the equivalent exception
outside the context.
"""
saved = sys.modules.copy()
with ExceptionSaver() as saved_exc:
yield saved
sys.modules.update(saved)
# remove any modules imported since
del_modules = (
mod_name for mod_name in sys.modules
if mod_name not in saved
# exclude any encodings modules. See #285
and not mod_name.startswith('encodings.')
)
_clear_modules(del_modules)
saved_exc.resume()
def _clear_modules(module_names):
for mod_name in list(module_names):
del sys.modules[mod_name]
@contextlib.contextmanager
def save_pkg_resources_state():
saved = pkg_resources.__getstate__()
try:
yield saved
finally:
pkg_resources.__setstate__(saved)
@contextlib.contextmanager
def setup_context(setup_dir):
temp_dir = os.path.join(setup_dir, 'temp')
with save_pkg_resources_state():
with save_modules():
hide_setuptools()
with save_path():
with save_argv():
with override_temp(temp_dir):
with pushd(setup_dir):
# ensure setuptools commands are available
__import__('setuptools')
yield
def _needs_hiding(mod_name):
"""
>>> _needs_hiding('setuptools')
True
>>> _needs_hiding('pkg_resources')
True
>>> _needs_hiding('setuptools_plugin')
False
>>> _needs_hiding('setuptools.__init__')
True
>>> _needs_hiding('distutils')
True
"""
pattern = re.compile('(setuptools|pkg_resources|distutils)(\.|$)')
return bool(pattern.match(mod_name))
def hide_setuptools():
"""
Remove references to setuptools' modules from sys.modules to allow the
invocation to import the most appropriate setuptools. This technique is
necessary to avoid issues such as #315 where setuptools upgrading itself
would fail to find a function declared in the metadata.
"""
modules = filter(_needs_hiding, sys.modules)
_clear_modules(modules)
def run_setup(setup_script, args):
"""Run a distutils setup script, sandboxed in its directory"""
setup_dir = os.path.abspath(os.path.dirname(setup_script))
with setup_context(setup_dir):
try:
sys.argv[:] = [setup_script]+list(args)
sys.path.insert(0, setup_dir)
# reset to include setup dir, w/clean callback list
working_set.__init__()
working_set.callbacks.append(lambda dist:dist.activate())
def runner():
ns = dict(__file__=setup_script, __name__='__main__')
_execfile(setup_script, ns)
DirectorySandbox(setup_dir).run(runner)
except SystemExit as v:
if v.args and v.args[0]:
raise
# Normal exit, just return
class AbstractSandbox:
"""Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
_active = False
def __init__(self):
self._attrs = [
name for name in dir(_os)
if not name.startswith('_') and hasattr(self,name)
]
def _copy(self, source):
for name in self._attrs:
setattr(os, name, getattr(source,name))
def run(self, func):
"""Run 'func' under os sandboxing"""
try:
self._copy(self)
if _file:
builtins.file = self._file
builtins.open = self._open
self._active = True
return func()
finally:
self._active = False
if _file:
builtins.file = _file
builtins.open = _open
self._copy(_os)
def _mk_dual_path_wrapper(name):
original = getattr(_os,name)
def wrap(self,src,dst,*args,**kw):
if self._active:
src,dst = self._remap_pair(name,src,dst,*args,**kw)
return original(src,dst,*args,**kw)
return wrap
for name in ["rename", "link", "symlink"]:
if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
def _mk_single_path_wrapper(name, original=None):
original = original or getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return original(path,*args,**kw)
return wrap
if _file:
_file = _mk_single_path_wrapper('file', _file)
_open = _mk_single_path_wrapper('open', _open)
for name in [
"stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
"remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
"startfile", "mkfifo", "mknod", "pathconf", "access"
]:
if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
def _mk_single_with_return(name):
original = getattr(_os,name)
def wrap(self,path,*args,**kw):
if self._active:
path = self._remap_input(name,path,*args,**kw)
return self._remap_output(name, original(path,*args,**kw))
return original(path,*args,**kw)
return wrap
for name in ['readlink', 'tempnam']:
if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
def _mk_query(name):
original = getattr(_os,name)
def wrap(self,*args,**kw):
retval = original(*args,**kw)
if self._active:
return self._remap_output(name, retval)
return retval
return wrap
for name in ['getcwd', 'tmpnam']:
if hasattr(_os,name): locals()[name] = _mk_query(name)
def _validate_path(self,path):
"""Called to remap or validate any path, whether input or output"""
return path
def _remap_input(self,operation,path,*args,**kw):
"""Called for path inputs"""
return self._validate_path(path)
def _remap_output(self,operation,path):
"""Called for path outputs"""
return self._validate_path(path)
def _remap_pair(self,operation,src,dst,*args,**kw):
"""Called for path pairs like rename, link, and symlink operations"""
return (
self._remap_input(operation+'-from',src,*args,**kw),
self._remap_input(operation+'-to',dst,*args,**kw)
)
if hasattr(os, 'devnull'):
_EXCEPTIONS = [os.devnull,]
else:
_EXCEPTIONS = []
try:
from win32com.client.gencache import GetGeneratePath
_EXCEPTIONS.append(GetGeneratePath())
del GetGeneratePath
except ImportError:
# it appears pywin32 is not installed, so no need to exclude.
pass
class DirectorySandbox(AbstractSandbox):
"""Restrict operations to a single subdirectory - pseudo-chroot"""
write_ops = dict.fromkeys([
"open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
"utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
])
_exception_patterns = [
# Allow lib2to3 to attempt to save a pickled grammar object (#121)
'.*lib2to3.*\.pickle$',
]
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
self._sandbox = os.path.normcase(os.path.realpath(sandbox))
self._prefix = os.path.join(self._sandbox,'')
self._exceptions = [
os.path.normcase(os.path.realpath(path))
for path in exceptions
]
AbstractSandbox.__init__(self)
def _violation(self, operation, *args, **kw):
raise SandboxViolation(operation, args, kw)
if _file:
def _file(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("file", path, mode, *args, **kw)
return _file(path,mode,*args,**kw)
def _open(self, path, mode='r', *args, **kw):
if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
self._violation("open", path, mode, *args, **kw)
return _open(path,mode,*args,**kw)
def tmpnam(self):
self._violation("tmpnam")
def _ok(self, path):
active = self._active
try:
self._active = False
realpath = os.path.normcase(os.path.realpath(path))
return (
self._exempted(realpath)
or realpath == self._sandbox
or realpath.startswith(self._prefix)
)
finally:
self._active = active
def _exempted(self, filepath):
start_matches = (
filepath.startswith(exception)
for exception in self._exceptions
)
pattern_matches = (
re.match(pattern, filepath)
for pattern in self._exception_patterns
)
candidates = itertools.chain(start_matches, pattern_matches)
return any(candidates)
def _remap_input(self, operation, path, *args, **kw):
"""Called for path inputs"""
if operation in self.write_ops and not self._ok(path):
self._violation(operation, os.path.realpath(path), *args, **kw)
return path
def _remap_pair(self, operation, src, dst, *args, **kw):
"""Called for path pairs like rename, link, and symlink operations"""
if not self._ok(src) or not self._ok(dst):
self._violation(operation, src, dst, *args, **kw)
return (src,dst)
def open(self, file, flags, mode=0o777, *args, **kw):
"""Called for low-level os.open()"""
if flags & WRITE_FLAGS and not self._ok(file):
self._violation("os.open", file, flags, mode, *args, **kw)
return _os.open(file,flags,mode, *args, **kw)
WRITE_FLAGS = functools.reduce(
operator.or_, [getattr(_os, a, 0) for a in
"O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
)
class SandboxViolation(DistutilsError):
"""A setup script attempted to modify the filesystem outside the sandbox"""
def __str__(self):
return """SandboxViolation: %s%r %s
The package setup script has attempted to modify files on your system
that are not within the EasyInstall build area, and has been aborted.
This package cannot be safely installed by EasyInstall, and may not
support alternate installation locations even if you run its setup
script by hand. Please inform the package's author and the EasyInstall
maintainers to find out if a fix or workaround is available.""" % self.args
#
| mit |
JacobCallahan/robottelo | tests/foreman/ui/test_computeresource_ec2.py | 2 | 6885 | """Test for Compute Resource UI
:Requirement: Computeresource RHV
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: ComputeResources-EC2
:Assignee: lhellebr
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from fauxfactory import gen_string
from nailgun import entities
from robottelo.config import settings
from robottelo.constants import AWS_EC2_FLAVOR_T2_MICRO
from robottelo.constants import COMPUTE_PROFILE_LARGE
from robottelo.constants import DEFAULT_LOC
from robottelo.constants import EC2_REGION_CA_CENTRAL_1
from robottelo.constants import FOREMAN_PROVIDERS
pytestmark = [pytest.mark.skip_if_not_set('ec2')]
@pytest.fixture(scope='module')
def module_org():
return entities.Organization().create()
@pytest.fixture(scope='module')
def module_loc():
default_loc_id = entities.Location().search(query={'search': f'name="{DEFAULT_LOC}"'})[0].id
return entities.Location(id=default_loc_id).read()
@pytest.fixture(scope='module')
def module_ec2_settings():
return dict(
access_key=settings.ec2.access_key,
secret_key=settings.ec2.secret_key,
region=settings.ec2.region,
image=settings.ec2.image,
availability_zone=settings.ec2.availability_zone,
subnet=settings.ec2.subnet,
security_groups=settings.ec2.security_groups,
managed_ip=settings.ec2.managed_ip,
)
@pytest.mark.tier2
@pytest.mark.skip_if_not_set('http_proxy')
def test_positive_default_end_to_end_with_custom_profile(
session, module_org, module_loc, module_ec2_settings
):
"""Create EC2 compute resource with default properties and apply it's basic functionality.
:id: 33f80a8f-2ecf-4f15-b0c3-aab5fe0ac8d3
:Steps:
1. Create an EC2 compute resource with default properties and taxonomies.
2. Update the compute resource name and add new taxonomies.
3. Associate compute profile with custom properties to ec2 compute resource
4. Delete the compute resource.
:expectedresults: The EC2 compute resource is created, updated, compute profile associated and
deleted.
:CaseLevel: Integration
:BZ: 1451626
:CaseImportance: High
"""
cr_name = gen_string('alpha')
new_cr_name = gen_string('alpha')
cr_description = gen_string('alpha')
new_org = entities.Organization().create()
new_loc = entities.Location().create()
http_proxy = entities.HTTPProxy(
name=gen_string('alpha', 15),
url=settings.http_proxy.auth_proxy_url,
username=settings.http_proxy.username,
password=settings.http_proxy.password,
organization=[module_org.id],
location=[module_loc.id],
).create()
with session:
session.computeresource.create(
{
'name': cr_name,
'description': cr_description,
'provider': FOREMAN_PROVIDERS['ec2'],
'provider_content.http_proxy.value': http_proxy.name,
'provider_content.access_key': module_ec2_settings['access_key'],
'provider_content.secret_key': module_ec2_settings['secret_key'],
'provider_content.region.value': module_ec2_settings['region'],
'organizations.resources.assigned': [module_org.name],
'locations.resources.assigned': [module_loc.name],
}
)
cr_values = session.computeresource.read(cr_name)
assert cr_values['name'] == cr_name
assert cr_values['description'] == cr_description
assert cr_values['provider_content']['http_proxy']['value'] == http_proxy.name
assert cr_values['organizations']['resources']['assigned'] == [module_org.name]
assert cr_values['locations']['resources']['assigned'] == [module_loc.name]
session.computeresource.edit(
cr_name,
{
'name': new_cr_name,
'organizations.resources.assigned': [new_org.name],
'locations.resources.assigned': [new_loc.name],
},
)
assert not session.computeresource.search(cr_name)
cr_values = session.computeresource.read(new_cr_name)
assert cr_values['name'] == new_cr_name
assert set(cr_values['organizations']['resources']['assigned']) == {
module_org.name,
new_org.name,
}
assert set(cr_values['locations']['resources']['assigned']) == {
module_loc.name,
new_loc.name,
}
session.computeresource.update_computeprofile(
new_cr_name,
COMPUTE_PROFILE_LARGE,
{
'provider_content.flavor': AWS_EC2_FLAVOR_T2_MICRO,
'provider_content.availability_zone': module_ec2_settings['availability_zone'],
'provider_content.subnet': module_ec2_settings['subnet'],
'provider_content.security_groups.assigned': module_ec2_settings['security_groups'],
'provider_content.managed_ip': module_ec2_settings['managed_ip'],
},
)
cr_profile_values = session.computeresource.read_computeprofile(
new_cr_name, COMPUTE_PROFILE_LARGE
)
assert cr_profile_values['breadcrumb'] == f'Edit {COMPUTE_PROFILE_LARGE}'
assert cr_profile_values['compute_profile'] == COMPUTE_PROFILE_LARGE
assert cr_profile_values['compute_resource'] == '{} ({}-{})'.format(
new_cr_name, module_ec2_settings['region'], FOREMAN_PROVIDERS['ec2']
)
assert (
cr_profile_values['provider_content']['managed_ip'] == module_ec2_settings['managed_ip']
)
assert cr_profile_values['provider_content']['flavor'] == AWS_EC2_FLAVOR_T2_MICRO
session.computeresource.delete(new_cr_name)
assert not session.computeresource.search(new_cr_name)
@pytest.mark.tier2
def test_positive_create_ec2_with_custom_region(session, module_ec2_settings):
"""Create a new ec2 compute resource with custom region
:id: aeb0c52e-34dd-4574-af34-a6d8721724a7
:customerscenario: true
:expectedresults: An ec2 compute resource is created
successfully.
:BZ: 1456942
:CaseLevel: Integration
:CaseImportance: Critical
"""
cr_name = gen_string('alpha')
with session:
session.computeresource.create(
{
'name': cr_name,
'provider': FOREMAN_PROVIDERS['ec2'],
'provider_content.access_key': module_ec2_settings['access_key'],
'provider_content.secret_key': module_ec2_settings['secret_key'],
'provider_content.region.value': EC2_REGION_CA_CENTRAL_1,
}
)
cr_values = session.computeresource.read(cr_name, widget_names='name')
assert cr_values['name'] == cr_name
| gpl-3.0 |
x10an14/overtime-calculator | overtime_calculator/__init__.py | 1 | 1295 | from functools import wraps
import logging
import jwt
default_parse_fmt = "%d-%m-%Y %H:%M:%S"
def get_secret():
return 'sflkjsdjkfd'
def token_verify(token):
secret = get_secret()
try:
return jwt.decode(token, secret, algorithm='HS256')
except jwt.DecodeError:
return False
def log_function_entry_and_exit(decorated_function):
'''
Function decorator logging time spent.
Logging entry + exit (as logging.info),
and parameters (as logging.debug) of functions.
'''
@wraps(decorated_function)
def wrapper(*dec_fn_args, **dec_fn_kwargs):
# Log function entry
func_name = decorated_function.__name__
logging.info(f"Entering {func_name}()...")
# get function params (args and kwargs)
arg_names = decorated_function.__code__.co_varnames
params = dict(
args=dict(zip(arg_names, dec_fn_args)),
kwargs=dec_fn_kwargs
)
logging.debug(
"\t" + ', '.join(
[f"{k}={v}" for k, v in params.items()]
)
)
# Execute wrapped (decorated) function:
out = decorated_function(*dec_fn_args, **dec_fn_kwargs)
logging.info(f"Done running {func_name}()!")
return out
return wrapper
| mit |
akostrikov/hadoop-1.2.1-in-action | src/contrib/hod/hodlib/GridServices/mapred.py | 182 | 8167 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""define MapReduce as subclass of Service"""
# -*- python -*-
import os, copy, time
from service import *
from hodlib.Hod.nodePool import *
from hodlib.Common.desc import CommandDesc
from hodlib.Common.util import get_exception_string, parseEquals
class MapReduceExternal(MasterSlave):
"""dummy proxy to external MapReduce instance"""
def __init__(self, serviceDesc, workDirs, version):
MasterSlave.__init__(self, serviceDesc, workDirs,None)
self.launchedMaster = True
self.masterInitialized = True
self.version = version
def getMasterRequest(self):
return None
def getMasterCommands(self, serviceDict):
return []
def getAdminCommands(self, serviceDict):
return []
def getWorkerCommands(self, serviceDict):
return []
def getMasterAddrs(self):
attrs = self.serviceDesc.getfinalAttrs()
addr = attrs['mapred.job.tracker']
return [addr]
def needsMore(self):
return 0
def needsLess(self):
return 0
def setMasterParams(self, dict):
self.serviceDesc['final-attrs']['mapred.job.tracker'] = "%s:%s" % (dict['host'],
dict['tracker_port'])
if self.version < 16:
self.serviceDesc.dict['final-attrs']['mapred.job.tracker.info.port'] = \
str(self.serviceDesc.dict['info_port'])
else:
# After Hadoop-2185
self.serviceDesc['final-attrs']['mapred.job.tracker.http.address'] = \
"%s:%s" %(dict['host'], dict['info_port'])
def getInfoAddrs(self):
attrs = self.serviceDesc.getfinalAttrs()
if self.version < 16:
addr = attrs['mapred.job.tracker']
k,v = addr.split( ":")
infoaddr = k + ':' + attrs['mapred.job.tracker.info.port']
else:
# After Hadoop-2185
# Note: earlier,we never respected mapred.job.tracker.http.address
infoaddr = attrs['mapred.job.tracker.http.address']
return [infoaddr]
class MapReduce(MasterSlave):
def __init__(self, serviceDesc, workDirs,required_node, version,
workers_per_ring = 1):
MasterSlave.__init__(self, serviceDesc, workDirs,required_node)
self.masterNode = None
self.masterAddr = None
self.infoAddr = None
self.workers = []
self.required_node = required_node
self.version = version
self.workers_per_ring = workers_per_ring
def isLaunchable(self, serviceDict):
hdfs = serviceDict['hdfs']
if (hdfs.isMasterInitialized()):
return True
return False
def getMasterRequest(self):
req = NodeRequest(1, [], False)
return req
def getMasterCommands(self, serviceDict):
hdfs = serviceDict['hdfs']
cmdDesc = self._getJobTrackerCommand(hdfs)
return [cmdDesc]
def getAdminCommands(self, serviceDict):
return []
def getWorkerCommands(self, serviceDict):
hdfs = serviceDict['hdfs']
workerCmds = []
for id in range(1, self.workers_per_ring + 1):
workerCmds.append(self._getTaskTrackerCommand(str(id), hdfs))
return workerCmds
def setMasterNodes(self, list):
node = list[0]
self.masterNode = node
def getMasterAddrs(self):
return [self.masterAddr]
def getInfoAddrs(self):
return [self.infoAddr]
def getWorkers(self):
return self.workers
def requiredNode(self):
return self.required_host
def setMasterParams(self, list):
dict = self._parseEquals(list)
self.masterAddr = dict['mapred.job.tracker']
k,v = self.masterAddr.split(":")
self.masterNode = k
if self.version < 16:
self.infoAddr = self.masterNode + ':' + dict['mapred.job.tracker.info.port']
else:
# After Hadoop-2185
self.infoAddr = dict['mapred.job.tracker.http.address']
def _parseEquals(self, list):
return parseEquals(list)
def _setWorkDirs(self, workDirs, envs, attrs, parentDirs, subDir):
local = []
system = None
temp = None
hadooptmpdir = None
dfsclient = []
for p in parentDirs:
workDirs.append(p)
workDirs.append(os.path.join(p, subDir))
dir = os.path.join(p, subDir, 'mapred-local')
local.append(dir)
if not system:
system = os.path.join(p, subDir, 'mapred-system')
if not temp:
temp = os.path.join(p, subDir, 'mapred-temp')
if not hadooptmpdir:
# Not used currently, generating hadooptmpdir just in case
hadooptmpdir = os.path.join(p, subDir, 'hadoop-tmp')
dfsclientdir = os.path.join(p, subDir, 'dfs-client')
dfsclient.append(dfsclientdir)
workDirs.append(dfsclientdir)
# FIXME!! use csv
attrs['mapred.local.dir'] = ','.join(local)
attrs['mapred.system.dir'] = 'fillindir'
attrs['mapred.temp.dir'] = temp
attrs['hadoop.tmp.dir'] = hadooptmpdir
envs['HADOOP_ROOT_LOGGER'] = "INFO,DRFA"
def _getJobTrackerCommand(self, hdfs):
sd = self.serviceDesc
parentDirs = self.workDirs
workDirs = []
attrs = sd.getfinalAttrs().copy()
envs = sd.getEnvs().copy()
if 'mapred.job.tracker' not in attrs:
attrs['mapred.job.tracker'] = 'fillinhostport'
if self.version < 16:
if 'mapred.job.tracker.info.port' not in attrs:
attrs['mapred.job.tracker.info.port'] = 'fillinport'
else:
# Addressing Hadoop-2185,
if 'mapred.job.tracker.http.address' not in attrs:
attrs['mapred.job.tracker.http.address'] = 'fillinhostport'
attrs['fs.default.name'] = hdfs.getMasterAddrs()[0]
self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'mapred-jt')
dict = { 'name' : 'jobtracker' }
dict['version'] = self.version
dict['program'] = os.path.join('bin', 'hadoop')
dict['argv'] = ['jobtracker']
dict['envs'] = envs
dict['pkgdirs'] = sd.getPkgDirs()
dict['workdirs'] = workDirs
dict['final-attrs'] = attrs
dict['attrs'] = sd.getAttrs()
cmd = CommandDesc(dict)
return cmd
def _getTaskTrackerCommand(self, id, hdfs):
sd = self.serviceDesc
parentDirs = self.workDirs
workDirs = []
attrs = sd.getfinalAttrs().copy()
envs = sd.getEnvs().copy()
jt = self.masterAddr
if jt == None:
raise ValueError, "Can't get job tracker address"
attrs['mapred.job.tracker'] = jt
attrs['fs.default.name'] = hdfs.getMasterAddrs()[0]
if self.version < 16:
if 'tasktracker.http.port' not in attrs:
attrs['tasktracker.http.port'] = 'fillinport'
# earlier to 16, tasktrackers always took ephemeral port 0 for
# tasktracker.report.bindAddress
else:
# Adding the following. Hadoop-2185
if 'mapred.task.tracker.report.address' not in attrs:
attrs['mapred.task.tracker.report.address'] = 'fillinhostport'
if 'mapred.task.tracker.http.address' not in attrs:
attrs['mapred.task.tracker.http.address'] = 'fillinhostport'
# unique parentDirs in case of multiple tasktrackers per hodring
pd = []
for dir in parentDirs:
dir = dir + "-" + id
pd.append(dir)
parentDirs = pd
# end of unique workdirs
self._setWorkDirs(workDirs, envs, attrs, parentDirs, 'mapred-tt')
dict = { 'name' : 'tasktracker' }
dict['program'] = os.path.join('bin', 'hadoop')
dict['argv'] = ['tasktracker']
dict['envs'] = envs
dict['pkgdirs'] = sd.getPkgDirs()
dict['workdirs'] = workDirs
dict['final-attrs'] = attrs
dict['attrs'] = sd.getAttrs()
cmd = CommandDesc(dict)
return cmd
| apache-2.0 |
gauribhoite/personfinder | env/google_appengine/lib/django-1.5/django/contrib/markup/tests.py | 102 | 4306 | # Quick tests for the markup templatetags (django.contrib.markup)
import re
import warnings
from django.template import Template, Context
from django import test
from django.utils import unittest
from django.utils.html import escape
try:
import textile
except ImportError:
textile = None
try:
import markdown
markdown_version = getattr(markdown, "version_info", 0)
except ImportError:
markdown = None
try:
import docutils
except ImportError:
docutils = None
class Templates(test.TestCase):
textile_content = """Paragraph 1
Paragraph 2 with "quotes" and @code@"""
markdown_content = """Paragraph 1
## An h2"""
rest_content = """Paragraph 1
Paragraph 2 with a link_
.. _link: http://www.example.com/"""
def setUp(self):
self.save_warnings_state()
warnings.filterwarnings('ignore', category=DeprecationWarning, module='django.contrib.markup')
def tearDown(self):
self.restore_warnings_state()
@unittest.skipUnless(textile, 'textile not installed')
def test_textile(self):
t = Template("{% load markup %}{{ textile_content|textile }}")
rendered = t.render(Context({'textile_content':self.textile_content})).strip()
self.assertEqual(rendered.replace('\t', ''), """<p>Paragraph 1</p>
<p>Paragraph 2 with “quotes” and <code>code</code></p>""")
@unittest.skipIf(textile, 'textile is installed')
def test_no_textile(self):
t = Template("{% load markup %}{{ textile_content|textile }}")
rendered = t.render(Context({'textile_content':self.textile_content})).strip()
self.assertEqual(rendered, escape(self.textile_content))
@unittest.skipUnless(markdown and markdown_version >= (2,1), 'markdown >= 2.1 not installed')
def test_markdown(self):
t = Template("{% load markup %}{{ markdown_content|markdown }}")
rendered = t.render(Context({'markdown_content':self.markdown_content})).strip()
pattern = re.compile("""<p>Paragraph 1\s*</p>\s*<h2>\s*An h2</h2>""")
self.assertTrue(pattern.match(rendered))
@unittest.skipUnless(markdown and markdown_version >= (2,1), 'markdown >= 2.1 not installed')
def test_markdown_attribute_disable(self):
t = Template("{% load markup %}{{ markdown_content|markdown:'safe' }}")
markdown_content = "{@onclick=alert('hi')}some paragraph"
rendered = t.render(Context({'markdown_content':markdown_content})).strip()
self.assertTrue('@' in rendered)
@unittest.skipUnless(markdown and markdown_version >= (2,1), 'markdown >= 2.1 not installed')
def test_markdown_attribute_enable(self):
t = Template("{% load markup %}{{ markdown_content|markdown }}")
markdown_content = "{@onclick=alert('hi')}some paragraph"
rendered = t.render(Context({'markdown_content':markdown_content})).strip()
self.assertFalse('@' in rendered)
@unittest.skipIf(markdown, 'markdown is installed')
def test_no_markdown(self):
t = Template("{% load markup %}{{ markdown_content|markdown }}")
rendered = t.render(Context({'markdown_content':self.markdown_content})).strip()
self.assertEqual(rendered, self.markdown_content)
@unittest.skipUnless(docutils, 'docutils not installed')
def test_docutils(self):
t = Template("{% load markup %}{{ rest_content|restructuredtext }}")
rendered = t.render(Context({'rest_content':self.rest_content})).strip()
# Different versions of docutils return slightly different HTML
try:
# Docutils v0.4 and earlier
self.assertEqual(rendered, """<p>Paragraph 1</p>
<p>Paragraph 2 with a <a class="reference" href="http://www.example.com/">link</a></p>""")
except AssertionError:
# Docutils from SVN (which will become 0.5)
self.assertEqual(rendered, """<p>Paragraph 1</p>
<p>Paragraph 2 with a <a class="reference external" href="http://www.example.com/">link</a></p>""")
@unittest.skipIf(docutils, 'docutils is installed')
def test_no_docutils(self):
t = Template("{% load markup %}{{ rest_content|restructuredtext }}")
rendered = t.render(Context({'rest_content':self.rest_content})).strip()
self.assertEqual(rendered, self.rest_content)
| apache-2.0 |
philanthropy-u/edx-platform | cms/djangoapps/contentstore/views/tests/test_import_export.py | 4 | 38740 | """
Unit tests for course import and export
"""
import copy
import json
import logging
import os
import shutil
import tarfile
import tempfile
from uuid import uuid4
import ddt
import lxml
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.test.utils import override_settings
from milestones.tests.utils import MilestonesTestCaseMixin
from mock import Mock, patch
from opaque_keys.edx.locator import LibraryLocator
from path import Path as path
from storages.backends.s3boto import S3BotoStorage
from user_tasks.models import UserTaskStatus
from contentstore.tests.test_libraries import LibraryTestCase
from contentstore.tests.utils import CourseTestCase
from contentstore.utils import reverse_course_url
from models.settings.course_metadata import CourseMetadata
from openedx.core.lib.extract_tar import safetar_extractall
from student import auth
from student.roles import CourseInstructorRole, CourseStaffRole
from util import milestones_helpers
from xmodule.contentstore.django import contentstore
from xmodule.modulestore import LIBRARY_ROOT, ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, LibraryFactory
from xmodule.modulestore.tests.utils import SPLIT_MODULESTORE_SETUP, TEST_DATA_DIR, MongoContentstoreBuilder
from xmodule.modulestore.xml_exporter import export_course_to_xml, export_library_to_xml
from xmodule.modulestore.xml_importer import import_course_from_xml, import_library_from_xml
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
log = logging.getLogger(__name__)
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class ImportEntranceExamTestCase(CourseTestCase, MilestonesTestCaseMixin):
"""
Unit tests for importing a course with entrance exam
"""
def setUp(self):
super(ImportEntranceExamTestCase, self).setUp()
self.url = reverse_course_url('import_handler', self.course.id)
self.content_dir = path(tempfile.mkdtemp())
self.addCleanup(shutil.rmtree, self.content_dir)
# Create tar test file -----------------------------------------------
# OK course with entrance exam section:
entrance_exam_dir = tempfile.mkdtemp(dir=self.content_dir)
# test course being deeper down than top of tar file
embedded_exam_dir = os.path.join(entrance_exam_dir, "grandparent", "parent")
os.makedirs(os.path.join(embedded_exam_dir, "course"))
os.makedirs(os.path.join(embedded_exam_dir, "chapter"))
with open(os.path.join(embedded_exam_dir, "course.xml"), "w+") as f:
f.write('<course url_name="2013_Spring" org="EDx" course="0.00x"/>')
with open(os.path.join(embedded_exam_dir, "course", "2013_Spring.xml"), "w+") as f:
f.write(
'<course '
'entrance_exam_enabled="true" entrance_exam_id="xyz" entrance_exam_minimum_score_pct="0.7">'
'<chapter url_name="2015_chapter_entrance_exam"/></course>'
)
with open(os.path.join(embedded_exam_dir, "chapter", "2015_chapter_entrance_exam.xml"), "w+") as f:
f.write('<chapter display_name="Entrance Exam" in_entrance_exam="true" is_entrance_exam="true"></chapter>')
self.entrance_exam_tar = os.path.join(self.content_dir, "entrance_exam.tar.gz")
with tarfile.open(self.entrance_exam_tar, "w:gz") as gtar:
gtar.add(entrance_exam_dir)
def test_import_existing_entrance_exam_course(self):
"""
Check that course is imported successfully as an entrance exam.
"""
course = self.store.get_course(self.course.id)
self.assertIsNotNone(course)
self.assertEquals(course.entrance_exam_enabled, False)
with open(self.entrance_exam_tar) as gtar:
args = {"name": self.entrance_exam_tar, "course-data": [gtar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
course = self.store.get_course(self.course.id)
self.assertIsNotNone(course)
self.assertEquals(course.entrance_exam_enabled, True)
self.assertEquals(course.entrance_exam_minimum_score_pct, 0.7)
def test_import_delete_pre_exiting_entrance_exam(self):
"""
Check that pre existed entrance exam content should be overwrite with the imported course.
"""
exam_url = '/course/{}/entrance_exam/'.format(unicode(self.course.id))
resp = self.client.post(exam_url, {'entrance_exam_minimum_score_pct': 0.5}, http_accept='application/json')
self.assertEqual(resp.status_code, 201)
# Reload the test course now that the exam module has been added
self.course = modulestore().get_course(self.course.id)
metadata = CourseMetadata.fetch_all(self.course)
self.assertTrue(metadata['entrance_exam_enabled'])
self.assertIsNotNone(metadata['entrance_exam_minimum_score_pct'])
self.assertEqual(metadata['entrance_exam_minimum_score_pct']['value'], 0.5)
self.assertTrue(len(milestones_helpers.get_course_milestones(unicode(self.course.id))))
content_milestones = milestones_helpers.get_course_content_milestones(
unicode(self.course.id),
metadata['entrance_exam_id']['value'],
milestones_helpers.get_milestone_relationship_types()['FULFILLS']
)
self.assertTrue(len(content_milestones))
# Now import entrance exam course
with open(self.entrance_exam_tar) as gtar:
args = {"name": self.entrance_exam_tar, "course-data": [gtar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
course = self.store.get_course(self.course.id)
self.assertIsNotNone(course)
self.assertEquals(course.entrance_exam_enabled, True)
self.assertEquals(course.entrance_exam_minimum_score_pct, 0.7)
@ddt.ddt
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class ImportTestCase(CourseTestCase):
"""
Unit tests for importing a course or Library
"""
CREATE_USER = True
def setUp(self):
super(ImportTestCase, self).setUp()
self.url = reverse_course_url('import_handler', self.course.id)
self.content_dir = path(tempfile.mkdtemp())
self.addCleanup(shutil.rmtree, self.content_dir)
def touch(name):
""" Equivalent to shell's 'touch'"""
with file(name, 'a'):
os.utime(name, None)
# Create tar test files -----------------------------------------------
# OK course:
good_dir = tempfile.mkdtemp(dir=self.content_dir)
# test course being deeper down than top of tar file
embedded_dir = os.path.join(good_dir, "grandparent", "parent")
os.makedirs(os.path.join(embedded_dir, "course"))
with open(os.path.join(embedded_dir, "course.xml"), "w+") as f:
f.write('<course url_name="2013_Spring" org="EDx" course="0.00x"/>')
with open(os.path.join(embedded_dir, "course", "2013_Spring.xml"), "w+") as f:
f.write('<course></course>')
self.good_tar = os.path.join(self.content_dir, "good.tar.gz")
with tarfile.open(self.good_tar, "w:gz") as gtar:
gtar.add(good_dir)
# Bad course (no 'course.xml' file):
bad_dir = tempfile.mkdtemp(dir=self.content_dir)
touch(os.path.join(bad_dir, "bad.xml"))
self.bad_tar = os.path.join(self.content_dir, "bad.tar.gz")
with tarfile.open(self.bad_tar, "w:gz") as btar:
btar.add(bad_dir)
self.unsafe_common_dir = path(tempfile.mkdtemp(dir=self.content_dir))
def test_no_coursexml(self):
"""
Check that the response for a tar.gz import without a course.xml is
correct.
"""
with open(self.bad_tar) as btar:
resp = self.client.post(
self.url,
{
"name": self.bad_tar,
"course-data": [btar]
})
self.assertEquals(resp.status_code, 200)
# Check that `import_status` returns the appropriate stage (i.e., the
# stage at which import failed).
resp_status = self.client.get(
reverse_course_url(
'import_status_handler',
self.course.id,
kwargs={'filename': os.path.split(self.bad_tar)[1]}
)
)
self.assertEquals(json.loads(resp_status.content)["ImportStatus"], -2)
def test_with_coursexml(self):
"""
Check that the response for a tar.gz import with a course.xml is
correct.
"""
with open(self.good_tar) as gtar:
args = {"name": self.good_tar, "course-data": [gtar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
def test_import_in_existing_course(self):
"""
Check that course is imported successfully in existing course and users have their access roles
"""
# Create a non_staff user and add it to course staff only
__, nonstaff_user = self.create_non_staff_authed_user_client()
auth.add_users(self.user, CourseStaffRole(self.course.id), nonstaff_user)
course = self.store.get_course(self.course.id)
self.assertIsNotNone(course)
display_name_before_import = course.display_name
# Check that global staff user can import course
with open(self.good_tar) as gtar:
args = {"name": self.good_tar, "course-data": [gtar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
course = self.store.get_course(self.course.id)
self.assertIsNotNone(course)
display_name_after_import = course.display_name
# Check that course display name have changed after import
self.assertNotEqual(display_name_before_import, display_name_after_import)
# Now check that non_staff user has his same role
self.assertFalse(CourseInstructorRole(self.course.id).has_user(nonstaff_user))
self.assertTrue(CourseStaffRole(self.course.id).has_user(nonstaff_user))
# Now course staff user can also successfully import course
self.client.login(username=nonstaff_user.username, password='foo')
with open(self.good_tar) as gtar:
args = {"name": self.good_tar, "course-data": [gtar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
# Now check that non_staff user has his same role
self.assertFalse(CourseInstructorRole(self.course.id).has_user(nonstaff_user))
self.assertTrue(CourseStaffRole(self.course.id).has_user(nonstaff_user))
## Unsafe tar methods #####################################################
# Each of these methods creates a tarfile with a single type of unsafe
# content.
def _fifo_tar(self):
"""
Tar file with FIFO
"""
fifop = self.unsafe_common_dir / "fifo.file"
fifo_tar = self.unsafe_common_dir / "fifo.tar.gz"
os.mkfifo(fifop)
with tarfile.open(fifo_tar, "w:gz") as tar:
tar.add(fifop)
return fifo_tar
def _symlink_tar(self):
"""
Tarfile with symlink to path outside directory.
"""
outsidep = self.unsafe_common_dir / "unsafe_file.txt"
symlinkp = self.unsafe_common_dir / "symlink.txt"
symlink_tar = self.unsafe_common_dir / "symlink.tar.gz"
outsidep.symlink(symlinkp)
with tarfile.open(symlink_tar, "w:gz") as tar:
tar.add(symlinkp)
return symlink_tar
def _outside_tar(self):
"""
Tarfile with file that extracts to outside directory.
Extracting this tarfile in directory <dir> will put its contents
directly in <dir> (rather than <dir/tarname>).
"""
outside_tar = self.unsafe_common_dir / "unsafe_file.tar.gz"
with tarfile.open(outside_tar, "w:gz") as tar:
tar.addfile(tarfile.TarInfo(str(self.content_dir / "a_file")))
return outside_tar
def _outside_tar2(self):
"""
Tarfile with file that extracts to outside directory.
The path here matches the basename (`self.unsafe_common_dir`), but
then "cd's out". E.g. "/usr/../etc" == "/etc", but the naive basename
of the first (but not the second) is "/usr"
Extracting this tarfile in directory <dir> will also put its contents
directly in <dir> (rather than <dir/tarname>).
"""
outside_tar = self.unsafe_common_dir / "unsafe_file.tar.gz"
with tarfile.open(outside_tar, "w:gz") as tar:
tar.addfile(tarfile.TarInfo(str(self.unsafe_common_dir / "../a_file")))
return outside_tar
def _edx_platform_tar(self):
"""
Tarfile with file that extracts to edx-platform directory.
Extracting this tarfile in directory <dir> will also put its contents
directly in <dir> (rather than <dir/tarname>).
"""
outside_tar = self.unsafe_common_dir / "unsafe_file.tar.gz"
with tarfile.open(outside_tar, "w:gz") as tar:
tar.addfile(tarfile.TarInfo(os.path.join(os.path.abspath("."), "a_file")))
return outside_tar
def test_unsafe_tar(self):
"""
Check that safety measure work.
This includes:
'tarbombs' which include files or symlinks with paths
outside or directly in the working directory,
'special files' (character device, block device or FIFOs),
all raise exceptions/400s.
"""
def try_tar(tarpath):
""" Attempt to tar an unacceptable file """
with open(tarpath) as tar:
args = {"name": tarpath, "course-data": [tar]}
resp = self.client.post(self.url, args)
self.assertEquals(resp.status_code, 200)
resp = self.client.get(
reverse_course_url(
'import_status_handler',
self.course.id,
kwargs={'filename': os.path.split(tarpath)[1]}
)
)
status = json.loads(resp.content)["ImportStatus"]
self.assertEqual(status, -1)
try_tar(self._fifo_tar())
try_tar(self._symlink_tar())
try_tar(self._outside_tar())
try_tar(self._outside_tar2())
try_tar(self._edx_platform_tar())
# test trying to open a tar outside of the normal data directory
with self.settings(DATA_DIR='/not/the/data/dir'):
try_tar(self._edx_platform_tar())
# Check that `import_status` returns the appropriate stage (i.e.,
# either 3, indicating all previous steps are completed, or 0,
# indicating no upload in progress)
resp_status = self.client.get(
reverse_course_url(
'import_status_handler',
self.course.id,
kwargs={'filename': os.path.split(self.good_tar)[1]}
)
)
import_status = json.loads(resp_status.content)["ImportStatus"]
self.assertIn(import_status, (0, 3))
def test_library_import(self):
"""
Try importing a known good library archive, and verify that the
contents of the library have completely replaced the old contents.
"""
# Create some blocks to overwrite
library = LibraryFactory.create(modulestore=self.store)
lib_key = library.location.library_key
test_block = ItemFactory.create(
category="vertical",
parent_location=library.location,
user_id=self.user.id,
publish_item=False,
)
test_block2 = ItemFactory.create(
category="vertical",
parent_location=library.location,
user_id=self.user.id,
publish_item=False
)
# Create a library and blocks that should remain unmolested.
unchanged_lib = LibraryFactory.create()
unchanged_key = unchanged_lib.location.library_key
test_block3 = ItemFactory.create(
category="vertical",
parent_location=unchanged_lib.location,
user_id=self.user.id,
publish_item=False
)
test_block4 = ItemFactory.create(
category="vertical",
parent_location=unchanged_lib.location,
user_id=self.user.id,
publish_item=False
)
# Refresh library.
library = self.store.get_library(lib_key)
children = [self.store.get_item(child).url_name for child in library.children]
self.assertEqual(len(children), 2)
self.assertIn(test_block.url_name, children)
self.assertIn(test_block2.url_name, children)
unchanged_lib = self.store.get_library(unchanged_key)
children = [self.store.get_item(child).url_name for child in unchanged_lib.children]
self.assertEqual(len(children), 2)
self.assertIn(test_block3.url_name, children)
self.assertIn(test_block4.url_name, children)
extract_dir = path(tempfile.mkdtemp(dir=settings.DATA_DIR))
# the extract_dir needs to be passed as a relative dir to
# import_library_from_xml
extract_dir_relative = path.relpath(extract_dir, settings.DATA_DIR)
try:
with tarfile.open(path(TEST_DATA_DIR) / 'imports' / 'library.HhJfPD.tar.gz') as tar:
safetar_extractall(tar, extract_dir)
library_items = import_library_from_xml(
self.store,
self.user.id,
settings.GITHUB_REPO_ROOT,
[extract_dir_relative / 'library'],
load_error_modules=False,
static_content_store=contentstore(),
target_id=lib_key
)
finally:
shutil.rmtree(extract_dir)
self.assertEqual(lib_key, library_items[0].location.library_key)
library = self.store.get_library(lib_key)
children = [self.store.get_item(child).url_name for child in library.children]
self.assertEqual(len(children), 3)
self.assertNotIn(test_block.url_name, children)
self.assertNotIn(test_block2.url_name, children)
unchanged_lib = self.store.get_library(unchanged_key)
children = [self.store.get_item(child).url_name for child in unchanged_lib.children]
self.assertEqual(len(children), 2)
self.assertIn(test_block3.url_name, children)
self.assertIn(test_block4.url_name, children)
@ddt.data(
ModuleStoreEnum.Branch.draft_preferred,
ModuleStoreEnum.Branch.published_only,
)
def test_library_import_branch_settings(self, branch_setting):
"""
Try importing a known good library archive under either branch setting.
The branch setting should have no effect on library import.
"""
with self.store.branch_setting(branch_setting):
library = LibraryFactory.create(modulestore=self.store)
lib_key = library.location.library_key
extract_dir = path(tempfile.mkdtemp(dir=settings.DATA_DIR))
# the extract_dir needs to be passed as a relative dir to
# import_library_from_xml
extract_dir_relative = path.relpath(extract_dir, settings.DATA_DIR)
try:
with tarfile.open(path(TEST_DATA_DIR) / 'imports' / 'library.HhJfPD.tar.gz') as tar:
safetar_extractall(tar, extract_dir)
import_library_from_xml(
self.store,
self.user.id,
settings.GITHUB_REPO_ROOT,
[extract_dir_relative / 'library'],
load_error_modules=False,
static_content_store=contentstore(),
target_id=lib_key
)
finally:
shutil.rmtree(extract_dir)
@ddt.data(
ModuleStoreEnum.Branch.draft_preferred,
ModuleStoreEnum.Branch.published_only,
)
def test_library_import_branch_settings_again(self, branch_setting):
# Construct the contentstore for storing the import
with MongoContentstoreBuilder().build() as source_content:
# Construct the modulestore for storing the import (using the previously created contentstore)
with SPLIT_MODULESTORE_SETUP.build(contentstore=source_content) as source_store:
# Use the test branch setting.
with source_store.branch_setting(branch_setting):
source_library_key = LibraryLocator(org='TestOrg', library='TestProbs')
extract_dir = path(tempfile.mkdtemp(dir=settings.DATA_DIR))
# the extract_dir needs to be passed as a relative dir to
# import_library_from_xml
extract_dir_relative = path.relpath(extract_dir, settings.DATA_DIR)
try:
with tarfile.open(path(TEST_DATA_DIR) / 'imports' / 'library.HhJfPD.tar.gz') as tar:
safetar_extractall(tar, extract_dir)
import_library_from_xml(
source_store,
self.user.id,
settings.GITHUB_REPO_ROOT,
[extract_dir_relative / 'library'],
static_content_store=source_content,
target_id=source_library_key,
load_error_modules=False,
raise_on_failure=True,
create_if_not_present=True,
)
finally:
shutil.rmtree(extract_dir)
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
@ddt.ddt
class ExportTestCase(CourseTestCase):
"""
Tests for export_handler.
"""
def setUp(self):
"""
Sets up the test course.
"""
super(ExportTestCase, self).setUp()
self.url = reverse_course_url('export_handler', self.course.id)
self.status_url = reverse_course_url('export_status_handler', self.course.id)
def test_export_html(self):
"""
Get the HTML for the page.
"""
resp = self.client.get_html(self.url)
self.assertEquals(resp.status_code, 200)
self.assertContains(resp, "Export My Course Content")
def test_export_json_unsupported(self):
"""
JSON is unsupported.
"""
resp = self.client.get(self.url, HTTP_ACCEPT='application/json')
self.assertEquals(resp.status_code, 406)
def test_export_async(self):
"""
Get tar.gz file, using asynchronous background task
"""
resp = self.client.post(self.url)
self.assertEquals(resp.status_code, 200)
resp = self.client.get(self.status_url)
result = json.loads(resp.content)
status = result['ExportStatus']
self.assertEquals(status, 3)
self.assertIn('ExportOutput', result)
output_url = result['ExportOutput']
resp = self.client.get(output_url)
self._verify_export_succeeded(resp)
def _verify_export_succeeded(self, resp):
""" Export success helper method. """
self.assertEquals(resp.status_code, 200)
self.assertTrue(resp.get('Content-Disposition').startswith('attachment'))
def test_export_failure_top_level(self):
"""
Export failure.
"""
fake_xblock = ItemFactory.create(parent_location=self.course.location, category='aawefawef')
self.store.publish(fake_xblock.location, self.user.id)
self._verify_export_failure(u'/container/{}'.format(self.course.location))
def test_export_failure_subsection_level(self):
"""
Slightly different export failure.
"""
vertical = ItemFactory.create(parent_location=self.course.location, category='vertical', display_name='foo')
ItemFactory.create(
parent_location=vertical.location,
category='aawefawef'
)
self._verify_export_failure(u'/container/{}'.format(vertical.location))
def _verify_export_failure(self, expected_text):
""" Export failure helper method. """
resp = self.client.post(self.url)
self.assertEquals(resp.status_code, 200)
resp = self.client.get(self.status_url)
self.assertEquals(resp.status_code, 200)
result = json.loads(resp.content)
self.assertNotIn('ExportOutput', result)
self.assertIn('ExportError', result)
error = result['ExportError']
self.assertIn('Unable to create xml for module', error['raw_error_msg'])
self.assertIn(expected_text, error['edit_unit_url'])
def test_library_export(self):
"""
Verify that useable library data can be exported.
"""
youtube_id = "qS4NO9MNC6w"
library = LibraryFactory.create(modulestore=self.store)
video_block = ItemFactory.create(
category="video",
parent_location=library.location,
user_id=self.user.id,
publish_item=False,
youtube_id_1_0=youtube_id
)
name = library.url_name
lib_key = library.location.library_key
root_dir = path(tempfile.mkdtemp())
try:
export_library_to_xml(self.store, contentstore(), lib_key, root_dir, name)
lib_xml = lxml.etree.XML(open(root_dir / name / LIBRARY_ROOT).read())
self.assertEqual(lib_xml.get('org'), lib_key.org)
self.assertEqual(lib_xml.get('library'), lib_key.library)
block = lib_xml.find('video')
self.assertIsNotNone(block)
self.assertEqual(block.get('url_name'), video_block.url_name)
video_xml = lxml.etree.XML(open(root_dir / name / 'video' / video_block.url_name + '.xml').read())
self.assertEqual(video_xml.tag, 'video')
self.assertEqual(video_xml.get('youtube_id_1_0'), youtube_id)
finally:
shutil.rmtree(root_dir / name)
def test_export_success_with_custom_tag(self):
"""
Verify that course export with customtag
"""
xml_string = '<impl>slides</impl>'
vertical = ItemFactory.create(
parent_location=self.course.location, category='vertical', display_name='foo'
)
ItemFactory.create(
parent_location=vertical.location,
category='customtag',
display_name='custom_tag_foo',
data=xml_string
)
self.test_export_async()
@ddt.data(
'/export/non.1/existence_1/Run_1', # For mongo
'/export/course-v1:non1+existence1+Run1', # For split
)
def test_export_course_does_not_exist(self, url):
"""
Export failure if course does not exist
"""
resp = self.client.get_html(url)
self.assertEquals(resp.status_code, 404)
def test_non_course_author(self):
"""
Verify that users who aren't authors of the course are unable to export it
"""
client, _ = self.create_non_staff_authed_user_client()
resp = client.get(self.url)
self.assertEqual(resp.status_code, 403)
def test_status_non_course_author(self):
"""
Verify that users who aren't authors of the course are unable to see the status of export tasks
"""
client, _ = self.create_non_staff_authed_user_client()
resp = client.get(self.status_url)
self.assertEqual(resp.status_code, 403)
def test_status_missing_record(self):
"""
Attempting to get the status of an export task which isn't currently
represented in the database should yield a useful result
"""
resp = self.client.get(self.status_url)
self.assertEqual(resp.status_code, 200)
result = json.loads(resp.content)
self.assertEqual(result['ExportStatus'], 0)
def test_output_non_course_author(self):
"""
Verify that users who aren't authors of the course are unable to see the output of export tasks
"""
client, _ = self.create_non_staff_authed_user_client()
resp = client.get(reverse_course_url('export_output_handler', self.course.id))
self.assertEqual(resp.status_code, 403)
def _mock_artifact(self, spec=None, file_url=None):
"""
Creates a Mock of the UserTaskArtifact model for testing exports handler
code without touching the database.
"""
mock_artifact = Mock()
mock_artifact.file.name = 'testfile.tar.gz'
mock_artifact.file.storage = Mock(spec=spec)
mock_artifact.file.storage.url.return_value = file_url
return mock_artifact
@patch('contentstore.views.import_export._latest_task_status')
@patch('user_tasks.models.UserTaskArtifact.objects.get')
def test_export_status_handler_other(
self,
mock_get_user_task_artifact,
mock_latest_task_status,
):
"""
Verify that the export status handler generates the correct export path
for storage providers other than ``FileSystemStorage`` and
``S3BotoStorage``
"""
mock_latest_task_status.return_value = Mock(state=UserTaskStatus.SUCCEEDED)
mock_get_user_task_artifact.return_value = self._mock_artifact(
file_url='/path/to/testfile.tar.gz',
)
resp = self.client.get(self.status_url)
result = json.loads(resp.content)
self.assertEqual(result['ExportOutput'], '/path/to/testfile.tar.gz')
@patch('contentstore.views.import_export._latest_task_status')
@patch('user_tasks.models.UserTaskArtifact.objects.get')
def test_export_status_handler_s3(
self,
mock_get_user_task_artifact,
mock_latest_task_status,
):
"""
Verify that the export status handler generates the correct export path
for the ``S3BotoStorage`` storage provider
"""
mock_latest_task_status.return_value = Mock(state=UserTaskStatus.SUCCEEDED)
mock_get_user_task_artifact.return_value = self._mock_artifact(
spec=S3BotoStorage,
file_url='/s3/file/path/testfile.tar.gz',
)
resp = self.client.get(self.status_url)
result = json.loads(resp.content)
self.assertEqual(result['ExportOutput'], '/s3/file/path/testfile.tar.gz')
@patch('contentstore.views.import_export._latest_task_status')
@patch('user_tasks.models.UserTaskArtifact.objects.get')
def test_export_status_handler_filesystem(
self,
mock_get_user_task_artifact,
mock_latest_task_status,
):
"""
Verify that the export status handler generates the correct export path
for the ``FileSystemStorage`` storage provider
"""
mock_latest_task_status.return_value = Mock(state=UserTaskStatus.SUCCEEDED)
mock_get_user_task_artifact.return_value = self._mock_artifact(spec=FileSystemStorage)
resp = self.client.get(self.status_url)
result = json.loads(resp.content)
file_export_output_url = reverse_course_url('export_output_handler', self.course.id)
self.assertEqual(result['ExportOutput'], file_export_output_url)
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class TestLibraryImportExport(CourseTestCase):
"""
Tests for importing content libraries from XML and exporting them to XML.
"""
def setUp(self):
super(TestLibraryImportExport, self).setUp()
self.export_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.export_dir, ignore_errors=True)
def test_content_library_export_import(self):
library1 = LibraryFactory.create(modulestore=self.store)
source_library1_key = library1.location.library_key
library2 = LibraryFactory.create(modulestore=self.store)
source_library2_key = library2.location.library_key
import_library_from_xml(
self.store,
'test_user',
TEST_DATA_DIR,
['library_empty_problem'],
static_content_store=contentstore(),
target_id=source_library1_key,
load_error_modules=False,
raise_on_failure=True,
create_if_not_present=True,
)
export_library_to_xml(
self.store,
contentstore(),
source_library1_key,
self.export_dir,
'exported_source_library',
)
source_library = self.store.get_library(source_library1_key)
self.assertEqual(source_library.url_name, 'library')
# Import the exported library into a different content library.
import_library_from_xml(
self.store,
'test_user',
self.export_dir,
['exported_source_library'],
static_content_store=contentstore(),
target_id=source_library2_key,
load_error_modules=False,
raise_on_failure=True,
create_if_not_present=True,
)
# Compare the two content libraries for equality.
self.assertCoursesEqual(source_library1_key, source_library2_key)
@ddt.ddt
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class TestCourseExportImport(LibraryTestCase):
"""
Tests for importing after exporting the course containing content libraries from XML.
"""
def setUp(self):
super(TestCourseExportImport, self).setUp()
self.export_dir = tempfile.mkdtemp()
# Create a problem in library
ItemFactory.create(
category="problem",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name='Test Problem',
data="<problem><multiplechoiceresponse></multiplechoiceresponse></problem>",
)
# Create a source course.
self.source_course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
self.addCleanup(shutil.rmtree, self.export_dir, ignore_errors=True)
def _setup_source_course_with_library_content(self, publish=False):
"""
Sets up course with library content.
"""
chapter = ItemFactory.create(
parent_location=self.source_course.location,
category='chapter',
display_name='Test Section'
)
sequential = ItemFactory.create(
parent_location=chapter.location,
category='sequential',
display_name='Test Sequential'
)
vertical = ItemFactory.create(
category='vertical',
parent_location=sequential.location,
display_name='Test Unit'
)
lc_block = self._add_library_content_block(vertical, self.lib_key, publish_item=publish)
self._refresh_children(lc_block)
def get_lib_content_block_children(self, block_location):
"""
Search for library content block to return its immediate children
"""
if block_location.block_type == 'library_content':
return self.store.get_item(block_location).children
return self.get_lib_content_block_children(self.store.get_item(block_location).children[0])
def assert_problem_display_names(self, source_course_location, dest_course_location, is_published):
"""
Asserts that problems' display names in both source and destination courses are same.
"""
source_course_lib_children = self.get_lib_content_block_children(source_course_location)
dest_course_lib_children = self.get_lib_content_block_children(dest_course_location)
self.assertEquals(len(source_course_lib_children), len(dest_course_lib_children))
for source_child_location, dest_child_location in zip(source_course_lib_children, dest_course_lib_children):
# Assert problem names on draft branch.
with self.store.branch_setting(branch_setting=ModuleStoreEnum.Branch.draft_preferred):
self.assert_names(source_child_location, dest_child_location)
if is_published:
# Assert problem names on publish branch.
with self.store.branch_setting(branch_setting=ModuleStoreEnum.Branch.published_only):
self.assert_names(source_child_location, dest_child_location)
def assert_names(self, source_child_location, dest_child_location):
"""
Check if blocks have same display_name.
"""
source_child = self.store.get_item(source_child_location)
dest_child = self.store.get_item(dest_child_location)
self.assertEquals(source_child.display_name, dest_child.display_name)
@ddt.data(True, False)
def test_library_content_on_course_export_import(self, publish_item):
"""
Verify that library contents in destination and source courses are same after importing
the source course into destination course.
"""
self._setup_source_course_with_library_content(publish=publish_item)
# Create a course to import source course.
dest_course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
# Export the source course.
export_course_to_xml(
self.store,
contentstore(),
self.source_course.location.course_key,
self.export_dir,
'exported_source_course',
)
# Now, import it back to dest_course.
import_course_from_xml(
self.store,
self.user.id,
self.export_dir,
['exported_source_course'],
static_content_store=contentstore(),
target_id=dest_course.location.course_key,
load_error_modules=False,
raise_on_failure=True,
create_if_not_present=True,
)
self.assert_problem_display_names(
self.source_course.location,
dest_course.location,
publish_item
)
| agpl-3.0 |
kawamon/hue | desktop/core/ext-py/SQLAlchemy-1.3.17/test/ext/declarative/test_basic.py | 2 | 71852 | import sqlalchemy as sa
from sqlalchemy import CheckConstraint
from sqlalchemy import event
from sqlalchemy import exc
from sqlalchemy import ForeignKey
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import UniqueConstraint
from sqlalchemy import util
from sqlalchemy.ext import declarative as decl
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import synonym_for
from sqlalchemy.ext.declarative.base import _DeferredMapperConfig
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import backref
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import close_all_sessions
from sqlalchemy.orm import column_property
from sqlalchemy.orm import composite
from sqlalchemy.orm import configure_mappers
from sqlalchemy.orm import create_session
from sqlalchemy.orm import deferred
from sqlalchemy.orm import exc as orm_exc
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import mapper
from sqlalchemy.orm import properties
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm.events import MapperEvents
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import assertions
from sqlalchemy.testing import eq_
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import with_metaclass
Base = None
User = Address = None
class DeclarativeTestBase(
fixtures.TestBase,
testing.AssertsExecutionResults,
testing.AssertsCompiledSQL,
):
__dialect__ = "default"
def setup(self):
global Base
Base = decl.declarative_base(testing.db)
def teardown(self):
close_all_sessions()
clear_mappers()
Base.metadata.drop_all()
class DeclarativeTest(DeclarativeTestBase):
def test_basic(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50), key="_email")
user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
Base.metadata.create_all()
eq_(Address.__table__.c["id"].name, "id")
eq_(Address.__table__.c["_email"].name, "email")
eq_(Address.__table__.c["_user_id"].name, "user_id")
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
addresses=[Address(email="one"), Address(email="two")],
)
],
)
a1 = sess.query(Address).filter(Address.email == "two").one()
eq_(a1, Address(email="two"))
eq_(a1.user, User(name="u1"))
def test_deferred_reflection_default_error(self):
class MyExt(object):
@classmethod
def prepare(cls):
"sample prepare method"
to_map = _DeferredMapperConfig.classes_for_base(cls)
for thingy in to_map:
thingy.map()
@classmethod
def _sa_decl_prepare(cls):
pass
class User(MyExt, Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
assert_raises_message(
orm_exc.UnmappedClassError,
"Class test.ext.declarative.test_basic.User has a deferred "
"mapping on it. It is not yet usable as a mapped class.",
Session().query,
User,
)
User.prepare()
self.assert_compile(
Session().query(User), 'SELECT "user".id AS user_id FROM "user"'
)
def test_unicode_string_resolve(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship(util.u("Address"), backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50), key="_email")
user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
assert User.addresses.property.mapper.class_ is Address
def test_unicode_string_resolve_backref(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50), key="_email")
user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
user = relationship(
User,
backref=backref("addresses", order_by=util.u("Address.email")),
)
assert Address.user.property.mapper.class_ is User
def test_no_table(self):
def go():
class User(Base):
id = Column("id", Integer, primary_key=True)
assert_raises_message(
sa.exc.InvalidRequestError, "does not have a __table__", go
)
def test_table_args_empty_dict(self):
class MyModel(Base):
__tablename__ = "test"
id = Column(Integer, primary_key=True)
__table_args__ = {}
def test_table_args_empty_tuple(self):
class MyModel(Base):
__tablename__ = "test"
id = Column(Integer, primary_key=True)
__table_args__ = ()
def test_cant_add_columns(self):
t = Table(
"t",
Base.metadata,
Column("id", Integer, primary_key=True),
Column("data", String),
)
def go():
class User(Base):
__table__ = t
foo = Column(Integer, primary_key=True)
# can't specify new columns not already in the table
assert_raises_message(
sa.exc.ArgumentError,
"Can't add additional column 'foo' when " "specifying __table__",
go,
)
# regular re-mapping works tho
class Bar(Base):
__table__ = t
some_data = t.c.data
assert (
class_mapper(Bar).get_property("some_data").columns[0] is t.c.data
)
def test_lower_case_c_column_warning(self):
with assertions.expect_warnings(
r"Attribute 'x' on class <class .*Foo.* appears to be a "
r"non-schema 'sqlalchemy.sql.column\(\)' object; "
):
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
x = sa.sql.expression.column(Integer)
y = Column(Integer)
class MyMixin(object):
x = sa.sql.expression.column(Integer)
y = Column(Integer)
with assertions.expect_warnings(
r"Attribute 'x' on class <class .*MyMixin.* appears to be a "
r"non-schema 'sqlalchemy.sql.column\(\)' object; "
):
class Foo2(MyMixin, Base):
__tablename__ = "foo2"
id = Column(Integer, primary_key=True)
with assertions.expect_warnings(
r"Attribute 'x' on class <class .*Foo3.* appears to be a "
r"non-schema 'sqlalchemy.sql.column\(\)' object; "
):
class Foo3(Base):
__tablename__ = "foo3"
id = Column(Integer, primary_key=True)
@declared_attr
def x(cls):
return sa.sql.expression.column(Integer)
y = Column(Integer)
with assertions.expect_warnings(
r"Attribute 'x' on class <class .*Foo4.* appears to be a "
r"non-schema 'sqlalchemy.sql.column\(\)' object; "
):
class MyMixin2(object):
@declared_attr
def x(cls):
return sa.sql.expression.column(Integer)
y = Column(Integer)
class Foo4(MyMixin2, Base):
__tablename__ = "foo4"
id = Column(Integer, primary_key=True)
def test_column_named_twice(self):
def go():
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
x = Column("x", Integer)
y = Column("x", Integer)
assert_raises_message(
sa.exc.SAWarning,
"On class 'Foo', Column object 'x' named directly multiple times, "
"only one will be used: x, y",
go,
)
def test_column_repeated_under_prop(self):
def go():
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
x = Column("x", Integer)
y = column_property(x)
z = Column("x", Integer)
assert_raises_message(
sa.exc.SAWarning,
"On class 'Foo', Column object 'x' named directly multiple times, "
"only one will be used: x, y, z",
go,
)
def test_using_explicit_prop_in_schema_objects(self):
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
cprop = column_property(Column(Integer))
__table_args__ = (UniqueConstraint(cprop),)
uq = [
c
for c in Foo.__table__.constraints
if isinstance(c, UniqueConstraint)
][0]
is_(uq.columns.cprop, Foo.__table__.c.cprop)
class Bar(Base):
__tablename__ = "bar"
id = Column(Integer, primary_key=True)
cprop = deferred(Column(Integer))
__table_args__ = (CheckConstraint(cprop > sa.func.foo()),)
ck = [
c
for c in Bar.__table__.constraints
if isinstance(c, CheckConstraint)
][0]
is_(ck.columns.cprop, Bar.__table__.c.cprop)
if testing.requires.python3.enabled:
# test the existing failure case in case something changes
def go():
class Bat(Base):
__tablename__ = "bat"
id = Column(Integer, primary_key=True)
cprop = deferred(Column(Integer))
# we still can't do an expression like
# "cprop > 5" because the column property isn't
# a full blown column
__table_args__ = (CheckConstraint(cprop > 5),)
assert_raises(TypeError, go)
def test_relationship_level_msg_for_invalid_callable(self):
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
class B(Base):
__tablename__ = "b"
id = Column(Integer, primary_key=True)
a_id = Column(Integer, ForeignKey("a.id"))
a = relationship("a")
assert_raises_message(
sa.exc.ArgumentError,
"relationship 'a' expects a class or a mapper "
"argument .received: .*Table",
configure_mappers,
)
def test_relationship_level_msg_for_invalid_object(self):
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
class B(Base):
__tablename__ = "b"
id = Column(Integer, primary_key=True)
a_id = Column(Integer, ForeignKey("a.id"))
a = relationship(A.__table__)
assert_raises_message(
sa.exc.ArgumentError,
"relationship 'a' expects a class or a mapper "
"argument .received: .*Table",
configure_mappers,
)
def test_difficult_class(self):
"""test no getattr() errors with a customized class"""
# metaclass to mock the way zope.interface breaks getattr()
class BrokenMeta(type):
def __getattribute__(self, attr):
if attr == "xyzzy":
raise AttributeError("xyzzy")
else:
return object.__getattribute__(self, attr)
# even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
# fails
class BrokenParent(with_metaclass(BrokenMeta)):
xyzzy = "magic"
# _as_declarative() inspects obj.__class__.__bases__
class User(BrokenParent, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
decl.instrument_declarative(User, {}, Base.metadata)
def test_reserved_identifiers(self):
def go1():
class User1(Base):
__tablename__ = "user1"
id = Column(Integer, primary_key=True)
metadata = Column(Integer)
def go2():
class User2(Base):
__tablename__ = "user2"
id = Column(Integer, primary_key=True)
metadata = relationship("Address")
for go in (go1, go2):
assert_raises_message(
exc.InvalidRequestError,
"Attribute name 'metadata' is reserved "
"for the MetaData instance when using a "
"declarative base class.",
go,
)
def test_undefer_column_name(self):
# TODO: not sure if there was an explicit
# test for this elsewhere
foo = Column(Integer)
eq_(str(foo), "(no name)")
eq_(foo.key, None)
eq_(foo.name, None)
decl.base._undefer_column_name("foo", foo)
eq_(str(foo), "foo")
eq_(foo.key, "foo")
eq_(foo.name, "foo")
def test_recompile_on_othermapper(self):
"""declarative version of the same test in mappers.py"""
from sqlalchemy.orm import mapperlib
class User(Base):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
name = Column("name", String(50))
class Address(Base):
__tablename__ = "addresses"
id = Column("id", Integer, primary_key=True)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
user = relationship(
"User", primaryjoin=user_id == User.id, backref="addresses"
)
assert mapperlib.Mapper._new_mappers is True
u = User() # noqa
assert User.addresses
assert mapperlib.Mapper._new_mappers is False
def test_string_dependency_resolution(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
addresses = relationship(
"Address",
order_by="desc(Address.email)",
primaryjoin="User.id==Address.user_id",
foreign_keys="[Address.user_id]",
backref=backref(
"user",
primaryjoin="User.id==Address.user_id",
foreign_keys="[Address.user_id]",
),
)
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer) # note no foreign key
Base.metadata.create_all()
sess = create_session()
u1 = User(
name="ed",
addresses=[
Address(email="abc"),
Address(email="def"),
Address(email="xyz"),
],
)
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).filter(User.name == "ed").one(),
User(
name="ed",
addresses=[
Address(email="xyz"),
Address(email="def"),
Address(email="abc"),
],
),
)
class Foo(Base, fixtures.ComparableEntity):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
rel = relationship("User", primaryjoin="User.addresses==Foo.id")
assert_raises_message(
exc.InvalidRequestError,
"'addresses' is not an instance of " "ColumnProperty",
configure_mappers,
)
def test_string_dependency_resolution_synonym(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
Base.metadata.create_all()
sess = create_session()
u1 = User(name="ed")
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).filter(User.name == "ed").one(), User(name="ed"))
class Foo(Base, fixtures.ComparableEntity):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
_user_id = Column(Integer)
rel = relationship(
"User",
uselist=False,
foreign_keys=[User.id],
primaryjoin="Foo.user_id==User.id",
)
@synonym_for("_user_id")
@property
def user_id(self):
return self._user_id
foo = Foo()
foo.rel = u1
assert foo.rel == u1
def test_string_dependency_resolution_orm_descriptor(self):
from sqlalchemy.ext.hybrid import hybrid_property
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
firstname = Column(String(50))
lastname = Column(String(50))
game_id = Column(Integer, ForeignKey("game.id"))
@hybrid_property
def fullname(self):
return self.firstname + " " + self.lastname
class Game(Base):
__tablename__ = "game"
id = Column(Integer, primary_key=True)
name = Column(String(50))
users = relationship("User", order_by="User.fullname")
s = Session()
self.assert_compile(
s.query(Game).options(joinedload(Game.users)),
"SELECT game.id AS game_id, game.name AS game_name, "
"user_1.id AS user_1_id, user_1.firstname AS user_1_firstname, "
"user_1.lastname AS user_1_lastname, "
"user_1.game_id AS user_1_game_id "
'FROM game LEFT OUTER JOIN "user" AS user_1 ON game.id = '
"user_1.game_id ORDER BY "
"user_1.firstname || :firstname_1 || user_1.lastname",
)
def test_string_dependency_resolution_asselectable(self):
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
b_id = Column(ForeignKey("b.id"))
d = relationship(
"D",
secondary="join(B, D, B.d_id == D.id)."
"join(C, C.d_id == D.id)",
primaryjoin="and_(A.b_id == B.id, A.id == C.a_id)",
secondaryjoin="D.id == B.d_id",
)
class B(Base):
__tablename__ = "b"
id = Column(Integer, primary_key=True)
d_id = Column(ForeignKey("d.id"))
class C(Base):
__tablename__ = "c"
id = Column(Integer, primary_key=True)
a_id = Column(ForeignKey("a.id"))
d_id = Column(ForeignKey("d.id"))
class D(Base):
__tablename__ = "d"
id = Column(Integer, primary_key=True)
s = Session()
self.assert_compile(
s.query(A).join(A.d),
"SELECT a.id AS a_id, a.b_id AS a_b_id FROM a JOIN "
"(b AS b_1 JOIN d AS d_1 ON b_1.d_id = d_1.id "
"JOIN c AS c_1 ON c_1.d_id = d_1.id) ON a.b_id = b_1.id "
"AND a.id = c_1.a_id JOIN d ON d.id = b_1.d_id",
)
def test_string_dependency_resolution_no_table(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
class Bar(Base, fixtures.ComparableEntity):
__tablename__ = "bar"
id = Column(Integer, primary_key=True)
rel = relationship("User", primaryjoin="User.id==Bar.__table__.id")
assert_raises_message(
exc.InvalidRequestError,
"does not have a mapped column named " "'__table__'",
configure_mappers,
)
def test_string_w_pj_annotations(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer)
user = relationship(
"User", primaryjoin="remote(User.id)==foreign(Address.user_id)"
)
eq_(
Address.user.property._join_condition.local_remote_pairs,
[(Address.__table__.c.user_id, User.__table__.c.id)],
)
def test_string_dependency_resolution_no_magic(self):
"""test that full tinkery expressions work as written"""
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
addresses = relationship(
"Address",
primaryjoin="User.id==Address.user_id.prop.columns[0]",
)
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("users.id"))
configure_mappers()
eq_(
str(User.addresses.prop.primaryjoin),
"users.id = addresses.user_id",
)
def test_string_dependency_resolution_module_qualified(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
addresses = relationship(
"%s.Address" % __name__,
primaryjoin="%s.User.id==%s.Address.user_id.prop.columns[0]"
% (__name__, __name__),
)
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("users.id"))
configure_mappers()
eq_(
str(User.addresses.prop.primaryjoin),
"users.id = addresses.user_id",
)
def test_string_dependency_resolution_in_backref(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(String(50))
addresses = relationship(
"Address",
primaryjoin="User.id==Address.user_id",
backref="user",
)
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(Integer, primary_key=True)
email = Column(String(50))
user_id = Column(Integer, ForeignKey("users.id"))
configure_mappers()
eq_(
str(User.addresses.property.primaryjoin),
str(Address.user.property.primaryjoin),
)
def test_string_dependency_resolution_tables(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(String(50))
props = relationship(
"Prop",
secondary="user_to_prop",
primaryjoin="User.id==user_to_prop.c.u" "ser_id",
secondaryjoin="user_to_prop.c.prop_id=" "=Prop.id",
backref="users",
)
class Prop(Base, fixtures.ComparableEntity):
__tablename__ = "props"
id = Column(Integer, primary_key=True)
name = Column(String(50))
user_to_prop = Table(
"user_to_prop",
Base.metadata,
Column("user_id", Integer, ForeignKey("users.id")),
Column("prop_id", Integer, ForeignKey("props.id")),
)
configure_mappers()
assert (
class_mapper(User).get_property("props").secondary is user_to_prop
)
def test_string_dependency_resolution_schemas(self):
Base = decl.declarative_base()
class User(Base):
__tablename__ = "users"
__table_args__ = {"schema": "fooschema"}
id = Column(Integer, primary_key=True)
name = Column(String(50))
props = relationship(
"Prop",
secondary="fooschema.user_to_prop",
primaryjoin="User.id==fooschema.user_to_prop.c.user_id",
secondaryjoin="fooschema.user_to_prop.c.prop_id==Prop.id",
backref="users",
)
class Prop(Base):
__tablename__ = "props"
__table_args__ = {"schema": "fooschema"}
id = Column(Integer, primary_key=True)
name = Column(String(50))
user_to_prop = Table(
"user_to_prop",
Base.metadata,
Column("user_id", Integer, ForeignKey("fooschema.users.id")),
Column("prop_id", Integer, ForeignKey("fooschema.props.id")),
schema="fooschema",
)
configure_mappers()
assert (
class_mapper(User).get_property("props").secondary is user_to_prop
)
def test_string_dependency_resolution_annotations(self):
Base = decl.declarative_base()
class Parent(Base):
__tablename__ = "parent"
id = Column(Integer, primary_key=True)
name = Column(String)
children = relationship(
"Child",
primaryjoin="Parent.name=="
"remote(foreign(func.lower(Child.name_upper)))",
)
class Child(Base):
__tablename__ = "child"
id = Column(Integer, primary_key=True)
name_upper = Column(String)
configure_mappers()
eq_(
Parent.children.property._calculated_foreign_keys,
set([Child.name_upper.property.columns[0]]),
)
def test_shared_class_registry(self):
reg = {}
Base1 = decl.declarative_base(testing.db, class_registry=reg)
Base2 = decl.declarative_base(testing.db, class_registry=reg)
class A(Base1):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
class B(Base2):
__tablename__ = "b"
id = Column(Integer, primary_key=True)
aid = Column(Integer, ForeignKey(A.id))
as_ = relationship("A")
assert B.as_.property.mapper.class_ is A
def test_uncompiled_attributes_in_relationship(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
addresses = relationship(
"Address",
order_by=Address.email,
foreign_keys=Address.user_id,
remote_side=Address.user_id,
)
# get the mapper for User. User mapper will compile,
# "addresses" relationship will call upon Address.user_id for
# its clause element. Address.user_id is a _CompileOnAttr,
# which then calls class_mapper(Address). But ! We're already
# "in compilation", but class_mapper(Address) needs to
# initialize regardless, or COA's assertion fails and things
# generally go downhill from there.
class_mapper(User)
Base.metadata.create_all()
sess = create_session()
u1 = User(
name="ed",
addresses=[
Address(email="abc"),
Address(email="xyz"),
Address(email="def"),
],
)
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).filter(User.name == "ed").one(),
User(
name="ed",
addresses=[
Address(email="abc"),
Address(email="def"),
Address(email="xyz"),
],
),
)
def test_nice_dependency_error(self):
class User(Base):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
addresses = relationship("Address")
class Address(Base):
__tablename__ = "addresses"
id = Column(Integer, primary_key=True)
foo = sa.orm.column_property(User.id == 5)
# this used to raise an error when accessing User.id but that's
# no longer the case since we got rid of _CompileOnAttr.
assert_raises(sa.exc.ArgumentError, configure_mappers)
def test_nice_dependency_error_works_with_hasattr(self):
class User(Base):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
addresses = relationship("Address")
# hasattr() on a compile-loaded attribute
try:
hasattr(User.addresses, "property")
except exc.InvalidRequestError:
assert sa.util.compat.py32
# the exception is preserved. Remains the
# same through repeated calls.
for i in range(3):
assert_raises_message(
sa.exc.InvalidRequestError,
"^One or more mappers failed to initialize"
" - can't proceed with initialization of other mappers. "
r"Triggering mapper: 'mapped class User->users'. "
"Original exception was: When initializing.*",
configure_mappers,
)
def test_custom_base(self):
class MyBase(object):
def foobar(self):
return "foobar"
Base = decl.declarative_base(cls=MyBase)
assert hasattr(Base, "metadata")
assert Base().foobar() == "foobar"
def test_uses_get_on_class_col_fk(self):
# test [ticket:1492]
class Master(Base):
__tablename__ = "master"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
class Detail(Base):
__tablename__ = "detail"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
master_id = Column(None, ForeignKey(Master.id))
master = relationship(Master)
Base.metadata.create_all()
configure_mappers()
assert class_mapper(Detail).get_property("master").strategy.use_get
m1 = Master()
d1 = Detail(master=m1)
sess = create_session()
sess.add(d1)
sess.flush()
sess.expunge_all()
d1 = sess.query(Detail).first()
m1 = sess.query(Master).first()
def go():
assert d1.master
self.assert_sql_count(testing.db, go, 0)
def test_index_doesnt_compile(self):
class User(Base):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
name = Column("name", String(50))
error = relationship("Address")
i = Index("my_index", User.name)
# compile fails due to the nonexistent Addresses relationship
assert_raises(sa.exc.InvalidRequestError, configure_mappers)
# index configured
assert i in User.__table__.indexes
assert User.__table__.c.id not in set(i.columns)
assert User.__table__.c.name in set(i.columns)
# tables create fine
Base.metadata.create_all()
def test_add_prop(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
User.name = Column("name", String(50))
User.addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
Address.email = Column(String(50), key="_email")
Address.user_id = Column(
"user_id", Integer, ForeignKey("users.id"), key="_user_id"
)
Base.metadata.create_all()
eq_(Address.__table__.c["id"].name, "id")
eq_(Address.__table__.c["_email"].name, "email")
eq_(Address.__table__.c["_user_id"].name, "user_id")
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
addresses=[Address(email="one"), Address(email="two")],
)
],
)
a1 = sess.query(Address).filter(Address.email == "two").one()
eq_(a1, Address(email="two"))
eq_(a1.user, User(name="u1"))
def test_alt_name_attr_subclass_column_inline(self):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
id = Column("id", Integer, primary_key=True)
data = Column("data")
class ASub(A):
brap = A.data
assert ASub.brap.property is A.data.property
assert isinstance(
ASub.brap.original_property, properties.SynonymProperty
)
def test_alt_name_attr_subclass_relationship_inline(self):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
id = Column("id", Integer, primary_key=True)
b_id = Column(Integer, ForeignKey("b.id"))
b = relationship("B", backref="as_")
class B(Base):
__tablename__ = "b"
id = Column("id", Integer, primary_key=True)
configure_mappers()
class ASub(A):
brap = A.b
assert ASub.brap.property is A.b.property
assert isinstance(
ASub.brap.original_property, properties.SynonymProperty
)
ASub(brap=B())
def test_alt_name_attr_subclass_column_attrset(self):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
id = Column("id", Integer, primary_key=True)
data = Column("data")
A.brap = A.data
assert A.brap.property is A.data.property
assert isinstance(A.brap.original_property, properties.SynonymProperty)
def test_alt_name_attr_subclass_relationship_attrset(self):
# [ticket:2900]
class A(Base):
__tablename__ = "a"
id = Column("id", Integer, primary_key=True)
b_id = Column(Integer, ForeignKey("b.id"))
b = relationship("B", backref="as_")
A.brap = A.b
class B(Base):
__tablename__ = "b"
id = Column("id", Integer, primary_key=True)
assert A.brap.property is A.b.property
assert isinstance(A.brap.original_property, properties.SynonymProperty)
A(brap=B())
def test_eager_order_by(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", order_by=Address.email)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="two"), Address(email="one")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).options(joinedload(User.addresses)).all(),
[
User(
name="u1",
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_order_by_multi(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship(
"Address", order_by=(Address.email, Address.id)
)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="two"), Address(email="one")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
u = sess.query(User).filter(User.name == "u1").one()
u.addresses
def test_as_declarative(self):
class User(fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", backref="user")
class Address(fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
reg = {}
decl.instrument_declarative(User, reg, Base.metadata)
decl.instrument_declarative(Address, reg, Base.metadata)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_custom_mapper_attribute(self):
def mymapper(cls, tbl, **kwargs):
m = sa.orm.mapper(cls, tbl, **kwargs)
m.CHECK = True
return m
base = decl.declarative_base()
class Foo(base):
__tablename__ = "foo"
__mapper_cls__ = mymapper
id = Column(Integer, primary_key=True)
eq_(Foo.__mapper__.CHECK, True)
def test_custom_mapper_argument(self):
def mymapper(cls, tbl, **kwargs):
m = sa.orm.mapper(cls, tbl, **kwargs)
m.CHECK = True
return m
base = decl.declarative_base(mapper=mymapper)
class Foo(base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
eq_(Foo.__mapper__.CHECK, True)
def test_no_change_to_all_descriptors(self):
base = decl.declarative_base()
class Foo(base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
eq_(Foo.__mapper__.all_orm_descriptors.keys(), ["id"])
def test_oops(self):
with testing.expect_warnings(
"Ignoring declarative-like tuple value of " "attribute 'name'"
):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
name = (Column("name", String(50)),)
def test_table_args_no_dict(self):
class Foo1(Base):
__tablename__ = "foo"
__table_args__ = (ForeignKeyConstraint(["id"], ["foo.bar"]),)
id = Column("id", Integer, primary_key=True)
bar = Column("bar", Integer)
assert Foo1.__table__.c.id.references(Foo1.__table__.c.bar)
def test_table_args_type(self):
def err():
class Foo1(Base):
__tablename__ = "foo"
__table_args__ = ForeignKeyConstraint(["id"], ["foo.id"])
id = Column("id", Integer, primary_key=True)
assert_raises_message(
sa.exc.ArgumentError, "__table_args__ value must be a tuple, ", err
)
def test_table_args_none(self):
class Foo2(Base):
__tablename__ = "foo"
__table_args__ = None
id = Column("id", Integer, primary_key=True)
assert Foo2.__table__.kwargs == {}
def test_table_args_dict_format(self):
class Foo2(Base):
__tablename__ = "foo"
__table_args__ = {"mysql_engine": "InnoDB"}
id = Column("id", Integer, primary_key=True)
assert Foo2.__table__.kwargs["mysql_engine"] == "InnoDB"
def test_table_args_tuple_format(self):
class Foo2(Base):
__tablename__ = "foo"
__table_args__ = {"mysql_engine": "InnoDB"}
id = Column("id", Integer, primary_key=True)
class Bar(Base):
__tablename__ = "bar"
__table_args__ = (
ForeignKeyConstraint(["id"], ["foo.id"]),
{"mysql_engine": "InnoDB"},
)
id = Column("id", Integer, primary_key=True)
assert Bar.__table__.c.id.references(Foo2.__table__.c.id)
assert Bar.__table__.kwargs["mysql_engine"] == "InnoDB"
def test_table_cls_attribute(self):
class Foo(Base):
__tablename__ = "foo"
@classmethod
def __table_cls__(cls, *arg, **kw):
name = arg[0]
return Table(name + "bat", *arg[1:], **kw)
id = Column(Integer, primary_key=True)
eq_(Foo.__table__.name, "foobat")
def test_table_cls_attribute_return_none(self):
from sqlalchemy.schema import Column, PrimaryKeyConstraint
class AutoTable(object):
@declared_attr
def __tablename__(cls):
return cls.__name__
@classmethod
def __table_cls__(cls, *arg, **kw):
for obj in arg[1:]:
if (
isinstance(obj, Column) and obj.primary_key
) or isinstance(obj, PrimaryKeyConstraint):
return Table(*arg, **kw)
return None
class Person(AutoTable, Base):
id = Column(Integer, primary_key=True)
class Employee(Person):
employee_name = Column(String)
is_(inspect(Employee).local_table, Person.__table__)
def test_expression(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
User.address_count = sa.orm.column_property(
sa.select([sa.func.count(Address.id)])
.where(Address.user_id == User.id)
.as_scalar()
)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
address_count=2,
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_useless_declared_attr(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", backref="user")
@declared_attr
def address_count(cls):
# this doesn't really gain us anything. but if
# one is used, lets have it function as expected...
return sa.orm.column_property(
sa.select([sa.func.count(Address.id)]).where(
Address.user_id == cls.id
)
)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
address_count=2,
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_declared_on_base_class(self):
class MyBase(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
@declared_attr
def somecol(cls):
return Column(Integer)
class MyClass(MyBase):
__tablename__ = "bar"
id = Column(Integer, ForeignKey("foo.id"), primary_key=True)
# previously, the 'somecol' declared_attr would be ignored
# by the mapping and would remain unused. now we take
# it as part of MyBase.
assert "somecol" in MyBase.__table__.c
assert "somecol" not in MyClass.__table__.c
def test_decl_cascading_warns_non_mixin(self):
with expect_warnings(
"Use of @declared_attr.cascading only applies to "
"Declarative 'mixin' and 'abstract' classes. "
"Currently, this flag is ignored on mapped class "
"<class '.*.MyBase'>"
):
class MyBase(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
@declared_attr.cascading
def somecol(cls):
return Column(Integer)
def test_column(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
User.a = Column("a", String(10))
User.b = Column(String(10))
Base.metadata.create_all()
u1 = User(name="u1", a="a", b="b")
eq_(u1.a, "a")
eq_(User.a.get_history(u1), (["a"], (), ()))
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(sess.query(User).all(), [User(name="u1", a="a", b="b")])
def test_column_properties(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
adr_count = sa.orm.column_property(
sa.select(
[sa.func.count(Address.id)], Address.user_id == id
).as_scalar()
)
addresses = relationship(Address)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
adr_count=2,
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_column_properties_2(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(Integer, primary_key=True)
email = Column(String(50))
user_id = Column(Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column("id", Integer, primary_key=True)
name = Column("name", String(50))
# this is not "valid" but we want to test that Address.id
# doesn't get stuck into user's table
adr_count = Address.id
eq_(set(User.__table__.c.keys()), set(["id", "name"]))
eq_(set(Address.__table__.c.keys()), set(["id", "email", "user_id"]))
def test_deferred(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = sa.orm.deferred(Column(String(50)))
Base.metadata.create_all()
sess = create_session()
sess.add(User(name="u1"))
sess.flush()
sess.expunge_all()
u1 = sess.query(User).filter(User.name == "u1").one()
assert "name" not in u1.__dict__
def go():
eq_(u1.name, "u1")
self.assert_sql_count(testing.db, go, 1)
def test_composite_inline(self):
class AddressComposite(fixtures.ComparableEntity):
def __init__(self, street, state):
self.street = street
self.state = state
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = "user"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
address = composite(
AddressComposite,
Column("street", String(50)),
Column("state", String(2)),
)
Base.metadata.create_all()
sess = Session()
sess.add(User(address=AddressComposite("123 anywhere street", "MD")))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite("123 anywhere street", "MD"))],
)
def test_composite_separate(self):
class AddressComposite(fixtures.ComparableEntity):
def __init__(self, street, state):
self.street = street
self.state = state
def __composite_values__(self):
return [self.street, self.state]
class User(Base, fixtures.ComparableEntity):
__tablename__ = "user"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
street = Column(String(50))
state = Column(String(2))
address = composite(AddressComposite, street, state)
Base.metadata.create_all()
sess = Session()
sess.add(User(address=AddressComposite("123 anywhere street", "MD")))
sess.commit()
eq_(
sess.query(User).all(),
[User(address=AddressComposite("123 anywhere street", "MD"))],
)
def test_mapping_to_join(self):
users = Table(
"users", Base.metadata, Column("id", Integer, primary_key=True)
)
addresses = Table(
"addresses",
Base.metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("users.id")),
)
usersaddresses = sa.join(
users, addresses, users.c.id == addresses.c.user_id
)
class User(Base):
__table__ = usersaddresses
__table_args__ = {"primary_key": [users.c.id]}
# need to use column_property for now
user_id = column_property(users.c.id, addresses.c.user_id)
address_id = addresses.c.id
assert User.__mapper__.get_property("user_id").columns[0] is users.c.id
assert (
User.__mapper__.get_property("user_id").columns[1]
is addresses.c.user_id
)
def test_synonym_inline(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
_name = Column("name", String(50))
def _set_name(self, name):
self._name = "SOMENAME " + name
def _get_name(self):
return self._name
name = sa.orm.synonym(
"_name", descriptor=property(_get_name, _set_name)
)
Base.metadata.create_all()
sess = create_session()
u1 = User(name="someuser")
eq_(u1.name, "SOMENAME someuser")
sess.add(u1)
sess.flush()
eq_(
sess.query(User).filter(User.name == "SOMENAME someuser").one(), u1
)
def test_synonym_no_descriptor(self):
from sqlalchemy.orm.properties import ColumnProperty
class CustomCompare(ColumnProperty.Comparator):
__hash__ = None
def __eq__(self, other):
return self.__clause_element__() == other + " FOO"
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
_name = Column("name", String(50))
name = sa.orm.synonym("_name", comparator_factory=CustomCompare)
Base.metadata.create_all()
sess = create_session()
u1 = User(name="someuser FOO")
sess.add(u1)
sess.flush()
eq_(sess.query(User).filter(User.name == "someuser").one(), u1)
def test_synonym_added(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
_name = Column("name", String(50))
def _set_name(self, name):
self._name = "SOMENAME " + name
def _get_name(self):
return self._name
name = property(_get_name, _set_name)
User.name = sa.orm.synonym("_name", descriptor=User.name)
Base.metadata.create_all()
sess = create_session()
u1 = User(name="someuser")
eq_(u1.name, "SOMENAME someuser")
sess.add(u1)
sess.flush()
eq_(
sess.query(User).filter(User.name == "SOMENAME someuser").one(), u1
)
def test_reentrant_compile_via_foreignkey(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey(User.id))
# previous versions would force a re-entrant mapper compile via
# the User.id inside the ForeignKey but this is no longer the
# case
sa.orm.configure_mappers()
eq_(
list(Address.user_id.property.columns[0].foreign_keys)[0].column,
User.__table__.c.id,
)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_relationship_reference(self):
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column("email", String(50))
user_id = Column("user_id", Integer, ForeignKey("users.id"))
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
addresses = relationship(
"Address", backref="user", primaryjoin=id == Address.user_id
)
User.address_count = sa.orm.column_property(
sa.select([sa.func.count(Address.id)])
.where(Address.user_id == User.id)
.as_scalar()
)
Base.metadata.create_all()
u1 = User(
name="u1", addresses=[Address(email="one"), Address(email="two")]
)
sess = create_session()
sess.add(u1)
sess.flush()
sess.expunge_all()
eq_(
sess.query(User).all(),
[
User(
name="u1",
address_count=2,
addresses=[Address(email="one"), Address(email="two")],
)
],
)
def test_pk_with_fk_init(self):
class Bar(Base):
__tablename__ = "bar"
id = sa.Column(
sa.Integer, sa.ForeignKey("foo.id"), primary_key=True
)
ex = sa.Column(sa.Integer, primary_key=True)
class Foo(Base):
__tablename__ = "foo"
id = sa.Column(sa.Integer, primary_key=True)
bars = sa.orm.relationship(Bar)
assert Bar.__mapper__.primary_key[0] is Bar.__table__.c.id
assert Bar.__mapper__.primary_key[1] is Bar.__table__.c.ex
def test_with_explicit_autoloaded(self):
meta = MetaData(testing.db)
t1 = Table(
"t1",
meta,
Column("id", String(50), primary_key=True),
Column("data", String(50)),
)
meta.create_all()
try:
class MyObj(Base):
__table__ = Table("t1", Base.metadata, autoload=True)
sess = create_session()
m = MyObj(id="someid", data="somedata")
sess.add(m)
sess.flush()
eq_(t1.select().execute().fetchall(), [("someid", "somedata")])
finally:
meta.drop_all()
def test_synonym_for(self):
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
@decl.synonym_for("name")
@property
def namesyn(self):
return self.name
Base.metadata.create_all()
sess = create_session()
u1 = User(name="someuser")
eq_(u1.name, "someuser")
eq_(u1.namesyn, "someuser")
sess.add(u1)
sess.flush()
rt = sess.query(User).filter(User.namesyn == "someuser").one()
eq_(rt, u1)
def test_duplicate_classes_in_base(self):
class Test(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
assert_raises_message(
sa.exc.SAWarning,
"This declarative base already contains a class with ",
lambda: type(Base)(
"Test",
(Base,),
dict(__tablename__="b", id=Column(Integer, primary_key=True)),
),
)
@testing.teardown_events(MapperEvents)
def test_instrument_class_before_instrumentation(self):
# test #3388
canary = mock.Mock()
@event.listens_for(mapper, "instrument_class")
def instrument_class(mp, cls):
canary.instrument_class(mp, cls)
@event.listens_for(object, "class_instrument")
def class_instrument(cls):
canary.class_instrument(cls)
class Test(Base):
__tablename__ = "test"
id = Column(Integer, primary_key=True)
eq_(
canary.mock_calls,
[
mock.call.instrument_class(Test.__mapper__, Test),
mock.call.class_instrument(Test),
],
)
def test_cls_docstring(self):
class MyBase(object):
"""MyBase Docstring"""
Base = decl.declarative_base(cls=MyBase)
eq_(Base.__doc__, MyBase.__doc__)
def test_delattr_mapped_raises(self):
Base = decl.declarative_base()
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
data = Column(String)
def go():
del Foo.data
assert_raises_message(
NotImplementedError,
"Can't un-map individual mapped attributes on a mapped class.",
go,
)
def test_delattr_hybrid_fine(self):
Base = decl.declarative_base()
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
data = Column(String)
@hybrid_property
def data_hybrid(self):
return self.data
assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
del Foo.data_hybrid
assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
assert not hasattr(Foo, "data_hybrid")
def test_setattr_hybrid_updates_descriptors(self):
Base = decl.declarative_base()
class Foo(Base):
__tablename__ = "foo"
id = Column(Integer, primary_key=True)
data = Column(String)
assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
@hybrid_property
def data_hybrid(self):
return self.data
Foo.data_hybrid = data_hybrid
assert "data_hybrid" in Foo.__mapper__.all_orm_descriptors.keys()
del Foo.data_hybrid
assert "data_hybrid" not in Foo.__mapper__.all_orm_descriptors.keys()
assert not hasattr(Foo, "data_hybrid")
def _produce_test(inline, stringbased):
class ExplicitJoinTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global User, Address
Base = decl.declarative_base(metadata=metadata)
class User(Base, fixtures.ComparableEntity):
__tablename__ = "users"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column(String(50))
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
email = Column(String(50))
user_id = Column(Integer, ForeignKey("users.id"))
if inline:
if stringbased:
user = relationship(
"User",
primaryjoin="User.id==Address.user_id",
backref="addresses",
)
else:
user = relationship(
User,
primaryjoin=User.id == user_id,
backref="addresses",
)
if not inline:
configure_mappers()
if stringbased:
Address.user = relationship(
"User",
primaryjoin="User.id==Address.user_id",
backref="addresses",
)
else:
Address.user = relationship(
User,
primaryjoin=User.id == Address.user_id,
backref="addresses",
)
@classmethod
def insert_data(cls, connection):
params = [
dict(list(zip(("id", "name"), column_values)))
for column_values in [
(7, "jack"),
(8, "ed"),
(9, "fred"),
(10, "chuck"),
]
]
connection.execute(User.__table__.insert(), params)
connection.execute(
Address.__table__.insert(),
[
dict(list(zip(("id", "user_id", "email"), column_values)))
for column_values in [
(1, 7, "jack@bean.com"),
(2, 8, "ed@wood.com"),
(3, 8, "ed@bettyboop.com"),
(4, 8, "ed@lala.com"),
(5, 9, "fred@fred.com"),
]
],
)
def test_aliased_join(self):
# this query will screw up if the aliasing enabled in
# query.join() gets applied to the right half of the join
# condition inside the any(). the join condition inside of
# any() comes from the "primaryjoin" of the relationship,
# and should not be annotated with _orm_adapt.
# PropertyLoader.Comparator will annotate the left side with
# _orm_adapt, though.
sess = create_session()
eq_(
sess.query(User)
.join(User.addresses, aliased=True)
.filter(Address.email == "ed@wood.com")
.filter(User.addresses.any(Address.email == "jack@bean.com"))
.all(),
[],
)
ExplicitJoinTest.__name__ = "ExplicitJoinTest%s%s" % (
inline and "Inline" or "Separate",
stringbased and "String" or "Literal",
)
return ExplicitJoinTest
for inline in True, False:
for stringbased in True, False:
testclass = _produce_test(inline, stringbased)
exec("%s = testclass" % testclass.__name__)
del testclass
| apache-2.0 |
tiagocardosos/stoq | stoqlib/gui/test/test_sellablecategoryeditor.py | 3 | 1701 | # -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
from stoqlib.gui.editors.categoryeditor import SellableCategoryEditor
from stoqlib.gui.test.uitestutils import GUITest
class TestSellableCategoryEditor(GUITest):
def test_create(self):
editor = SellableCategoryEditor(self.store)
self.check_editor(editor, 'editor-sellablecategory-create')
def test_description_validation(self):
# Just create an existing category to check unique value above
self.create_sellable_category(u'Existing category')
editor = SellableCategoryEditor(self.store)
self.assertInvalid(editor, ['description'])
editor.description.update('Non-existing category')
self.assertValid(editor, ['description'])
editor.description.update('Existing category')
self.assertInvalid(editor, ['description'])
| gpl-2.0 |
PaulPetring/zulip | zerver/lib/avatar.py | 124 | 1765 | from __future__ import absolute_import
from django.conf import settings
import hashlib
from zerver.lib.utils import make_safe_digest
def gravatar_hash(email):
"""Compute the Gravatar hash for an email address."""
# Non-ASCII characters aren't permitted by the currently active e-mail
# RFCs. However, the IETF has published https://tools.ietf.org/html/rfc4952,
# outlining internationalization of email addresses, and regardless if we
# typo an address or someone manages to give us a non-ASCII address, let's
# not error out on it.
return make_safe_digest(email.lower(), hashlib.md5)
def user_avatar_hash(email):
# Salting the user_key may be overkill, but it prevents us from
# basically mimicking Gravatar's hashing scheme, which could lead
# to some abuse scenarios like folks using us as a free Gravatar
# replacement.
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
def avatar_url(user_profile):
return get_avatar_url(
user_profile.avatar_source,
user_profile.email
)
def get_avatar_url(avatar_source, email):
if avatar_source == 'U':
hash_key = user_avatar_hash(email)
if settings.LOCAL_UPLOADS_DIR is not None:
# ?x=x allows templates to append additional parameters with &s
return "/user_avatars/%s.png?x=x" % (hash_key)
else:
bucket = settings.S3_AVATAR_BUCKET
return "https://%s.s3.amazonaws.com/%s?x=x" % (bucket, hash_key)
elif settings.ENABLE_GRAVATAR:
hash_key = gravatar_hash(email)
return "https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,)
else:
return settings.DEFAULT_AVATAR_URI+'?x=x'
| apache-2.0 |
thaumos/ansible | lib/ansible/plugins/action/cli_config.py | 42 | 1287 | #
# Copyright 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
self._config_module = True
if self._play_context.connection != 'network_cli':
return {'failed': True, 'msg': 'Connection type %s is not valid for cli_config module' % self._play_context.connection}
return super(ActionModule, self).run(task_vars=task_vars)
| gpl-3.0 |
coala/coala | coalib/testing/LocalBearTestHelper.py | 1 | 19188 | import logging
import queue
import unittest
from contextlib import contextmanager, ExitStack
from unittest.mock import patch
from coalib.bearlib.abstractions.LinterClass import LinterClass
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.bears.LocalBear import LocalBear
from coala_utils.Comparable import Comparable
from coala_utils.ContextManagers import prepare_file
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
@contextmanager
def execute_bear(bear, *args, **kwargs):
try:
console_output = []
# For linters provide additional information, such as
# stdout and stderr.
with ExitStack() as stack:
if isinstance(bear, LinterClass):
old_process_output = bear.process_output
old_create_arguments = bear.create_arguments
def new_create_arguments(filename, file, config_file,
*args, **kwargs):
arguments = old_create_arguments(filename, file,
config_file, *args,
**kwargs)
console_output.append(
'Program arguments:\n' + repr(arguments))
return arguments
def new_process_output(output, filename=None, file=None,
**process_output_kwargs):
console_output.append('The program yielded '
'the following output:\n')
if isinstance(output, tuple):
stdout, stderr = output
console_output.append('Stdout:\n' + stdout)
console_output.append('Stderr:\n' + stderr)
else:
console_output.append(output)
return old_process_output(output, filename, file,
**process_output_kwargs)
stack.enter_context(patch.object(
bear, 'process_output', wraps=new_process_output))
stack.enter_context(patch.object(
bear, 'create_arguments', wraps=new_create_arguments))
bear_output_generator = bear.execute(*args, **kwargs)
assert bear_output_generator is not None, (
'Bear returned None on execution\n')
yield bear_output_generator
except Exception as err:
msg = []
while not bear.message_queue.empty():
msg.append(bear.message_queue.get().message)
msg += console_output
raise AssertionError(str(err) + ''.join('\n' + m for m in msg))
def get_results(local_bear,
lines,
filename=None,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={},
settings={},
aspects=None,
):
if local_bear.BEAR_DEPS:
# Get results of bear's dependencies first
deps_results = dict()
for bear in local_bear.BEAR_DEPS:
uut = bear(local_bear.section, queue.Queue())
deps_results[bear.name] = get_results(uut,
lines,
filename,
force_linebreaks,
create_tempfile,
tempfile_kwargs,
settings,
aspects,
)
else:
deps_results = None
with prepare_file(lines, filename,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs) as (file, fname):
with execute_bear(local_bear, fname, file,
dependency_results=deps_results,
**local_bear.get_metadata().filter_parameters(
settings)) as bear_output:
return bear_output
class LocalBearTestHelper(unittest.TestCase):
"""
This is a helper class for simplification of testing of local bears.
Please note that all abstraction will prepare the lines so you don't need
to do that if you use them.
If you miss some methods, get in contact with us, we'll be happy to help!
"""
def assertComparableObjectsEqual(self, observed_result, expected_result):
if len(observed_result) == len(expected_result):
messages = ''
for observed, expected in zip(observed_result, expected_result):
if (isinstance(observed, Comparable)
and isinstance(expected, Comparable)) and (
type(observed) is type(expected)):
for attribute in type(observed).__compare_fields__:
try:
self.assertEqual(
getattr(observed, attribute),
getattr(expected, attribute),
msg=f'{attribute} mismatch.')
except AssertionError as ex:
messages += (str(ex) + '\n\n')
else:
self.assertEqual(observed_result, expected_result)
if messages:
raise AssertionError(messages)
else:
self.assertEqual(observed_result, expected_result)
def check_validity(self,
local_bear,
lines,
filename=None,
valid=True,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={},
settings={},
aspects=None,
):
"""
Asserts that a check of the given lines with the given local bear
either yields or does not yield any results.
:param local_bear: The local bear to check with.
:param lines: The lines to check. (List of strings)
:param filename: The filename, if it matters.
:param valid: Whether the lines are valid or not.
:param force_linebreaks: Whether to append newlines at each line
if needed. (Bears expect a \\n for every line)
:param create_tempfile: Whether to save lines in tempfile if needed.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp().
:param aspects: A list of aspect objects along with the name
and value of their respective tastes.
"""
if valid:
self.check_results(local_bear, lines,
results=[], filename=filename,
check_order=True,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs,
settings=settings,
aspects=aspects,
)
else:
return self.check_invalidity(local_bear, lines,
filename=filename,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs,
settings=settings,
aspects=aspects,
)
def check_invalidity(self,
local_bear,
lines,
filename=None,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={},
settings={},
aspects=None,
):
"""
Asserts that a check of the given lines with the given local bear
yields results.
:param local_bear: The local bear to check with.
:param lines: The lines to check. (List of strings)
:param filename: The filename, if it matters.
:param force_linebreaks: Whether to append newlines at each line
if needed. (Bears expect a \\n for every line)
:param create_tempfile: Whether to save lines in tempfile if needed.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp().
:param aspects: A list of aspect objects along with the name
and value of their respective tastes.
"""
assert isinstance(self, unittest.TestCase)
self.assertIsInstance(local_bear,
LocalBear,
msg='The given bear is not a local bear.')
self.assertIsInstance(lines,
(list, tuple),
msg='The given lines are not a list.')
bear_output = get_results(local_bear, lines,
filename=filename,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs,
settings=settings,
aspects=aspects,
)
msg = (f"The local bear '{local_bear.__class__.__name__}'"
'yields no result although it '
'should.')
self.assertNotEqual(len(bear_output), 0, msg=msg)
return bear_output
def check_results(self,
local_bear,
lines,
results,
filename=None,
check_order=False,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={},
settings={},
aspects=None,
):
"""
Asserts that a check of the given lines with the given local bear does
yield exactly the given results.
:param local_bear: The local bear to check with.
:param lines: The lines to check. (List of strings)
:param results: The expected list of results.
:param filename: The filename, if it matters.
:param check_order: Whether to check that the elements of
``results`` and that of the actual list
generated are in the same order or not.
:param force_linebreaks: Whether to append newlines at each line
if needed. (Bears expect a \\n for every line)
:param create_tempfile: Whether to save lines in tempfile if needed.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp().
:param settings: A dictionary of keys and values (both strings)
from which settings will be created that will
be made available for the tested bear.
:param aspects: A list of aspect objects along with the name
and value of their respective tastes.
"""
assert isinstance(self, unittest.TestCase)
self.assertIsInstance(local_bear,
LocalBear,
msg='The given bear is not a local bear.')
self.assertIsInstance(lines,
(list, tuple),
msg='The given lines are not a list.')
self.assertIsInstance(results,
list,
msg='The given results are not a list.')
bear_output = get_results(local_bear, lines,
filename=filename,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs,
settings=settings,
aspects=aspects,
)
if not check_order:
self.assertComparableObjectsEqual(
sorted(bear_output), sorted(results))
else:
self.assertComparableObjectsEqual(bear_output, results)
return bear_output
def check_line_result_count(self,
local_bear,
lines,
results_num,
filename=None,
force_linebreaks=True,
create_tempfile=True,
tempfile_kwargs={},
settings={},
aspects=None,
):
"""
Check many results for each line.
:param local_bear: The local bear to check with.
:param lines: The lines to check. (List of strings)
:param results_num: The expected list of many results each line.
:param filename: The filename, if it matters.
:param force_linebreaks: Whether to append newlines at each line
if needed. (Bears expect a \\n for every line)
:param create_tempfile: Whether to save lines in tempfile if needed.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp().
:param settings: A dictionary of keys and values (both strings)
from which settings will be created that will
be made available for the tested bear.
:param aspects: A list of aspect objects along with the name
and value of their respective tastes.
"""
modified_lines = []
for line in lines:
stripped_line = line.strip()
if stripped_line == '' or stripped_line.startswith('#'):
continue
modified_lines.append(line)
for line, num in zip(modified_lines, results_num):
bear_output = get_results(local_bear, [line],
filename=filename,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs,
settings=settings,
aspects=aspects,
)
self.assertEqual(num, len(bear_output))
def verify_local_bear(bear,
valid_files,
invalid_files,
filename=None,
settings={},
aspects=None,
force_linebreaks=True,
create_tempfile=True,
timeout=None,
tempfile_kwargs={}):
"""
Generates a test for a local bear by checking the given valid and invalid
file contents. Simply use it on your module level like:
YourTestName = verify_local_bear(YourBear, (['valid line'],),
(['invalid line'],))
:param bear: The Bear class to test.
:param valid_files: An iterable of files as a string list that won't
yield results.
:param invalid_files: An iterable of files as a string list that must
yield results.
:param filename: The filename to use for valid and invalid files.
:param settings: A dictionary of keys and values (both string) from
which settings will be created that will be made
available for the tested bear.
:param aspects: A list of aspect objects along with the name
and value of their respective tastes.
:param force_linebreaks: Whether to append newlines at each line
if needed. (Bears expect a \\n for every line)
:param create_tempfile: Whether to save lines in tempfile if needed.
:param timeout: Unused. Use pytest-timeout or similar.
:param tempfile_kwargs: Kwargs passed to tempfile.mkstemp() if tempfile
needs to be created.
:return: A unittest.TestCase object.
"""
if timeout:
logging.warning('timeout is ignored as the timeout set in the repo '
'configuration will be sufficient. Use pytest-timeout '
'or similar to achieve same result.')
@generate_skip_decorator(bear)
class LocalBearTest(LocalBearTestHelper):
def setUp(self):
self.section = Section('name')
self.uut = bear(self.section,
queue.Queue())
for name, value in settings.items():
self.section.append(Setting(name, value))
if aspects:
self.section.aspects = aspects
def test_valid_files(self):
self.assertIsInstance(valid_files, (list, tuple))
for file in valid_files:
self.check_validity(self.uut,
file.splitlines(keepends=True),
filename,
valid=True,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs)
def test_invalid_files(self):
self.assertIsInstance(invalid_files, (list, tuple))
for file in invalid_files:
self.check_validity(self.uut,
file.splitlines(keepends=True),
filename,
valid=False,
force_linebreaks=force_linebreaks,
create_tempfile=create_tempfile,
tempfile_kwargs=tempfile_kwargs)
return LocalBearTest
| agpl-3.0 |
dmitrijus/hltd | lib/urllib3-1.10/urllib3_hltd/__init__.py | 2 | 1867 | """
urllib3_hltd - Thread-safe connection pooling and re-using.
"""
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = '1.10'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
# Set security warning to always go off by default.
import warnings
warnings.simplefilter('always', exceptions.SecurityWarning)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter('ignore', category)
| lgpl-3.0 |
x2Ident/x2Ident_test | mitmproxy/mitmproxy/protocol/http1.py | 2 | 2732 | from __future__ import absolute_import, print_function, division
from mitmproxy import models
from mitmproxy.protocol import http
from netlib.http import http1
class Http1Layer(http._HttpTransmissionLayer):
def __init__(self, ctx, mode):
super(Http1Layer, self).__init__(ctx)
self.mode = mode
def read_request(self):
req = http1.read_request(
self.client_conn.rfile, body_size_limit=self.config.options.body_size_limit
)
return models.HTTPRequest.wrap(req)
def read_request_body(self, request):
expected_size = http1.expected_http_body_size(request)
return http1.read_body(
self.client_conn.rfile,
expected_size,
self.config.options.body_size_limit
)
def send_request(self, request):
self.server_conn.wfile.write(http1.assemble_request(request))
self.server_conn.wfile.flush()
def read_response_headers(self):
resp = http1.read_response_head(self.server_conn.rfile)
return models.HTTPResponse.wrap(resp)
def read_response_body(self, request, response):
expected_size = http1.expected_http_body_size(request, response)
return http1.read_body(
self.server_conn.rfile,
expected_size,
self.config.options.body_size_limit
)
def send_response_headers(self, response):
raw = http1.assemble_response_head(response)
self.client_conn.wfile.write(raw)
self.client_conn.wfile.flush()
def send_response_body(self, response, chunks):
for chunk in http1.assemble_body(response.headers, chunks):
self.client_conn.wfile.write(chunk)
self.client_conn.wfile.flush()
def check_close_connection(self, flow):
request_close = http1.connection_close(
flow.request.http_version,
flow.request.headers
)
response_close = http1.connection_close(
flow.response.http_version,
flow.response.headers
)
read_until_eof = http1.expected_http_body_size(flow.request, flow.response) == -1
close_connection = request_close or response_close or read_until_eof
if flow.request.first_line_format == "authority" and flow.response.status_code == 200:
# Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
# Charles Proxy sends a CONNECT response with HTTP/1.0
# and no Content-Length header
return False
return close_connection
def __call__(self):
layer = http.HttpLayer(self, self.mode)
layer()
| gpl-3.0 |
lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/physics/quantum/qft.py | 3 | 6205 | """An implementation of qubits and gates acting on them.
Todo:
* Update docstrings.
* Update tests.
* Implement apply using decompose.
* Implement represent using decompose or something smarter. For this to
work we first have to implement represent for SWAP.
* Decide if we want upper index to be inclusive in the constructor.
* Fix the printing of Rk gates in plotting.
"""
from sympy import Expr, Matrix, exp, I, pi, Integer, Symbol
from sympy.functions import sqrt
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.qexpr import QuantumError, QExpr
from sympy.matrices import eye
from sympy.physics.quantum.tensorproduct import matrix_tensor_product
from sympy.physics.quantum.gate import (
Gate, HadamardGate, SwapGate, OneQubitGate, CGate, PhaseGate, TGate, ZGate
)
__all__ = [
'QFT',
'IQFT',
'RkGate',
'Rk'
]
#-----------------------------------------------------------------------------
# Fourier stuff
#-----------------------------------------------------------------------------
class RkGate(OneQubitGate):
"""This is the R_k gate of the QTF."""
gate_name = u'Rk'
gate_name_latex = u'R'
def __new__(cls, *args):
if len(args) != 2:
raise QuantumError(
'Rk gates only take two arguments, got: %r' % args
)
# For small k, Rk gates simplify to other gates, using these
# substitutions give us familiar results for the QFT for small numbers
# of qubits.
target = args[0]
k = args[1]
if k == 1:
return ZGate(target)
elif k == 2:
return PhaseGate(target)
elif k == 3:
return TGate(target)
args = cls._eval_args(args)
inst = Expr.__new__(cls, *args)
inst.hilbert_space = cls._eval_hilbert_space(args)
return inst
@classmethod
def _eval_args(cls, args):
# Fall back to this, because Gate._eval_args assumes that args is
# all targets and can't contain duplicates.
return QExpr._eval_args(args)
@property
def k(self):
return self.label[1]
@property
def targets(self):
return self.label[:1]
@property
def gate_name_plot(self):
return r'$%s_%s$' % (self.gate_name_latex, str(self.k))
def get_target_matrix(self, format='sympy'):
if format == 'sympy':
return Matrix([[1, 0], [0, exp(Integer(2)*pi*I/(Integer(2)**self.k))]])
raise NotImplementedError(
'Invalid format for the R_k gate: %r' % format)
Rk = RkGate
class Fourier(Gate):
"""Superclass of Quantum Fourier and Inverse Quantum Fourier Gates."""
@classmethod
def _eval_args(self, args):
if len(args) != 2:
raise QuantumError(
'QFT/IQFT only takes two arguments, got: %r' % args
)
if args[0] >= args[1]:
raise QuantumError("Start must be smaller than finish")
return Gate._eval_args(args)
def _represent_default_basis(self, **options):
return self._represent_ZGate(None, **options)
def _represent_ZGate(self, basis, **options):
"""
Represents the (I)QFT In the Z Basis
"""
nqubits = options.get('nqubits', 0)
if nqubits == 0:
raise QuantumError(
'The number of qubits must be given as nqubits.')
if nqubits < self.min_qubits:
raise QuantumError(
'The number of qubits %r is too small for the gate.' % nqubits
)
size = self.size
omega = self.omega
#Make a matrix that has the basic Fourier Transform Matrix
arrayFT = [[omega**(
i*j % size)/sqrt(size) for i in range(size)] for j in range(size)]
matrixFT = Matrix(arrayFT)
#Embed the FT Matrix in a higher space, if necessary
if self.label[0] != 0:
matrixFT = matrix_tensor_product(eye(2**self.label[0]), matrixFT)
if self.min_qubits < nqubits:
matrixFT = matrix_tensor_product(
matrixFT, eye(2**(nqubits - self.min_qubits)))
return matrixFT
@property
def targets(self):
return range(self.label[0], self.label[1])
@property
def min_qubits(self):
return self.label[1]
@property
def size(self):
"""Size is the size of the QFT matrix"""
return 2**(self.label[1] - self.label[0])
@property
def omega(self):
return Symbol('omega')
class QFT(Fourier):
"""The forward quantum Fourier transform."""
gate_name = u'QFT'
gate_name_latex = u'QFT'
def decompose(self):
"""Decomposes QFT into elementary gates."""
start = self.label[0]
finish = self.label[1]
circuit = 1
for level in reversed(range(start, finish)):
circuit = HadamardGate(level)*circuit
for i in range(level - start):
circuit = CGate(level - i - 1, RkGate(level, i + 2))*circuit
for i in range((finish - start)//2):
circuit = SwapGate(i + start, finish - i - 1)*circuit
return circuit
def _apply_operator_Qubit(self, qubits, **options):
return qapply(self.decompose()*qubits)
def _eval_inverse(self):
return IQFT(*self.args)
@property
def omega(self):
return exp(2*pi*I/self.size)
class IQFT(Fourier):
"""The inverse quantum Fourier transform."""
gate_name = u'IQFT'
gate_name_latex = u'{QFT^{-1}}'
def decompose(self):
"""Decomposes IQFT into elementary gates."""
start = self.args[0]
finish = self.args[1]
circuit = 1
for i in range((finish - start)//2):
circuit = SwapGate(i + start, finish - i - 1)*circuit
for level in range(start, finish):
for i in reversed(range(level - start)):
circuit = CGate(level - i - 1, RkGate(level, -i - 2))*circuit
circuit = HadamardGate(level)*circuit
return circuit
def _eval_inverse(self):
return QFT(*self.args)
@property
def omega(self):
return exp(-2*pi*I/self.size)
| gpl-3.0 |
fxfitz/ansible | lib/ansible/modules/network/onyx/onyx_l3_interface.py | 41 | 9721 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_l3_interface
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Manage L3 interfaces on Mellanox ONYX network devices
description:
- This module provides declarative management of L3 interfaces
on Mellanox ONYX network devices.
options:
name:
description:
- Name of the L3 interface.
ipv4:
description:
- IPv4 of the L3 interface.
ipv6:
description:
- IPv6 of the L3 interface (not supported for now).
aggregate:
description: List of L3 interfaces definitions
purge:
description:
- Purge L3 interfaces not defined in the I(aggregate) parameter.
default: false
type: bool
state:
description:
- State of the L3 interface configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: Set Eth1/1 IPv4 address
onyx_l3_interface:
name: Eth1/1
ipv4: 192.168.0.1/24
- name: Remove Eth1/1 IPv4 address
onyx_l3_interface:
name: Eth1/1
state: absent
- name: Set IP addresses on aggregate
onyx_l3_interface:
aggregate:
- { name: Eth1/1, ipv4: 192.168.2.10/24 }
- { name: Eth1/2, ipv4: 192.168.3.10/24 }
- name: Remove IP addresses on aggregate
onyx_l3_interface:
aggregate:
- { name: Eth1/1, ipv4: 192.168.2.10/24 }
- { name: Eth1/2, ipv4: 192.168.3.10/24 }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always.
type: list
sample:
- interfaces ethernet 1/1 ip address 192.168.0.1 /24
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.common.utils import remove_default_spec
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
from ansible.module_utils.network.onyx.onyx import get_interfaces_config
class OnyxL3InterfaceModule(BaseOnyxModule):
IF_ETH_REGEX = re.compile(r"^Eth(\d+\/\d+|Eth\d+\/\d+\d+)$")
IF_VLAN_REGEX = re.compile(r"^Vlan (\d+)$")
IF_LOOPBACK_REGEX = re.compile(r"^Loopback (\d+)$")
IF_TYPE_ETH = "ethernet"
IF_TYPE_LOOPBACK = "loopback"
IF_TYPE_VLAN = "vlan"
IF_TYPE_MAP = {
IF_TYPE_ETH: IF_ETH_REGEX,
IF_TYPE_VLAN: IF_VLAN_REGEX,
IF_TYPE_LOOPBACK: IF_LOOPBACK_REGEX,
}
IP_ADDR_ATTR_MAP = {
IF_TYPE_ETH: 'IP Address',
IF_TYPE_VLAN: 'Internet Address',
IF_TYPE_LOOPBACK: 'Internet Address',
}
_purge = False
@classmethod
def _get_element_spec(cls):
return dict(
name=dict(type='str'),
ipv4=dict(type='str'),
ipv6=dict(type='str'),
state=dict(default='present',
choices=['present', 'absent', 'enabled', 'disabled']),
)
@classmethod
def _get_aggregate_spec(cls, element_spec):
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
return aggregate_spec
def init_module(self):
""" module initialization
"""
element_spec = self._get_element_spec()
aggregate_spec = self._get_aggregate_spec(element_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict',
options=aggregate_spec),
purge=dict(default=False, type='bool'),
)
argument_spec.update(element_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
self._module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
def _get_interface_type(self, if_name):
if_type = None
if_id = None
for interface_type, interface_regex in iteritems(self.IF_TYPE_MAP):
match = interface_regex.match(if_name)
if match:
if_type = interface_type
if_id = match.group(1)
break
return if_type, if_id
def _set_if_type(self, params):
if_name = params['name']
if_type, if_id = self._get_interface_type(if_name)
if not if_id:
self._module.fail_json(
msg='unsupported interface: %s' % if_name)
params['if_type'] = if_type
params['if_id'] = if_id
def get_required_config(self):
self._required_config = list()
module_params = self._module.params
aggregate = module_params.get('aggregate')
self._purge = module_params.get('purge', False)
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module_params[key]
self.validate_param_values(item, item)
req_item = item.copy()
self._set_if_type(req_item)
self._required_config.append(req_item)
else:
params = {
'name': module_params['name'],
'ipv4': module_params['ipv4'],
'ipv6': module_params['ipv6'],
'state': module_params['state'],
}
self.validate_param_values(params)
self._set_if_type(params)
self._required_config.append(params)
def _get_interfaces_config(self, interface_type):
return get_interfaces_config(self._module, interface_type)
def _parse_interfaces_config(self, if_type, if_config):
ipaddr_attr = self.IP_ADDR_ATTR_MAP[if_type]
for if_data in if_config:
if_name = self.get_config_attr(if_data, 'header')
regex = self.IF_TYPE_MAP[if_type]
match = regex.match(if_name)
if not match:
continue
ipv4 = self.get_config_attr(if_data, ipaddr_attr)
if ipv4:
ipv4 = ipv4.replace(' ', '')
ipv6 = self.get_config_attr(if_data, 'IPv6 address(es)')
if ipv6:
ipv6 = ipv6.replace('[primary]', '')
ipv6 = ipv6.strip()
if_id = match.group(1)
switchport = self.get_config_attr(if_data, 'Switchport mode')
if_obj = {
'name': if_name,
'if_id': if_id,
'if_type': if_type,
'ipv4': ipv4,
'ipv6': ipv6,
'switchport': switchport,
}
self._current_config[if_name] = if_obj
def load_current_config(self):
# called in base class in run function
self._current_config = dict()
if_types = set([if_obj['if_type'] for if_obj in self._required_config])
for if_type in if_types:
if_config = self._get_interfaces_config(if_type)
if not if_config:
continue
self._parse_interfaces_config(if_type, if_config)
def _generate_no_ip_commands(self, req_conf, curr_conf):
curr_ip = curr_conf.get('ipv4')
if_type = req_conf['if_type']
if_id = req_conf['if_id']
if curr_ip:
cmd = "interface %s %s no ip address" % (if_type, if_id)
self._commands.append(cmd)
curr_ipv6 = curr_conf.get('ipv6')
if curr_ipv6:
cmd = "interface %s %s no ipv6 address %s" % (
if_type, if_id, curr_ipv6)
self._commands.append(cmd)
def _generate_ip_commands(self, req_conf, curr_conf):
curr_ipv4 = curr_conf.get('ipv4')
req_ipv4 = req_conf.get('ipv4')
curr_ipv6 = curr_conf.get('ipv6')
req_ipv6 = req_conf.get('ipv6')
if_type = req_conf['if_type']
if_id = req_conf['if_id']
switchport = curr_conf.get('switchport')
if switchport:
cmd = "interface %s %s no switchport force" % (if_type, if_id)
self._commands.append(cmd)
if curr_ipv4 != req_ipv4:
cmd = "interface %s %s ip address %s" % (if_type, if_id, req_ipv4)
self._commands.append(cmd)
if curr_ipv6 != req_ipv6:
cmd = "interface %s %s ipv6 address %s" % (
if_type, if_id, req_ipv6)
self._commands.append(cmd)
def generate_commands(self):
req_interfaces = set()
for req_conf in self._required_config:
state = req_conf['state']
if_name = req_conf['name']
curr_conf = self._current_config.get(if_name, {})
if state == 'absent':
self._generate_no_ip_commands(req_conf, curr_conf)
else:
req_interfaces.add(if_name)
self._generate_ip_commands(req_conf, curr_conf)
if self._purge:
for if_name, curr_conf in iteritems(self._current_config):
if if_name not in req_interfaces:
self._generate_no_ip_commands(req_conf, curr_conf)
def main():
""" main entry point for module execution
"""
OnyxL3InterfaceModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
lnielsen/zenodo | tests/unit/deposit/test_api_metadata.py | 3 | 10518 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test validation in Zenodo deposit REST API."""
from __future__ import absolute_import, print_function
import json
from datetime import datetime, timedelta
from flask import url_for
from invenio_search import current_search
from six import BytesIO
def test_invalid_create(api_client, es, json_auth_headers, deposit_url,
get_json):
"""Test invalid deposit creation."""
client = api_client
headers = json_auth_headers
# Invalid deposits.
cases = [
dict(unknownkey='data', metadata={}),
dict(metadat={}),
]
for case in cases:
res = client.post(deposit_url, data=json.dumps(case), headers=headers)
assert res.status_code == 400, case
# No deposits were created
assert 0 == len(
get_json(client.get(deposit_url, headers=headers), code=200))
def test_input_output(api_client, es, json_auth_headers, deposit_url, get_json,
license_record, grant_record, location):
"""Rough validation of input against output data."""
client = api_client
headers = json_auth_headers
test_data = dict(
metadata=dict(
access_right='embargoed',
communities=[{'identifier': 'cfa'}],
conference_acronym='Some acronym',
conference_dates='Some dates',
conference_place='Some place',
conference_title='Some title',
conference_url='http://someurl.com',
conference_session='VI',
conference_session_part='1',
creators=[
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
],
description="Some description",
doi="10.1234/foo.bar",
embargo_date=(
datetime.utcnow().date() + timedelta(days=1)).isoformat(),
grants=[dict(id="282896"), ],
imprint_isbn="Some isbn",
imprint_place="Some place",
imprint_publisher="Some publisher",
journal_issue="Some issue",
journal_pages="Some pages",
journal_title="Some journal name",
journal_volume="Some volume",
keywords=["Keyword 1", "keyword 2"],
subjects=[
dict(scheme="gnd", identifier="gnd:1234567899",
term="Astronaut"),
dict(scheme="gnd", identifier="gnd:1234567898", term="Amish"),
],
license="CC0-1.0",
notes="Some notes",
partof_pages="SOme part of",
partof_title="Some part of title",
prereserve_doi=True,
publication_date="2013-09-12",
publication_type="book",
references=[
"Reference 1",
"Reference 2",
],
related_identifiers=[
dict(identifier='10.1234/foo.bar2', relation='isCitedBy',
scheme='doi'),
dict(identifier='10.1234/foo.bar3', relation='cites',
scheme='doi'),
dict(
identifier='2011ApJS..192...18K',
relation='isAlternateIdentifier',
scheme='ads'),
],
thesis_supervisors=[
dict(name="Doe Sr., John", affiliation="Atlantis"),
dict(name="Smith Sr., Jane", affiliation="Atlantis",
orcid="0000-0002-1825-0097",
gnd="170118215")
],
thesis_university="Some thesis_university",
contributors=[
dict(name="Doe Sr., Jochen", affiliation="Atlantis",
type="Other"),
dict(name="Smith Sr., Marco", affiliation="Atlantis",
orcid="0000-0002-1825-0097",
gnd="170118215",
type="DataCurator")
],
title="Test title",
upload_type="publication",
)
)
# Create
res = client.post(deposit_url, data=json.dumps(test_data), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Get serialization.
data = get_json(client.get(links['self'], headers=headers), code=200)
# - fix known differences.
# DOI and recid have 2 as control number, since Concept DOI/recid are
# registered first
test_data['metadata'].update({
'prereserve_doi': {'doi': '10.5072/zenodo.2', 'recid': 2}
})
assert data['metadata'] == test_data['metadata']
def test_unicode(api_client, es, location, json_auth_headers, deposit_url,
get_json, license_record, grant_record, auth_headers,
communities):
"""Rough validation of input against output data."""
client = api_client
headers = json_auth_headers
test_data = dict(
metadata=dict(
access_right='open',
access_conditions='Αυτή είναι μια δοκιμή',
communities=[{'identifier': 'c1'}],
conference_acronym='Αυτή είναι μια δοκιμή',
conference_dates='هذا هو اختبار',
conference_place='Սա փորձություն',
conference_title='Гэта тэст',
conference_url='http://someurl.com',
conference_session='5',
conference_session_part='a',
creators=[
dict(name="Doe, John", affiliation="Това е тест"),
dict(name="Smith, Jane", affiliation="Tio ĉi estas testo")
],
description="这是一个测试",
doi="10.1234/foo.bar",
embargo_date="2010-12-09",
grants=[dict(id="282896"), ],
imprint_isbn="Some isbn",
imprint_place="這是一個測試",
imprint_publisher="ეს არის გამოცდა",
journal_issue="આ એક કસોટી છે",
journal_pages="זהו מבחן",
journal_title="यह एक परीक्षण है",
journal_volume="Þetta er prófun",
keywords=["これはテストです", "ಇದು ಪರೀಕ್ಷೆ"],
subjects=[
dict(scheme="gnd", identifier="1234567899", term="これはです"),
dict(scheme="gnd", identifier="1234567898", term="ಇ"),
],
license="CC0-1.0",
notes="이것은 테스트입니다",
partof_pages="ນີ້ແມ່ນການທົດສອບ",
partof_title="ही चाचणी आहे",
prereserve_doi=True,
publication_date="2013-09-12",
publication_type="book",
related_identifiers=[
dict(
identifier='2011ApJS..192...18K',
relation='isAlternativeIdentifier'),
dict(identifier='10.1234/foo.bar2', relation='isCitedBy'),
dict(identifier='10.1234/foo.bar3', relation='cites'),
],
thesis_supervisors=[
dict(name="Doe Sr., این یک تست است", affiliation="Atlantis"),
dict(name="Это Sr., Jane", affiliation="Atlantis")
],
thesis_university="இந்த ஒரு சோதனை",
contributors=[
dict(name="Doe Sr., ن یک تست", affiliation="Atlantis",
type="Other"),
dict(name="SmЭтith Sr., Marco", affiliation="Atlantis",
type="DataCurator")
],
title="Đây là một thử nghiệm",
upload_type="publication",
)
)
# Create
res = client.post(deposit_url, data=json.dumps(test_data), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
assert client.post(
links['files'],
data=dict(file=(BytesIO(b'test'), 'test.txt'), name='test.txt'),
headers=auth_headers,
).status_code == 201
# Publish deposition
response = client.post(links['publish'], headers=auth_headers)
record_id = get_json(response, code=202)['record_id']
# Get record.
current_search.flush_and_refresh(index='records')
response = client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id))
def test_validation(api_client, es, json_auth_headers, deposit_url, get_json,
license_record, grant_record, auth_headers):
"""Test validation."""
client = api_client
headers = json_auth_headers
test_data = dict(metadata=dict(
access_right='notvalid',
conference_url='not_a_url',
doi='not a doi',
publication_date='not a date',
title='',
upload_type='notvalid'
))
data = get_json(
client.post(deposit_url, data=json.dumps(test_data), headers=headers),
code=400)
field_errors = {e['field'] for e in data['errors']}
expected_field_errors = set([
'metadata.access_right',
'metadata.conference_url',
'metadata.doi',
'metadata.publication_date',
'metadata.title',
'metadata.upload_type',
])
for e in expected_field_errors:
assert e in field_errors
| gpl-2.0 |
joopert/home-assistant | homeassistant/components/deconz/__init__.py | 4 | 2685 | """Support for deCONZ devices."""
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from .config_flow import get_master_gateway
from .const import CONF_BRIDGEID, CONF_MASTER_GATEWAY, CONF_UUID, DOMAIN
from .gateway import DeconzGateway, get_gateway_from_config_entry
from .services import async_setup_services, async_unload_services
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({}, extra=vol.ALLOW_EXTRA)}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass, config):
"""Old way of setting up deCONZ integrations."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up a deCONZ bridge for a config entry.
Load config, group, light and sensor data for server information.
Start websocket for push notification of state changes from deCONZ.
"""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
if not config_entry.options:
await async_update_master_gateway(hass, config_entry)
gateway = DeconzGateway(hass, config_entry)
if not await gateway.async_setup():
return False
hass.data[DOMAIN][gateway.bridgeid] = gateway
await gateway.async_update_device_registry()
if CONF_UUID not in config_entry.data:
await async_add_uuid_to_config_entry(hass, config_entry)
await async_setup_services(hass)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, gateway.shutdown)
return True
async def async_unload_entry(hass, config_entry):
"""Unload deCONZ config entry."""
gateway = hass.data[DOMAIN].pop(config_entry.data[CONF_BRIDGEID])
if not hass.data[DOMAIN]:
await async_unload_services(hass)
elif gateway.master:
await async_update_master_gateway(hass, config_entry)
new_master_gateway = next(iter(hass.data[DOMAIN].values()))
await async_update_master_gateway(hass, new_master_gateway.config_entry)
return await gateway.async_reset()
async def async_update_master_gateway(hass, config_entry):
"""Update master gateway boolean.
Called by setup_entry and unload_entry.
Makes sure there is always one master available.
"""
master = not get_master_gateway(hass)
options = {**config_entry.options, CONF_MASTER_GATEWAY: master}
hass.config_entries.async_update_entry(config_entry, options=options)
async def async_add_uuid_to_config_entry(hass, config_entry):
"""Add UUID to config entry to help discovery identify entries."""
gateway = get_gateway_from_config_entry(hass, config_entry)
config = {**config_entry.data, CONF_UUID: gateway.api.config.uuid}
hass.config_entries.async_update_entry(config_entry, data=config)
| apache-2.0 |
chauhanhardik/populo_2 | common/lib/xmodule/xmodule/assetstore/tests/test_asset_xml.py | 113 | 3631 | """
Test for asset XML generation / parsing.
"""
from path import Path as path
from lxml import etree
from contracts import ContractNotRespected
import unittest
from opaque_keys.edx.locator import CourseLocator
from xmodule.assetstore import AssetMetadata
from xmodule.modulestore.tests.test_assetstore import AssetStoreTestData
class TestAssetXml(unittest.TestCase):
"""
Tests for storing/querying course asset metadata.
"""
def setUp(self):
super(TestAssetXml, self).setUp()
xsd_filename = "assets.xsd"
self.course_id = CourseLocator('org1', 'course1', 'run1')
self.course_assets = []
for asset in AssetStoreTestData.all_asset_data:
asset_dict = dict(zip(AssetStoreTestData.asset_fields[1:], asset[1:]))
asset_md = AssetMetadata(self.course_id.make_asset_key('asset', asset[0]), **asset_dict)
self.course_assets.append(asset_md)
# Read in the XML schema definition and make a validator.
xsd_path = path(__file__).realpath().parent / xsd_filename
with open(xsd_path, 'r') as f:
schema_root = etree.XML(f.read())
schema = etree.XMLSchema(schema_root)
self.xmlparser = etree.XMLParser(schema=schema)
def test_export_single_asset_to_from_xml(self):
"""
Export a single AssetMetadata to XML and verify the structure and fields.
"""
asset_md = self.course_assets[0]
root = etree.Element("assets")
asset = etree.SubElement(root, "asset")
asset_md.to_xml(asset)
# If this line does *not* raise, the XML is valid.
etree.fromstring(etree.tostring(root), self.xmlparser)
new_asset_key = self.course_id.make_asset_key('tmp', 'tmp')
new_asset_md = AssetMetadata(new_asset_key)
new_asset_md.from_xml(asset)
# Compare asset_md to new_asset_md.
for attr in AssetMetadata.XML_ATTRS:
if attr in AssetMetadata.XML_ONLY_ATTRS:
continue
orig_value = getattr(asset_md, attr)
new_value = getattr(new_asset_md, attr)
self.assertEqual(orig_value, new_value)
def test_export_with_None_value(self):
"""
Export and import a single AssetMetadata to XML with a None created_by field, without causing an exception.
"""
asset_md = AssetMetadata(
self.course_id.make_asset_key('asset', 'none_value'),
created_by=None,
)
asset = etree.Element("asset")
asset_md.to_xml(asset)
asset_md.from_xml(asset)
def test_export_all_assets_to_xml(self):
"""
Export all AssetMetadatas to XML and verify the structure and fields.
"""
root = etree.Element("assets")
AssetMetadata.add_all_assets_as_xml(root, self.course_assets)
# If this line does *not* raise, the XML is valid.
etree.fromstring(etree.tostring(root), self.xmlparser)
def test_wrong_node_type_all(self):
"""
Ensure full asset sections with the wrong tag are detected.
"""
root = etree.Element("glassets")
with self.assertRaises(ContractNotRespected):
AssetMetadata.add_all_assets_as_xml(root, self.course_assets)
def test_wrong_node_type_single(self):
"""
Ensure single asset blocks with the wrong tag are detected.
"""
asset_md = self.course_assets[0]
root = etree.Element("assets")
asset = etree.SubElement(root, "smashset")
with self.assertRaises(ContractNotRespected):
asset_md.to_xml(asset)
| agpl-3.0 |
marqueedev/django | django/conf/locale/id/formats.py | 504 | 2135 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j N Y'
DATETIME_FORMAT = "j N Y, G.i"
TIME_FORMAT = 'G.i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d-%m-%y', '%d/%m/%y', # '25-10-09', 25/10/09'
'%d-%m-%Y', '%d/%m/%Y', # '25-10-2009', 25/10/2009'
'%d %b %Y', # '25 Oct 2006',
'%d %B %Y', # '25 October 2006'
]
TIME_INPUT_FORMATS = [
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
]
DATETIME_INPUT_FORMATS = [
'%d-%m-%Y %H.%M.%S', # '25-10-2009 14.30.59'
'%d-%m-%Y %H.%M.%S.%f', # '25-10-2009 14.30.59.000200'
'%d-%m-%Y %H.%M', # '25-10-2009 14.30'
'%d-%m-%Y', # '25-10-2009'
'%d-%m-%y %H.%M.%S', # '25-10-09' 14.30.59'
'%d-%m-%y %H.%M.%S.%f', # '25-10-09' 14.30.59.000200'
'%d-%m-%y %H.%M', # '25-10-09' 14.30'
'%d-%m-%y', # '25-10-09''
'%m/%d/%y %H.%M.%S', # '10/25/06 14.30.59'
'%m/%d/%y %H.%M.%S.%f', # '10/25/06 14.30.59.000200'
'%m/%d/%y %H.%M', # '10/25/06 14.30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y %H.%M.%S', # '25/10/2009 14.30.59'
'%m/%d/%Y %H.%M.%S.%f', # '25/10/2009 14.30.59.000200'
'%m/%d/%Y %H.%M', # '25/10/2009 14.30'
'%m/%d/%Y', # '10/25/2009'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
chiefspace/udemy-rest-api | udemy_rest_api_section5/env/lib/python3.4/site-packages/setuptools/compat.py | 331 | 2556 | import sys
import itertools
if sys.version_info[0] < 3:
PY3 = False
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
execfile = execfile
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
from htmlentitydefs import name2codepoint
import httplib
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
next = lambda o: o.next()
numeric_types = (int, long, float)
unichr = unichr
unicode = unicode
bytes = str
from urllib import url2pathname, splittag, pathname2url
import urllib2
from urllib2 import urlopen, HTTPError, URLError, unquote, splituser
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
filterfalse = itertools.ifilterfalse
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
else:
PY3 = True
basestring = str
import builtins
import configparser as ConfigParser
from io import StringIO, BytesIO
func_code = lambda o: o.__code__
func_globals = lambda o: o.__globals__
im_func = lambda o: o.__func__
from html.entities import name2codepoint
import http.client as httplib
from http.server import HTTPServer, SimpleHTTPRequestHandler
from http.server import BaseHTTPRequestHandler
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
next = next
numeric_types = (int, float)
unichr = chr
unicode = str
bytes = bytes
from urllib.error import HTTPError, URLError
import urllib.request as urllib2
from urllib.request import urlopen, url2pathname, pathname2url
from urllib.parse import (
urlparse, urlunparse, unquote, splituser, urljoin, urlsplit,
urlunsplit, splittag,
)
filterfalse = itertools.filterfalse
def execfile(fn, globs=None, locs=None):
if globs is None:
globs = globals()
if locs is None:
locs = globs
f = open(fn, 'rb')
try:
source = f.read()
finally:
f.close()
exec(compile(source, fn, 'exec'), globs, locs)
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
| gpl-2.0 |
aesteve/vertx-web | vertx-web/src/test/sockjs-protocol/unittest2/case.py | 4 | 55983 | """Test case implementation"""
import sys
import collections
import contextlib
import difflib
import logging
import pprint
import re
import traceback2 as traceback
import types
import unittest
import warnings
import six
from six.moves import range
from unittest2 import result
from unittest2.util import (
safe_repr, safe_str, strclass,
unorderable_list_difference, _common_shorten_repr
)
from unittest2.compatibility import (
wraps, with_context, catch_warnings, raise_from
)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestCase.skipTest() or one of the skipping decorators
instead of raising this directly.
"""
class _ShouldStop(Exception):
"""
The test should stop.
"""
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
class _Outcome(object):
def __init__(self, result=None):
self.expecting_failure = False
self.result = result
self.result_supports_subtests = hasattr(result, "addSubTest")
self.success = True
self.skipped = []
self.expectedFailure = None
self.errors = []
@contextlib.contextmanager
def testPartExecutor(self, test_case, isTest=False):
old_success = self.success
self.success = True
try:
yield
except KeyboardInterrupt:
raise
except SkipTest as e:
self.success = False
self.skipped.append((test_case, str(e)))
except _ShouldStop:
pass
except:
exc_info = sys.exc_info()
if self.expecting_failure:
self.expectedFailure = exc_info
else:
self.success = False
self.errors.append((test_case, exc_info))
# explicitly break a reference cycle:
# exc_info -> frame -> exc_info
exc_info = None
else:
if self.result_supports_subtests and self.success:
self.errors.append((test_case, None))
finally:
self.success = self.success and old_success
def _id(obj):
return obj
class_types = [type]
if getattr(types, 'ClassType', None):
class_types.append(types.ClassType)
class_types = tuple(class_types)
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not isinstance(test_item, class_types):
@wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(test_item):
test_item.__unittest_expecting_failure__ = True
return test_item
def _is_subtype(expected, basetype):
if isinstance(expected, tuple):
return all(_is_subtype(e, basetype) for e in expected)
return isinstance(expected, type) and issubclass(expected, basetype)
class _BaseTestCaseContext:
def __init__(self, test_case):
self.test_case = test_case
def _raiseFailure(self, standardMsg):
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
class _AssertRaisesBaseContext(_BaseTestCaseContext):
def __init__(self, expected, test_case, expected_regex=None):
_BaseTestCaseContext.__init__(self, test_case)
self.expected = expected
self.failureException = test_case.failureException
if expected_regex is not None:
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
self.obj_name = None
self.msg = None
def handle(self, name, args, kwargs):
"""
If args is empty, assertRaises/Warns is being used as a
context manager, so check for a 'msg' kwarg and return self.
If args is not empty, call a callable passing positional and keyword
arguments.
"""
if not _is_subtype(self.expected, self._base_type):
raise TypeError('%s() arg 1 must be %s' %
(name, self._base_type_str))
if args and args[0] is None:
warnings.warn("callable is None",
DeprecationWarning, 3)
args = ()
if not args:
self.msg = kwargs.pop('msg', None)
if kwargs:
warnings.warn('%r is an invalid keyword argument for '
'this function' % next(iter(kwargs)),
DeprecationWarning, 3)
return self
callable_obj = args[0]
args = args[1:]
try:
self.obj_name = callable_obj.__name__
except AttributeError:
self.obj_name = str(callable_obj)
with self:
callable_obj(*args, **kwargs)
class _AssertRaisesContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertRaises* methods."""
_base_type = BaseException
_base_type_str = 'an exception type or tuple of exception types'
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
self._raiseFailure("{0} not raised by {1}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{0} not raised".format(exc_name))
else:
traceback.clear_frames(tb)
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regex.pattern, str(exc_value)))
return True
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
_base_type = Warning
_base_type_str = 'a warning type or tuple of warning types'
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter("always", self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
raise self.failureException('%r does not match %r' %
(self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
raise self.failureException("%s not triggered by %s"
% (exc_name, self.obj_name))
else:
raise self.failureException("%s not triggered"
% exc_name )
class _TypeEqualityDict(object):
def __init__(self, testcase):
self.testcase = testcase
self._store = {}
def __setitem__(self, key, value):
self._store[key] = value
def __getitem__(self, key):
value = self._store[key]
if isinstance(value, six.string_types):
return getattr(self.testcase, value)
return value
def get(self, key, default=None):
if key in self._store:
return self[key]
return default
_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
["records", "output"])
class _CapturingHandler(logging.Handler):
"""
A logging handler capturing all (raw and formatted) logging output.
"""
def __init__(self):
logging.Handler.__init__(self)
self.watcher = _LoggingWatcher([], [])
def flush(self):
pass
def emit(self, record):
self.watcher.records.append(record)
msg = self.format(record)
self.watcher.output.append(msg)
class _AssertLogsContext(_BaseTestCaseContext):
"""A context manager used to implement TestCase.assertLogs()."""
LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
def __init__(self, test_case, logger_name, level):
_BaseTestCaseContext.__init__(self, test_case)
self.logger_name = logger_name
if level:
self.level = getattr(logging, str(level), level)
else:
self.level = logging.INFO
self.msg = None
def __enter__(self):
if isinstance(self.logger_name, logging.Logger):
logger = self.logger = self.logger_name
else:
logger = self.logger = logging.getLogger(self.logger_name)
formatter = logging.Formatter(self.LOGGING_FORMAT)
handler = _CapturingHandler()
handler.setFormatter(formatter)
self.watcher = handler.watcher
self.old_handlers = logger.handlers[:]
self.old_level = logger.level
self.old_propagate = logger.propagate
logger.handlers = [handler]
logger.setLevel(self.level)
logger.propagate = False
return handler.watcher
def __exit__(self, exc_type, exc_value, tb):
self.logger.handlers = self.old_handlers
self.logger.propagate = self.old_propagate
self.logger.setLevel(self.old_level)
if exc_type is not None:
# let unexpected exceptions pass through
return False
if len(self.watcher.records) == 0:
self._raiseFailure(
"no logs of level {0} or higher triggered on {1}"
.format(logging.getLevelName(self.level), self.logger.name))
class TestCase(unittest.TestCase):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
When subclassing TestCase, you can set these attributes:
* failureException: determines which exception will be raised when
the instance's assertion methods fail; test methods raising this
exception will be deemed to have 'failed' rather than 'errored'.
* longMessage: determines whether long messages (including repr of
objects used in assert methods) will be printed on failure in *addition*
to any explicit message passed.
* maxDiff: sets the maximum length of a diff in failure messages
by assert methods using difflib. It is looked up as an instance
attribute so can be configured by individual tests if required.
"""
failureException = AssertionError
longMessage = True
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._outcome = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" % \
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
self._subtest = None
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = _TypeEqualityDict(self)
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
if six.PY2:
self.addTypeEqualityFunc(str, 'assertMultiLineEqual')
self.addTypeEqualityFunc(six.text_type, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, test_case, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(test_case, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(test_case)
@contextlib.contextmanager
def subTest(self, msg=None, **params):
"""Return a context manager that will return the enclosed block
of code in a subtest identified by the optional message and
keyword parameters. A failure in the subtest marks the test
case as failed but resumes execution at the end of the enclosed
block, allowing further test code to be executed.
"""
if not self._outcome.result_supports_subtests:
yield
return
parent = self._subtest
if parent is None:
params_map = collections.ChainMap(params)
else:
params_map = parent.params.new_child(params)
self._subtest = _SubTest(self, msg, params_map)
try:
with self._outcome.testPartExecutor(self._subtest, isTest=True):
yield
if not self._outcome.success:
result = self._outcome.result
if result is not None and result.failfast:
raise _ShouldStop
elif self._outcome.expectedFailure:
# If the test is expecting a failure, we really want to
# stop now and register the expected failure.
raise _ShouldStop
finally:
self._subtest = parent
def _feedErrorsToResult(self, result, errors):
for test, exc_info in errors:
if isinstance(test, _SubTest):
result.addSubTest(test.test_case, test, exc_info)
elif exc_info is not None:
if issubclass(exc_info[0], self.failureException):
result.addFailure(test, exc_info)
else:
result.addError(test, exc_info)
def _addExpectedFailure(self, result, exc_info):
try:
addExpectedFailure = result.addExpectedFailure
except AttributeError:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
else:
addExpectedFailure(self, exc_info)
def _addUnexpectedSuccess(self, result):
try:
addUnexpectedSuccess = result.addUnexpectedSuccess
except AttributeError:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failure",
RuntimeWarning)
# We need to pass an actual exception and traceback to addFailure,
# otherwise the legacy result can choke.
try:
raise_from(_UnexpectedSuccess, None)
except _UnexpectedSuccess:
result.addFailure(self, sys.exc_info())
else:
addUnexpectedSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, self, skip_why)
finally:
result.stopTest(self)
return
expecting_failure = getattr(testMethod,
"__unittest_expecting_failure__", False)
outcome = _Outcome(result)
try:
self._outcome = outcome
with outcome.testPartExecutor(self):
self.setUp()
if outcome.success:
outcome.expecting_failure = expecting_failure
with outcome.testPartExecutor(self, isTest=True):
testMethod()
outcome.expecting_failure = False
with outcome.testPartExecutor(self):
self.tearDown()
self.doCleanups()
for test, reason in outcome.skipped:
self._addSkip(result, test, reason)
self._feedErrorsToResult(result, outcome.errors)
if outcome.success:
if expecting_failure:
if outcome.expectedFailure:
self._addExpectedFailure(result, outcome.expectedFailure)
else:
self._addUnexpectedSuccess(result)
else:
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
# explicitly break reference cycles:
# outcome.errors -> frame -> outcome -> outcome.errors
# outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
del outcome.errors[:]
outcome.expectedFailure = None
# clear the outcome, no more needed
self._outcome = None
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
outcome = self._outcome or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
with outcome.testPartExecutor(self):
function(*args, **kwargs)
# return this for backwards compatibility
# even though we no longer us it internally
return outcome.success
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"Fail the test if the expression is true."
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
def assertRaises(self, expected_exception, *args, **kwargs):
"""Fail unless an exception of class expected_exception is raised
by the callable when invoked with specified positional and
keyword arguments. If a different type of exception is
raised, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with the callable and arguments omitted, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(expected_exception, self)
return context.handle('assertRaises', args, kwargs)
def assertWarns(self, expected_warning, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
by the callable when invoked with specified positional and
keyword arguments. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
If called with the callable and arguments omitted, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
of Python code from which the warning was triggered.
This allows you to inspect the warning after the assertion::
with self.assertWarns(SomeWarning) as cm:
do_something()
the_warning = cm.warning
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self)
return context.handle('assertWarns', args, kwargs)
def assertLogs(self, logger=None, level=None):
"""Fail unless a log message of level *level* or higher is emitted
on *logger_name* or its children. If omitted, *level* defaults to
INFO and *logger* defaults to the root logger.
This method must be used as a context manager, and will yield
a recording object with two attributes: `output` and `records`.
At the end of the context manager, the `output` attribute will
be a list of the matching formatted log messages and the
`records` attribute will be a list of the corresponding LogRecord
objects.
Example::
with self.assertLogs('foo', level='INFO') as cm:
logging.getLogger('foo').info('first message')
logging.getLogger('foo.bar').error('second message')
self.assertEqual(cm.output, ['INFO:foo:first message',
'ERROR:foo.bar:second message'])
"""
return _AssertLogsContext(self, logger, level)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % _common_shorten_repr(first, second)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '!='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
differing = '%ss differ: %s != %s\n' % (
(seq_type_name.capitalize(),) +
_common_shorten_repr(seq1, seq2))
for i in range(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support
different types of sets, and is optimized for sets specifically
(parameters must support a difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError:
e = sys.exc_info()[1]
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError:
e = sys.exc_info()[1]
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError:
e = sys.exc_info()[1]
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError:
e = sys.exc_info()[1]
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1), safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % _common_shorten_repr(d1, d2)
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.items():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
expected_seq and actual_seq contain the same elements. It is
the equivalent of::
self.assertEqual(sorted(expected_seq), sorted(actual_seq))
Raises with an error message listing which elements of expected_seq
are missing from actual_seq and vice versa if any.
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
try:
expected = sorted(expected_seq)
actual = sorted(actual_seq)
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual, ignore_duplicate=False
)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append('Expected, but missing:\n %s' %
safe_repr(missing))
if unexpected:
errors.append('Unexpected, but present:\n %s' %
safe_repr(unexpected))
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, six.string_types, (
'First argument is not a string'))
self.assertIsInstance(second, six.string_types, (
'Second argument is not a string'))
if first != second:
# don't use difflib if the strings are too long
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
firstlines = first.splitlines(True)
secondlines = second.splitlines(True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % _common_shorten_repr(first, second)
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegex(self, expected_exception, expected_regex,
*args, **kwargs):
"""Asserts that the message in a raised exception matches a regex.
Args:
expected_exception: Exception class expected to be raised.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, expected_regex)
return context.handle('assertRaisesRegex', args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
*args, **kwargs):
"""Asserts that the message in a triggered warning matches a regex.
Basic functioning is similar to assertWarns() with the addition
that only warnings whose messages also match the regular expression
are considered successful matches.
Args:
expected_warning: Warning class expected to be triggered.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
context = _AssertWarnsContext(expected_warning, self, expected_regex)
return context.handle('assertWarnsRegex', args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regex, six.string_types):
expected_regex = re.compile(expected_regex)
if not expected_regex.search(text):
msg = msg or "Regex didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
raise self.failureException(msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regex, six.string_types):
unexpected_regex = re.compile(unexpected_regex)
match = unexpected_regex.search(text)
if match:
msg = msg or "Regex matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regex.pattern,
text)
raise self.failureException(msg)
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = assertEquals = _deprecate(assertEqual)
failIfEqual = assertNotEquals = _deprecate(assertNotEqual)
failUnlessAlmostEqual = assertAlmostEquals = _deprecate(assertAlmostEqual)
failIfAlmostEqual = assertNotAlmostEquals = _deprecate(assertNotAlmostEqual)
failUnless = assert_ = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
assertRaisesRegexp = _deprecate(assertRaisesRegex)
assertRegexpMatches = _deprecate(assertRegex)
assertNotRegexpMatches = _deprecate(assertNotRegex)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
class _SubTest(TestCase):
def __init__(self, test_case, message, params):
super(_SubTest, self).__init__()
self._message = message
self.test_case = test_case
self.params = params
self.failureException = test_case.failureException
def runTest(self):
raise NotImplementedError("subtests cannot be run directly")
def _subDescription(self):
parts = []
if self._message:
parts.append("[{0}]".format(self._message))
if self.params:
params_desc = ', '.join(
"{0}={1!r}".format(k, v)
for (k, v) in sorted(self.params.items()))
parts.append("({0})".format(params_desc))
return " ".join(parts) or '(<subtest>)'
def id(self):
return "{0} {1}".format(self.test_case.id(), self._subDescription())
def shortDescription(self):
"""Returns a one-line description of the subtest, or None if no
description has been provided.
"""
return self.test_case.shortDescription()
def __str__(self):
return "{0} {1}".format(self.test_case, self._subDescription())
| apache-2.0 |
sstoma/CellProfiler | cellprofiler/cpmath/tests/test_threshold.py | 1 | 7992 | '''test_threshold - test the threshold module
CellProfiler is distributed under the GNU General Public License,
but this file is licensed under the more permissive BSD license.
See the accompanying file LICENSE for details.
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2015 Broad Institute
All rights reserved.
Please see the AUTHORS file for credits.
Website: http://www.cellprofiler.org
'''
import numpy as np
from scipy.ndimage import convolve1d
import scipy.stats
import unittest
import cellprofiler.cpmath.threshold as T
class TestThreshold(unittest.TestCase):
def test_01_00_nothing(self):
result = T.get_otsu_threshold(-np.ones((10,10)))
def test_01_01_negative_log_otsu(self):
'''regression test of img-1466'''
r = np.random.RandomState()
r.seed(11)
img = r.uniform(size=(10,10))
img[0,0] = -1
unmasked = T.get_otsu_threshold(img)
masked = T.get_otsu_threshold(img, img >= 0)
self.assertEqual(unmasked, masked)
def test_02_00_mct_zeros(self):
result = T.get_maximum_correlation_threshold(np.zeros(0))
r = np.random.RandomState()
r.seed(11)
result = T.get_maximum_correlation_threshold(r.uniform(size=(10,10)),
mask=np.zeros((10,10), bool))
result = T.get_maximum_correlation_threshold(np.ones((10,10)) * .5)
self.assertEqual(result, .5)
def test_02_01_mct_matches_reference_implementation(self):
image = np.array([0,255,231,161,58,218,95,17,136,56,179,196,1,70,173,113,192,101,223,65,127,27,234,224,205,61,74,168,63,209,120,41,218,22,66,135,244,178,193,238,140,215,96,194,158,20,169,61,55,1,130,17,240,237,15,228,136,207,65,90,191,253,63,101,206,91,154,76,43,89,213,26,17,107,251,164,206,191,73,32,51,191,80,48,61,57,4,152,74,174,103,91,106,217,194,161,248,59,198,24,22,36], float)
self.assertEqual(127, T.get_maximum_correlation_threshold(image))
def test_03_01_adaptive_threshold_same(self):
r = np.random.RandomState()
r.seed(31)
block = r.uniform(size=(10,10))
i,j = np.mgrid[0:10:2,0:10:2]
block[i,j] *= .5
i,j = np.mgrid[0:50,0:50]
img = block[i%10, j%10]
global_threshold = T.get_global_threshold(T.TM_OTSU, block)
adaptive_threshold = T.get_adaptive_threshold(
T.TM_OTSU, img, global_threshold,
adaptive_window_size = 10)
np.testing.assert_almost_equal(adaptive_threshold, global_threshold)
def test_03_02_adaptive_threshold_different(self):
r = np.random.RandomState()
r.seed(31)
block = r.uniform(size=(10,10))
i,j = np.mgrid[0:10:2,0:10:2]
block[i,j] *= .5
i,j = np.mgrid[0:50,0:50]
img = block[i%10, j%10] * .5
#
# Make the middle higher in intensity
#
img[20:30, 20:30] *= 2
global_threshold = T.get_global_threshold(T.TM_OTSU, block)
adaptive_threshold = T.get_adaptive_threshold(
T.TM_OTSU, img, global_threshold,
adaptive_window_size = 10)
#
# Check that the gradients are positive for i,j<15 and negative
# for i,j>=15
#
gradient = convolve1d(adaptive_threshold, [-1, 0, 1], 0)
self.assertTrue(np.all(gradient[20:25, 20:30] < 0))
self.assertTrue(np.all(gradient[25:30, 20:30] > 0))
gradient = convolve1d(adaptive_threshold, [-1, 0, 1], 1)
self.assertTrue(np.all(gradient[20:30, 20:25] < 0))
self.assertTrue(np.all(gradient[20:30, 25:30] > 0))
def make_mog_image(self, loc1, sigma1, loc2, sigma2, frac1, size):
'''Make an image that is a mixture of gaussians
loc{1,2} - mean of distribution # 1 and 2
sigma{1,2} - standard deviation of distribution # 1 and 2
frac1 - the fraction of pixels that are in distribution 1
size - the shape of the image.
'''
r = np.random.RandomState()
r.seed(np.frombuffer(np.array(
[loc1, sigma1, loc2, sigma2, frac1] + list(size)).data, np.int32))
n_pixels = np.prod(size)
p = r.permutation(n_pixels).reshape(size)
s1 = int(n_pixels * frac1)
s2 = n_pixels - s1
d = np.hstack([
r.normal(loc=loc1, scale=sigma1, size=s1),
r.normal(loc=loc2, scale=sigma2, size=s2)])
d[d<0] = 0
d[d>1] = 1
return d[p]
def test_04_01_robust_background(self):
img = self.make_mog_image(.1, .05, .5, .2, .975, (45, 35))
t = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img)
self.assertLess(abs(t-.2), .025)
def test_04_02_robust_background_lower_outliers(self):
img = self.make_mog_image(.1, .05, .5, .2, .5, (45, 35))
t0 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
lower_outlier_fraction=0)
t05 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
lower_outlier_fraction=0.05)
self.assertNotEqual(t0, t05)
def test_04_03_robust_background_upper_outliers(self):
img = self.make_mog_image(.1, .05, .5, .2, .9, (45, 35))
t0 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
upper_outlier_fraction=0)
t05 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
upper_outlier_fraction=0.05)
self.assertNotEqual(t0, t05)
def test_04_04_robust_background_sd(self):
img = self.make_mog_image(.5, .1, .8, .01, .99, (45, 35))
t2 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
lower_outlier_fraction = 0,
upper_outlier_fraction = 0)
self.assertLess(abs(t2 - .7), .02)
t3 = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
lower_outlier_fraction = 0,
upper_outlier_fraction = 0,
deviations_above_average = 2.5)
self.assertLess(abs(t3 - .75), .02)
def test_04_05_robust_background_median(self):
img = self.make_mog_image(.3, .05, .5, .2, .9, (45, 35))
t = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
average_fn = np.median,
deviations_above_average = 0,
lower_outlier_fraction = 0,
upper_outlier_fraction = 0)
self.assertLess(abs(t - .3), .01)
def test_04_06_robust_background_mode(self):
img = self.make_mog_image(.3, .05, .5, .2, .9, (45, 35))
img[(img > .25) & (img < .35)] = .304
t = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
average_fn = T.binned_mode,
deviations_above_average = 0,
lower_outlier_fraction = 0,
upper_outlier_fraction = 0)
self.assertAlmostEqual(t, .304)
def test_04_08_mad(self):
img = self.make_mog_image(.3, .05, .5, .2, .95, (45, 35))
t = T.get_global_threshold(T.TM_ROBUST_BACKGROUND, img,
variance_fn = T.mad,
deviations_above_average = 2,
lower_outlier_fraction = 0,
upper_outlier_fraction = 0)
norm = scipy.stats.norm(0, .05)
# the MAD should be the expected value at the 75th percentile
expected = .3 + 2 * norm.ppf(.75)
self.assertLess(np.abs(t - expected), .02)
if __name__=="__main__":
unittest.main()
| gpl-2.0 |
paulot/NodeVector | node-0.12.0/deps/v8/test/webkit/testcfg.py | 53 | 6231 | # Copyright 2013 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import itertools
import os
import re
from testrunner.local import testsuite
from testrunner.objects import testcase
FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)")
FILES_PATTERN = re.compile(r"//\s+Files:(.*)")
SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME")
# TODO (machenbach): Share commonalities with mjstest.
class WebkitTestSuite(testsuite.TestSuite):
def __init__(self, name, root):
super(WebkitTestSuite, self).__init__(name, root)
def ListTests(self, context):
tests = []
for dirname, dirs, files in os.walk(self.root):
for dotted in [x for x in dirs if x.startswith('.')]:
dirs.remove(dotted)
if 'resources' in dirs:
dirs.remove('resources')
dirs.sort()
files.sort()
for filename in files:
if filename.endswith(".js"):
testname = os.path.join(dirname[len(self.root) + 1:], filename[:-3])
test = testcase.TestCase(self, testname)
tests.append(test)
return tests
def GetFlagsForTestCase(self, testcase, context):
source = self.GetSourceForTest(testcase)
flags = [] + context.mode_flags
flags_match = re.findall(FLAGS_PATTERN, source)
for match in flags_match:
flags += match.strip().split()
files_list = [] # List of file names to append to command arguments.
files_match = FILES_PATTERN.search(source);
# Accept several lines of 'Files:'.
while True:
if files_match:
files_list += files_match.group(1).strip().split()
files_match = FILES_PATTERN.search(source, files_match.end())
else:
break
files = [ os.path.normpath(os.path.join(self.root, '..', '..', f))
for f in files_list ]
testfilename = os.path.join(self.root, testcase.path + self.suffix())
if SELF_SCRIPT_PATTERN.search(source):
env = ["-e", "TEST_FILE_NAME=\"%s\"" % testfilename.replace("\\", "\\\\")]
files = env + files
files.append(os.path.join(self.root, "resources/standalone-pre.js"))
files.append(testfilename)
files.append(os.path.join(self.root, "resources/standalone-post.js"))
flags += files
if context.isolates:
flags.append("--isolate")
flags += files
return testcase.flags + flags
def GetSourceForTest(self, testcase):
filename = os.path.join(self.root, testcase.path + self.suffix())
with open(filename) as f:
return f.read()
# TODO(machenbach): Share with test/message/testcfg.py
def _IgnoreLine(self, string):
"""Ignore empty lines, valgrind output and Android output."""
if not string: return True
return (string.startswith("==") or string.startswith("**") or
string.startswith("ANDROID") or
# These five patterns appear in normal Native Client output.
string.startswith("DEBUG MODE ENABLED") or
string.startswith("tools/nacl-run.py") or
string.find("BYPASSING ALL ACL CHECKS") > 0 or
string.find("Native Client module will be loaded") > 0 or
string.find("NaClHostDescOpen:") > 0)
def IsFailureOutput(self, output, testpath):
if super(WebkitTestSuite, self).IsFailureOutput(output, testpath):
return True
file_name = os.path.join(self.root, testpath) + "-expected.txt"
with file(file_name, "r") as expected:
expected_lines = expected.readlines()
def ExpIterator():
for line in expected_lines:
if line.startswith("#") or not line.strip(): continue
yield line.strip()
def ActIterator(lines):
for line in lines:
if self._IgnoreLine(line.strip()): continue
yield line.strip()
def ActBlockIterator():
"""Iterates over blocks of actual output lines."""
lines = output.stdout.splitlines()
start_index = 0
found_eqeq = False
for index, line in enumerate(lines):
# If a stress test separator is found:
if line.startswith("=="):
# Iterate over all lines before a separator except the first.
if not found_eqeq:
found_eqeq = True
else:
yield ActIterator(lines[start_index:index])
# The next block of ouput lines starts after the separator.
start_index = index + 1
# Iterate over complete output if no separator was found.
if not found_eqeq:
yield ActIterator(lines)
for act_iterator in ActBlockIterator():
for (expected, actual) in itertools.izip_longest(
ExpIterator(), act_iterator, fillvalue=''):
if expected != actual:
return True
return False
def GetSuite(name, root):
return WebkitTestSuite(name, root)
| mit |
scrollback/kuma | vendor/packages/GitPython/lib/git/blob.py | 32 | 5305 | # blob.py
# Copyright (C) 2008-2010 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import mimetypes
import os
import re
import time
from actor import Actor
from commit import Commit
class Blob(object):
"""A Blob encapsulates a git blob object"""
DEFAULT_MIME_TYPE = "text/plain"
def __init__(self, repo, id, mode=None, name=None):
"""
Create an unbaked Blob containing just the specified attributes
``repo``
is the Repo
``id``
is the git object id
``mode``
is the file mode
``name``
is the file name
Returns
git.Blob
"""
self.repo = repo
self.id = id
self.mode = mode
self.name = name
self._size = None
self.data_stored = None
@property
def size(self):
"""
The size of this blob in bytes
Returns
int
NOTE
The size will be cached after the first access
"""
if self._size is None:
self._size = int(self.repo.git.cat_file(self.id, s=True).rstrip())
return self._size
@property
def data(self):
"""
The binary contents of this blob.
Returns
str
NOTE
The data will be cached after the first access.
"""
self.data_stored = self.data_stored or self.repo.git.cat_file(self.id, p=True, with_raw_output=True)
return self.data_stored
@property
def mime_type(self):
"""
The mime type of this file (based on the filename)
Returns
str
NOTE
Defaults to 'text/plain' in case the actual file type is unknown.
"""
guesses = None
if self.name:
guesses = mimetypes.guess_type(self.name)
return guesses and guesses[0] or self.DEFAULT_MIME_TYPE
@property
def basename(self):
"""
Returns
The basename of the Blobs file name
"""
return os.path.basename(self.name)
@classmethod
def blame(cls, repo, commit, file):
"""
The blame information for the given file at the given commit
Returns
list: [git.Commit, list: [<line>]]
A list of tuples associating a Commit object with a list of lines that
changed within the given commit. The Commit objects will be given in order
of appearance.
"""
data = repo.git.blame(commit, '--', file, p=True)
commits = {}
blames = []
info = None
for line in data.splitlines():
parts = re.split(r'\s+', line, 1)
if re.search(r'^[0-9A-Fa-f]{40}$', parts[0]):
if re.search(r'^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$', line):
m = re.search(r'^([0-9A-Fa-f]{40}) (\d+) (\d+) (\d+)$', line)
id, origin_line, final_line, group_lines = m.groups()
info = {'id': id}
blames.append([None, []])
elif re.search(r'^([0-9A-Fa-f]{40}) (\d+) (\d+)$', line):
m = re.search(r'^([0-9A-Fa-f]{40}) (\d+) (\d+)$', line)
id, origin_line, final_line = m.groups()
info = {'id': id}
elif re.search(r'^(author|committer)', parts[0]):
if re.search(r'^(.+)-mail$', parts[0]):
m = re.search(r'^(.+)-mail$', parts[0])
info["%s_email" % m.groups()[0]] = parts[-1]
elif re.search(r'^(.+)-time$', parts[0]):
m = re.search(r'^(.+)-time$', parts[0])
info["%s_date" % m.groups()[0]] = time.gmtime(int(parts[-1]))
elif re.search(r'^(author|committer)$', parts[0]):
m = re.search(r'^(author|committer)$', parts[0])
info[m.groups()[0]] = parts[-1]
elif re.search(r'^filename', parts[0]):
info['filename'] = parts[-1]
elif re.search(r'^summary', parts[0]):
info['summary'] = parts[-1]
elif parts[0] == '':
if info:
c = commits.has_key(info['id']) and commits[info['id']]
if not c:
c = Commit(repo, id=info['id'],
author=Actor.from_string(info['author'] + ' ' + info['author_email']),
authored_date=info['author_date'],
committer=Actor.from_string(info['committer'] + ' ' + info['committer_email']),
committed_date=info['committer_date'],
message=info['summary'])
commits[info['id']] = c
m = re.search(r'^\t(.*)$', line)
text, = m.groups()
blames[-1][0] = c
blames[-1][1].append( text )
info = None
return blames
def __repr__(self):
return '<git.Blob "%s">' % self.id
| mpl-2.0 |
DanielSBrown/osf.io | admin_tests/metrics/test_utils.py | 11 | 6284 | from nose import tools as nt
from datetime import timedelta, datetime
from tests.base import AdminTestCase
from tests.factories import (
AuthUserFactory, NodeFactory, ProjectFactory, RegistrationFactory
)
from website.project.model import Node, User
from framework.auth import Auth
from admin.metrics.utils import (
get_projects,
get_osf_statistics,
get_list_of_dates,
get_previous_midnight,
get_days_statistics,
DAY_LEEWAY,
get_active_user_count,
get_unregistered_users,
)
from admin.metrics.models import OSFWebsiteStatistics
class TestMetricsGetProjects(AdminTestCase):
def setUp(self):
super(TestMetricsGetProjects, self).setUp()
Node.remove()
self.public_node = ProjectFactory(is_public=True)
self.private_node = ProjectFactory(is_public=False)
self.node_2 = NodeFactory() # creates parent project + node
self.reg = RegistrationFactory(project=self.public_node)
def test_get_all_top_level_nodes(self):
count = get_projects()
nt.assert_equal(count, 4)
def test_get_public_top_level_nodes(self):
count = get_projects(public=True)
nt.assert_equal(count, 1)
def test_get_registrations(self):
count = get_projects(registered=True)
nt.assert_equal(count, 1)
def test_date_created_filter_returns_no_results(self):
time = self.public_node.date_created - timedelta(weeks=1)
count = get_projects(time=time)
nt.assert_equal(count, 0)
class TestMetricsGetDaysStatistics(AdminTestCase):
def setUp(self):
super(TestMetricsGetDaysStatistics, self).setUp()
Node.remove()
NodeFactory(category='project') # makes Node, plus parent
NodeFactory(category='data')
def test_time_now(self):
get_days_statistics(datetime.utcnow())
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 1)
nt.assert_equal(OSFWebsiteStatistics.objects.latest('date').projects, 2)
def test_delta(self):
get_days_statistics(datetime.utcnow())
ProjectFactory()
ProjectFactory()
latest = OSFWebsiteStatistics.objects.latest('date')
get_days_statistics(datetime.utcnow(), latest)
even_later = OSFWebsiteStatistics.objects.latest('date')
nt.assert_equal(even_later.delta_projects, 2)
class TestMetricsGetOSFStatistics(AdminTestCase):
def setUp(self):
super(TestMetricsGetOSFStatistics, self).setUp()
Node.remove()
time_now = get_previous_midnight()
NodeFactory(category='project', date_created=time_now)
NodeFactory(category='project',
date_created=time_now - timedelta(days=1))
last_time = time_now - timedelta(days=2)
NodeFactory(category='project', date_created=last_time)
NodeFactory(category='project', date_created=last_time)
get_days_statistics(last_time + timedelta(seconds=1))
self.time = time_now + timedelta(seconds=1)
def test_get_two_more_days(self):
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 1)
get_osf_statistics()
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 3)
def test_dont_add_another(self):
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 1)
get_osf_statistics()
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 3)
get_osf_statistics()
nt.assert_equal(OSFWebsiteStatistics.objects.count(), 3)
class TestMetricListDays(AdminTestCase):
def test_five_days(self):
time_now = datetime.utcnow()
time_past = time_now - timedelta(days=5)
dates = get_list_of_dates(time_past, time_now)
nt.assert_equal(len(dates), 5)
nt.assert_in(time_now, dates)
def test_month_transition(self):
time_now = datetime.utcnow()
time_end = time_now - timedelta(
days=(time_now.day - 2)
)
time_start = time_end - timedelta(days=5)
dates = get_list_of_dates(time_start, time_end)
nt.assert_equal(len(dates), 5)
def test_off_by_seconds(self):
time_now = datetime.utcnow()
time_start = time_now - timedelta(
seconds=DAY_LEEWAY + 1
)
dates = get_list_of_dates(time_start, time_now)
nt.assert_equal(len(dates), 1)
def test_on_exact_time(self):
time_now = datetime.utcnow()
time_start = time_now - timedelta(
seconds=DAY_LEEWAY
)
dates = get_list_of_dates(time_start, time_now)
nt.assert_equal(len(dates), 0)
def test_just_missed_time(self):
time_now = datetime.utcnow()
time_start = time_now - timedelta(
seconds=DAY_LEEWAY - 1
)
dates = get_list_of_dates(time_start, time_now)
nt.assert_equal(len(dates), 0)
class TestMetricPreviousMidnight(AdminTestCase):
def test_midnight(self):
time_now = datetime.utcnow()
midnight = get_previous_midnight(time_now)
nt.assert_equal(midnight.date(), time_now.date())
nt.assert_equal(midnight.hour, 0)
nt.assert_equal(midnight.minute, 0)
nt.assert_equal(midnight.second, 0)
nt.assert_equal(midnight.microsecond, 1)
def test_no_time_given(self):
time_now = datetime.utcnow()
midnight = get_previous_midnight()
nt.assert_equal(midnight.date(), time_now.date())
class TestUserGet(AdminTestCase):
def setUp(self):
super(TestUserGet, self).setUp()
User.remove()
self.user_1 = AuthUserFactory()
self.auth = Auth(user=self.user_1)
self.project = ProjectFactory(creator=self.user_1)
self.project.add_unregistered_contributor(
email='foo@bar.com',
fullname='Weezy F. Baby',
auth=self.auth
)
self.user_3 = AuthUserFactory()
self.user_3.date_confirmed = None
self.user_3.save()
self.user_4 = AuthUserFactory()
def test_get_all_user_count(self):
time_now = datetime.utcnow()
count = get_active_user_count(time_now)
nt.assert_equal(count, 2)
def test_get_unregistered_users(self):
count = get_unregistered_users()
nt.assert_equal(count, 1)
| apache-2.0 |
defionscode/ansible | lib/ansible/modules/network/onyx/onyx_bgp.py | 66 | 8971 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_bgp
version_added: "2.5"
author: "Samer Deeb (@samerd)"
short_description: Configures BGP on Mellanox ONYX network devices
description:
- This module provides declarative management of BGP router and neighbors
on Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.4000
options:
as_number:
description:
- Local AS number.
required: true
router_id:
description:
- Router IP address. Required if I(state=present).
neighbors:
description:
- List of neighbors. Required if I(state=present).
suboptions:
remote_as:
description:
- Remote AS number.
required: true
neighbor:
description:
- Neighbor IP address.
required: true
networks:
description:
- List of advertised networks.
state:
description:
- BGP state.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure bgp
onyx_bgp:
as_number: 320
router_id: 10.3.3.3
neighbors:
- remote_as: 321
neighbor: 10.3.3.4
state: present
networks:
- 172.16.1.0/24
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- router bgp 172
- exit
- router bgp 172 router-id 2.3.4.5 force
- router bgp 172 neighbor 2.3.4.6 remote-as 173
- router bgp 172 network 172.16.1.0 /24
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import get_bgp_summary
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
class OnyxBgpModule(BaseOnyxModule):
LOCAL_AS_REGEX = re.compile(r'^\s+router bgp\s+(\d+).*')
ROUTER_ID_REGEX = re.compile(
r'^\s+router bgp\s+(\d+).*router-id\s+(\S+)\s+.*')
NEIGHBOR_REGEX = re.compile(
r'^\s+router bgp\s+(\d+).*neighbor\s+(\S+)\s+remote\-as\s+(\S+).*')
NETWORK_REGEX = re.compile(
r'^\s+router bgp\s+(\d+).*network\s+(\S+)\s+(\S+).*')
def init_module(self):
""" initialize module
"""
neighbor_spec = dict(
remote_as=dict(type='int', required=True),
neighbor=dict(required=True),
)
element_spec = dict(
as_number=dict(type='int', required=True),
router_id=dict(),
neighbors=dict(type='list', elements='dict',
options=neighbor_spec),
networks=dict(type='list', elements='str'),
state=dict(choices=['present', 'absent'], default='present'),
)
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def get_required_config(self):
module_params = self._module.params
req_neighbors = list()
self._required_config = dict(
as_number=module_params['as_number'],
router_id=module_params['router_id'],
state=module_params['state'],
neighbors=req_neighbors,
networks=module_params['networks'])
neighbors = module_params['neighbors'] or list()
for neighbor_data in neighbors:
req_neighbors.append(
(neighbor_data['neighbor'], neighbor_data['remote_as']))
self.validate_param_values(self._required_config)
def _set_bgp_config(self, bgp_config):
lines = bgp_config.split('\n')
self._current_config['router_id'] = None
self._current_config['as_number'] = None
neighbors = self._current_config['neighbors'] = []
networks = self._current_config['networks'] = []
for line in lines:
if line.startswith('#'):
continue
if not self._current_config['as_number']:
match = self.LOCAL_AS_REGEX.match(line)
if match:
self._current_config['as_number'] = int(match.group(1))
continue
if not self._current_config['router_id']:
match = self.ROUTER_ID_REGEX.match(line)
if match:
self._current_config['router_id'] = match.group(2)
continue
match = self.NEIGHBOR_REGEX.match(line)
if match:
neighbors.append((match.group(2), int(match.group(3))))
continue
match = self.NETWORK_REGEX.match(line)
if match:
network = match.group(2) + match.group(3)
networks.append(network)
continue
def _get_bgp_summary(self):
return get_bgp_summary(self._module)
def load_current_config(self):
self._current_config = dict()
bgp_config = self._get_bgp_summary()
if bgp_config:
self._set_bgp_config(bgp_config)
def generate_commands(self):
state = self._required_config['state']
if state == 'present':
self._generate_bgp_cmds()
else:
self._generate_no_bgp_cmds()
def _generate_bgp_cmds(self):
as_number = self._required_config['as_number']
curr_as_num = self._current_config.get('as_number')
bgp_removed = False
if curr_as_num != as_number:
if curr_as_num:
self._commands.append('no router bgp %d' % curr_as_num)
bgp_removed = True
self._commands.append('router bgp %d' % as_number)
self._commands.append('exit')
curr_route_id = self._current_config.get('router_id')
req_router_id = self._required_config['router_id']
if req_router_id and req_router_id != curr_route_id or bgp_removed:
self._commands.append('router bgp %d router-id %s force' %
(as_number, req_router_id))
self._generate_neighbors_cmds(as_number, bgp_removed)
self._generate_networks_cmds(as_number, bgp_removed)
def _generate_neighbors_cmds(self, as_number, bgp_removed):
req_neighbors = self._required_config['neighbors']
curr_neighbors = self._current_config.get('neighbors', [])
if not bgp_removed:
for neighbor_data in curr_neighbors:
if neighbor_data not in req_neighbors:
(neighbor, remote_as) = neighbor_data
self._commands.append(
'router bgp %s no neighbor %s remote-as %s' %
(as_number, neighbor, remote_as))
for neighbor_data in req_neighbors:
if bgp_removed or neighbor_data not in curr_neighbors:
(neighbor, remote_as) = neighbor_data
self._commands.append(
'router bgp %s neighbor %s remote-as %s' %
(as_number, neighbor, remote_as))
def _generate_networks_cmds(self, as_number, bgp_removed):
req_networks = self._required_config['networks'] or []
curr_networks = self._current_config.get('networks', [])
if not bgp_removed:
for network in curr_networks:
if network not in req_networks:
net_attrs = network.split('/')
if len(net_attrs) != 2:
self._module.fail_json(
msg='Invalid network %s' % network)
net_address, netmask = net_attrs
cmd = 'router bgp %s no network %s /%s' % (
as_number, net_address, netmask)
self._commands.append(cmd)
for network in req_networks:
if bgp_removed or network not in curr_networks:
net_attrs = network.split('/')
if len(net_attrs) != 2:
self._module.fail_json(
msg='Invalid network %s' % network)
net_address, netmask = net_attrs
cmd = 'router bgp %s network %s /%s' % (
as_number, net_address, netmask)
self._commands.append(cmd)
def _generate_no_bgp_cmds(self):
as_number = self._required_config['as_number']
curr_as_num = self._current_config.get('as_number')
if curr_as_num and curr_as_num == as_number:
self._commands.append('no router bgp %d' % as_number)
def main():
""" main entry point for module execution
"""
OnyxBgpModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
Curious72/sympy | sympy/polys/domains/pythonrationalfield.py | 117 | 2234 | """Implementation of :class:`PythonRationalField` class. """
from __future__ import print_function, division
from sympy.polys.domains.rationalfield import RationalField
from sympy.polys.domains.groundtypes import PythonInteger, PythonRational, SymPyRational
from sympy.polys.polyerrors import CoercionFailed
from sympy.utilities import public
@public
class PythonRationalField(RationalField):
"""Rational field based on Python rational number type. """
dtype = PythonRational
zero = dtype(0)
one = dtype(1)
alias = 'QQ_python'
def __init__(self):
pass
def get_ring(self):
"""Returns ring associated with ``self``. """
from sympy.polys.domains import PythonIntegerRing
return PythonIntegerRing()
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return SymPyRational(a.numerator, a.denominator)
def from_sympy(self, a):
"""Convert SymPy's Rational to `dtype`. """
if a.is_Rational:
return PythonRational(a.p, a.q)
elif a.is_Float:
from sympy.polys.domains import RR
p, q = RR.to_rational(a)
return PythonRational(int(p), int(q))
else:
raise CoercionFailed("expected `Rational` object, got %s" % a)
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return PythonRational(a)
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return a
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return PythonRational(PythonInteger(a))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return PythonRational(PythonInteger(a.numer()),
PythonInteger(a.denom()))
def from_RealField(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
p, q = K0.to_rational(a)
return PythonRational(int(p), int(q))
def numer(self, a):
"""Returns numerator of `a`. """
return a.numerator
def denom(self, a):
"""Returns denominator of `a`. """
return a.denominator
| bsd-3-clause |
pmghalvorsen/gramps_branch | gramps/plugins/quickview/reporef.py | 3 | 2581 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2006-2007 Alex Roitman
# Copyright (C) 2007-2009 Jerome Rapinat
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# plugins/quickview/Reporef.py
# -------------------------------------------------------------------------
#
# gramps modules
#
# -------------------------------------------------------------------------
"""
Display RepoRef for sources related to active repository
"""
from gramps.gen.simple import SimpleAccess, SimpleDoc
from gramps.gui.plug.quick import QuickTable
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
def run(database, document, repo):
"""
Display back-references (sources) for this repository.
"""
# setup the simple access functions
sdb = SimpleAccess(database)
sdoc = SimpleDoc(document)
stab = QuickTable(sdb)
# First we find repository and add its text
sdoc.title('%s\n' % repo.get_name())
# Go over all the sources that refer to this repository
repo_handle = repo.handle
source_list = [item[1] for item in
database.find_backlink_handles(repo_handle, ['Source'
])]
stab.columns(_("Source"), _("Type of media"), _("Call number"))
document.has_data = False
for source_handle in source_list:
src = database.get_source_from_handle(source_handle)
# Get the list of references from this source to our repo
# (can be more than one, technically)
for reporef in src.get_reporef_list():
if reporef.ref == repo_handle:
# Determine the text for this source
media = str(reporef.get_media_type())
call = reporef.get_call_number()
stab.row(src.get_title(), media, call)
document.has_data = True
stab.write(sdoc)
| gpl-2.0 |
garrettcap/Bulletproof-Backup | Python2.7/lib/python2.7/site-packages/setuptools/tests/test_develop.py | 286 | 3605 | """develop tests
"""
import sys
import os, shutil, tempfile, unittest
import tempfile
import site
from distutils.errors import DistutilsError
from setuptools.command.develop import develop
from setuptools.command import easy_install as easy_install_pkg
from setuptools.compat import StringIO
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['foo'],
use_2to3=True,
)
"""
INIT_PY = """print "foo"
"""
class TestDevelopTest(unittest.TestCase):
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
# Directory structure
self.dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.dir, 'foo'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
# foo/__init__.py
init = os.path.join(self.dir, 'foo', '__init__.py')
f = open(init, 'w')
f.write(INIT_PY)
f.close()
os.chdir(self.dir)
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_develop(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
dist = Distribution(
dict(name='foo',
packages=['foo'],
use_2to3=True,
version='0.0',
))
dist.script_name = 'setup.py'
cmd = develop(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
old_stdout = sys.stdout
#sys.stdout = StringIO()
try:
cmd.run()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place
content = os.listdir(site.USER_SITE)
content.sort()
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
# Check that we are using the right code.
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
try:
path = egg_link_file.read().split()[0].strip()
finally:
egg_link_file.close()
init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
try:
init = init_file.read().strip()
finally:
init_file.close()
if sys.version < "3":
self.assertEqual(init, 'print "foo"')
else:
self.assertEqual(init, 'print("foo")')
def notest_develop_with_setup_requires(self):
wanted = ("Could not find suitable distribution for "
"Requirement.parse('I-DONT-EXIST')")
old_dir = os.getcwd()
os.chdir(self.dir)
try:
try:
dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
except DistutilsError:
e = sys.exc_info()[1]
error = str(e)
if error == wanted:
pass
finally:
os.chdir(old_dir)
| gpl-2.0 |
bdoner/SickRage | lib/github/tests/GitTree.py | 39 | 3310 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class GitTree(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.tree = self.g.get_user().get_repo("PyGithub").get_git_tree("f492784d8ca837779650d1fb406a1a3587a764ad")
def testAttributes(self):
self.assertEqual(self.tree.sha, "f492784d8ca837779650d1fb406a1a3587a764ad")
self.assertEqual(len(self.tree.tree), 11)
self.assertEqual(self.tree.tree[0].mode, "100644")
self.assertEqual(self.tree.tree[0].path, ".gitignore")
self.assertEqual(self.tree.tree[0].sha, "8a9af1462c3f4e3358315c2d2e6ef1e7334c59dd")
self.assertEqual(self.tree.tree[0].size, 53)
self.assertEqual(self.tree.tree[0].type, "blob")
self.assertEqual(self.tree.tree[0].url, "https://api.github.com/repos/jacquev6/PyGithub/git/blobs/8a9af1462c3f4e3358315c2d2e6ef1e7334c59dd")
self.assertEqual(self.tree.tree[6].mode, "040000")
self.assertEqual(self.tree.tree[6].path, "ReplayDataForIntegrationTest")
self.assertEqual(self.tree.tree[6].sha, "60b4602b2c2070246c5df078fb7a5150b45815eb")
self.assertEqual(self.tree.tree[6].size, None)
self.assertEqual(self.tree.tree[6].type, "tree")
self.assertEqual(self.tree.tree[6].url, "https://api.github.com/repos/jacquev6/PyGithub/git/trees/60b4602b2c2070246c5df078fb7a5150b45815eb")
self.assertEqual(self.tree.url, "https://api.github.com/repos/jacquev6/PyGithub/git/trees/f492784d8ca837779650d1fb406a1a3587a764ad")
| gpl-3.0 |
mrquim/mrquimrepo | repo/script.module.youtube.dl/lib/youtube_dl/extractor/playwire.py | 64 | 2408 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
dict_get,
float_or_none,
)
class PlaywireIE(InfoExtractor):
_VALID_URL = r'https?://(?:config|cdn)\.playwire\.com(?:/v2)?/(?P<publisher_id>\d+)/(?:videos/v2|embed|config)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://config.playwire.com/14907/videos/v2/3353705/player.json',
'md5': 'e6398701e3595888125729eaa2329ed9',
'info_dict': {
'id': '3353705',
'ext': 'mp4',
'title': 'S04_RM_UCL_Rus',
'thumbnail': r're:^https?://.*\.png$',
'duration': 145.94,
},
}, {
# m3u8 in f4m
'url': 'http://config.playwire.com/21772/videos/v2/4840492/zeus.json',
'info_dict': {
'id': '4840492',
'ext': 'mp4',
'title': 'ITV EL SHOW FULL',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
# Multiple resolutions while bitrates missing
'url': 'http://cdn.playwire.com/11625/embed/85228.html',
'only_matching': True,
}, {
'url': 'http://config.playwire.com/12421/videos/v2/3389892/zeus.json',
'only_matching': True,
}, {
'url': 'http://cdn.playwire.com/v2/12342/config/1532636.json',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
publisher_id, video_id = mobj.group('publisher_id'), mobj.group('id')
player = self._download_json(
'http://config.playwire.com/%s/videos/v2/%s/zeus.json' % (publisher_id, video_id),
video_id)
title = player['settings']['title']
duration = float_or_none(player.get('duration'), 1000)
content = player['content']
thumbnail = content.get('poster')
src = content['media']['f4m']
formats = self._extract_f4m_formats(src, video_id, m3u8_id='hls')
for a_format in formats:
if not dict_get(a_format, ['tbr', 'width', 'height']):
a_format['quality'] = 1 if '-hd.' in a_format['url'] else 0
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
| gpl-2.0 |
humanoid-path-planner/hpp-corbaserver | src/hpp/corbaserver/robot.py | 1 | 22688 | #!/usr/bin/env python
# Copyright (c) 2014 CNRS
# Author: Florent Lamiraux
#
# This file is part of hpp-corbaserver.
# hpp-corbaserver is free software: you can redistribute it
# and/or modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# hpp-corbaserver is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Lesser Public License for more details. You should have
# received a copy of the GNU Lesser General Public License along with
# hpp-corbaserver. If not, see
# <http://www.gnu.org/licenses/>.
from hpp import Transform
from hpp.corbaserver.client import Client
##
# Helper class to load a robot model in hpp::core::ProblemSolver.
#
# This class is also a wrapper of idl methods defined by
# hpp::corbaserver::Robot. Most methods call idl methods.
class Robot (object):
def __init__ (self, robotName = None, rootJointType = None, load = True, client = None, hppcorbaClient = None):
if client is None: client = Client ()
self.client = client
self.hppcorba = client if hppcorbaClient is None else hppcorbaClient
if robotName is None:
# assert (rootJointType is None), "rootJointType is ignore when robotName is None"
self.name = self.hppcorba.robot.getRobotName()
self.rootJointType = rootJointType
self.displayName = self.name
self.rebuildRanks()
else:
self.name = robotName
self.displayName = robotName
self.rootJointType = rootJointType
if load:
self.loadModel (robotName, rootJointType)
else:
self.rebuildRanks()
## Rebuild inner variables rankInConfiguration and rankInVelocity
#
# compute the kinematic tree.
def rebuildRanks (self):
try:
self.jointNames = self.getJointNames ()
except:
# No robot yet
return
self.allJointNames = self.getAllJointNames ()
self.rankInConfiguration = dict ()
self.rankInVelocity = dict ()
rankInConfiguration = rankInVelocity = 0
for j in self.jointNames:
self.rankInConfiguration [j] = rankInConfiguration
rankInConfiguration += self.hppcorba.robot.getJointConfigSize (j)
self.rankInVelocity [j] = rankInVelocity
rankInVelocity += self.hppcorba.robot.getJointNumberDof (j)
self.childFrames = None
def _computeKinematicChain(self):
# compute the tree of frames
self.childFrames = {'universe' : list()}
for j in self.getAllJointNames():
self.childFrames[j] = list()
parent = self.getParentFrame(j)
if parent in self.childFrames:
self.childFrames[parent].append(j)
else:
self.childFrames[parent] = [j]
# Compute parent dictionary, remove anchor joints
self.parentJoint = dict()
for joint in self.jointNames:
j = joint
found = False
while not found:
parent = self.getParentFrame(j)
if self.getJointConfigSize(parent) > 0 or parent == 'universe':
found = True
j = parent
self.parentJoint[joint] = parent
# Compute kinematic chain without anchor joints
self.childJoints = {'universe' : list()}
for j in self.jointNames:
self.childJoints[j] = list()
parent = self.parentJoint[j]
self.childJoints[parent].append(j)
## Return urdf and srdf filenames
#
def urdfSrdfFilenames (self):
# if packageName, urdfName, urdfSuffix, srdfSuffix are members of the
# class, build urdf and srdf filenames
if self.urdfSrdfString():
return self.urdfString, self.srdfString
elif hasattr (self, 'packageName') and hasattr (self, 'urdfName') and \
hasattr (self, 'urdfSuffix') and hasattr (self, 'srdfSuffix') :
urdfFilename = self.urdfPath ()
srdfFilename = self.srdfPath ()
elif hasattr (self, 'urdfFilename') and hasattr (self, 'srdfFilename') :
urdfFilename = self.urdfFilename
srdfFilename = self.srdfFilename
else :
raise RuntimeError (\
"""instance should have one of the following sets of members
- (packageName, urdfName, urdfSuffix, srdfSuffix),
- (urdfFilename, srdfFilename)""")
return urdfFilename, srdfFilename
def urdfSrdfString (self):
return hasattr (self, 'urdfString') and hasattr (self, 'srdfString')
def loadModel (self, robotName, rootJointType):
urdfFilename, srdfFilename = self.urdfSrdfFilenames ()
if self.urdfSrdfString():
self.hppcorba.robot.loadRobotModelFromString \
(robotName, rootJointType, urdfFilename, srdfFilename)
else:
self.hppcorba.robot.loadRobotModel \
(robotName, rootJointType, urdfFilename, srdfFilename)
self.rebuildRanks()
def urdfPath (self):
return "package://" + self.packageName + '/urdf/' + self.urdfName + self.urdfSuffix + '.urdf'
def srdfPath (self):
return "package://" + self.packageName + '/srdf/' + self.urdfName + self.srdfSuffix + '.srdf'
## \name Degrees of freedom
# \{
## Get size of configuration
# \return size of configuration
def getConfigSize (self):
return self.hppcorba.robot.getConfigSize ()
# Get size of velocity
# \return size of velocity
def getNumberDof (self):
return self.hppcorba.robot.getNumberDof ()
## \}
## \name Joints
#\{
## Get joint names in the same order as in the configuration.
def getJointNames (self):
return self.hppcorba.robot.getJointNames ()
## Get joint types in the same order as in the configuration.
def getJointTypes (self):
return self.hppcorba.robot.getJointTypes ()
## Get joint names in the same order as in the configuration.
def getAllJointNames (self):
return self.hppcorba.robot.getAllJointNames ()
## Get parent frame of a frame
# \param frameName name of the frame
def getParentFrame(self, frameName):
return self.hppcorba.robot.getParentJointName(frameName)
## Get child frames of a frame
# \param frameName name of the frame
def getChildFrames(self, frameName, recursive = False):
if not self.childFrames:
self._computeKinematicChain()
childFrames = self.childFrames[jointName]
if recursive:
i = 0
while i < len(childFrames):
childFrames += self.childFrames[childFrames[i]]
i+=1
return childFrames
## Get parent joint of a joint (excluding anchor joints)
# \param jointName name of the joint
def getParentJoint(self, jointName):
if not self.childFrames:
self._computeKinematicChain()
return self.parentJoint[jointName]
## Get child joints of a joint (excluding anchor joints)
def getChildJoints(self, jointName, recursive = False):
if not self.childFrames:
self._computeKinematicChain()
childJoints = self.childJoints[jointName]
if recursive:
i = 0
while i < len(childJoints):
childJoints += self.childJoints[childJoints[i]]
i+=1
return childJoints
## Get joint position.
def getJointPosition (self, jointName):
return self.hppcorba.robot.getJointPosition (jointName)
## Get constant position of root joint in world frame in initial position
def getRootJointPosition (self):
return self.hppcorba.robot.getRootJointPosition ()
## Set position of root joint in world frame in initial configuration
def setRootJointPosition (self, position):
return self.hppcorba.robot.setRootJointPosition (position)
## Set the static position of joint WRT its parent
def setJointPosition (self, jointName, position):
return self.hppcorba.robot.setJointPositionInParentFrame (jointName, position)
## Get joint transformation in world frame for current configuration.
def getCurrentTransformation(self, jointName):
return self.hppcorba.robot.getCurrentTransformation (jointName)
## Get joint number degrees of freedom.
def getJointNumberDof (self, jointName):
return self.hppcorba.robot.getJointNumberDof (jointName)
## Get joint number config size.
def getJointConfigSize (self, jointName):
return self.hppcorba.robot.getJointConfigSize (jointName)
## set bounds for the joint
def setJointBounds (self, jointName, inJointBound):
return self.hppcorba.robot.setJointBounds (jointName, inJointBound)
## Get bounds for a joint
#
# \param jointName name of the joint
# \return sequence of bounds in order [v0_min,v0_max,v1_min,v1_max,...]
# where vi_min, vi_max are the bounds of the i-th degree of
# freedom of the joint if the degree of freedom is bounded, 1, 0
# otherwise.
def getJointBounds(self, jointName):
return self.hppcorba.robot.getJointBounds(jointName)
## Get joints that are saturated for a given configuration
#
# \param q configuration
# \return list of triples joint names, dof id, value
def getSaturated (self, q):
saturated = []
for j in self.jointNames:
b = self.getJointBounds (j)
r = self.rankInConfiguration [j]
for m, M, i in zip (b [::2], b [1::2], range (100000)):
if q [r+i] == m or q [r+i] == M:
saturated.append ((j, i, q [r+i]))
return saturated
## Get link position in world frame
#
# Joints are oriented in a different way as in urdf standard since
# rotation and uni-dimensional translation joints act around or along
# their x-axis. This method returns the position of the urdf link in
# world frame.
#
# \param jointName name of the joint
# \return position of the link in world frame.
def getLinkPosition (self, linkName):
return self.hppcorba.robot.getLinkPosition (linkName)
## Get link name
#
# \param jointName name of the joint,
# \return name of the link.
def getLinkNames (self, jointName):
return self.hppcorba.robot.getLinkNames (jointName)
## \}
## \name Configurations
#\{
## Set current configuration of composite robot
#
# \param q configuration of the composite robot
def setCurrentConfig (self, q):
self.hppcorba.robot.setCurrentConfig (q)
## Get current configuration of composite robot
#
# \return configuration of the composite robot
def getCurrentConfig (self):
return self.hppcorba.robot.getCurrentConfig ()
## Set current velocity of composite robot
#
# \param q velocity of the composite robot
def setCurrentVelocity (self, v):
self.hppcorba.robot.setCurrentVelocity (v)
## Get current velocity of composite robot
#
# \return velocity of the composite robot
def getCurrentVelocity (self):
return self.hppcorba.robot.getCurrentVelocity ()
## Shoot random configuration
# \return dofArray Array of degrees of freedom.
def shootRandomConfig(self):
return self.hppcorba.robot.shootRandomConfig ()
## \}
## \name Bodies
# \{
## Get the list of objects attached to a joint.
# \param inJointName name of the joint.
# \return list of names of CollisionObject attached to the body.
def getJointInnerObjects (self, jointName):
return self.hppcorba.robot.getJointInnerObjects (jointName)
## Get list of collision objects tested with the body attached to a joint
# \param inJointName name of the joint.
# \return list of names of CollisionObject
def getJointOuterObjects (self, jointName):
return self.hppcorba.robot.getJointOuterObjects (jointName)
## Get position of robot object
# \param objectName name of the object.
# \return transformation as a hpp.Transform object
def getObjectPosition (self, objectName):
return Transform (self.hppcorba.robot.getObjectPosition (objectName))
## \brief Remove an obstacle from outer objects of a joint body
#
# \param objectName name of the object to remove,
# \param jointName name of the joint owning the body,
def removeObstacleFromJoint (self, objectName, jointName):
return self.hppcorba.obstacle.removeObstacleFromJoint \
(objectName, jointName, True, False)
## \}
## \name Collision checking and distance computation
# \{
## Check the validity of a configuration and return a informative message
#
# Check whether a configuration of robot is valid.
# \param cfg a configuration
# \return (boolean, str): whether configuration is valid and a msg explaining what is not valid
def isConfigValid (self, cfg):
return self.hppcorba.robot.isConfigValid (cfg)
valid, msg = self.hppcorba.robot.isConfigValid (cfg)
return valid, msg
## Checks the validity of a configuration.
#
# Check whether a configuration of robot is valid.
# \param cfg a configuration
# \return whether configuration is valid
def configIsValid (self, cfg):
return self.isConfigValid(cfg)[0]
## Compute distances between bodies and obstacles
#
# \return list of distances,
# \return names of the objects belonging to a body
# \return names of the objects tested with inner objects,
# \return closest points on the body,
# \return closest points on the obstacles
# \note outer objects for a body can also be inner objects of another
# body.
def distancesToCollision (self):
return self.hppcorba.robot.distancesToCollision ()
## Get the aligned axes bounding box around the robot.
# \return a vector a 6 floats. The 3 first are one corner of the box (lowest in all dimensions),
# the 3 last are the opposite corner.
def getRobotAABB(self):
return self.hppcorba.robot.getRobotAABB ()
## \}
## \name Mass and inertia
# \{
## Get mass of robot
def getMass (self):
return self.hppcorba.robot.getMass ()
## Get position of center of mass
def getCenterOfMass (self):
return self.hppcorba.robot.getCenterOfMass ()
## Get Jacobian of the center of mass
def getJacobianCenterOfMass (self):
return self.hppcorba.robot.getJacobianCenterOfMass ()
##\}
## This class provides tools to create static stability constraints
class StaticStabilityConstraintsFactory:
def _getCOM (self, com):
from numpy import array
if com == "":
return array(self.getCenterOfMass ())
else:
return array(self.hppcorba.robot.getPartialCom (com))
## Create static stability constraints where the robot slides on the ground,
## and store them into ProblemSolver
## \param prefix prefix of the names of the constraint as stored in
## core::ProblemSolver,
## \param comName name of the PartialCOM in the problem solver map. Put "" for
## a full COM computations.
## \param leftAnkle, rightAnkle: names of the ankle joints.
## \param q0 input configuration for computing constraint reference,
## \return a list of the names of the created constraints
##
## The constraints are stored in the core::ProblemSolver constraints map
## and are accessible through the method
## hpp::core::ProblemSolver::addNumericalConstraint:
def createSlidingStabilityConstraint (self, prefix, comName, leftAnkle, rightAnkle, q0):
robot = self.hppcorba.robot
problem = self.hppcorba.problem
_tfs = robot.getJointsPosition (q0, (leftAnkle, rightAnkle))
Ml = Transform(_tfs[0])
Mr = Transform(_tfs[1])
self.setCurrentConfig (q0)
x = self._getCOM (comName)
result = []
# COM wrt left ankle frame
xloc = Ml.inverse().transform(x)
result.append (prefix + "relative-com")
problem.createRelativeComConstraint (result[-1], comName, leftAnkle, xloc.tolist(), (True,)*3)
# Relative pose of the feet
result.append (prefix + "relative-pose")
problem.createTransformationConstraint2 (result[-1],
leftAnkle, rightAnkle, (0,0,0,0,0,0,1), (Mr.inverse()*Ml).toTuple(), (True,)*6)
# Pose of the left foot
result.append (prefix + "pose-left-foot")
problem.createTransformationConstraint2 (result[-1],
"", leftAnkle, Ml.toTuple(), (0,0,0,0,0,0,1), (False,False,True,True,True,False))
# Complement left foot
result.append (prefix + "pose-left-foot-complement")
problem.createTransformationConstraint2 (result[-1],
"", leftAnkle, Ml.toTuple(), (0,0,0,0,0,0,1), (True,True,False,False,False,True))
problem.setConstantRightHandSide (result[-1], False)
return result
## Create static stability constraints where the feet are fixed on the ground,
## and store them into ProblemSolver
## \param prefix prefix of the names of the constraint as stored in
## core::ProblemSolver,
## \param comName name of the PartialCOM in the problem solver map. Put "" for
## a full COM computations.
## \param leftAnkle, rightAnkle: names of the ankle joints.
## \param q0 input configuration for computing constraint reference,
## \return a list of the names of the created constraints
##
## The constraints are stored in the core::ProblemSolver constraints map
## and are accessible through the method
## hpp::core::ProblemSolver::addNumericalConstraint:
def createStaticStabilityConstraint (self, prefix, comName, leftAnkle, rightAnkle, q0):
robot = self.hppcorba.robot
problem = self.hppcorba.problem
_tfs = robot.getJointsPosition (q0, (leftAnkle, rightAnkle))
Ml = Transform(_tfs[0])
Mr = Transform(_tfs[1])
self.setCurrentConfig (q0)
x = self._getCOM (comName)
result = []
# COM wrt left ankle frame
xloc = Ml.inverse().transform(x)
result.append (prefix + "relative-com")
problem.createRelativeComConstraint (result[-1], comName, leftAnkle, xloc.tolist(), (True,)*3)
# Pose of the left foot
result.append (prefix + "pose-left-foot")
problem.createTransformationConstraint2 (result[-1],
"", leftAnkle, Ml.toTuple(), (0,0,0,0,0,0,1), (True,True,True,True,True,True))
# Pose of the right foot
result.append (prefix + "pose-right-foot")
problem.createTransformationConstraint2 (result[-1],
"", rightAnkle, Mr.toTuple(), (0,0,0,0,0,0,1), (True,True,True,True,True,True))
return result
## Create static stability constraints where the COM is vertically projected
## on the line between the two ankles, and the feet slide (or are fixed) on the ground.
## The constraints are stored into ProblemSolver
## \param prefix prefix of the names of the constraint as stored in
## core::ProblemSolver,
## \param comName name of the PartialCOM in the problem solver map. Put "" for
## a full COM computations.
## \param leftAnkle, rightAnkle: names of the ankle joints.
## \param q0 input configuration for computing constraint reference,
## \param sliding whether the feet slide or are fixed.
## \return a list of the names of the created constraints
##
## The constraints are stored in the core::ProblemSolver constraints map
## and are accessible through the method
## hpp::core::ProblemSolver::addNumericalConstraint:
def createAlignedCOMStabilityConstraint (self, prefix, comName, leftAnkle, rightAnkle, q0, sliding):
robot = self.hppcorba.robot
problem = self.hppcorba.problem
_tfs = robot.getJointsPosition (q0, (leftAnkle, rightAnkle))
Ml = Transform(_tfs[0])
Mr = Transform(_tfs[1])
robot.setCurrentConfig (q0)
x = self._getCOM (robot, comName)
result = []
# COM between feet
result.append (prefix + "com-between-feet")
problem.createComBetweenFeet (result[-1], comName, leftAnkle, rightAnkle,
(0,0,0), (0,0,0), "", x.tolist(), (True,)*4)
if sliding:
mask = ( False, False, True, True, True, False )
else:
mask = ( True, ) * 6
# Pose of the right foot
result.append (prefix + "pose-right-foot")
problem.createTransformationConstraint2 (result[-1],
"", rightAnkle, Mr.toTuple(), (0,0,0,0,0,0,1), mask)
# Pose of the left foot
result.append (prefix + "pose-left-foot")
problem.createTransformationConstraint2 (result[-1],
"", leftAnkle, Ml.toTuple(), (0,0,0,0,0,0,1), mask)
return result;
## Humanoid robot
#
# Method loadModel builds a humanoid robot.
class HumanoidRobot (Robot, StaticStabilityConstraintsFactory):
def __init__ (self, robotName = None, rootJointType = None, load = True, client = None, hppcorbaClient = None):
Robot.__init__ (self, robotName, rootJointType, load, client)
def loadModel (self, robotName, rootJointType):
urdfFilename, srdfFilename = self.urdfSrdfFilenames ()
self.hppcorba.robot.loadHumanoidModel \
(robotName, rootJointType, urdfFilename, srdfFilename)
self.rebuildRanks()
class RobotXML (Robot):
def __init__ (self, robotName, rootJointType, urdfString, srdfString = "",
load = True, client = None, hppcorbaClient = None,):
self.load = load
self.urdfString = urdfString
self.srdfString = srdfString
Robot.__init__ (self, robotName, rootJointType, load, client, hppcorbaClient)
def loadModel (self, robotName, rootJointType):
if self.load:
self.hppcorba.robot.loadRobotModelFromString (
robotName, rootJointType, self.urdfString, self.srdfString)
self.rebuildRanks()
def urdfPath (self):
return self.urdfString
| lgpl-3.0 |
dmnfarrell/peat | Protool/analyse_structure.py | 1 | 5888 | #!/usr/bin/env python
#
# # Protool - Python class for manipulating protein structuress
# Copyright (C) 2010 Jens Erik Nielsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Contact information:
# Email: Jens.Nielsen_at_gmail.com
# Normal mail:
# Jens Nielsen
# SBBS, Conway Institute
# University College Dublin
# Dublin 4, Ireland
from errors import *
import string
class find_pattern:
def acid_amide_Gly(self):
residues=self.residues.keys()
residues.sort()
prevtype=''
sites=[]
for residue in residues:
resname=self.resname(residue)
if prevtype!='':
if string.strip(string.upper(resname))=='GLY':
if string.strip(string.upper(prevtype))=='ASP' or string.strip(string.upper(prevtype))=='ASN':
print 'Asp/Asn-Gly site found:',residue
sites.append(residue)
prevtype=resname
return sites
#
# =====================================================
#
def cis_Pro(self):
import phipsi
residues=self.residues.keys()
residues.sort()
for residue in residues:
#
# Get the next residue
#
skip=None
try:
nextres=self.NextResidue(residue)
except Cterm:
skip=1
#
# Is nextres OK?
#
if not self.isaa(nextres):
skip=1
if not skip:
if string.strip(string.upper(self.resname(nextres)))=='PRO':
if phipsi.GetPhiPsi(self,residue)[2]:
if abs(phipsi.GetPhiPsi(self,residue)[2])<30.0:
print 'Cis proline peptide bond: ',residue,nextres
#print phipsi.GetPhiPsi(self,residue)[2],residue,self.resname(residue)
return
#
# ==========================================================
#
def find_sequence(self,sequence):
#
# Find a sequence of amino acids in the present PDB
# sequence holds a one-letter sequence
#
import string
sequence=string.upper(sequence)
residues=self.residues.keys()
residues.sort()
results=[]
for residue in residues:
if self.three_to_one.has_key(self.resname(residue)):
resnam=self.three_to_one[self.resname(residue)]
else:
print 'Residue not found: %s' %residue
#resnam='?'
if resnam==sequence[0]:
found=1
lastres=residue
stretch=[residue]
for testres in sequence[1:]:
try:
nextres=self.NextResidue(lastres)
except Cterm:
found=None
break
if self.three_to_one.has_key(self.resname(nextres)):
resnam=self.three_to_one[self.resname(nextres)]
lastres=self.NextResidue(lastres)
if resnam!=testres:
found=None
break
stretch.append(lastres)
if found:
#
# Add this stretch to the results
#
results.append(stretch)
return results
#
# ---------------------------------------
#
class distances:
def calc_distance_matrix(self):
CA={}
epsilon=0.01
count=1
for atom in self.atoms.keys():
if self.atname(atom)=="CA":
CA[atom]=count
count=count+1
dists={}
CAs=CA.keys()
for pos1 in range(len(CAs)):
for atom2 in CAs[pos1+1:]:
atom1=CAs[pos1]
dist=self.dist(atom1,atom2)
if dist<920.0:
ID1=CA[atom1]
ID2=CA[atom2]
if not dists.has_key(ID1):
dists[ID1]={}
dists[ID1][ID2]=[self.dist(atom1,atom2)*(1-epsilon),self.dist(atom1,atom2)*(1+epsilon)]
#
# Write file
#
done={}
fd=open('dg.data','w')
distsum=0.0
count=0
max=-9.9
for ID1 in dists.keys():
for ID2 in dists[ID1].keys():
fd.write('%4d\t%4d\t%7.3f\t%7.3f\n' %(ID1,ID2,dists[ID1][ID2][0],dists[ID1][ID2][1]))
distsum=distsum+dists[ID1][ID2][1]
count=count+1
if dists[ID1][ID2][1]>max:
max=dists[ID1][ID2][1]
fd.close()
print 'Average CA-CA distance: %5.3f. Max CA-CA dist: %5.3f ' %(distsum/float(count),max)
return
#
# ----
#
def get_min_distance(self,residue1,residue2):
"""Get the minimum distance between residue1 and residue2"""
min_dist=99999.9
for atom1 in self.residues[residue1]:
for atom2 in self.residues[residue2]:
dist=self.dist(atom1,atom2)
if dist<min_dist:
min_dist=dist
return min_dist
| mit |
auready/django | tests/template_tests/filter_tests/test_rjust.py | 521 | 1030 | from django.template.defaultfilters import rjust
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class RjustTests(SimpleTestCase):
@setup({'rjust01': '{% autoescape off %}.{{ a|rjust:"5" }}. .{{ b|rjust:"5" }}.{% endautoescape %}'})
def test_rjust01(self):
output = self.engine.render_to_string('rjust01', {"a": "a&b", "b": mark_safe("a&b")})
self.assertEqual(output, ". a&b. . a&b.")
@setup({'rjust02': '.{{ a|rjust:"5" }}. .{{ b|rjust:"5" }}.'})
def test_rjust02(self):
output = self.engine.render_to_string('rjust02', {"a": "a&b", "b": mark_safe("a&b")})
self.assertEqual(output, ". a&b. . a&b.")
class FunctionTests(SimpleTestCase):
def test_rjust(self):
self.assertEqual(rjust('test', 10), ' test')
def test_less_than_string_length(self):
self.assertEqual(rjust('test', 3), 'test')
def test_non_string_input(self):
self.assertEqual(rjust(123, 4), ' 123')
| bsd-3-clause |
umlsynco/umlsync | django_server/easynav/settings.py | 1 | 5589 | # Django settings for easynav project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'gt3', # Or path to database file if using sqlite3.
'USER': 'gt3', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://localhost:8000/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
'/home/evgeny/Projects/easynav/htdocs/media/',
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '!9h5pmypr@3et52cyz)9^#l856425%mtyg!dwrd(dmb%&tpn$n'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'easynav.urls'
TEMPLATE_DIRS = (
'/home/evgeny/Projects/easynav/templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
'djblets.util.context_processors.settingsVars',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
'djblets.siteconfig',
'djblets.util',
'easynav.codeprojects',
'easynav.diagramsview',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| gpl-2.0 |
Vajnar/linux-stable-hx4700 | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
aragilar/h5preserve | tests/test_roundtrip.py | 2 | 4749 | import pytest
import h5py
from h5preserve import open as hp_open, H5PreserveFile
@pytest.mark.roundtrip
def test_roundtrip(tmpdir, obj_registry):
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with hp_open(tmpfile, registries=obj_registry["registries"], mode='x') as f:
f["first"] = obj_registry["dumpable_object"]
with hp_open(tmpfile, registries=obj_registry["registries"], mode='r') as f:
roundtripped = f["first"]
assert roundtripped == obj_registry["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_without_open(tmpdir, obj_registry):
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with H5PreserveFile(
h5py.File(tmpfile, 'x'), registries=obj_registry["registries"]
) as f:
f["first"] = obj_registry["dumpable_object"]
with H5PreserveFile(
h5py.File(tmpfile, 'r'), registries=obj_registry["registries"]
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_with_defaults(tmpdir, obj_registry_with_defaults):
obj_registry = obj_registry_with_defaults
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with hp_open(tmpfile, registries=obj_registry["registries"], mode='x') as f:
f["first"] = obj_registry["dumpable_object"]
with hp_open(tmpfile, registries=obj_registry["registries"], mode='r') as f:
roundtripped = f["first"]
assert roundtripped == obj_registry["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_without_open_with_defaults(
tmpdir, obj_registry_with_defaults
):
obj_registry = obj_registry_with_defaults
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with H5PreserveFile(
h5py.File(tmpfile, mode='x'), registries=obj_registry["registries"]
) as f:
f["first"] = obj_registry["dumpable_object"]
with H5PreserveFile(
h5py.File(tmpfile, mode='r'), registries=obj_registry["registries"]
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry["dumpable_object"]
if hasattr(h5py, "Empty"):
@pytest.mark.roundtrip
def test_roundtrip_with_none(tmpdir, obj_registry_with_none):
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with hp_open(
tmpfile, registries=obj_registry_with_none["registries"], mode='x'
) as f:
f["first"] = obj_registry_with_none["dumpable_object"]
with hp_open(
tmpfile, registries=obj_registry_with_none["registries"], mode='r'
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry_with_none["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_without_open_with_none(tmpdir, obj_registry_with_none):
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with H5PreserveFile(
h5py.File(tmpfile, 'x'),
registries=obj_registry_with_none["registries"]
) as f:
f["first"] = obj_registry_with_none["dumpable_object"]
with H5PreserveFile(
h5py.File(tmpfile, 'r'),
registries=obj_registry_with_none["registries"]
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry_with_none["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_with_defaults_with_none(
tmpdir, obj_registry_with_none_with_defaults
):
obj_registry_with_none = obj_registry_with_none_with_defaults
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with hp_open(
tmpfile, registries=obj_registry_with_none["registries"], mode='x'
) as f:
f["first"] = obj_registry_with_none["dumpable_object"]
with hp_open(
tmpfile, registries=obj_registry_with_none["registries"], mode='r'
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry_with_none["dumpable_object"]
@pytest.mark.roundtrip
def test_roundtrip_without_open_with_defaults_with_none(
tmpdir, obj_registry_with_none_with_defaults
):
obj_registry_with_none = obj_registry_with_none_with_defaults
tmpfile = str(tmpdir.join("test_roundtrip.h5"))
with H5PreserveFile(
h5py.File(tmpfile, 'x'),
registries=obj_registry_with_none["registries"]
) as f:
f["first"] = obj_registry_with_none["dumpable_object"]
with H5PreserveFile(
h5py.File(tmpfile, 'r'),
registries=obj_registry_with_none["registries"]
) as f:
roundtripped = f["first"]
assert roundtripped == obj_registry_with_none["dumpable_object"]
| bsd-3-clause |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/docutils/examples.py | 180 | 3959 | # $Id: examples.py 7320 2012-01-19 22:33:02Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
This module contains practical examples of Docutils client code.
Importing this module from client code is not recommended; its contents are
subject to change in future Docutils releases. Instead, it is recommended
that you copy and paste the parts you need into your own code, modifying as
necessary.
"""
from docutils import core, io
def html_parts(input_string, source_path=None, destination_path=None,
input_encoding='unicode', doctitle=True,
initial_header_level=1):
"""
Given an input string, returns a dictionary of HTML document parts.
Dictionary keys are the names of parts, and values are Unicode strings;
encoding is up to the client.
Parameters:
- `input_string`: A multi-line text string; required.
- `source_path`: Path to the source file or object. Optional, but useful
for diagnostic output (system messages).
- `destination_path`: Path to the file or object which will receive the
output; optional. Used for determining relative paths (stylesheets,
source links, etc.).
- `input_encoding`: The encoding of `input_string`. If it is an encoded
8-bit string, provide the correct encoding. If it is a Unicode string,
use "unicode", the default.
- `doctitle`: Disable the promotion of a lone top-level section title to
document title (and subsequent section title to document subtitle
promotion); enabled by default.
- `initial_header_level`: The initial level for header elements (e.g. 1
for "<h1>").
"""
overrides = {'input_encoding': input_encoding,
'doctitle_xform': doctitle,
'initial_header_level': initial_header_level}
parts = core.publish_parts(
source=input_string, source_path=source_path,
destination_path=destination_path,
writer_name='html', settings_overrides=overrides)
return parts
def html_body(input_string, source_path=None, destination_path=None,
input_encoding='unicode', output_encoding='unicode',
doctitle=True, initial_header_level=1):
"""
Given an input string, returns an HTML fragment as a string.
The return value is the contents of the <body> element.
Parameters (see `html_parts()` for the remainder):
- `output_encoding`: The desired encoding of the output. If a Unicode
string is desired, use the default value of "unicode" .
"""
parts = html_parts(
input_string=input_string, source_path=source_path,
destination_path=destination_path,
input_encoding=input_encoding, doctitle=doctitle,
initial_header_level=initial_header_level)
fragment = parts['html_body']
if output_encoding != 'unicode':
fragment = fragment.encode(output_encoding)
return fragment
def internals(input_string, source_path=None, destination_path=None,
input_encoding='unicode', settings_overrides=None):
"""
Return the document tree and publisher, for exploring Docutils internals.
Parameters: see `html_parts()`.
"""
if settings_overrides:
overrides = settings_overrides.copy()
else:
overrides = {}
overrides['input_encoding'] = input_encoding
output, pub = core.publish_programmatically(
source_class=io.StringInput, source=input_string,
source_path=source_path,
destination_class=io.NullOutput, destination=None,
destination_path=destination_path,
reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='null',
settings=None, settings_spec=None, settings_overrides=overrides,
config_section=None, enable_exit_status=None)
return pub.writer.document, pub
| gpl-3.0 |
aperigault/ansible | lib/ansible/modules/cloud/azure/azure_rm_servicebus.py | 24 | 6521 | #!/usr/bin/python
#
# Copyright (c) 2018 Yuwei Zhou, <yuwzho@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_servicebus
version_added: "2.8"
short_description: Manage Azure Service Bus
description:
- Create, update or delete an Azure Service Bus namespaces.
options:
resource_group:
description:
- Name of resource group.
required: true
name:
description:
- Name of the servicebus namespace.
required: true
state:
description:
- Assert the state of the servicebus. Use C(present) to create or update and use C(absen) to delete.
default: present
choices:
- absent
- present
location:
description:
- The servicebus's location.
sku:
description:
- Namespace SKU.
choices:
- standard
- basic
- premium
default: standard
extends_documentation_fragment:
- azure
- azure_tags
author:
- Yuwei Zhou (@yuwzho)
'''
EXAMPLES = '''
- name: Create a namespace
azure_rm_servicebus:
name: deadbeef
location: eastus
'''
RETURN = '''
id:
description:
- Current state of the service bus.
returned: success
type: str
sample: "/subscriptions/xxx...xxx/resourceGroups/myResourceGroup/providers/Microsoft.ServiceBus/namespaces/myServicebus"
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel, _camel_to_snake
from ansible.module_utils._text import to_native
from datetime import datetime, timedelta
class AzureRMServiceBus(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
location=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent']),
sku=dict(type='str', choices=['basic', 'standard', 'premium'], default='standard')
)
self.resource_group = None
self.name = None
self.state = None
self.sku = None
self.location = None
self.results = dict(
changed=False,
id=None
)
super(AzureRMServiceBus, self).__init__(self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
changed = False
if not self.location:
resource_group = self.get_resource_group(self.resource_group)
self.location = resource_group.location
original = self.get()
if self.state == 'present' and not original:
self.check_name()
changed = True
if not self.check_mode:
original = self.create()
elif self.state == 'absent' and original:
changed = True
original = None
if not self.check_mode:
self.delete()
self.results['deleted'] = True
if original:
self.results = self.to_dict(original)
self.results['changed'] = changed
return self.results
def check_name(self):
try:
check_name = self.servicebus_client.namespaces.check_name_availability_method(self.name)
if not check_name or not check_name.name_available:
self.fail("Error creating namespace {0} - {1}".format(self.name, check_name.message or str(check_name)))
except Exception as exc:
self.fail("Error creating namespace {0} - {1}".format(self.name, exc.message or str(exc)))
def create(self):
self.log('Cannot find namespace, creating a one')
try:
sku = self.servicebus_models.SBSku(name=str.capitalize(self.sku))
poller = self.servicebus_client.namespaces.create_or_update(self.resource_group,
self.name,
self.servicebus_models.SBNamespace(location=self.location,
sku=sku))
ns = self.get_poller_result(poller)
except Exception as exc:
self.fail('Error creating namespace {0} - {1}'.format(self.name, str(exc.inner_exception) or str(exc)))
return ns
def delete(self):
try:
self.servicebus_client.namespaces.delete(self.resource_group, self.name)
return True
except Exception as exc:
self.fail("Error deleting route {0} - {1}".format(self.name, str(exc)))
def get(self):
try:
return self.servicebus_client.namespaces.get(self.resource_group, self.name)
except Exception:
return None
def to_dict(self, instance):
result = dict()
attribute_map = self.servicebus_models.SBNamespace._attribute_map
for attribute in attribute_map.keys():
value = getattr(instance, attribute)
if not value:
continue
if isinstance(value, self.servicebus_models.SBSku):
result[attribute] = value.name.lower()
elif isinstance(value, datetime):
result[attribute] = str(value)
elif isinstance(value, str):
result[attribute] = to_native(value)
elif attribute == 'max_size_in_megabytes':
result['max_size_in_mb'] = value
else:
result[attribute] = value
return result
def is_valid_timedelta(value):
if value == timedelta(10675199, 10085, 477581):
return None
return value
def main():
AzureRMServiceBus()
if __name__ == '__main__':
main()
| gpl-3.0 |
blackzw/openwrt_sdk_dev1 | staging_dir/target-mips_r2_uClibc-0.9.33.2/usr/lib/python2.7/test/test_xdrlib.py | 94 | 1597 | from test import test_support
import unittest
import xdrlib
class XDRTest(unittest.TestCase):
def test_xdr(self):
p = xdrlib.Packer()
s = 'hello world'
a = ['what', 'is', 'hapnin', 'doctor']
p.pack_int(42)
p.pack_int(-17)
p.pack_uint(9)
p.pack_bool(True)
p.pack_bool(False)
p.pack_uhyper(45L)
p.pack_float(1.9)
p.pack_double(1.9)
p.pack_string(s)
p.pack_list(range(5), p.pack_uint)
p.pack_array(a, p.pack_string)
# now verify
data = p.get_buffer()
up = xdrlib.Unpacker(data)
self.assertEqual(up.get_position(), 0)
self.assertEqual(up.unpack_int(), 42)
self.assertEqual(up.unpack_int(), -17)
self.assertEqual(up.unpack_uint(), 9)
self.assertTrue(up.unpack_bool() is True)
# remember position
pos = up.get_position()
self.assertTrue(up.unpack_bool() is False)
# rewind and unpack again
up.set_position(pos)
self.assertTrue(up.unpack_bool() is False)
self.assertEqual(up.unpack_uhyper(), 45L)
self.assertAlmostEqual(up.unpack_float(), 1.9)
self.assertAlmostEqual(up.unpack_double(), 1.9)
self.assertEqual(up.unpack_string(), s)
self.assertEqual(up.unpack_list(up.unpack_uint), range(5))
self.assertEqual(up.unpack_array(up.unpack_string), a)
up.done()
self.assertRaises(EOFError, up.unpack_uint)
def test_main():
test_support.run_unittest(XDRTest)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
nolanliou/tensorflow | tensorflow/contrib/tensor_forest/python/tensor_forest_test.py | 10 | 5506 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.tensor_forest.ops.tensor_forest."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class TensorForestTest(test_util.TensorFlowTestCase):
def testForestHParams(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000,
split_after_samples=25,
num_features=60).fill()
self.assertEquals(2, hparams.num_classes)
self.assertEquals(3, hparams.num_output_columns)
self.assertEquals(10, hparams.num_splits_to_consider)
# Default value of valid_leaf_threshold
self.assertEquals(1, hparams.valid_leaf_threshold)
self.assertEquals(0, hparams.base_random_seed)
def testForestHParamsBigTree(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000000,
split_after_samples=25,
num_features=1000).fill()
self.assertEquals(31, hparams.num_splits_to_consider)
def testForestHParamsStringParams(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000000,
split_after_samples="25",
num_splits_to_consider="1000000",
num_features=1000).fill()
self.assertEquals("1000000", hparams.num_splits_to_consider)
def testTrainingConstructionClassification(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testTrainingConstructionRegression(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25,
regression=True).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testInferenceConstruction(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
probs, paths, var = graph_builder.inference_graph(input_data)
self.assertTrue(isinstance(probs, ops.Tensor))
self.assertTrue(isinstance(paths, ops.Tensor))
self.assertTrue(isinstance(var, ops.Tensor))
def testTrainingConstructionClassificationSparse(self):
input_data = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 3], [1, 0], [1, 7], [2, 1], [3, 9]],
values=[-1.0, 0.0, -1., 2., 1., -2.0],
dense_shape=[4, 10])
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=10,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testInferenceConstructionSparse(self):
input_data = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 3],
[1, 0], [1, 7],
[2, 1],
[3, 9]],
values=[-1.0, 0.0,
-1., 2.,
1.,
-2.0],
dense_shape=[4, 10])
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=10,
num_trees=10,
max_nodes=1000,
regression=True,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
probs, paths, var = graph_builder.inference_graph(input_data)
self.assertTrue(isinstance(probs, ops.Tensor))
self.assertTrue(isinstance(paths, ops.Tensor))
self.assertTrue(isinstance(var, ops.Tensor))
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
mabotech/mabozen | mabozen/conf/logging_config.py | 3 | 2630 | # -*- coding: utf-8 -*-
LOGGING = {
'version': 1,
#'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(process)d, %(thread)d, %(name)s, %(module)s:%(lineno)s:%(message)s'
},
'performance': {
'format': '%(asctime)s [%(levelname)s] %(process)d, %(thread)d, %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter':'verbose'
},
'performance': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'formatter':'performance',
'filename':'performance.log',
'maxBytes':10240000, # 10M
'backupCount':7 # total
},
'debug': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'formatter':'verbose',
'filename':'debug.log',
'maxBytes':10240000,
'backupCount':7
},
'info': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'formatter':'verbose',
'filename':'info.log',
'maxBytes':10240000,
'backupCount':7
},
'warning': {
'level':'WARNING',
'class':'logging.handlers.RotatingFileHandler',
'formatter':'verbose',
'filename':'warning.log',
'maxBytes':10240000,
'backupCount':7
},
'error': {
'level':'ERROR',
'class':'logging.handlers.RotatingFileHandler',
'formatter':'verbose',
'filename':'error.log',
'maxBytes':10240000,
'backupCount':7
},
},
'loggers': {
'': {
'handlers': ['console', 'info', 'warning', 'error', 'debug'],
'level': 'DEBUG',
'propagate': True
},
'performance': {
'handlers': ['console', 'performance'],
'level': 'DEBUG',
'propagate': True
}
}
}
| mit |
imtapps/django-admin-ext | example/sample/tests.py | 1 | 9590 | from functools import wraps
from django.contrib.auth.models import User
from django.test.testcases import TestCase
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium.common.exceptions import TimeoutException, NoSuchElementException, StaleElementReferenceException
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
def retry(func): # noqa C901
@wraps(func)
def retry_test(self, countdown=30, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
except Exception:
if countdown <= 0:
raise
self.tearDown()
self._post_teardown()
self._pre_setup()
self.setUp()
result = retry_test(self, countdown=countdown - 1, *args, **kwargs)
return result
return retry_test
class Retry(type):
def __new__(cls, name, bases, attrs):
for test in filter(lambda i: i.startswith('test_'), attrs):
attrs[test] = retry(attrs[test])
return super(Retry, cls).__new__(cls, name, bases, attrs)
class AjaxAdminTests(TestCase, StaticLiveServerTestCase):
__metaclass__ = Retry
fixtures = ['initial_data.json']
@classmethod
def setUpClass(cls):
super(AjaxAdminTests, cls).setUpClass()
caps = webdriver.DesiredCapabilities.CHROME
caps['platform'] = 'Windows XP'
caps['version'] = '31'
caps['name'] = 'django-admin-ext'
cls.driver = webdriver.Remote(
desired_capabilities=caps,
command_executor=(
"http://imtappswebadmin:841f95a0-c21d-4cb4-a7f4-288ed88a4b18@ondemand.saucelabs.com:80/wd/hub"
)
)
cls.driver.implicitly_wait(30)
@classmethod
def tearDownClass(cls):
print("Link to your job: https://saucelabs.com/jobs/%s" % cls.driver.session_id)
cls.driver.quit()
def setUp(self):
list(User.objects.all())
self.login()
def _get_element(self, context, method, argument):
return getattr(context, method)(argument)
def find_element(self, context=None, name=None, selector=None, tag=None): # noqa C901
argument = name or selector or tag
context = context or self.driver
if name:
method = 'find_element_by_name'
elif selector:
method = 'find_element_by_css_selector'
elif tag:
method = 'find_elements_by_tag_name'
else:
raise Exception("No Selector")
WebDriverWait(context, 60, 1).until(lambda d: self._get_element(d, method, argument))
return self._get_element(context, method, argument)
def click_element(self, **kwargs):
element = self.find_element(**kwargs)
element.click()
def login(self):
self.driver.get("%s/admin/" % self.live_server_url)
# new_user = User.objects.create_user(username='admin', is_superuser=True, is_staff=True)
# new_user.set_password('test')
# new_user.save()
user = self.find_element(selector='#id_username')
user.send_keys("admin")
pswd = self.find_element(selector='#id_password')
pswd.send_keys("test")
self.click_element(selector=".submit-row>[type='submit']")
def assert_selected_option(self, element_id, value):
option = self.find_element(selector='#' + element_id + ' option[selected]')
self.assertEqual(value, option.text)
def assert_select_has_options(self, element_id, expected_ingredients):
details = self.find_element(selector='#' + element_id)
options = self.find_element(context=details, tag='option')
self.assertCountEqual(expected_ingredients, [o.text for o in options])
def change_value_for_element(self, element_id, value):
element = self.find_element(selector='#' + element_id)
element.send_keys(value)
# click off of the element to trigger the change event
try:
self.click_element(selector='label[for="' + element_id + '"]')
except Exception:
pass
def test_main_ingredient_element_not_present_initially(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.find_element(selector='#id_food_type')
with self.assertRaises(TimeoutException):
self.find_element(selector='#id_main_ingredient')
def test_main_ingredient_element_shows_when_pizza_food_type_is_selected(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'pizza')
self.assert_select_has_options(
'id_main_ingredient', [u'---------', u'pepperoni', u'mushrooms', u'beef', u'anchovies']
)
def test_main_ingredient_element_shows_when_burger_food_type_is_selected(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'burger')
self.assert_select_has_options('id_main_ingredient', [u'---------', u'mushrooms', u'beef', u'lettuce'])
def test_ingredient_details_is_shown_when_beef_is_selected(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'burger')
self.change_value_for_element('id_main_ingredient', 'beef')
self.assert_select_has_options('id_ingredient_details', [u'---------', u'Grass Fed', u'Cardboard Fed'])
def test_ingredient_details_is_reset_when_main_ingredient_changes(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'burger')
self.change_value_for_element('id_main_ingredient', 'beef')
details = self.find_element(selector='#id_ingredient_details')
self.assertTrue(details.is_displayed())
self.change_value_for_element('id_main_ingredient', 'lettuce')
try:
self.find_element(selector='#id_ingredient_details')
except (NoSuchElementException, TimeoutException, StaleElementReferenceException):
pass
else:
self.fail("Expected not to find #id_ingredient_details")
def test_ingredient_details_change_when_main_ingredient_changes(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'pizza')
self.change_value_for_element('id_main_ingredient', 'beef')
self.assert_select_has_options('id_ingredient_details', [u'---------', u'Grass Fed', u'Cardboard Fed'])
self.change_value_for_element('id_main_ingredient', 'pepperoni')
self.assert_select_has_options(
'id_ingredient_details', [u'---------', u'Grass Fed Goodness', u'Cardboard Not So Goodness']
)
def test_main_ingredient_does_not_change_when_food_type_changes_if_valid_option(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'pizza')
self.change_value_for_element('id_main_ingredient', 'beef')
self.assert_selected_option('id_main_ingredient', 'beef')
self.change_value_for_element('id_food_type', 'burger')
self.assert_selected_option('id_main_ingredient', 'beef')
def test_shows_dynamic_field_on_existing_instance(self):
self.driver.get("%s/admin/sample/meal/1/" % self.live_server_url)
self.assert_selected_option('id_main_ingredient', 'anchovies')
def test_sets_ingredient_details_when_available(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'burger')
self.change_value_for_element('id_main_ingredient', 'beef')
self.change_value_for_element('id_ingredient_details', 'Grass Fed')
self.click_element(name='_continue')
self.assert_selected_option('id_ingredient_details', 'Grass Fed')
def test_allows_changing_dynamic_field_on_existing_instance(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
self.change_value_for_element('id_food_type', 'burger')
# create new meal
main_ingredient = self.find_element(selector='#id_main_ingredient')
main_ingredient.send_keys('mushrooms')
self.click_element(name='_continue')
# change main_ingredient for new meal
main_ingredient2 = self.find_element(selector='#id_main_ingredient')
main_ingredient2.send_keys('lettuce')
self.click_element(name='_continue')
# make sure there are no errors
with self.assertRaises(TimeoutException):
self.find_element(selector=".errors")
# make sure our new main_ingredient was saved
self.assert_selected_option('id_main_ingredient', 'lettuce')
# delete our meal when we're done
self.click_element(selector='.deletelink')
self.click_element(selector='[type="submit"]')
def test_gives_field_required_error_when_dynamic_field_not_chosen(self):
self.driver.get("%s/admin/sample/meal/add/" % self.live_server_url)
food_type = self.find_element(selector='#id_food_type')
food_type.send_keys('burger')
self.click_element(name='_save')
error_item = self.find_element(selector=".errors.field-main_ingredient li")
self.assertEqual("This field is required.", error_item.text)
| bsd-2-clause |
lhellebr/spacewalk | backend/satellite_tools/xmlDiskSource.py | 6 | 9005 | #
# Abstraction for an XML importer with a disk base
#
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
import gzip
from spacewalk.common.fileutils import createPath
from spacewalk.common.rhnLib import hash_object_id
class MissingXmlDiskSourceFileError(Exception):
pass
class MissingXmlDiskSourceDirError(Exception):
pass
class DiskSource:
subdir = None
# Allow for compressed files by default
allow_compressed_files = 1
def __init__(self, mountPoint):
self.mountPoint = mountPoint
# Returns a data stream
def load(self):
# Returns a stream
filename = self._getFile()
return self._loadFile(filename)
def _getFile(self, create=0):
# Virtual
# pylint: disable=W0613,R0201
return None
def _loadFile(self, filename):
# Look for a gzip file first
if self.allow_compressed_files:
if filename[-3:] == '.gz' and os.path.exists(filename):
return gzip.open(filename, "rb")
if os.path.exists(filename + '.gz'):
return gzip.open(filename + ".gz", "rb")
if os.path.exists(filename):
return open(filename, "r")
raise MissingXmlDiskSourceFileError("unable to process file %s" % filename)
def _getDir(self, create=0):
dirname = "%s/%s" % (self.mountPoint, self.subdir)
if not create:
return dirname
if not os.path.exists(dirname):
createPath(dirname)
if not os.path.isdir(dirname):
raise MissingXmlDiskSourceDirError("%s is not a directory" % dirname)
return dirname
class ArchesDiskSource(DiskSource):
subdir = 'arches'
filename = 'arches.xml'
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return os.path.join(dirname, self.filename)
class ArchesExtraDiskSource(ArchesDiskSource):
filename = "arches-extra.xml"
class ProductnamesDiskSource(DiskSource):
subdir = 'product_names'
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return "%s/product_names.xml" % dirname
class ChannelFamilyDiskSource(DiskSource):
subdir = 'channel_families'
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return "%s/channel_families.xml" % dirname
class OrgsDiskSource(DiskSource):
subdir = 'orgs'
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return "%s/orgs.xml" % dirname
class ChannelDiskSource(DiskSource):
subdir = 'channels'
def __init__(self, mountPoint):
DiskSource.__init__(self, mountPoint)
self.channel = None
def setChannel(self, channel):
self.channel = channel
def list(self):
# Lists the available channels
dirname = self._getDir(create=0)
if not os.path.isdir(dirname):
# No channels available
return []
return os.listdir(dirname)
def _getFile(self, create=0):
dirname = "%s/%s" % (self._getDir(create), self.channel)
if create and not os.path.isdir(dirname):
createPath(dirname)
return os.path.join(dirname, self._file_name())
@staticmethod
def _file_name():
return "channel.xml"
class ChannelCompsDiskSource(ChannelDiskSource):
@staticmethod
def _file_name():
return "comps.xml"
class ShortPackageDiskSource(DiskSource):
subdir = "packages_short"
def __init__(self, mountPoint):
DiskSource.__init__(self, mountPoint)
# Package ID
self.id = None
self._file_suffix = ".xml"
def setID(self, pid):
self.id = pid
# limited dict behaviour
def has_key(self, pid):
# Save the old id
old_id = self.id
self.id = pid
f = self._getFile()
# Restore the old id
self.id = old_id
if os.path.exists(f + '.gz') or os.path.exists(f):
return 1
return 0
def _getFile(self, create=0):
dirname = "%s/%s" % (self._getDir(create), self._hashID())
# Create the directoru if we have to
if create and not os.path.exists(dirname):
createPath(dirname)
return "%s/%s%s" % (dirname, self.id, self._file_suffix)
def _hashID(self):
# Hashes the package name
return hash_object_id(self.id, 2)
class PackageDiskSource(ShortPackageDiskSource):
subdir = "packages"
class SourcePackageDiskSource(ShortPackageDiskSource):
subdir = "source_packages"
class ErrataDiskSource(ShortPackageDiskSource):
subdir = "errata"
def _hashID(self):
# Hashes the erratum name
return hash_object_id(self.id, 1)
class BlacklistsDiskSource(DiskSource):
subdir = "blacklists"
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return "%s/blacklists.xml" % dirname
class BinaryRPMDiskSource(ShortPackageDiskSource):
subdir = "rpms"
def __init__(self, mountPoint):
ShortPackageDiskSource.__init__(self, mountPoint)
self._file_suffix = '.rpm'
class SourceRPMDiskSource(BinaryRPMDiskSource):
subdir = "srpms"
class KickstartDataDiskSource(DiskSource):
subdir = "kickstart_trees"
def __init__(self, mountPoint):
DiskSource.__init__(self, mountPoint)
self.id = None
def setID(self, ks_label):
self.id = ks_label
def _getFile(self, create=0):
dirname = self._getDir(create)
if create and not os.path.isdir(dirname):
createPath(dirname)
return os.path.join(dirname, self.id) + '.xml'
class KickstartFileDiskSource(KickstartDataDiskSource):
subdir = "kickstart_files"
allow_compressed_files = 0
def __init__(self, mountPoint):
KickstartDataDiskSource.__init__(self, mountPoint)
# the file's relative path
self.relative_path = None
def set_relative_path(self, relative_path):
self.relative_path = relative_path
def _getFile(self, create=0):
path = os.path.join(self._getDir(create), self.id,
self.relative_path)
dirname = os.path.dirname(path)
if create and not os.path.isdir(dirname):
createPath(dirname)
return path
class MetadataDiskSource:
def __init__(self, mountpoint):
self.mountpoint = mountpoint
@staticmethod
def is_disk_loader():
return True
def getArchesXmlStream(self):
return ArchesDiskSource(self.mountpoint).load()
def getArchesExtraXmlStream(self):
return ArchesExtraDiskSource(self.mountpoint).load()
def getChannelFamilyXmlStream(self):
return ChannelFamilyDiskSource(self.mountpoint).load()
def getOrgsXmlStream(self):
return OrgsDiskSource(self.mountpoint).load()
def getProductNamesXmlStream(self):
return ProductnamesDiskSource(self.mountpoint).load()
def getComps(self, label):
sourcer = ChannelCompsDiskSource(self.mountpoint)
sourcer.setChannel(label)
return sourcer.load()
def getChannelXmlStream(self):
sourcer = ChannelDiskSource(self.mountpoint)
channels = sourcer.list()
stream_list = []
for c in channels:
sourcer.setChannel(c)
stream_list.append(sourcer.load())
return stream_list
def getChannelShortPackagesXmlStream(self):
return ShortPackageDiskSource(self.mountpoint)
def getPackageXmlStream(self):
return PackageDiskSource(self.mountpoint)
def getSourcePackageXmlStream(self):
return SourcePackageDiskSource(self.mountpoint)
def getKickstartsXmlStream(self):
return KickstartDataDiskSource(self.mountpoint)
def getErrataXmlStream(self):
return ErrataDiskSource(self.mountpoint)
if __name__ == '__main__':
# TEST CODE
s = ChannelDiskSource("/tmp")
print(s.list())
s.setChannel("redhat-linux-i386-7.2")
print(s.load())
| gpl-2.0 |
noironetworks/nova | nova/tests/functional/api_sample_tests/test_hypervisors.py | 17 | 6546 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from nova.cells import utils as cells_utils
from nova.compute import api as compute_api
from nova.compute import cells_api as cells_api
from nova import objects
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class HypervisorsSampleJsonTests(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
extension_name = "os-hypervisors"
def _get_flags(self):
f = super(HypervisorsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.hypervisors.Hypervisors')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_hypervisors.'
'Extended_hypervisors')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.hypervisor_status.'
'Hypervisor_status')
return f
def test_hypervisors_list(self):
response = self._do_get('os-hypervisors')
self._verify_response('hypervisors-list-resp', {}, response, 200)
def test_hypervisors_search(self):
response = self._do_get('os-hypervisors/fake/search')
self._verify_response('hypervisors-search-resp', {}, response, 200)
def test_hypervisors_without_servers(self):
response = self._do_get('os-hypervisors/fake/servers')
self._verify_response('hypervisors-without-servers-resp',
{}, response, 200)
@mock.patch("nova.compute.api.HostAPI.instance_get_all_by_host")
def test_hypervisors_with_servers(self, mock_instance_get):
instance = [
{
"deleted": None,
"name": "test_server1",
"uuid": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
},
{
"deleted": None,
"name": "test_server2",
"uuid": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"
}]
mock_instance_get.return_value = instance
response = self._do_get('os-hypervisors/fake/servers')
self._verify_response('hypervisors-with-servers-resp', {},
response, 200)
def test_hypervisors_detail(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/detail')
subs.update(self._get_regexes())
self._verify_response('hypervisors-detail-resp', subs, response, 200)
def test_hypervisors_show(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/%s' % hypervisor_id)
subs.update(self._get_regexes())
self._verify_response('hypervisors-show-resp', subs, response, 200)
def test_hypervisors_statistics(self):
response = self._do_get('os-hypervisors/statistics')
self._verify_response('hypervisors-statistics-resp', {}, response, 200)
def test_hypervisors_uptime(self):
def fake_get_host_uptime(self, context, hyp):
return (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
self.stubs.Set(compute_api.HostAPI,
'get_host_uptime', fake_get_host_uptime)
hypervisor_id = 1
response = self._do_get('os-hypervisors/%s/uptime' % hypervisor_id)
subs = {
'hypervisor_id': hypervisor_id,
}
self._verify_response('hypervisors-uptime-resp', subs, response, 200)
@mock.patch("nova.servicegroup.API.service_is_up", return_value=True)
class HypervisorsCellsSampleJsonTests(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
extension_name = "os-hypervisors"
def _get_flags(self):
f = super(HypervisorsCellsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.hypervisors.Hypervisors')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.hypervisor_status.'
'Hypervisor_status')
return f
def setUp(self):
self.flags(enable=True, cell_type='api', group='cells')
super(HypervisorsCellsSampleJsonTests, self).setUp()
def test_hypervisor_uptime(self, mocks):
fake_hypervisor = objects.ComputeNode(id=1, host='fake-mini',
hypervisor_hostname='fake-mini')
def fake_get_host_uptime(self, context, hyp):
return (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
def fake_compute_node_get(self, context, hyp):
return fake_hypervisor
def fake_service_get_by_compute_host(self, context, host):
return cells_utils.ServiceProxy(
objects.Service(id=1, host='fake-mini', disabled=False,
disabled_reason=None),
'cell1')
self.stubs.Set(cells_api.HostAPI, 'compute_node_get',
fake_compute_node_get)
self.stubs.Set(cells_api.HostAPI, 'service_get_by_compute_host',
fake_service_get_by_compute_host)
self.stubs.Set(cells_api.HostAPI,
'get_host_uptime', fake_get_host_uptime)
hypervisor_id = fake_hypervisor['id']
response = self._do_get('os-hypervisors/%s/uptime' % hypervisor_id)
subs = {'hypervisor_id': hypervisor_id}
self._verify_response('hypervisors-uptime-resp', subs, response, 200)
| apache-2.0 |
d-nox/iHummeln | iHummeln/www/js/OpenLayers/tools/BeautifulSoup.py | 1 | 69013 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup parses a (possibly invalid) XML or HTML document into a
tree representation. It provides methods and Pythonic idioms that make
it easy to navigate, search, and modify the tree.
A well-formed XML/HTML document yields a well-formed data
structure. An ill-formed XML/HTML document yields a correspondingly
ill-formed data structure. If your document is only locally
well-formed, you can use this library to find and process the
well-formed part of it. The BeautifulSoup class
Beautiful Soup works with Python 2.2 and up. It has no external
dependencies, but you'll have more success at converting data to UTF-8
if you also install these three packages:
* chardet, for auto-detecting character encodings
http://chardet.feedparser.org/
* cjkcodecs and iconv_codec, which add more encodings to the ones supported
by stock Python.
http://cjkpython.i18n.org/
Beautiful Soup defines classes for two main parsing strategies:
* BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific
language that kind of looks like XML.
* BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid
or invalid. This class has web browser-like heuristics for
obtaining a sensible parse tree in the face of common HTML errors.
Beautiful Soup also defines a class (UnicodeDammit) for autodetecting
the encoding of an HTML or XML document, and converting it to
Unicode. Much of this code is taken from Mark Pilgrim's Universal Feed Parser.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/documentation.html
"""
from __future__ import generators
__author__ = "Leonard Richardson (leonardr@segfault.org)"
__version__ = "3.0.4"
__copyright__ = "Copyright (c) 2004-2007 Leonard Richardson"
__license__ = "PSF"
from sgmllib import SGMLParser, SGMLParseError
import codecs
import types
import re
import sgmllib
try:
from htmlentitydefs import name2codepoint
except ImportError:
name2codepoint = {}
#This hack makes Beautiful Soup able to parse XML with namespaces
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
DEFAULT_OUTPUT_ENCODING = "utf-8"
# First, the classes that represent markup elements.
class PageElement:
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
def setup(self, parent=None, previous=None):
"""Sets up the initial relations between this element and
other elements."""
self.parent = parent
self.previous = previous
self.next = None
self.previousSibling = None
self.nextSibling = None
if self.parent and self.parent.contents:
self.previousSibling = self.parent.contents[-1]
self.previousSibling.nextSibling = self
def replaceWith(self, replaceWith):
oldParent = self.parent
myIndex = self.parent.contents.index(self)
if hasattr(replaceWith, 'parent') and replaceWith.parent == self.parent:
# We're replacing this element with one of its siblings.
index = self.parent.contents.index(replaceWith)
if index and index < myIndex:
# Furthermore, it comes before this element. That
# means that when we extract it, the index of this
# element will change.
myIndex = myIndex - 1
self.extract()
oldParent.insert(myIndex, replaceWith)
def extract(self):
"""Destructively rips this element out of the tree."""
if self.parent:
try:
self.parent.contents.remove(self)
except ValueError:
pass
#Find the two elements that would be next to each other if
#this element (and any children) hadn't been parsed. Connect
#the two.
lastChild = self._lastRecursiveChild()
nextElement = lastChild.next
if self.previous:
self.previous.next = nextElement
if nextElement:
nextElement.previous = self.previous
self.previous = None
lastChild.next = None
self.parent = None
if self.previousSibling:
self.previousSibling.nextSibling = self.nextSibling
if self.nextSibling:
self.nextSibling.previousSibling = self.previousSibling
self.previousSibling = self.nextSibling = None
def _lastRecursiveChild(self):
"Finds the last element beneath this object to be parsed."
lastChild = self
while hasattr(lastChild, 'contents') and lastChild.contents:
lastChild = lastChild.contents[-1]
return lastChild
def insert(self, position, newChild):
if (isinstance(newChild, basestring)
or isinstance(newChild, unicode)) \
and not isinstance(newChild, NavigableString):
newChild = NavigableString(newChild)
position = min(position, len(self.contents))
if hasattr(newChild, 'parent') and newChild.parent != None:
# We're 'inserting' an element that's already one
# of this object's children.
if newChild.parent == self:
index = self.find(newChild)
if index and index < position:
# Furthermore we're moving it further down the
# list of this object's children. That means that
# when we extract this element, our target index
# will jump down one.
position = position - 1
newChild.extract()
newChild.parent = self
previousChild = None
if position == 0:
newChild.previousSibling = None
newChild.previous = self
else:
previousChild = self.contents[position-1]
newChild.previousSibling = previousChild
newChild.previousSibling.nextSibling = newChild
newChild.previous = previousChild._lastRecursiveChild()
if newChild.previous:
newChild.previous.next = newChild
newChildsLastElement = newChild._lastRecursiveChild()
if position >= len(self.contents):
newChild.nextSibling = None
parent = self
parentsNextSibling = None
while not parentsNextSibling:
parentsNextSibling = parent.nextSibling
parent = parent.parent
if not parent: # This is the last element in the document.
break
if parentsNextSibling:
newChildsLastElement.next = parentsNextSibling
else:
newChildsLastElement.next = None
else:
nextChild = self.contents[position]
newChild.nextSibling = nextChild
if newChild.nextSibling:
newChild.nextSibling.previousSibling = newChild
newChildsLastElement.next = nextChild
if newChildsLastElement.next:
newChildsLastElement.next.previous = newChildsLastElement
self.contents.insert(position, newChild)
def findNext(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears after this Tag in the document."""
return self._findOne(self.findAllNext, name, attrs, text, **kwargs)
def findAllNext(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before after Tag in the document."""
return self._findAll(name, attrs, text, limit, self.nextGenerator)
def findNextSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears after this Tag in the document."""
return self._findOne(self.findNextSiblings, name, attrs, text,
**kwargs)
def findNextSiblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.nextSiblingGenerator, **kwargs)
fetchNextSiblings = findNextSiblings # Compatibility with pre-3.x
def findPrevious(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears before this Tag in the document."""
return self._findOne(self.findAllPrevious, name, attrs, text, **kwargs)
def findAllPrevious(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._findAll(name, attrs, text, limit, self.previousGenerator,
**kwargs)
fetchPrevious = findAllPrevious # Compatibility with pre-3.x
def findPreviousSibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears before this Tag in the document."""
return self._findOne(self.findPreviousSiblings, name, attrs, text,
**kwargs)
def findPreviousSiblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._findAll(name, attrs, text, limit,
self.previousSiblingGenerator, **kwargs)
fetchPreviousSiblings = findPreviousSiblings # Compatibility with pre-3.x
def findParent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _findOne because findParents takes a different
# set of arguments.
r = None
l = self.findParents(name, attrs, 1)
if l:
r = l[0]
return r
def findParents(self, name=None, attrs={}, limit=None, **kwargs):
"""Returns the parents of this Tag that match the given
criteria."""
return self._findAll(name, attrs, None, limit, self.parentGenerator,
**kwargs)
fetchParents = findParents # Compatibility with pre-3.x
#These methods do the real heavy lifting.
def _findOne(self, method, name, attrs, text, **kwargs):
r = None
l = method(name, attrs, text, 1, **kwargs)
if l:
r = l[0]
return r
def _findAll(self, name, attrs, text, limit, generator, **kwargs):
"Iterates over a generator looking for things that match."
if isinstance(name, SoupStrainer):
strainer = name
else:
# Build a SoupStrainer
strainer = SoupStrainer(name, attrs, text, **kwargs)
results = ResultSet(strainer)
g = generator()
while True:
try:
i = g.next()
except StopIteration:
break
if i:
found = strainer.search(i)
if found:
results.append(found)
if limit and len(results) >= limit:
break
return results
#These Generators can be used to navigate starting from both
#NavigableStrings and Tags.
def nextGenerator(self):
i = self
while i:
i = i.next
yield i
def nextSiblingGenerator(self):
i = self
while i:
i = i.nextSibling
yield i
def previousGenerator(self):
i = self
while i:
i = i.previous
yield i
def previousSiblingGenerator(self):
i = self
while i:
i = i.previousSibling
yield i
def parentGenerator(self):
i = self
while i:
i = i.parent
yield i
# Utility methods
def substituteEncoding(self, str, encoding=None):
encoding = encoding or "utf-8"
return str.replace("%SOUP-ENCODING%", encoding)
def toEncoding(self, s, encoding=None):
"""Encodes an object to a string in some encoding, or to Unicode.
."""
if isinstance(s, unicode):
if encoding:
s = s.encode(encoding)
elif isinstance(s, str):
if encoding:
s = s.encode(encoding)
else:
s = unicode(s)
else:
if encoding:
s = self.toEncoding(str(s), encoding)
else:
s = unicode(s)
return s
class NavigableString(unicode, PageElement):
def __getattr__(self, attr):
"""text.string gives you text. This is for backwards
compatibility for Navigable*String, but for CData* it lets you
get the string without the CData wrapper."""
if attr == 'string':
return self
else:
raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__.__name__, attr)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
if encoding:
return self.encode(encoding)
else:
return self
class CData(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<![CDATA[%s]]>" % NavigableString.__str__(self, encoding)
class ProcessingInstruction(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
output = self
if "%SOUP-ENCODING%" in output:
output = self.substituteEncoding(output, encoding)
return "<?%s?>" % self.toEncoding(output, encoding)
class Comment(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!--%s-->" % NavigableString.__str__(self, encoding)
class Declaration(NavigableString):
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING):
return "<!%s>" % NavigableString.__str__(self, encoding)
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
XML_SPECIAL_CHARS_TO_ENTITIES = { "'" : "squot",
'"' : "quote",
"&" : "amp",
"<" : "lt",
">" : "gt" }
def __init__(self, parser, name, attrs=None, parent=None,
previous=None):
"Basic constructor."
# We don't actually store the parser object: that lets extracted
# chunks be garbage-collected
self.parserClass = parser.__class__
self.isSelfClosing = parser.isSelfClosingTag(name)
self.name = name
if attrs == None:
attrs = []
self.attrs = attrs
self.contents = []
self.setup(parent, previous)
self.hidden = False
self.containsSubstitutions = False
def get(self, key, default=None):
"""Returns the value of the 'key' attribute for the tag, or
the value given for 'default' if it doesn't have that
attribute."""
return self._getAttrMap().get(key, default)
def has_key(self, key):
return self._getAttrMap().has_key(key)
def __getitem__(self, key):
"""tag[key] returns the value of the 'key' attribute for the tag,
and throws an exception if it's not there."""
return self._getAttrMap()[key]
def __iter__(self):
"Iterating over a tag iterates over its contents."
return iter(self.contents)
def __len__(self):
"The length of a tag is the length of its list of contents."
return len(self.contents)
def __contains__(self, x):
return x in self.contents
def __nonzero__(self):
"A tag is non-None even if it has no contents."
return True
def __setitem__(self, key, value):
"""Setting tag[key] sets the value of the 'key' attribute for the
tag."""
self._getAttrMap()
self.attrMap[key] = value
found = False
for i in range(0, len(self.attrs)):
if self.attrs[i][0] == key:
self.attrs[i] = (key, value)
found = True
if not found:
self.attrs.append((key, value))
self._getAttrMap()[key] = value
def __delitem__(self, key):
"Deleting tag[key] deletes all 'key' attributes for the tag."
for item in self.attrs:
if item[0] == key:
self.attrs.remove(item)
#We don't break because bad HTML can define the same
#attribute multiple times.
self._getAttrMap()
if self.attrMap.has_key(key):
del self.attrMap[key]
def __call__(self, *args, **kwargs):
"""Calling a tag like a function is the same as calling its
findAll() method. Eg. tag('a') returns a list of all the A tags
found within this tag."""
return apply(self.findAll, args, kwargs)
def __getattr__(self, tag):
#print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.rfind('Tag') == len(tag)-3:
return self.find(tag[:-3])
elif tag.find('__') != 0:
return self.find(tag)
def __eq__(self, other):
"""Returns true iff this tag has the same name, the same attributes,
and the same contents (recursively) as the given tag.
NOTE: right now this will return false if two tags have the
same attributes in a different order. Should this be fixed?"""
if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other):
return False
for i in range(0, len(self.contents)):
if self.contents[i] != other.contents[i]:
return False
return True
def __ne__(self, other):
"""Returns true iff this tag is not identical to the other tag,
as defined in __eq__."""
return not self == other
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
"""Renders this tag as a string."""
return self.__str__(encoding)
def __unicode__(self):
return self.__str__(None)
def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Returns a string or Unicode representation of this tag and
its contents. To get Unicode, pass None for encoding.
NOTE: since Python's HTML parser consumes whitespace, this
method is not certain to reproduce the whitespace present in
the original string."""
encodedName = self.toEncoding(self.name, encoding)
attrs = []
if self.attrs:
for key, val in self.attrs:
fmt = '%s="%s"'
if isString(val):
if self.containsSubstitutions and '%SOUP-ENCODING%' in val:
val = self.substituteEncoding(val, encoding)
# The attribute value either:
#
# * Contains no embedded double quotes or single quotes.
# No problem: we enclose it in double quotes.
# * Contains embedded single quotes. No problem:
# double quotes work here too.
# * Contains embedded double quotes. No problem:
# we enclose it in single quotes.
# * Embeds both single _and_ double quotes. This
# can't happen naturally, but it can happen if
# you modify an attribute value after parsing
# the document. Now we have a bit of a
# problem. We solve it by enclosing the
# attribute in single quotes, and escaping any
# embedded single quotes to XML entities.
if '"' in val:
fmt = "%s='%s'"
# This can't happen naturally, but it can happen
# if you modify an attribute value after parsing.
if "'" in val:
val = val.replace("'", "&squot;")
# Now we're okay w/r/t quotes. But the attribute
# value might also contain angle brackets, or
# ampersands that aren't part of entities. We need
# to escape those to XML entities too.
val = re.sub("([<>]|&(?![^\s]+;))",
lambda x: "&" + self.XML_SPECIAL_CHARS_TO_ENTITIES[x.group(0)[0]] + ";",
val)
attrs.append(fmt % (self.toEncoding(key, encoding),
self.toEncoding(val, encoding)))
close = ''
closeTag = ''
if self.isSelfClosing:
close = ' /'
else:
closeTag = '</%s>' % encodedName
indentTag, indentContents = 0, 0
if prettyPrint:
indentTag = indentLevel
space = (' ' * (indentTag-1))
indentContents = indentTag + 1
contents = self.renderContents(encoding, prettyPrint, indentContents)
if self.hidden:
s = contents
else:
s = []
attributeString = ''
if attrs:
attributeString = ' ' + ' '.join(attrs)
if prettyPrint:
s.append(space)
s.append('<%s%s%s>' % (encodedName, attributeString, close))
if prettyPrint:
s.append("\n")
s.append(contents)
if prettyPrint and contents and contents[-1] != "\n":
s.append("\n")
if prettyPrint and closeTag:
s.append(space)
s.append(closeTag)
if prettyPrint and closeTag and self.nextSibling:
s.append("\n")
s = ''.join(s)
return s
def prettify(self, encoding=DEFAULT_OUTPUT_ENCODING):
return self.__str__(encoding, True)
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
s=[]
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.__str__(encoding)
elif isinstance(c, Tag):
s.append(c.__str__(encoding, prettyPrint, indentLevel))
if text and prettyPrint:
text = text.strip()
if text:
if prettyPrint:
s.append(" " * (indentLevel-1))
s.append(text)
if prettyPrint:
s.append("\n")
return ''.join(s)
#Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
"""Return only the first child of this Tag matching the given
criteria."""
r = None
l = self.findAll(name, attrs, recursive, text, 1, **kwargs)
if l:
r = l[0]
return r
findChild = find
def findAll(self, name=None, attrs={}, recursive=True, text=None,
limit=None, **kwargs):
"""Extracts a list of Tag objects that match the given
criteria. You can specify the name of the Tag and any
attributes you want the Tag to have.
The value of a key-value pair in the 'attrs' map can be a
string, a list of strings, a regular expression object, or a
callable that takes a string and returns whether or not the
string matches for some custom definition of 'matches'. The
same is true of the tag name."""
generator = self.recursiveChildGenerator
if not recursive:
generator = self.childGenerator
return self._findAll(name, attrs, text, limit, generator, **kwargs)
findChildren = findAll
# Pre-3.x compatibility methods
first = find
fetch = findAll
def fetchText(self, text=None, recursive=True, limit=None):
return self.findAll(text=text, recursive=recursive, limit=limit)
def firstText(self, text=None, recursive=True):
return self.find(text=text, recursive=recursive)
#Utility methods
def append(self, tag):
"""Appends the given tag to the contents of this tag."""
self.contents.append(tag)
#Private methods
def _getAttrMap(self):
"""Initializes a map representation of this tag's attributes,
if not already initialized."""
if not getattr(self, 'attrMap'):
self.attrMap = {}
for (key, value) in self.attrs:
self.attrMap[key] = value
return self.attrMap
#Generator methods
def childGenerator(self):
for i in range(0, len(self.contents)):
yield self.contents[i]
raise StopIteration
def recursiveChildGenerator(self):
stack = [(self, 0)]
while stack:
tag, start = stack.pop()
if isinstance(tag, Tag):
for i in range(start, len(tag.contents)):
a = tag.contents[i]
yield a
if isinstance(a, Tag) and tag.contents:
if i < len(tag.contents) - 1:
stack.append((tag, i+1))
stack.append((a, 0))
break
raise StopIteration
# Next, a couple classes to represent queries and their results.
class SoupStrainer:
"""Encapsulates a number of ways of matching a markup element (tag or
text)."""
def __init__(self, name=None, attrs={}, text=None, **kwargs):
self.name = name
if isString(attrs):
kwargs['class'] = attrs
attrs = None
if kwargs:
if attrs:
attrs = attrs.copy()
attrs.update(kwargs)
else:
attrs = kwargs
self.attrs = attrs
self.text = text
def __str__(self):
if self.text:
return self.text
else:
return "%s|%s" % (self.name, self.attrs)
def searchTag(self, markupName=None, markupAttrs={}):
found = None
markup = None
if isinstance(markupName, Tag):
markup = markupName
markupAttrs = markup
callFunctionWithTagData = callable(self.name) \
and not isinstance(markupName, Tag)
if (not self.name) \
or callFunctionWithTagData \
or (markup and self._matches(markup, self.name)) \
or (not markup and self._matches(markupName, self.name)):
if callFunctionWithTagData:
match = self.name(markupName, markupAttrs)
else:
match = True
markupAttrMap = None
for attr, matchAgainst in self.attrs.items():
if not markupAttrMap:
if hasattr(markupAttrs, 'get'):
markupAttrMap = markupAttrs
else:
markupAttrMap = {}
for k,v in markupAttrs:
markupAttrMap[k] = v
attrValue = markupAttrMap.get(attr)
if not self._matches(attrValue, matchAgainst):
match = False
break
if match:
if markup:
found = markup
else:
found = markupName
return found
def search(self, markup):
#print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
if isList(markup) and not isinstance(markup, Tag):
for element in markup:
if isinstance(element, NavigableString) \
and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
# Don't bother with Tags if we're searching for text.
elif isinstance(markup, Tag):
if not self.text:
found = self.searchTag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
isString(markup):
if self._matches(markup, self.text):
found = markup
else:
raise Exception, "I don't know how to match against a %s" \
% markup.__class__
return found
def _matches(self, markup, matchAgainst):
#print "Matching %s against %s" % (markup, matchAgainst)
result = False
if matchAgainst == True and type(matchAgainst) == types.BooleanType:
result = markup != None
elif callable(matchAgainst):
result = matchAgainst(markup)
else:
#Custom match methods take the tag as an argument, but all
#other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
if markup and not isString(markup):
markup = unicode(markup)
#Now we know that chunk is either a string, or None.
if hasattr(matchAgainst, 'match'):
# It's a regexp object.
result = markup and matchAgainst.search(markup)
elif isList(matchAgainst):
result = markup in matchAgainst
elif hasattr(matchAgainst, 'items'):
result = markup.has_key(matchAgainst)
elif matchAgainst and isString(markup):
if isinstance(markup, unicode):
matchAgainst = unicode(matchAgainst)
else:
matchAgainst = str(matchAgainst)
if not result:
result = matchAgainst == markup
return result
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
def __init__(self, source):
list.__init__([])
self.source = source
# Now, some helper functions.
def isList(l):
"""Convenience method that works with all 2.x versions of Python
to determine whether or not something is listlike."""
return hasattr(l, '__iter__') \
or (type(l) in (types.ListType, types.TupleType))
def isString(s):
"""Convenience method that works with all 2.x versions of Python
to determine whether or not something is stringlike."""
try:
return isinstance(s, unicode) or isintance(s, basestring)
except NameError:
return isinstance(s, str)
def buildTagMap(default, *args):
"""Turns a list of maps, lists, or scalars into a single map.
Used to build the SELF_CLOSING_TAGS, NESTABLE_TAGS, and
NESTING_RESET_TAGS maps out of lists and partial maps."""
built = {}
for portion in args:
if hasattr(portion, 'items'):
#It's a map. Merge it.
for k,v in portion.items():
built[k] = v
elif isList(portion):
#It's a list. Map each item to the default.
for k in portion:
built[k] = default
else:
#It's a scalar. Map it to the default.
built[portion] = default
return built
# Now, the parser classes.
class BeautifulStoneSoup(Tag, SGMLParser):
"""This class contains the basic parser and search code. It defines
a parser that knows nothing about tag behavior except for the
following:
You can't close a tag without closing all the tags it encloses.
That is, "<foo><bar></foo>" actually means
"<foo><bar></bar></foo>".
[Another possible explanation is "<foo><bar /></foo>", but since
this class defines no SELF_CLOSING_TAGS, it will never use that
explanation.]
This class is useful for parsing XML or made-up markup languages,
or when BeautifulSoup makes an assumption counter to what you were
expecting."""
XML_ENTITY_LIST = {}
for i in Tag.XML_SPECIAL_CHARS_TO_ENTITIES.values():
XML_ENTITY_LIST[i] = True
SELF_CLOSING_TAGS = {}
NESTABLE_TAGS = {}
RESET_NESTING_TAGS = {}
QUOTE_TAGS = {}
MARKUP_MASSAGE = [(re.compile('(<[^<>]*)/>'),
lambda x: x.group(1) + ' />'),
(re.compile('<!\s+([^<>]*)>'),
lambda x: '<!' + x.group(1) + '>')
]
ROOT_TAG_NAME = u'[document]'
HTML_ENTITIES = "html"
XML_ENTITIES = "xml"
def __init__(self, markup="", parseOnlyThese=None, fromEncoding=None,
markupMassage=True, smartQuotesTo=XML_ENTITIES,
convertEntities=None, selfClosingTags=None):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser.
sgmllib will process most bad HTML, and the BeautifulSoup
class has some tricks for dealing with some HTML that kills
sgmllib, but Beautiful Soup can nonetheless choke or lose data
if your data uses self-closing tags or declarations
incorrectly.
By default, Beautiful Soup uses regexes to sanitize input,
avoiding the vast majority of these problems. If the problems
don't apply to you, pass in False for markupMassage, and
you'll get better performance.
The default parser massage techniques fix the two most common
instances of invalid HTML that choke sgmllib:
<br/> (No space between name of closing tag and tag close)
<! --Comment--> (Extraneous whitespace in declaration)
You can pass in a custom list of (RE object, replace method)
tuples to get Beautiful Soup to scrub your input the way you
want."""
self.parseOnlyThese = parseOnlyThese
self.fromEncoding = fromEncoding
self.smartQuotesTo = smartQuotesTo
self.convertEntities = convertEntities
if self.convertEntities:
# It doesn't make sense to convert encoded characters to
# entities even while you're converting entities to Unicode.
# Just convert it all to Unicode.
self.smartQuotesTo = None
self.instanceSelfClosingTags = buildTagMap(None, selfClosingTags)
SGMLParser.__init__(self)
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
self.markup = markup
self.markupMassage = markupMassage
try:
self._feed()
except StopParsing:
pass
self.markup = None # The markup can now be GCed
def _feed(self, inDocumentEncoding=None):
# Convert the document to Unicode.
markup = self.markup
if isinstance(markup, unicode):
if not hasattr(self, 'originalEncoding'):
self.originalEncoding = None
else:
dammit = UnicodeDammit\
(markup, [self.fromEncoding, inDocumentEncoding],
smartQuotesTo=self.smartQuotesTo)
markup = dammit.unicode
self.originalEncoding = dammit.originalEncoding
if markup:
if self.markupMassage:
if not isList(self.markupMassage):
self.markupMassage = self.MARKUP_MASSAGE
for fix, m in self.markupMassage:
markup = fix.sub(m, markup)
self.reset()
SGMLParser.feed(self, markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def __getattr__(self, methodName):
"""This method routes method call requests to either the SGMLParser
superclass or the Tag superclass, depending on the method name."""
#print "__getattr__ called on %s.%s" % (self.__class__, methodName)
if methodName.find('start_') == 0 or methodName.find('end_') == 0 \
or methodName.find('do_') == 0:
return SGMLParser.__getattr__(self, methodName)
elif methodName.find('__') != 0:
return Tag.__getattr__(self, methodName)
else:
raise AttributeError
def isSelfClosingTag(self, name):
"""Returns true iff the given string is the name of a
self-closing tag according to this parser."""
return self.SELF_CLOSING_TAGS.has_key(name) \
or self.instanceSelfClosingTags.has_key(name)
def reset(self):
Tag.__init__(self, self, self.ROOT_TAG_NAME)
self.hidden = 1
SGMLParser.reset(self)
self.currentData = []
self.currentTag = None
self.tagStack = []
self.quoteStack = []
self.pushTag(self)
def popTag(self):
tag = self.tagStack.pop()
# Tags with just one string-owning child get the child as a
# 'string' property, so that soup.tag.string is shorthand for
# soup.tag.contents[0]
if len(self.currentTag.contents) == 1 and \
isinstance(self.currentTag.contents[0], NavigableString):
self.currentTag.string = self.currentTag.contents[0]
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
def endData(self, containerClass=NavigableString):
if self.currentData:
currentData = ''.join(self.currentData)
if not currentData.strip():
if '\n' in currentData:
currentData = '\n'
else:
currentData = ' '
self.currentData = []
if self.parseOnlyThese and len(self.tagStack) <= 1 and \
(not self.parseOnlyThese.text or \
not self.parseOnlyThese.search(currentData)):
return
o = containerClass(currentData)
o.setup(self.currentTag, self.previous)
if self.previous:
self.previous.next = o
self.previous = o
self.currentTag.contents.append(o)
def _popToTag(self, name, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
return
numPops = 0
mostRecentTag = None
for i in range(len(self.tagStack)-1, 0, -1):
if name == self.tagStack[i].name:
numPops = len(self.tagStack)-i
break
if not inclusivePop:
numPops = numPops - 1
for i in range(0, numPops):
mostRecentTag = self.popTag()
return mostRecentTag
def _smartPop(self, name):
"""We need to pop up to the previous tag of this type, unless
one of this tag's nesting reset triggers comes between this
tag and the previous tag of this type, OR unless this tag is a
generic nesting trigger and another generic nesting trigger
comes between this tag and the previous tag of this type.
Examples:
<p>Foo<b>Bar<p> should pop to 'p', not 'b'.
<p>Foo<table>Bar<p> should pop to 'table', not 'p'.
<p>Foo<table><tr>Bar<p> should pop to 'tr', not 'p'.
<p>Foo<b>Bar<p> should pop to 'p', not 'b'.
<li><ul><li> *<li>* should pop to 'ul', not the first 'li'.
<tr><table><tr> *<tr>* should pop to 'table', not the first 'tr'
<td><tr><td> *<td>* should pop to 'tr', not the first 'td'
"""
nestingResetTriggers = self.NESTABLE_TAGS.get(name)
isNestable = nestingResetTriggers != None
isResetNesting = self.RESET_NESTING_TAGS.has_key(name)
popTo = None
inclusive = True
for i in range(len(self.tagStack)-1, 0, -1):
p = self.tagStack[i]
if (not p or p.name == name) and not isNestable:
#Non-nestable tags get popped to the top or to their
#last occurance.
popTo = name
break
if (nestingResetTriggers != None
and p.name in nestingResetTriggers) \
or (nestingResetTriggers == None and isResetNesting
and self.RESET_NESTING_TAGS.has_key(p.name)):
#If we encounter one of the nesting reset triggers
#peculiar to this tag, or we encounter another tag
#that causes nesting to reset, pop up to but not
#including that tag.
popTo = p.name
inclusive = False
break
p = p.parent
if popTo:
self._popToTag(popTo, inclusive)
def unknown_starttag(self, name, attrs, selfClosing=0):
#print "Start tag %s: %s" % (name, attrs)
if self.quoteStack:
#This is not a real tag.
#print "<%s> is not real!" % name
attrs = ''.join(map(lambda(x, y): ' %s="%s"' % (x, y), attrs))
self.handle_data('<%s%s>' % (name, attrs))
return
self.endData()
if not self.isSelfClosingTag(name) and not selfClosing:
self._smartPop(name)
if self.parseOnlyThese and len(self.tagStack) <= 1 \
and (self.parseOnlyThese.text or not self.parseOnlyThese.searchTag(name, attrs)):
return
tag = Tag(self, name, attrs, self.currentTag, self.previous)
if self.previous:
self.previous.next = tag
self.previous = tag
self.pushTag(tag)
if selfClosing or self.isSelfClosingTag(name):
self.popTag()
if name in self.QUOTE_TAGS:
#print "Beginning quote (%s)" % name
self.quoteStack.append(name)
self.literal = 1
return tag
def unknown_endtag(self, name):
#print "End tag %s" % name
if self.quoteStack and self.quoteStack[-1] != name:
#This is not a real end tag.
#print "</%s> is not real!" % name
self.handle_data('</%s>' % name)
return
self.endData()
self._popToTag(name)
if self.quoteStack and self.quoteStack[-1] == name:
self.quoteStack.pop()
self.literal = (len(self.quoteStack) > 0)
def handle_data(self, data):
self.currentData.append(data)
def _toStringSubclass(self, text, subclass):
"""Adds a certain piece of text to the tree as a NavigableString
subclass."""
self.endData()
self.handle_data(text)
self.endData(subclass)
def handle_pi(self, text):
"""Handle a processing instruction as a ProcessingInstruction
object, possibly one with a %SOUP-ENCODING% slot into which an
encoding will be plugged later."""
if text[:3] == "xml":
text = "xml version='1.0' encoding='%SOUP-ENCODING%'"
self._toStringSubclass(text, ProcessingInstruction)
def handle_comment(self, text):
"Handle comments as Comment objects."
self._toStringSubclass(text, Comment)
def handle_charref(self, ref):
"Handle character references as data."
if self.convertEntities in [self.HTML_ENTITIES,
self.XML_ENTITIES]:
data = unichr(int(ref))
else:
data = '&#%s;' % ref
self.handle_data(data)
def handle_entityref(self, ref):
"""Handle entity references as data, possibly converting known
HTML entity references to the corresponding Unicode
characters."""
data = None
if self.convertEntities == self.HTML_ENTITIES or \
(self.convertEntities == self.XML_ENTITIES and \
self.XML_ENTITY_LIST.get(ref)):
try:
data = unichr(name2codepoint[ref])
except KeyError:
pass
if not data:
data = '&%s;' % ref
self.handle_data(data)
def handle_decl(self, data):
"Handle DOCTYPEs and the like as Declaration objects."
self._toStringSubclass(data, Declaration)
def parse_declaration(self, i):
"""Treat a bogus SGML declaration as raw data. Treat a CDATA
declaration as a CData object."""
j = None
if self.rawdata[i:i+9] == '<![CDATA[':
k = self.rawdata.find(']]>', i)
if k == -1:
k = len(self.rawdata)
data = self.rawdata[i+9:k]
j = k+3
self._toStringSubclass(data, CData)
else:
try:
j = SGMLParser.parse_declaration(self, i)
except SGMLParseError:
toHandle = self.rawdata[i:]
self.handle_data(toHandle)
j = i + len(toHandle)
return j
class BeautifulSoup(BeautifulStoneSoup):
"""This parser knows the following facts about HTML:
* Some tags have no closing tag and should be interpreted as being
closed as soon as they are encountered.
* The text inside some tags (ie. 'script') may contain tags which
are not really part of the document and which should be parsed
as text, not tags. If you want to parse the text as tags, you can
always fetch it and parse it explicitly.
* Tag nesting rules:
Most tags can't be nested at all. For instance, the occurance of
a <p> tag should implicitly close the previous <p> tag.
<p>Para1<p>Para2
should be transformed into:
<p>Para1</p><p>Para2
Some tags can be nested arbitrarily. For instance, the occurance
of a <blockquote> tag should _not_ implicitly close the previous
<blockquote> tag.
Alice said: <blockquote>Bob said: <blockquote>Blah
should NOT be transformed into:
Alice said: <blockquote>Bob said: </blockquote><blockquote>Blah
Some tags can be nested, but the nesting is reset by the
interposition of other tags. For instance, a <tr> tag should
implicitly close the previous <tr> tag within the same <table>,
but not close a <tr> tag in another table.
<table><tr>Blah<tr>Blah
should be transformed into:
<table><tr>Blah</tr><tr>Blah
but,
<tr>Blah<table><tr>Blah
should NOT be transformed into
<tr>Blah<table></tr><tr>Blah
Differing assumptions about tag nesting rules are a major source
of problems with the BeautifulSoup class. If BeautifulSoup is not
treating as nestable a tag your page author treats as nestable,
try ICantBelieveItsBeautifulSoup, MinimalSoup, or
BeautifulStoneSoup before writing your own subclass."""
def __init__(self, *args, **kwargs):
if not kwargs.has_key('smartQuotesTo'):
kwargs['smartQuotesTo'] = self.HTML_ENTITIES
BeautifulStoneSoup.__init__(self, *args, **kwargs)
SELF_CLOSING_TAGS = buildTagMap(None,
['br' , 'hr', 'input', 'img', 'meta',
'spacer', 'link', 'frame', 'base'])
QUOTE_TAGS = {'script': None}
#According to the HTML standard, each of these inline tags can
#contain another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_INLINE_TAGS = ['span', 'font', 'q', 'object', 'bdo', 'sub', 'sup',
'center']
#According to the HTML standard, these block tags can contain
#another tag of the same type. Furthermore, it's common
#to actually use these tags this way.
NESTABLE_BLOCK_TAGS = ['blockquote', 'div', 'fieldset', 'ins', 'del']
#Lists can contain other lists, but there are restrictions.
NESTABLE_LIST_TAGS = { 'ol' : [],
'ul' : [],
'li' : ['ul', 'ol'],
'dl' : [],
'dd' : ['dl'],
'dt' : ['dl'] }
#Tables can contain other tables, but there are restrictions.
NESTABLE_TABLE_TAGS = {'table' : [],
'tr' : ['table', 'tbody', 'tfoot', 'thead'],
'td' : ['tr'],
'th' : ['tr'],
'thead' : ['table'],
'tbody' : ['table'],
'tfoot' : ['table'],
}
NON_NESTABLE_BLOCK_TAGS = ['address', 'form', 'p', 'pre']
#If one of these tags is encountered, all tags up to the next tag of
#this type are popped.
RESET_NESTING_TAGS = buildTagMap(None, NESTABLE_BLOCK_TAGS, 'noscript',
NON_NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS,
NESTABLE_TABLE_TAGS)
NESTABLE_TAGS = buildTagMap([], NESTABLE_INLINE_TAGS, NESTABLE_BLOCK_TAGS,
NESTABLE_LIST_TAGS, NESTABLE_TABLE_TAGS)
# Used to detect the charset in a META tag; see start_meta
CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)")
def start_meta(self, attrs):
"""Beautiful Soup can detect a charset included in a META tag,
try to convert the document to that charset, and re-parse the
document from the beginning."""
httpEquiv = None
contentType = None
contentTypeIndex = None
tagNeedsEncodingSubstitution = False
for i in range(0, len(attrs)):
key, value = attrs[i]
key = key.lower()
if key == 'http-equiv':
httpEquiv = value
elif key == 'content':
contentType = value
contentTypeIndex = i
if httpEquiv and contentType: # It's an interesting meta tag.
match = self.CHARSET_RE.search(contentType)
if match:
if getattr(self, 'declaredHTMLEncoding') or \
(self.originalEncoding == self.fromEncoding):
# This is our second pass through the document, or
# else an encoding was specified explicitly and it
# worked. Rewrite the meta tag.
newAttr = self.CHARSET_RE.sub\
(lambda(match):match.group(1) +
"%SOUP-ENCODING%", value)
attrs[contentTypeIndex] = (attrs[contentTypeIndex][0],
newAttr)
tagNeedsEncodingSubstitution = True
else:
# This is our first pass through the document.
# Go through it again with the new information.
newCharset = match.group(3)
if newCharset and newCharset != self.originalEncoding:
self.declaredHTMLEncoding = newCharset
self._feed(self.declaredHTMLEncoding)
raise StopParsing
tag = self.unknown_starttag("meta", attrs)
if tag and tagNeedsEncodingSubstitution:
tag.containsSubstitutions = True
class StopParsing(Exception):
pass
class ICantBelieveItsBeautifulSoup(BeautifulSoup):
"""The BeautifulSoup class is oriented towards skipping over
common HTML errors like unclosed tags. However, sometimes it makes
errors of its own. For instance, consider this fragment:
<b>Foo<b>Bar</b></b>
This is perfectly valid (if bizarre) HTML. However, the
BeautifulSoup class will implicitly close the first b tag when it
encounters the second 'b'. It will think the author wrote
"<b>Foo<b>Bar", and didn't close the first 'b' tag, because
there's no real-world reason to bold something that's already
bold. When it encounters '</b></b>' it will close two more 'b'
tags, for a grand total of three tags closed instead of two. This
can throw off the rest of your document structure. The same is
true of a number of other tags, listed below.
It's much more common for someone to forget to close a 'b' tag
than to actually use nested 'b' tags, and the BeautifulSoup class
handles the common case. This class handles the not-co-common
case: where you can't believe someone wrote what they did, but
it's valid HTML and BeautifulSoup screwed up by assuming it
wouldn't be."""
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS = \
['em', 'big', 'i', 'small', 'tt', 'abbr', 'acronym', 'strong',
'cite', 'code', 'dfn', 'kbd', 'samp', 'strong', 'var', 'b',
'big']
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS = ['noscript']
NESTABLE_TAGS = buildTagMap([], BeautifulSoup.NESTABLE_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_BLOCK_TAGS,
I_CANT_BELIEVE_THEYRE_NESTABLE_INLINE_TAGS)
class MinimalSoup(BeautifulSoup):
"""The MinimalSoup class is for parsing HTML that contains
pathologically bad markup. It makes no assumptions about tag
nesting, but it does know which tags are self-closing, that
<script> tags contain Javascript and should not be parsed, that
META tags may contain encoding information, and so on.
This also makes it better for subclassing than BeautifulStoneSoup
or BeautifulSoup."""
RESET_NESTING_TAGS = buildTagMap('noscript')
NESTABLE_TAGS = {}
class BeautifulSOAP(BeautifulStoneSoup):
"""This class will push a tag with only a single string child into
the tag's parent as an attribute. The attribute's name is the tag
name, and the value is the string child. An example should give
the flavor of the change:
<foo><bar>baz</bar></foo>
=>
<foo bar="baz"><bar>baz</bar></foo>
You can then access fooTag['bar'] instead of fooTag.barTag.string.
This is, of course, useful for scraping structures that tend to
use subelements instead of attributes, such as SOAP messages. Note
that it modifies its input, so don't print the modified version
out.
I'm not sure how many people really want to use this class; let me
know if you do. Mainly I like the name."""
def popTag(self):
if len(self.tagStack) > 1:
tag = self.tagStack[-1]
parent = self.tagStack[-2]
parent._getAttrMap()
if (isinstance(tag, Tag) and len(tag.contents) == 1 and
isinstance(tag.contents[0], NavigableString) and
not parent.attrMap.has_key(tag.name)):
parent[tag.name] = tag.contents[0]
BeautifulStoneSoup.popTag(self)
#Enterprise class names! It has come to our attention that some people
#think the names of the Beautiful Soup parser classes are too silly
#and "unprofessional" for use in enterprise screen-scraping. We feel
#your pain! For such-minded folk, the Beautiful Soup Consortium And
#All-Night Kosher Bakery recommends renaming this file to
#"RobustParser.py" (or, in cases of extreme enterprisness,
#"RobustParserBeanInterface.class") and using the following
#enterprise-friendly class aliases:
class RobustXMLParser(BeautifulStoneSoup):
pass
class RobustHTMLParser(BeautifulSoup):
pass
class RobustWackAssHTMLParser(ICantBelieveItsBeautifulSoup):
pass
class RobustInsanelyWackAssHTMLParser(MinimalSoup):
pass
class SimplifyingSOAPParser(BeautifulSOAP):
pass
######################################################
#
# Bonus library: Unicode, Dammit
#
# This class forces XML data into a standard format (usually to UTF-8
# or Unicode). It is heavily based on code from Mark Pilgrim's
# Universal Feed Parser. It does not rewrite the XML or HTML to
# reflect a new encoding: that happens in BeautifulStoneSoup.handle_pi
# (XML) and BeautifulSoup.start_meta (HTML).
# Autodetects character encodings.
# Download from http://chardet.feedparser.org/
try:
import chardet
# import chardet.constants
# chardet.constants._debug = 1
except:
chardet = None
chardet = None
# cjkcodecs and iconv_codec make Python know about more character encodings.
# Both are available from http://cjkpython.i18n.org/
# They're built in if you use Python 2.4.
try:
import cjkcodecs.aliases
except:
pass
try:
import iconv_codec
except:
pass
class UnicodeDammit:
"""A class for detecting the encoding of a *ML document and
converting it to a Unicode string. If the source encoding is
windows-1252, can replace MS smart quotes with their HTML or XML
equivalents."""
# This dictionary maps commonly seen values for "charset" in HTML
# meta tags to the corresponding Python codec names. It only covers
# values that aren't in Python's aliases and can't be determined
# by the heuristics in find_codec.
CHARSET_ALIASES = { "macintosh" : "mac-roman",
"x-sjis" : "shift-jis" }
def __init__(self, markup, overrideEncodings=[],
smartQuotesTo='xml'):
self.markup, documentEncoding, sniffedEncoding = \
self._detectEncoding(markup)
self.smartQuotesTo = smartQuotesTo
self.triedEncodings = []
if markup == '' or isinstance(markup, unicode):
self.originalEncoding = None
self.unicode = unicode(markup)
return
u = None
for proposedEncoding in overrideEncodings:
u = self._convertFrom(proposedEncoding)
if u: break
if not u:
for proposedEncoding in (documentEncoding, sniffedEncoding):
u = self._convertFrom(proposedEncoding)
if u: break
# If no luck and we have auto-detection library, try that:
if not u and chardet and not isinstance(self.markup, unicode):
u = self._convertFrom(chardet.detect(self.markup)['encoding'])
# As a last resort, try utf-8 and windows-1252:
if not u:
for proposed_encoding in ("utf-8", "windows-1252"):
u = self._convertFrom(proposed_encoding)
if u: break
self.unicode = u
if not u: self.originalEncoding = None
def _subMSChar(self, orig):
"""Changes a MS smart quote character to an XML or HTML
entity."""
sub = self.MS_CHARS.get(orig)
if type(sub) == types.TupleType:
if self.smartQuotesTo == 'xml':
sub = '&#x%s;' % sub[1]
else:
sub = '&%s;' % sub[0]
return sub
def _convertFrom(self, proposed):
proposed = self.find_codec(proposed)
if not proposed or proposed in self.triedEncodings:
return None
self.triedEncodings.append(proposed)
markup = self.markup
# Convert smart quotes to HTML if coming from an encoding
# that might have them.
if self.smartQuotesTo and proposed.lower() in("windows-1252",
"iso-8859-1",
"iso-8859-2"):
markup = re.compile("([\x80-\x9f])").sub \
(lambda(x): self._subMSChar(x.group(1)),
markup)
try:
# print "Trying to convert document to %s" % proposed
u = self._toUnicode(markup, proposed)
self.markup = u
self.originalEncoding = proposed
except Exception, e:
# print "That didn't work!"
# print e
return None
#print "Correct encoding: %s" % proposed
return self.markup
def _toUnicode(self, data, encoding):
'''Given a string and its encoding, decodes the string into Unicode.
%encoding is a string recognized by encodings.aliases'''
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == '\xfe\xff') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \
and (data[2:4] != '\x00\x00'):
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == '\xef\xbb\xbf':
encoding = 'utf-8'
data = data[3:]
elif data[:4] == '\x00\x00\xfe\xff':
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == '\xff\xfe\x00\x00':
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
return newdata
def _detectEncoding(self, xml_data):
"""Given a document, tries to detect its XML encoding."""
xml_encoding = sniffed_xml_encoding = None
try:
if xml_data[:4] == '\x4c\x6f\xa7\x94':
# EBCDIC
xml_data = self._ebcdic_to_ascii(xml_data)
elif xml_data[:4] == '\x00\x3c\x00\x3f':
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \
and (xml_data[2:4] != '\x00\x00'):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x3f\x00':
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \
(xml_data[2:4] != '\x00\x00'):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\x00\x3c':
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\x3c\x00\x00\x00':
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == '\x00\x00\xfe\xff':
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == '\xff\xfe\x00\x00':
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == '\xef\xbb\xbf':
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
sniffed_xml_encoding = 'ascii'
pass
xml_encoding_match = re.compile \
('^<\?.*encoding=[\'"](.*?)[\'"].*\?>')\
.match(xml_data)
except:
xml_encoding_match = None
if xml_encoding_match:
xml_encoding = xml_encoding_match.groups()[0].lower()
if sniffed_xml_encoding and \
(xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode',
'iso-10646-ucs-4', 'ucs-4', 'csucs4',
'utf-16', 'utf-32', 'utf_16', 'utf_32',
'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
return xml_data, xml_encoding, sniffed_xml_encoding
def find_codec(self, charset):
return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \
or (charset and self._codec(charset.replace("-", ""))) \
or (charset and self._codec(charset.replace("-", "_"))) \
or charset
def _codec(self, charset):
if not charset: return charset
codec = None
try:
codecs.lookup(charset)
codec = charset
except LookupError:
pass
return codec
EBCDIC_TO_ASCII_MAP = None
def _ebcdic_to_ascii(self, s):
c = self.__class__
if not c.EBCDIC_TO_ASCII_MAP:
emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,
201,202,106,107,108,109,110,111,112,113,114,203,204,205,
206,207,208,209,126,115,116,117,118,119,120,121,122,210,
211,212,213,214,215,216,217,218,219,220,221,222,223,224,
225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,
73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,
82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,
90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,
250,251,252,253,254,255)
import string
c.EBCDIC_TO_ASCII_MAP = string.maketrans( \
''.join(map(chr, range(256))), ''.join(map(chr, emap)))
return s.translate(c.EBCDIC_TO_ASCII_MAP)
MS_CHARS = { '\x80' : ('euro', '20AC'),
'\x81' : ' ',
'\x82' : ('sbquo', '201A'),
'\x83' : ('fnof', '192'),
'\x84' : ('bdquo', '201E'),
'\x85' : ('hellip', '2026'),
'\x86' : ('dagger', '2020'),
'\x87' : ('Dagger', '2021'),
'\x88' : ('circ', '2C6'),
'\x89' : ('permil', '2030'),
'\x8A' : ('Scaron', '160'),
'\x8B' : ('lsaquo', '2039'),
'\x8C' : ('OElig', '152'),
'\x8D' : '?',
'\x8E' : ('#x17D', '17D'),
'\x8F' : '?',
'\x90' : '?',
'\x91' : ('lsquo', '2018'),
'\x92' : ('rsquo', '2019'),
'\x93' : ('ldquo', '201C'),
'\x94' : ('rdquo', '201D'),
'\x95' : ('bull', '2022'),
'\x96' : ('ndash', '2013'),
'\x97' : ('mdash', '2014'),
'\x98' : ('tilde', '2DC'),
'\x99' : ('trade', '2122'),
'\x9a' : ('scaron', '161'),
'\x9b' : ('rsaquo', '203A'),
'\x9c' : ('oelig', '153'),
'\x9d' : '?',
'\x9e' : ('#x17E', '17E'),
'\x9f' : ('Yuml', ''),}
#######################################################################
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin.read())
print soup.prettify()
| mit |
freedomtan/tensorflow | tensorflow/python/data/experimental/kernel_tests/optimization/choose_fastest_dataset_test.py | 11 | 4211 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental._ChooseFastestDataset`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import optimization
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import combinations
from tensorflow.python.framework import errors
from tensorflow.python.platform import test
class ChooseFastestDatasetTest(test_base.DatasetTestBase,
parameterized.TestCase):
@combinations.generate(test_base.default_test_combinations())
def testChooseFastestSimple(self):
dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4])
merge = optimization._ChooseFastestDataset([dataset, dataset])
self.assertDatasetProduces(
merge,
expected_output=[0, 1, 2, 3, 4],
expected_shapes=dataset_ops.get_legacy_output_shapes(dataset))
@combinations.generate(test_base.default_test_combinations())
def testChooseFastestManyInputs(self):
dataset = dataset_ops.Dataset.from_tensor_slices([0, 1, 2, 3, 4])
merge = optimization._ChooseFastestDataset([dataset for _ in range(5)])
self.assertDatasetProduces(
merge,
expected_output=[0, 1, 2, 3, 4],
expected_shapes=dataset_ops.get_legacy_output_shapes(dataset))
@combinations.generate(test_base.default_test_combinations())
def testChooseFastest(self):
dataset = dataset_ops.Dataset.range(600)
f = lambda x: 2 * x
dataset_a = dataset.batch(50).map(f)
dataset_b = dataset.map(f).batch(50)
merge = optimization._ChooseFastestDataset([dataset_a, dataset_b])
self.assertDatasetProduces(
merge,
expected_output=[
[i * 2 for i in range(j * 50, (j + 1) * 50)] for j in range(12)
],
expected_shapes=dataset_ops.get_legacy_output_shapes(dataset_a))
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
combinations.combine(
slices_a=[[0]],
slices_b=[[[1, 2, 3]]],
error_msg="must have compatible output shapes.") +
combinations.combine(
slices_a=[[0]],
slices_b=[[0.0]],
error_msg="must have the same output types.") +
combinations.combine(
slices_a=[[0]],
slices_b=[([0], [1])],
error_msg="must have the same output types.") +
combinations.combine(
slices_a=[[1, 2, 3]],
slices_b=[[0]],
error_msg="must have compatible cardinalities.")))
def testChooseFastestErrorWithIncompatibleInput(self, slices_a, slices_b,
error_msg):
dataset_a = dataset_ops.Dataset.from_tensor_slices(slices_a)
dataset_b = dataset_ops.Dataset.from_tensor_slices(slices_b)
# The error is raised at dataset creation time.
if context.executing_eagerly():
with self.assertRaises(errors.InvalidArgumentError):
merge = optimization._ChooseFastestDataset([dataset_a, dataset_b])
else:
merge = optimization._ChooseFastestDataset([dataset_a, dataset_b])
self.assertDatasetProduces(
merge, expected_error=(errors.InvalidArgumentError, error_msg))
if __name__ == "__main__":
test.main()
| apache-2.0 |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/numpy/distutils/mingw32ccompiler.py | 57 | 22435 | """
Support code for building Python extensions on Windows.
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# 3. Force windows to use g77
"""
from __future__ import division, absolute_import, print_function
import os
import sys
import subprocess
import re
# Overwrite certain distutils.ccompiler functions:
import numpy.distutils.ccompiler
if sys.version_info[0] < 3:
from . import log
else:
from numpy.distutils import log
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# --> this is done in numpy/distutils/ccompiler.py
# 3. Force windows to use g77
import distutils.cygwinccompiler
from distutils.version import StrictVersion
from numpy.distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
from distutils.unixccompiler import UnixCCompiler
from distutils.msvccompiler import get_build_version as get_build_msvc_version
from numpy.distutils.misc_util import msvc_runtime_library, get_build_architecture
# Useful to generate table of symbols from a dll
_START = re.compile(r'\[Ordinal/Name Pointer\] Table')
_TABLE = re.compile(r'^\s+\[([\s*[0-9]*)\] ([a-zA-Z0-9_]*)')
# the same as cygwin plus some additional parameters
class Mingw32CCompiler(distutils.cygwinccompiler.CygwinCCompiler):
""" A modified MingW32 compiler compatible with an MSVC built Python.
"""
compiler_type = 'mingw32'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
distutils.cygwinccompiler.CygwinCCompiler.__init__ (self,
verbose, dry_run, force)
# we need to support 3.2 which doesn't match the standard
# get_versions methods regex
if self.gcc_version is None:
import re
p = subprocess.Popen(['gcc', '-dumpversion'], shell=True,
stdout=subprocess.PIPE)
out_string = p.stdout.read()
p.stdout.close()
result = re.search('(\d+\.\d+)', out_string)
if result:
self.gcc_version = StrictVersion(result.group(1))
# A real mingw32 doesn't need to specify a different entry point,
# but cygwin 2.91.57 in no-cygwin-mode needs it.
if self.gcc_version <= "2.91.57":
entry_point = '--entry _DllMain@12'
else:
entry_point = ''
if self.linker_dll == 'dllwrap':
# Commented out '--driver-name g++' part that fixes weird
# g++.exe: g++: No such file or directory
# error (mingw 1.0 in Enthon24 tree, gcc-3.4.5).
# If the --driver-name part is required for some environment
# then make the inclusion of this part specific to that environment.
self.linker = 'dllwrap' # --driver-name g++'
elif self.linker_dll == 'gcc':
self.linker = 'g++'
# **changes: eric jones 4/11/01
# 1. Check for import library on Windows. Build if it doesn't exist.
build_import_library()
# Check for custom msvc runtime library on Windows. Build if it doesn't exist.
msvcr_success = build_msvcr_library()
msvcr_dbg_success = build_msvcr_library(debug=True)
if msvcr_success or msvcr_dbg_success:
# add preprocessor statement for using customized msvcr lib
self.define_macro('NPY_MINGW_USE_CUSTOM_MSVCR')
# Define the MSVC version as hint for MinGW
msvcr_version = '0x%03i0' % int(msvc_runtime_library().lstrip('msvcr'))
self.define_macro('__MSVCRT_VERSION__', msvcr_version)
# **changes: eric jones 4/11/01
# 2. increased optimization and turned off all warnings
# 3. also added --driver-name g++
#self.set_executables(compiler='gcc -mno-cygwin -O2 -w',
# compiler_so='gcc -mno-cygwin -mdll -O2 -w',
# linker_exe='gcc -mno-cygwin',
# linker_so='%s --driver-name g++ -mno-cygwin -mdll -static %s'
# % (self.linker, entry_point))
# MS_WIN64 should be defined when building for amd64 on windows, but
# python headers define it only for MS compilers, which has all kind of
# bad consequences, like using Py_ModuleInit4 instead of
# Py_ModuleInit4_64, etc... So we add it here
if get_build_architecture() == 'AMD64':
if self.gcc_version < "4.0":
self.set_executables(
compiler='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall',
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall -Wstrict-prototypes',
linker_exe='gcc -g -mno-cygwin',
linker_so='gcc -g -mno-cygwin -shared')
else:
# gcc-4 series releases do not support -mno-cygwin option
self.set_executables(
compiler='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall',
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall -Wstrict-prototypes',
linker_exe='gcc -g',
linker_so='gcc -g -shared')
else:
if self.gcc_version <= "3.0.0":
self.set_executables(compiler='gcc -mno-cygwin -O2 -w',
compiler_so='gcc -mno-cygwin -mdll -O2 -w -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='%s -mno-cygwin -mdll -static %s'
% (self.linker, entry_point))
elif self.gcc_version < "4.0":
self.set_executables(compiler='gcc -mno-cygwin -O2 -Wall',
compiler_so='gcc -mno-cygwin -O2 -Wall -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='g++ -mno-cygwin -shared')
else:
# gcc-4 series releases do not support -mno-cygwin option
self.set_executables(compiler='gcc -O2 -Wall',
compiler_so='gcc -O2 -Wall -Wstrict-prototypes',
linker_exe='g++ ',
linker_so='g++ -shared')
# added for python2.3 support
# we can't pass it through set_executables because pre 2.2 would fail
self.compiler_cxx = ['g++']
# Maybe we should also append -mthreads, but then the finished
# dlls need another dll (mingwm10.dll see Mingw32 docs)
# (-mthreads: Support thread-safe exception handling on `Mingw32')
# no additional libraries needed
#self.dll_libraries=[]
return
# __init__ ()
def link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
export_symbols = None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# Include the appropiate MSVC runtime library if Python was built
# with MSVC >= 7.0 (MinGW standard is msvcrt)
runtime_library = msvc_runtime_library()
if runtime_library:
if not libraries:
libraries = []
libraries.append(runtime_library)
args = (self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, #export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
if self.gcc_version < "3.0.0":
func = distutils.cygwinccompiler.CygwinCCompiler.link
else:
func = UnixCCompiler.link
func(*args[:func.__code__.co_argcount])
return
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
# added these lines to strip off windows drive letters
# without it, .o files are placed next to .c files
# instead of the build directory
drv, base = os.path.splitdrive(base)
if drv:
base = base[1:]
if ext not in (self.src_extensions + ['.rc', '.res']):
raise UnknownFileError(
"unknown file type '%s' (from '%s')" % \
(ext, src_name))
if strip_dir:
base = os.path.basename (base)
if ext == '.res' or ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + ext + self.obj_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def find_python_dll():
maj, min, micro = [int(i) for i in sys.version_info[:3]]
dllname = 'python%d%d.dll' % (maj, min)
print("Looking for %s" % dllname)
# We can't do much here:
# - find it in python main dir
# - in system32,
# - ortherwise (Sxs), I don't know how to get it.
lib_dirs = []
lib_dirs.append(sys.prefix)
lib_dirs.append(os.path.join(sys.prefix, 'lib'))
try:
lib_dirs.append(os.path.join(os.environ['SYSTEMROOT'], 'system32'))
except KeyError:
pass
for d in lib_dirs:
dll = os.path.join(d, dllname)
if os.path.exists(dll):
return dll
raise ValueError("%s not found in %s" % (dllname, lib_dirs))
def dump_table(dll):
st = subprocess.Popen(["objdump.exe", "-p", dll], stdout=subprocess.PIPE)
return st.stdout.readlines()
def generate_def(dll, dfile):
"""Given a dll file location, get all its exported symbols and dump them
into the given def file.
The .def file will be overwritten"""
dump = dump_table(dll)
for i in range(len(dump)):
if _START.match(dump[i].decode()):
break
else:
raise ValueError("Symbol table not found")
syms = []
for j in range(i+1, len(dump)):
m = _TABLE.match(dump[j].decode())
if m:
syms.append((int(m.group(1).strip()), m.group(2)))
else:
break
if len(syms) == 0:
log.warn('No symbols found in %s' % dll)
d = open(dfile, 'w')
d.write('LIBRARY %s\n' % os.path.basename(dll))
d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
d.write(';DATA PRELOAD SINGLE\n')
d.write('\nEXPORTS\n')
for s in syms:
#d.write('@%d %s\n' % (s[0], s[1]))
d.write('%s\n' % s[1])
d.close()
def find_dll(dll_name):
arch = {'AMD64' : 'amd64',
'Intel' : 'x86'}[get_build_architecture()]
def _find_dll_in_winsxs(dll_name):
# Walk through the WinSxS directory to find the dll.
winsxs_path = os.path.join(os.environ['WINDIR'], 'winsxs')
if not os.path.exists(winsxs_path):
return None
for root, dirs, files in os.walk(winsxs_path):
if dll_name in files and arch in root:
return os.path.join(root, dll_name)
return None
def _find_dll_in_path(dll_name):
# First, look in the Python directory, then scan PATH for
# the given dll name.
for path in [sys.prefix] + os.environ['PATH'].split(';'):
filepath = os.path.join(path, dll_name)
if os.path.exists(filepath):
return os.path.abspath(filepath)
return _find_dll_in_winsxs(dll_name) or _find_dll_in_path(dll_name)
def build_msvcr_library(debug=False):
if os.name != 'nt':
return False
msvcr_name = msvc_runtime_library()
# Skip using a custom library for versions < MSVC 8.0
if int(msvcr_name.lstrip('msvcr')) < 80:
log.debug('Skip building msvcr library: custom functionality not present')
return False
if debug:
msvcr_name += 'd'
# Skip if custom library already exists
out_name = "lib%s.a" % msvcr_name
out_file = os.path.join(sys.prefix, 'libs', out_name)
if os.path.isfile(out_file):
log.debug('Skip building msvcr library: "%s" exists' % (out_file))
return True
# Find the msvcr dll
msvcr_dll_name = msvcr_name + '.dll'
dll_file = find_dll(msvcr_dll_name)
if not dll_file:
log.warn('Cannot build msvcr library: "%s" not found' % msvcr_dll_name)
return False
def_name = "lib%s.def" % msvcr_name
def_file = os.path.join(sys.prefix, 'libs', def_name)
log.info('Building msvcr library: "%s" (from %s)' \
% (out_file, dll_file))
# Generate a symbol definition file from the msvcr dll
generate_def(dll_file, def_file)
# Create a custom mingw library for the given symbol definitions
cmd = ['dlltool', '-d', def_file, '-l', out_file]
retcode = subprocess.call(cmd)
# Clean up symbol definitions
os.remove(def_file)
return (not retcode)
def build_import_library():
if os.name != 'nt':
return
arch = get_build_architecture()
if arch == 'AMD64':
return _build_import_library_amd64()
elif arch == 'Intel':
return _build_import_library_x86()
else:
raise ValueError("Unhandled arch %s" % arch)
def _build_import_library_amd64():
dll_file = find_python_dll()
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix, 'libs', out_name)
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' % (out_file))
return
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix, 'libs', def_name)
log.info('Building import library (arch=AMD64): "%s" (from %s)' \
% (out_file, dll_file))
generate_def(dll_file, def_file)
cmd = ['dlltool', '-d', def_file, '-l', out_file]
subprocess.Popen(cmd)
def _build_import_library_x86():
""" Build the import libraries for Mingw32-gcc on Windows
"""
lib_name = "python%d%d.lib" % tuple(sys.version_info[:2])
lib_file = os.path.join(sys.prefix, 'libs', lib_name)
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix, 'libs', out_name)
if not os.path.isfile(lib_file):
log.warn('Cannot build import library: "%s" not found' % (lib_file))
return
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' % (out_file))
return
log.info('Building import library (ARCH=x86): "%s"' % (out_file))
from numpy.distutils import lib2def
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix, 'libs', def_name)
nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file)
nm_output = lib2def.getnm(nm_cmd)
dlist, flist = lib2def.parse_nm(nm_output)
lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w'))
dll_name = "python%d%d.dll" % tuple(sys.version_info[:2])
args = (dll_name, def_file, out_file)
cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args
status = os.system(cmd)
# for now, fail silently
if status:
log.warn('Failed to build import library for gcc. Linking will fail.')
#if not success:
# msg = "Couldn't find import library, and failed to build it."
# raise DistutilsPlatformError(msg)
return
#=====================================
# Dealing with Visual Studio MANIFESTS
#=====================================
# Functions to deal with visual studio manifests. Manifest are a mechanism to
# enforce strong DLL versioning on windows, and has nothing to do with
# distutils MANIFEST. manifests are XML files with version info, and used by
# the OS loader; they are necessary when linking against a DLL not in the
# system path; in particular, official python 2.6 binary is built against the
# MS runtime 9 (the one from VS 2008), which is not available on most windows
# systems; python 2.6 installer does install it in the Win SxS (Side by side)
# directory, but this requires the manifest for this to work. This is a big
# mess, thanks MS for a wonderful system.
# XXX: ideally, we should use exactly the same version as used by python. I
# submitted a patch to get this version, but it was only included for python
# 2.6.1 and above. So for versions below, we use a "best guess".
_MSVCRVER_TO_FULLVER = {}
if sys.platform == 'win32':
try:
import msvcrt
# I took one version in my SxS directory: no idea if it is the good
# one, and we can't retrieve it from python
_MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42"
_MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8"
# Value from msvcrt.CRT_ASSEMBLY_VERSION under Python 3.3.0 on Windows XP:
_MSVCRVER_TO_FULLVER['100'] = "10.0.30319.460"
if hasattr(msvcrt, "CRT_ASSEMBLY_VERSION"):
major, minor, rest = msvcrt.CRT_ASSEMBLY_VERSION.split(".", 2)
_MSVCRVER_TO_FULLVER[major + minor] = msvcrt.CRT_ASSEMBLY_VERSION
del major, minor, rest
except ImportError:
# If we are here, means python was not built with MSVC. Not sure what to do
# in that case: manifest building will fail, but it should not be used in
# that case anyway
log.warn('Cannot import msvcrt: using manifest will not be possible')
def msvc_manifest_xml(maj, min):
"""Given a major and minor version of the MSVCR, returns the
corresponding XML file."""
try:
fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)]
except KeyError:
raise ValueError("Version %d,%d of MSVCRT not supported yet" \
% (maj, min))
# Don't be fooled, it looks like an XML, but it is not. In particular, it
# should not have any space before starting, and its size should be
# divisible by 4, most likely for alignement constraints when the xml is
# embedded in the binary...
# This template was copied directly from the python 2.6 binary (using
# strings.exe from mingw on python.exe).
template = """\
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
return template % {'fullver': fullver, 'maj': maj, 'min': min}
def manifest_rc(name, type='dll'):
"""Return the rc file used to generate the res file which will be embedded
as manifest for given manifest file name, of given type ('dll' or
'exe').
Parameters
----------
name : str
name of the manifest file to embed
type : str {'dll', 'exe'}
type of the binary which will embed the manifest
"""
if type == 'dll':
rctype = 2
elif type == 'exe':
rctype = 1
else:
raise ValueError("Type %s not supported" % type)
return """\
#include "winuser.h"
%d RT_MANIFEST %s""" % (rctype, name)
def check_embedded_msvcr_match_linked(msver):
"""msver is the ms runtime version used for the MANIFEST."""
# check msvcr major version are the same for linking and
# embedding
msvcv = msvc_runtime_library()
if msvcv:
assert msvcv.startswith("msvcr"), msvcv
# Dealing with something like "mscvr90" or "mscvr100", the last
# last digit is the minor release, want int("9") or int("10"):
maj = int(msvcv[5:-1])
if not maj == int(msver):
raise ValueError(
"Discrepancy between linked msvcr " \
"(%d) and the one about to be embedded " \
"(%d)" % (int(msver), maj))
def configtest_name(config):
base = os.path.basename(config._gen_temp_sourcefile("yo", [], "c"))
return os.path.splitext(base)[0]
def manifest_name(config):
# Get configest name (including suffix)
root = configtest_name(config)
exext = config.compiler.exe_extension
return root + exext + ".manifest"
def rc_name(config):
# Get configest name (including suffix)
root = configtest_name(config)
return root + ".rc"
def generate_manifest(config):
msver = get_build_msvc_version()
if msver is not None:
if msver >= 8:
check_embedded_msvcr_match_linked(msver)
ma = int(msver)
mi = int((msver - ma) * 10)
# Write the manifest file
manxml = msvc_manifest_xml(ma, mi)
man = open(manifest_name(config), "w")
config.temp_files.append(manifest_name(config))
man.write(manxml)
man.close()
# # Write the rc file
# manrc = manifest_rc(manifest_name(self), "exe")
# rc = open(rc_name(self), "w")
# self.temp_files.append(manrc)
# rc.write(manrc)
# rc.close()
| gpl-3.0 |
NeuralEnsemble/neuroConstruct | lib/jython/Lib/test/test_zlib_jy.py | 23 | 1039 | """Misc zlib tests
Made for Jython.
"""
import unittest
import zlib
from array import array
from test import test_support
class ArrayTestCase(unittest.TestCase):
def test_array(self):
self._test_array(zlib.compress, zlib.decompress)
def test_array_compressobj(self):
def compress(value):
co = zlib.compressobj()
return co.compress(value) + co.flush()
def decompress(value):
dco = zlib.decompressobj()
return dco.decompress(value) + dco.flush()
self._test_array(compress, decompress)
def _test_array(self, compress, decompress):
self.assertEqual(compress(array('c', 'jython')), compress('jython'))
intarray = array('i', range(5))
self.assertEqual(compress(intarray), compress(intarray.tostring()))
compressed = array('c', compress('jython'))
self.assertEqual('jython', decompress(compressed))
def test_main():
test_support.run_unittest(ArrayTestCase)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
akarol/cfme_tests | cfme/common/__init__.py | 1 | 21569 | # -*- coding: utf-8 -*-
from navmazing import NavigateToSibling
from widgetastic.exceptions import NoSuchElementException, RowNotFound
from widgetastic_patternfly import BootstrapSelect, Button, CheckableBootstrapTreeview
from widgetastic.widget import Table, Text, View
from cfme.base.login import BaseLoggedInPage
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.configure.configuration.region_settings import Category, Tag
from cfme.utils.appliance.implementations.ui import navigate_to, navigator, CFMENavigateStep
from cfme.utils.wait import wait_for
from widgetastic_manageiq import BaseNonInteractiveEntitiesView, BreadCrumb
class ManagePoliciesView(BaseLoggedInPage):
"""
Manage policies page
"""
policy_profiles = CheckableBootstrapTreeview(tree_id='protectbox')
breadcrumb = BreadCrumb() # some views have breadcrumb, some not
entities = View.nested(BaseNonInteractiveEntitiesView)
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@property
def is_displayed(self):
return False
class PolicyProfileAssignable(object):
"""This class can be inherited by anything that provider load_details method.
It provides functionality to assign and unassign Policy Profiles
"""
@property
def assigned_policy_profiles(self):
try:
return self._assigned_policy_profiles
except AttributeError:
self._assigned_policy_profiles = set([])
return self._assigned_policy_profiles
def assign_policy_profiles(self, *policy_profile_names):
""" Assign Policy Profiles to this object.
Args:
policy_profile_names: :py:class:`str` with Policy Profile names. After Control/Explorer
coverage goes in, PolicyProfile objects will be also passable.
"""
map(self.assigned_policy_profiles.add, policy_profile_names)
self._assign_unassign_policy_profiles(True, *policy_profile_names)
def unassign_policy_profiles(self, *policy_profile_names):
""" Unssign Policy Profiles to this object.
Args:
policy_profile_names: :py:class:`str` with Policy Profile names. After Control/Explorer
coverage goes in, PolicyProfile objects will be also passable.
"""
for pp_name in policy_profile_names:
try:
self.assigned_policy_profiles.remove(pp_name)
except KeyError:
pass
self._assign_unassign_policy_profiles(False, *policy_profile_names)
def _assign_unassign_policy_profiles(self, assign, *policy_profile_names):
"""DRY function for managing policy profiles.
See :py:func:`assign_policy_profiles` and :py:func:`assign_policy_profiles`
Args:
assign: Wheter to assign or unassign.
policy_profile_names: :py:class:`str` with Policy Profile names.
"""
view = navigate_to(self, 'ManagePoliciesFromDetails')
policy_changed = False
for policy_profile in policy_profile_names:
if assign:
policy_changed = view.policy_profiles.fill(
view.policy_profiles.CheckNode([policy_profile])
) or policy_changed
else:
policy_changed = view.policy_profiles.fill(
view.policy_profiles.UncheckNode([policy_profile])
) or policy_changed
if policy_changed:
view.save.click()
else:
view.cancel.click()
details_view = self.create_view(navigator.get_class(self, 'Details').VIEW)
details_view.flash.assert_no_error()
def assign_policy_profiles_multiple_entities(self, entities, conditions, *policy_profile_names):
""" Assign Policy Profiles to selected entity's on Collection All view
Args:
entities: list of entity's from collection table
policy_profile_names: :py:class:`str` with Policy Profile names. After Control/Explorer
coverage goes in, PolicyProfile objects will be also passable.
conditions: entities should match to
Ex:
collection = appliance.collections.container_images
# assign OpenSCAP policy
collection.assign_policy_profiles_multiple_entities(random_image_instances,
conditions=[{'name': 'dotnet/dotnet-20-rhel7'},
{'name': 'dotnet/dotnet-20-runtime-rhel7'}],
'OpenSCAP profile')
"""
map(self.assigned_policy_profiles.add, policy_profile_names)
self._assign_or_unassign_policy_profiles_multiple_entities(
entities, True, conditions, *policy_profile_names)
def unassign_policy_profiles_multiple_entities(self, entities, conditions,
*policy_profile_names):
""" UnAssign Policy Profiles to selected entity's on Collection All view
Args:
entities: list of entity's from collection table
policy_profile_names: :py:class:`str` with Policy Profile names. After Control/Explorer
coverage goes in, PolicyProfile objects will be also passable.
conditions: entities should match to
Ex:
collection = appliance.collections.container_images
# unassign OpenSCAP policy
collection.unassign_policy_profiles_multiple_entities(random_image_instances,
conditions=[{'name': 'dotnet/dotnet-20-rhel7'},
{'name': 'dotnet/dotnet-20-runtime-rhel7'}],
'OpenSCAP profile')
"""
for pp_name in policy_profile_names:
try:
self.assigned_policy_profiles.remove(pp_name)
except KeyError:
pass
self._assign_or_unassign_policy_profiles_multiple_entities(
entities, False, conditions, *policy_profile_names)
def _assign_or_unassign_policy_profiles_multiple_entities(
self, entities, assign, conditions, *policy_profile_names):
"""DRY function for managing policy profiles.
See :py:func:`assign_policy_profiles_multiple_entities`
and :py:func:`unassign_policy_profiles_multiple_entities`
Args:
entities: list of entity's from collection table
assign: Whether to assign or unassign.
policy_profile_names: :py:class:`str` with Policy Profile names.
conditions: entities should match to
"""
view = navigate_to(self, 'All')
# set item per page for maximum value in order to avoid paging,
# that will cancel the already check entity's
items_per_page = view.paginator.items_per_page
view.paginator.set_items_per_page(1000)
# check the entity's on collection ALL view
view.entities.apply(func=lambda e: e.check(), conditions=conditions)
wait_for(lambda: view.toolbar.policy.is_enabled, num_sec=5,
message='Policy drop down menu is disabled after checking some entities')
view.toolbar.policy.item_select('Manage Policies')
# get the object of the Manage Policies view
manage_policies_view = self.create_view(navigator.get_class(self, 'ManagePolicies').VIEW)
policy_changed = False
for policy_profile in policy_profile_names:
if assign:
policy_changed = manage_policies_view.policy_profiles.fill(
manage_policies_view.policy_profiles.CheckNode([policy_profile])
) or policy_changed
else:
policy_changed = manage_policies_view.policy_profiles.fill(
manage_policies_view.policy_profiles.UncheckNode([policy_profile])
) or policy_changed
if policy_changed:
manage_policies_view.save.click()
else:
manage_policies_view.cancel.click()
view.flash.assert_no_error()
# return the previous number of items per page
view.paginator.set_items_per_page(items_per_page)
@navigator.register(PolicyProfileAssignable, 'ManagePoliciesFromDetails')
class ManagePoliciesFromDetails(CFMENavigateStep):
VIEW = ManagePoliciesView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Manage Policies')
@navigator.register(PolicyProfileAssignable, 'ManagePolicies')
class ManagePolicies(CFMENavigateStep):
VIEW = ManagePoliciesView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True).check()
self.prerequisite_view.toolbar.policy.item_select('Manage Policies')
class TagPageView(BaseLoggedInPage):
"""Class represents common tag page in CFME UI"""
title = Text('#explorer_title_text')
table_title = Text('//div[@id="tab_div"]/h3')
@View.nested
class form(View): # noqa
tags = Table("//div[@id='assignments_div']//table")
tag_category = BootstrapSelect(id='tag_cat')
tag_name = BootstrapSelect(id='tag_add')
entities = View.nested(BaseNonInteractiveEntitiesView)
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@property
def is_displayed(self):
return (
self.table_title.text == 'Tag Assignment' and
self.form.tags.is_displayed
)
class WidgetasticTaggable(object):
"""
This class can be inherited by any class that honors tagging.
Class should have following
* 'Details' navigation
* 'Details' view should have entities.smart_management SummaryTable widget
* 'EditTags' navigation
* 'EditTags' view should have nested 'form' view with 'tags' table widget
* Suggest using class cfme.common.TagPageView as view for 'EditTags' nav
This class provides functionality to assign and unassigned tags for page models with
standardized widgetastic views
"""
def add_tag(self, category=None, tag=None, cancel=False, reset=False, details=True):
""" Add tag to tested item
Args:
category: category(str)
tag: tag(str) or Tag object
cancel: set True to cancel tag assigment
reset: set True to reset already set up tag
details (bool): set False if tag should be added for list selection,
default is details page
"""
if details:
view = navigate_to(self, 'EditTagsFromDetails')
else:
view = navigate_to(self, 'EditTags')
if isinstance(tag, Tag):
category = tag.category.display_name
tag = tag.display_name
# Handle nested view.form and where the view contains form widgets
try:
updated = view.form.fill({
"tag_category": '{} *'.format(category),
"tag_name": tag
})
except NoSuchElementException:
updated = view.form.fill({
"tag_category": category,
"tag_name": tag
})
# In case if field is not updated cancel the edition
if not updated:
cancel = True
self._tags_action(view, cancel, reset)
def add_tags(self, tags):
"""Add multiple tags
Args:
tags: pass dict with category name as key, and tag as value,
or pass list with tag objects
"""
if isinstance(tags, dict):
for category, tag in tags.items():
self.add_tag(category=category, tag=tag)
elif isinstance(tags, (list, tuple)):
for tag in tags:
self.add_tag(tag=tag)
def remove_tag(self, category=None, tag=None, cancel=False, reset=False, details=True):
""" Remove tag of tested item
Args:
category: category(str)
tag: tag(str) or Tag object
cancel: set True to cancel tag deletion
reset: set True to reset tag changes
details (bool): set False if tag should be added for list selection,
default is details page
"""
if details:
view = navigate_to(self, 'EditTagsFromDetails')
else:
view = navigate_to(self, 'EditTags')
if isinstance(tag, Tag):
category = tag.category.display_name
tag = tag.display_name
try:
row = view.form.tags.row(category="{} *".format(category), assigned_value=tag)
except RowNotFound:
row = view.form.tags.row(category=category, assigned_value=tag)
row[0].click()
self._tags_action(view, cancel, reset)
def remove_tags(self, tags):
"""Remove multiple of tags
Args:
tags: pass dict with category name as key, and tag as value,
or pass list with tag objects
"""
if isinstance(tags, dict):
for category, tag in tags.items():
self.remove_tag(category=category, tag=tag)
elif isinstance(tags, (list, tuple)):
for tag in tags:
self.remove_tag(tag=tag)
def get_tags(self, tenant="My Company Tags"):
""" Get list of tags assigned to item.
Details entities should have smart_management widget
For vm, providers, and other like pages 'SummaryTable' widget should be used,
for user, group like pages(no tables on details page) use 'SummaryForm'
Args:
tenant: string, tags tenant, default is "My Company Tags"
Returns: :py:class:`list` List of Tag objects
"""
view = navigate_to(self, 'Details')
tags = []
entities = view.entities
if hasattr(entities, 'smart_management'):
tag_table = entities.smart_management
else:
tag_table = entities.summary('Smart Management')
tags_text = tag_table.get_text_of(tenant)
if tags_text != 'No {} have been assigned'.format(tenant):
for tag in list(tags_text):
tag_category, tag_name = tag.split(':')
tags.append(Tag(category=Category(display_name=tag_category),
display_name=tag_name.strip()))
return tags
def _tags_action(self, view, cancel, reset):
""" Actions on edit tags page
Args:
view: View to use these actions(tag view)
cancel: Set True to cancel all changes, will redirect to details page
reset: Set True to reset all changes, edit tag page should be opened
"""
if reset:
view.form.reset.click()
view.flash.assert_message('All changes have been reset')
if cancel:
view.form.cancel.click()
view.flash.assert_success_message('Tag Edit was cancelled by the user')
if not reset and not cancel:
view.form.save.click()
view.flash.assert_success_message('Tag edits were successfully saved')
@navigator.register(WidgetasticTaggable, 'EditTagsFromDetails')
class EditTagsFromDetails(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
@navigator.register(WidgetasticTaggable, 'EditTags')
class EditTagsFromListCollection(CFMENavigateStep):
VIEW = TagPageView
def prerequisite(self):
if isinstance(self.obj, BaseCollection) or not isinstance(self.obj, BaseEntity):
return navigate_to(self.obj, 'All')
else:
return navigate_to(self.obj.parent, 'All')
def step(self, **kwargs):
"""
kwargs: pass an entities objects or entities names
Return: navigation step
"""
if kwargs:
for _, entity in kwargs.items():
name = entity.name if isinstance(entity, BaseEntity) else entity
self.prerequisite_view.entities.get_entity(
surf_pages=True, name=name).check()
else:
self.prerequisite_view.entities.get_entity(surf_pages=True, name=self.obj.name).check()
self.prerequisite_view.toolbar.policy.item_select('Edit Tags')
class Validatable(object):
"""Mixin for various validations. Requires the class to be also :py:class:`Taggable`.
:var :py:attr:`property_tuples`: Tuples which first value is the provider class's attribute
name, the second value is provider's UI summary page field key. Should have values in
child classes.
"""
property_tuples = []
def validate_properties(self):
"""Validation method which checks whether class attributes, which were used during creation
of provider, are correctly displayed in Properties section of provider UI.
The maps between class attribute and UI property is done via 'property_tuples' variable.
Fails if some property does not match.
"""
self.load_details(refresh=False)
for property_tuple in self.property_tuples:
expected_value = str(getattr(self, property_tuple[0], ''))
shown_value = self.get_detail("Properties", property_tuple[1])
assert expected_value == shown_value,\
("Property '{}' has wrong value, expected '{}' but was '{}'"
.format(property_tuple, expected_value, shown_value))
def validate_tags(self, reference_tags):
"""Validation method which check tagging between UI and provided reference_tags.
To use this method, `self`/`caller` should be extended with `Taggable` class
Args:
reference_tags: If you want to compare user input with UI, pass user input
as `reference_tags`
"""
if reference_tags and not isinstance(reference_tags, list):
raise KeyError("'reference_tags' should be an instance of list")
tags = self.get_tags()
# Verify tags
assert len(tags) == len(reference_tags), \
("Tags count between Provided and UI mismatch, expected '{}' but was '{}'"
.format(reference_tags, tags))
for ref_tag in reference_tags:
found = False
for tag in tags:
if ref_tag.category.display_name == tag.category.display_name \
and ref_tag.display_name == tag.display_name:
found = True
assert found, ("Tag '{}' not found in UI".format(ref_tag))
class TopologyMixin(object):
"""Use this mixin to have simple access to the Topology page.
To use this `TopologyMixin` you have to implement `load_topology_page`
function, which should take to topology page
Sample usage:
.. code-block:: python
# You can retrieve the elements details as it is in the UI
topology.elements # => 'hostname'
# You can do actions on topology page
topology.display_names.enable()
topology.display_names.disable()
topology.display_names.is_enabled
# You can do actions on topology search box
topology.search_box.text(text='hello')
topology.search_box.text(text='hello', submit=False)
topology.search_box.submit()
topology.search_box.clear()
# You can get legends and can perform actions
topology.legends
topology.pod.name
topology.pod.is_active
topology.pod.set_active()
# You can get elements, element parents and children
topology.elements
topology.elements[0].parents
topology.elements[0].children
topology.elements[0].double_click()
topology.elements[0].is_displayed()
"""
# @cached_property
# def topology(self):
# return Topology(self)
class UtilizationMixin(object):
"""Use this mixin to have simple access to the Utilization information of an object.
Requires that the class(page) has ``load_details(refresh)`` method
and ``taggable_type`` should be defined.
All the chart names from the UI are "attributized".
Sample usage:
.. code-block:: python
# You can list available charts
page.utilization.charts # => '[ 'jvm_heap_usage_bytes','web_sessions','transactions']'
# You can get the data from chart
page.utilization.jvm_heap_usage_bytes.list_data_chart() # => returns data as list
# You can get the data from table
provider.utilization.jvm_heap_usage_bytes.list_data_table() # => returns data as list
# You can get the data from wrapanapi
page.utilization.jvm_heap_usage_bytes.list_data_mgmt() # => returns data as list
# You can change chart option
page.utilization.jvm_non_heap_usage_bytes.option.set_by_visible_text(op_interval='Daily')
# You can list available ledgends
page.utilization.jvm_non_heap_usage_bytes.legends
# You can enable/disable legends
page.utilization.jvm_non_heap_usage_bytes.committed.set_active(active=False) # => Disables
page.utilization.jvm_non_heap_usage_bytes.committed.set_active(active=True) # => Enables
"""
# @cached_property
# def utilization(self):
# return Utilization(self)
| gpl-2.0 |
guillaume-philippon/aquilon | lib/aquilon/worker/commands/update_parameter_definition_feature.py | 1 | 2498 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aquilon.aqdb.model import Feature, ParamDefinition
from aquilon.worker.broker import BrokerCommand
from aquilon.worker.dbwrappers.change_management import validate_prod_feature
from aquilon.worker.dbwrappers.parameter import (add_feature_paramdef_plenaries,
lookup_paramdef,
update_paramdef_schema)
class CommandUpdParameterDefintionFeature(BrokerCommand):
requires_plenaries = True
required_parameters = ["feature", "type", "path"]
def render(self, session, logger, plenaries, feature, type, path, schema,
clear_schema, required, default, clear_default, description,
user, justification, reason, **_):
cls = Feature.polymorphic_subclass(type, "Unknown feature type")
dbfeature = cls.get_unique(session, name=feature, compel=True)
path = ParamDefinition.normalize_path(path)
db_paramdef, _ = lookup_paramdef(dbfeature, path)
# Changing the default value impacts all personalities which do not
# override it, so more scrunity is needed
if default is not None or clear_default:
validate_prod_feature(dbfeature, user, justification, reason, logger)
add_feature_paramdef_plenaries(session, dbfeature, plenaries)
db_paramdef.default = default
if required is not None:
db_paramdef.required = required
if description is not None:
db_paramdef.description = description
if schema:
update_paramdef_schema(session, db_paramdef, schema)
elif clear_schema:
db_paramdef.schema = None
session.flush()
plenaries.write(verbose=True)
return
| apache-2.0 |
abadger/ansible | test/lib/ansible_test/_internal/commands/sanity/import.py | 13 | 9115 | """Sanity test for proper import exception handling."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ... import types as t
from . import (
SanityMultipleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
SanitySkipped,
SANITY_ROOT,
)
from ...target import (
TestTarget,
)
from ...util import (
ANSIBLE_TEST_DATA_ROOT,
SubprocessError,
remove_tree,
display,
parse_to_list_of_dict,
is_subdir,
generate_pip_command,
find_python,
get_hash,
REMOTE_ONLY_PYTHON_VERSIONS,
)
from ...util_common import (
intercept_command,
run_command,
ResultType,
)
from ...ansible_util import (
ansible_environment,
)
from ...executor import (
generate_pip_install,
install_cryptography,
)
from ...config import (
SanityConfig,
)
from ...coverage_util import (
coverage_context,
)
from ...venv import (
create_virtual_environment,
)
from ...data import (
data_context,
)
def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
"""Create a predicate which tests whether a path can be used by modules or not."""
module_path = data_context().content.module_path
module_utils_path = data_context().content.module_utils_path
if module_restrictions:
return lambda path: is_subdir(path, module_path) or is_subdir(path, module_utils_path)
return lambda path: not (is_subdir(path, module_path) or is_subdir(path, module_utils_path))
class ImportTest(SanityMultipleVersion):
"""Sanity test for proper import exception handling."""
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
settings = self.load_processor(args, python_version)
paths = [target.path for target in targets.include]
capture_pip = args.verbosity < 2
python = find_python(python_version)
if python_version.startswith('2.') and args.requirements:
# hack to make sure that virtualenv is available under Python 2.x
# on Python 3.x we can use the built-in venv
pip = generate_pip_command(python)
run_command(args, generate_pip_install(pip, '', packages=['virtualenv']), capture=capture_pip)
env = ansible_environment(args, color=False)
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
messages = []
for import_type, test, add_ansible_requirements in (
('module', _get_module_test(True), False),
('plugin', _get_module_test(False), True),
):
if import_type == 'plugin' and python_version in REMOTE_ONLY_PYTHON_VERSIONS:
continue
data = '\n'.join([path for path in paths if test(path)])
if not data:
continue
requirements_file = None
# create a clean virtual environment to minimize the available imports beyond the python standard library
virtual_environment_dirname = 'minimal-py%s' % python_version.replace('.', '')
if add_ansible_requirements:
requirements_file = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.import-plugins.txt')
virtual_environment_dirname += '-requirements-%s' % get_hash(requirements_file)
virtual_environment_path = os.path.join(temp_root, virtual_environment_dirname)
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
remove_tree(virtual_environment_path)
if not create_virtual_environment(args, python_version, virtual_environment_path):
display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version))
return SanitySkipped(self.name, python_version)
# add the importer to our virtual environment so it can be accessed through the coverage injector
importer_path = os.path.join(virtual_environment_bin, 'importer.py')
yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
if not args.explain:
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
# activate the virtual environment
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
env.update(
SANITY_TEMP_PATH=ResultType.TMP.path,
SANITY_IMPORTER_TYPE=import_type,
)
if data_context().content.collection:
env.update(
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
SANITY_EXTERNAL_PYTHON=python,
)
virtualenv_python = os.path.join(virtual_environment_bin, 'python')
virtualenv_pip = generate_pip_command(virtualenv_python)
# make sure requirements are installed if needed
if requirements_file:
install_cryptography(args, virtualenv_python, python_version, virtualenv_pip)
run_command(args, generate_pip_install(virtualenv_pip, 'sanity', context='import-plugins'), env=env, capture=capture_pip)
# make sure coverage is available in the virtual environment if needed
if args.coverage:
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip)
run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip)
try:
# In some environments pkg_resources is installed as a separate pip package which needs to be removed.
# For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
# However, a venv is created with an additional pkg-resources package which is independent of setuptools.
# Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
# Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
# Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip)
except SubprocessError:
pass
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip)
run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip)
display.info(import_type + ': ' + data, verbosity=4)
cmd = ['importer.py']
try:
with coverage_context(args):
stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version,
virtualenv=virtualenv_python)
if stdout or stderr:
raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
except SubprocessError as ex:
if ex.status != 10 or ex.stderr or not ex.stdout:
raise
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
parsed = parse_to_list_of_dict(pattern, ex.stdout)
relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep
messages += [SanityMessage(
message=r['message'],
path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'],
line=int(r['line']),
column=int(r['column']),
) for r in parsed]
results = settings.process_errors(messages, paths)
if results:
return SanityFailure(self.name, messages=results, python_version=python_version)
return SanitySuccess(self.name, python_version=python_version)
| gpl-3.0 |
lu-ci/apex-sigma-plugins | minigames/professions/upgrades.py | 3 | 1781 | import discord
from .nodes.upgrades import upgrade_list
def calculate_upgrade(up_id, level):
up_table = {
'stamina': {
'amount': -(60 - (int(60 - ((60 / 100) * (level * 0.5))))),
'end': 'Seconds'
},
'luck': {
'amount': level * 250,
'end': 'Bonus'
},
'storage': {
'amount': 64 + (level * 8),
'end': 'Spaces'
},
'oven': {
'amount': -(3600 - (int(3600 - ((3600 / 100) * (level * 0.2))))),
'end': 'Seconds'
},
'casino': {
'amount': -(60 - (int(60 - ((60 / 100) * (level * 0.5))))),
'end': 'Seconds'
}
}
return up_table[up_id]
async def upgrades(cmd, message, args):
if message.mentions:
target = message.mentions[0]
else:
target = message.author
upgrade_file = cmd.db[cmd.db.db_cfg.database].Upgrades.find_one({'UserID': target.id})
if upgrade_file is None:
cmd.db[cmd.db.db_cfg.database].Upgrades.insert_one({'UserID': target.id})
upgrade_file = {}
upgrade_text = ''
upgrade_index = 0
for upgrade in upgrade_list:
upgrade_index += 1
upgrade_id = upgrade['id']
if upgrade_id in upgrade_file:
upgrade_level = upgrade_file[upgrade_id]
else:
upgrade_level = 0
up_data = calculate_upgrade(upgrade_id, upgrade_level)
upgrade_text += f'\n**Level {upgrade_level}** {upgrade["name"]}: **{up_data["amount"]} {up_data["end"]}**'
upgrade_list_embed = discord.Embed(color=0xF9F9F9, title=f'🛍 {target.display_name}\'s Sigma Upgrades')
upgrade_list_embed.description = upgrade_text
await message.channel.send(embed=upgrade_list_embed)
| gpl-3.0 |
nugget/home-assistant | tests/components/sensor/test_moon.py | 4 | 1666 | """The test for the moon sensor platform."""
import unittest
from datetime import datetime
from unittest.mock import patch
import homeassistant.util.dt as dt_util
from homeassistant.setup import setup_component
from tests.common import get_test_home_assistant
DAY1 = datetime(2017, 1, 1, 1, tzinfo=dt_util.UTC)
DAY2 = datetime(2017, 1, 18, 1, tzinfo=dt_util.UTC)
class TestMoonSensor(unittest.TestCase):
"""Test the Moon sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
@patch('homeassistant.components.sensor.moon.dt_util.utcnow',
return_value=DAY1)
def test_moon_day1(self, mock_request):
"""Test the Moon sensor."""
config = {
'sensor': {
'platform': 'moon',
'name': 'moon_day1',
}
}
assert setup_component(self.hass, 'sensor', config)
state = self.hass.states.get('sensor.moon_day1')
assert state.state == 'waxing_crescent'
@patch('homeassistant.components.sensor.moon.dt_util.utcnow',
return_value=DAY2)
def test_moon_day2(self, mock_request):
"""Test the Moon sensor."""
config = {
'sensor': {
'platform': 'moon',
'name': 'moon_day2',
}
}
assert setup_component(self.hass, 'sensor', config)
state = self.hass.states.get('sensor.moon_day2')
assert state.state == 'waning_gibbous'
| apache-2.0 |
kwade00/xbmc | lib/libUPnP/Neptune/Extras/Scripts/CodeScanner.py | 264 | 4286 | #! /usr/bin/python
import os
import os.path
import re
import sys
ErrorPattern = re.compile('([A-Z]{3}_ERROR_[A-Z_0-9]+)\s+=?\s*\(?([A-Z_0-9-][A-Z_0-9-+ ]+[A-Z_0-9])')
LoggerPattern = re.compile('NPT_SET_LOCAL_LOGGER\s*\([ "]*(\S+)[ "]*\)')
NakedErrorPattern = re.compile('return.*[ \(]..._FAILURE')
FilePatternH = re.compile('^.*\.h$')
FilePatternC = re.compile('^.*\.(c|cpp)$')
Errors = {}
Codes = {}
Loggers = []
def ResolveErrors():
keep_going = True
while keep_going:
keep_going = False
for key in Errors.keys():
value = Errors[key]
if type(value) is str:
elements = [x.strip() for x in value.split('-')]
if len(elements[0]) == 0:
first = 0
else:
first = elements[0]
if Errors.has_key(first):
first = Errors[first]
if not type(first) is str:
second = int(elements[1])
Errors[key] = first-second
keep_going = True
def AnalyzeErrorCodes(file):
input = open(file)
for line in input.readlines():
m = ErrorPattern.search(line)
if m:
Errors[m.group(1)] = m.group(2)
input.close()
def ScanErrorCodes(top):
for root, dirs, files in os.walk(top):
for file in files:
if FilePatternH.match(file):
AnalyzeErrorCodes(os.path.join(root, file))
ResolveErrors()
for key in Errors:
#print key,"==>",Errors[key]
if (key.find("ERROR_BASE") > 1): continue
if Codes.has_key(Errors[key]):
raise Exception("duplicate error code: "+ str(key) +" --> " + str(Errors[key]) + "=" + Codes[Errors[key]])
Codes[Errors[key]] = key
sorted_keys = Codes.keys()
sorted_keys.sort()
sorted_keys.reverse()
last = 0
for code in sorted_keys:
if type(code) != int:
continue
if code != last-1:
print
print code,"==>", Codes[code]
last = code
def AnalyzeLoggers(file):
input = open(file)
for line in input.readlines():
m = LoggerPattern.search(line)
if m:
if m.group(1) not in Loggers:
Loggers.append(m.group(1))
input.close()
def ScanLoggers(top):
for root, dirs, files in os.walk(top):
for file in files:
if FilePatternC.match(file):
AnalyzeLoggers(os.path.join(root, file))
Loggers.sort()
for logger in Loggers:
print logger
def AnalyzeNakedErrors(file, prefix):
line_number = 0
input = open(file)
for line in input.readlines():
line_number += 1
m = NakedErrorPattern.search(line)
if m:
print file[len(prefix):],line_number," --> ", line,
input.close()
def ScanNakedErrors(top):
for root, dirs, files in os.walk(top):
for file in files:
if FilePatternC.match(file):
AnalyzeNakedErrors(os.path.join(root, file), top)
def FindTabsInFile(file):
input = open(file)
for line in input.readlines():
if line.find('\t') >= 0:
print "TAB found in", file
input.close()
return
input.close()
def FindTabs(top):
for root, dirs, files in os.walk(top):
for file in files:
if FilePatternC.match(file) or FilePatternH.match(file):
FindTabsInFile(os.path.join(root, file))
####################################################
# main
####################################################
sys.argv.reverse()
sys.argv.pop()
action = None
top = None
while len(sys.argv):
arg = sys.argv.pop()
if arg == '--list-error-codes':
action = ScanErrorCodes
elif arg == '--list-loggers':
action = ScanLoggers
elif arg == '--list-naked-errors':
action = ScanNakedErrors
elif arg == '--find-tabs':
action = FindTabs
elif top == None:
top = arg
else:
raise "unexpected argument " + arg
if not action or not top:
print "CodeScanner {--list-error-codes | --list-loggers | --find-tabs} <directory-root>"
sys.exit(1)
action(top)
| gpl-2.0 |
theflofly/tensorflow | tensorflow/python/data/experimental/kernel_tests/prefetch_to_device_test.py | 3 | 8640 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.experimental.prefetch_to_device()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.experimental.ops import prefetching_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
# TODO(b/117581999): add eager coverage when supported.
class PrefetchToDeviceTest(test_base.DatasetTestBase):
@test_util.deprecated_graph_mode_only
def testPrefetchToDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(device_dataset)))
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(iterator)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchToSameDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device(
"/job:localhost/replica:0/task:0/device:CPU:0"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(device_dataset)))
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(iterator)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
with self.cached_session():
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchDictToDevice(self):
host_dataset = dataset_ops.Dataset.range(10).map(lambda x: {"a": x})
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(device_dataset)))
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(iterator)))
self.assertEqual(dtypes.int64, next_element["a"].dtype)
self.assertEqual([], next_element["a"].shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual({"a": i}, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchSparseTensorsToDevice(self):
def make_tensor(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0]], values=(i*[1]), dense_shape=[2, 2])
host_dataset = dataset_ops.Dataset.range(10).map(make_tensor)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(device_dataset)))
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(iterator)))
self.assertEqual(dtypes.int64, next_element.dtype)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
actual = self.evaluate(next_element)
self.assertAllEqual([i], actual.values)
self.assertAllEqual([[0, 0]], actual.indices)
self.assertAllEqual([2, 2], actual.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchToDeviceGpu(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchToDeviceWithReInit(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(device_dataset)))
self.assertTrue(dataset_ops.get_structure(host_dataset).is_compatible_with(
dataset_ops.get_structure(iterator)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@test_util.deprecated_graph_mode_only
def testPrefetchToDeviceGpuWithReInit(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.prefetch_to_device("/gpu:0"))
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
if __name__ == "__main__":
test.main()
| apache-2.0 |
liangjiaxing/sympy | sympy/core/tests/test_eval_power.py | 45 | 12209 | from sympy.core import (
Rational, Symbol, S, Float, Integer, Number, Pow,
Basic, I, nan, pi, symbols, oo, zoo)
from sympy.core.tests.test_evalf import NS
from sympy.functions.elementary.miscellaneous import sqrt, cbrt
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.trigonometric import sin, cos
from sympy.series.order import O
def test_rational():
a = Rational(1, 5)
r = sqrt(5)/5
assert sqrt(a) == r
assert 2*sqrt(a) == 2*r
r = a*a**Rational(1, 2)
assert a**Rational(3, 2) == r
assert 2*a**Rational(3, 2) == 2*r
r = a**5*a**Rational(2, 3)
assert a**Rational(17, 3) == r
assert 2 * a**Rational(17, 3) == 2*r
def test_large_rational():
e = (Rational(123712**12 - 1, 7) + Rational(1, 7))**Rational(1, 3)
assert e == 234232585392159195136 * (Rational(1, 7)**Rational(1, 3))
def test_negative_real():
def feq(a, b):
return abs(a - b) < 1E-10
assert feq(S.One / Float(-0.5), -Integer(2))
def test_expand():
x = Symbol('x')
assert (2**(-1 - x)).expand() == Rational(1, 2)*2**(-x)
def test_issue_3449():
#test if powers are simplified correctly
#see also issue 3995
x = Symbol('x')
assert ((x**Rational(1, 3))**Rational(2)) == x**Rational(2, 3)
assert (
(x**Rational(3))**Rational(2, 5)) == (x**Rational(3))**Rational(2, 5)
a = Symbol('a', real=True)
b = Symbol('b', real=True)
assert (a**2)**b == (abs(a)**b)**2
assert sqrt(1/a) != 1/sqrt(a) # e.g. for a = -1
assert (a**3)**Rational(1, 3) != a
assert (x**a)**b != x**(a*b) # e.g. x = -1, a=2, b=1/2
assert (x**.5)**b == x**(.5*b)
assert (x**.5)**.5 == x**.25
assert (x**2.5)**.5 != x**1.25 # e.g. for x = 5*I
k = Symbol('k', integer=True)
m = Symbol('m', integer=True)
assert (x**k)**m == x**(k*m)
assert Number(5)**Rational(2, 3) == Number(25)**Rational(1, 3)
assert (x**.5)**2 == x**1.0
assert (x**2)**k == (x**k)**2 == x**(2*k)
a = Symbol('a', positive=True)
assert (a**3)**Rational(2, 5) == a**Rational(6, 5)
assert (a**2)**b == (a**b)**2
assert (a**Rational(2, 3))**x == (a**(2*x/3)) != (a**x)**Rational(2, 3)
def test_issue_3866():
assert --sqrt(sqrt(5) - 1) == sqrt(sqrt(5) - 1)
def test_negative_one():
x = Symbol('x', complex=True)
y = Symbol('y', complex=True)
assert 1/x**y == x**(-y)
def test_issue_4362():
neg = Symbol('neg', negative=True)
nonneg = Symbol('nonneg', nonnegative=True)
any = Symbol('any')
num, den = sqrt(1/neg).as_numer_denom()
assert num == sqrt(-1)
assert den == sqrt(-neg)
num, den = sqrt(1/nonneg).as_numer_denom()
assert num == 1
assert den == sqrt(nonneg)
num, den = sqrt(1/any).as_numer_denom()
assert num == sqrt(1/any)
assert den == 1
def eqn(num, den, pow):
return (num/den)**pow
npos = 1
nneg = -1
dpos = 2 - sqrt(3)
dneg = 1 - sqrt(3)
assert dpos > 0 and dneg < 0 and npos > 0 and nneg < 0
# pos or neg integer
eq = eqn(npos, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(npos, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(nneg, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(nneg, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(npos, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(npos, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
eq = eqn(nneg, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(nneg, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
# pos or neg rational
pow = S.Half
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow is False and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert not eq.is_Pow or eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow is False and eq.as_numer_denom() == (-(-npos)**pow*(-dneg)**pow, npos)
eq = eqn(nneg, dpos, -pow)
assert not eq.is_Pow or eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
# unknown exponent
pow = 2*any
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-npos)**pow)
eq = eqn(nneg, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
x = Symbol('x')
y = Symbol('y')
assert ((1/(1 + x/3))**(-S.One)).as_numer_denom() == (3 + x, 3)
notp = Symbol('notp', positive=False) # not positive does not imply real
b = ((1 + x/notp)**-2)
assert (b**(-y)).as_numer_denom() == (1, b**y)
assert (b**(-S.One)).as_numer_denom() == ((notp + x)**2, notp**2)
nonp = Symbol('nonp', nonpositive=True)
assert (((1 + x/nonp)**-2)**(-S.One)).as_numer_denom() == ((-nonp -
x)**2, nonp**2)
n = Symbol('n', negative=True)
assert (x**n).as_numer_denom() == (1, x**-n)
assert sqrt(1/n).as_numer_denom() == (S.ImaginaryUnit, sqrt(-n))
n = Symbol('0 or neg', nonpositive=True)
# if x and n are split up without negating each term and n is negative
# then the answer might be wrong; if n is 0 it won't matter since
# 1/oo and 1/zoo are both zero as is sqrt(0)/sqrt(-x) unless x is also
# zero (in which case the negative sign doesn't matter):
# 1/sqrt(1/-1) = -I but sqrt(-1)/sqrt(1) = I
assert (1/sqrt(x/n)).as_numer_denom() == (sqrt(-n), sqrt(-x))
c = Symbol('c', complex=True)
e = sqrt(1/c)
assert e.as_numer_denom() == (e, 1)
i = Symbol('i', integer=True)
assert (((1 + x/y)**i)).as_numer_denom() == ((x + y)**i, y**i)
def test_Pow_signs():
"""Cf. issues 4595 and 5250"""
x = Symbol('x')
y = Symbol('y')
n = Symbol('n', even=True)
assert (3 - y)**2 != (y - 3)**2
assert (3 - y)**n != (y - 3)**n
assert (-3 + y - x)**2 != (3 - y + x)**2
assert (y - 3)**3 != -(3 - y)**3
def test_power_with_noncommutative_mul_as_base():
x = Symbol('x', commutative=False)
y = Symbol('y', commutative=False)
assert not (x*y)**3 == x**3*y**3
assert (2*x*y)**3 == 8*(x*y)**3
def test_zero():
x = Symbol('x')
y = Symbol('y')
assert 0**x != 0
assert 0**(2*x) == 0**x
assert 0**(1.0*x) == 0**x
assert 0**(2.0*x) == 0**x
assert (0**(2 - x)).as_base_exp() == (0, 2 - x)
assert 0**(x - 2) != S.Infinity**(2 - x)
assert 0**(2*x*y) == 0**(x*y)
assert 0**(-2*x*y) == S.ComplexInfinity**(x*y)
assert 0**I == nan
i = Symbol('i', imaginary=True)
assert 0**i == nan
def test_pow_as_base_exp():
x = Symbol('x')
assert (S.Infinity**(2 - x)).as_base_exp() == (S.Infinity, 2 - x)
assert (S.Infinity**(x - 2)).as_base_exp() == (S.Infinity, x - 2)
p = S.Half**x
assert p.base, p.exp == p.as_base_exp() == (S(2), -x)
# issue 8344:
assert Pow(1, 2, evaluate=False).as_base_exp() == (S(1), S(2))
def test_issue_6100():
x = Symbol('x')
y = Symbol('y')
assert x**1.0 == x
assert x == x**1.0
assert True != x**1.0
assert x**1.0 is not True
assert x is not True
assert x*y == (x*y)**1.0
assert (x**1.0)**1.0 == x
assert (x**1.0)**2.0 == x**2
b = Basic()
assert Pow(b, 1.0, evaluate=False) == b
# if the following gets distributed as a Mul (x**1.0*y**1.0 then
# __eq__ methods could be added to Symbol and Pow to detect the
# power-of-1.0 case.
assert ((x*y)**1.0).func is Pow
def test_issue_6208():
from sympy import root, Rational
I = S.ImaginaryUnit
assert sqrt(33**(9*I/10)) == -33**(9*I/20)
assert root((6*I)**(2*I), 3).as_base_exp()[1] == Rational(1, 3) # != 2*I/3
assert root((6*I)**(I/3), 3).as_base_exp()[1] == I/9
assert sqrt(exp(3*I)) == exp(3*I/2)
assert sqrt(-sqrt(3)*(1 + 2*I)) == sqrt(sqrt(3))*sqrt(-1 - 2*I)
assert sqrt(exp(5*I)) == -exp(5*I/2)
assert root(exp(5*I), 3).exp == Rational(1, 3)
def test_issue_6990():
x = Symbol('x')
a = Symbol('a')
b = Symbol('b')
assert (sqrt(a + b*x + x**2)).series(x, 0, 3).removeO() == \
b*x/(2*sqrt(a)) + x**2*(1/(2*sqrt(a)) - \
b**2/(8*a**(S(3)/2))) + sqrt(a)
def test_issue_6068():
x = Symbol('x')
assert sqrt(sin(x)).series(x, 0, 7) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 + O(x**7)
assert sqrt(sin(x)).series(x, 0, 9) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 - 67*x**(S(17)/2)/29030400 + O(x**9)
assert sqrt(sin(x**3)).series(x, 0, 19) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 + O(x**19)
assert sqrt(sin(x**3)).series(x, 0, 20) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 - \
x**(S(39)/2)/24192 + O(x**20)
def test_issue_6782():
x = Symbol('x')
assert sqrt(sin(x**3)).series(x, 0, 7) == x**(S(3)/2) + O(x**7)
assert sqrt(sin(x**4)).series(x, 0, 3) == x**2 + O(x**3)
def test_issue_6653():
x = Symbol('x')
assert (1 / sqrt(1 + sin(x**2))).series(x, 0, 3) == 1 - x**2/2 + O(x**3)
def test_issue_6429():
x = Symbol('x')
c = Symbol('c')
f = (c**2 + x)**(0.5)
assert f.series(x, x0=0, n=1) == (c**2)**0.5 + O(x)
assert f.taylor_term(0, x) == (c**2)**0.5
assert f.taylor_term(1, x) == 0.5*x*(c**2)**(-0.5)
assert f.taylor_term(2, x) == -0.125*x**2*(c**2)**(-1.5)
def test_issue_7638():
f = pi/log(sqrt(2))
assert ((1 + I)**(I*f/2))**0.3 == (1 + I)**(0.15*I*f)
# if 1/3 -> 1.0/3 this should fail since it cannot be shown that the
# sign will be +/-1; for the previous "small arg" case, it didn't matter
# that this could not be proved
assert (1 + I)**(4*I*f) == ((1 + I)**(12*I*f))**(S(1)/3)
assert (((1 + I)**(I*(1 + 7*f)))**(S(1)/3)).exp == S(1)/3
r = symbols('r', real=True)
assert sqrt(r**2) == abs(r)
assert cbrt(r**3) != r
assert sqrt(Pow(2*I, 5*S.Half)) != (2*I)**(5/S(4))
p = symbols('p', positive=True)
assert cbrt(p**2) == p**(2/S(3))
assert NS(((0.2 + 0.7*I)**(0.7 + 1.0*I))**(0.5 - 0.1*I), 1) == '0.4 + 0.2*I'
assert sqrt(1/(1 + I)) == sqrt((1 - I)/2) # or 1/sqrt(1 + I)
e = 1/(1 - sqrt(2))
assert sqrt(e) == I/sqrt(-1 + sqrt(2))
assert e**-S.Half == -I*sqrt(-1 + sqrt(2))
assert sqrt((cos(1)**2 + sin(1)**2 - 1)**(3 + I)).exp == S.Half
assert sqrt(r**(4/S(3))) != r**(2/S(3))
assert sqrt((p + I)**(4/S(3))) == (p + I)**(2/S(3))
assert sqrt((p - p**2*I)**2) == p - p**2*I
assert sqrt((p + r*I)**2) != p + r*I
e = (1 + I/5)
assert sqrt(e**5) == e**(5*S.Half)
assert sqrt(e**6) == e**3
assert sqrt((1 + I*r)**6) != (1 + I*r)**3
def test_issue_8582():
assert 1**oo is nan
assert 1**(-oo) is nan
assert 1**zoo is nan
assert 1**(oo + I) is nan
assert 1**(1 + I*oo) is nan
assert 1**(oo + I*oo) is nan
def test_issue_8650():
n = Symbol('n', integer=True, nonnegative=True)
assert (n**n).is_positive is True
x = 5*n+5
assert (x**(5*(n+1))).is_positive is True
| bsd-3-clause |
hkawasaki/kawasaki-aio8-2 | lms/envs/aws.py | 3 | 18142 | """
This is the default template for our main set of AWS servers. This does NOT
cover the content machines, which use content.py
Common traits:
* Use memcached, and cache-backed sessions
* Use a MySQL 5.1 database
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
import json
from .common import *
from logsettings import get_logger_config
import os
from path import path
# SERVICE_VARIANT specifies name of the variant used, which decides what JSON
# configuration files are read during startup.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# CONFIG_ROOT specifies the directory where the JSON configuration
# files are expected to be found. If not specified, use the project
# directory.
CONFIG_ROOT = path(os.environ.get('CONFIG_ROOT', ENV_ROOT))
# CONFIG_PREFIX specifies the prefix of the JSON configuration files,
# based on the service variant. If no variant is use, don't use a
# prefix.
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
################################ ALWAYS THE SAME ##############################
DEBUG = False
TEMPLATE_DEBUG = False
EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# IMPORTANT: With this enabled, the server must always be behind a proxy that
# strips the header HTTP_X_FORWARDED_PROTO from client requests. Otherwise,
# a user can fool our server into thinking it was an https connection.
# See
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
# for other warnings.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
###################################### CELERY ################################
# Don't use a connection pool, since connections are dropped by ELB.
BROKER_POOL_LIMIT = 0
BROKER_CONNECTION_TIMEOUT = 1
# For the Result Store, use the django cache named 'celery'
CELERY_RESULT_BACKEND = 'cache'
CELERY_CACHE_BACKEND = 'celery'
# When the broker is behind an ELB, use a heartbeat to refresh the
# connection and to detect if it has been dropped.
BROKER_HEARTBEAT = 10.0
BROKER_HEARTBEAT_CHECKRATE = 2
# Each worker should only fetch one message at a time
CELERYD_PREFETCH_MULTIPLIER = 1
# Skip djcelery migrations, since we don't use the database as the broker
SOUTH_MIGRATION_MODULES = {
'djcelery': 'ignore',
}
# Rename the exchange and queues for each variant
QUEUE_VARIANT = CONFIG_PREFIX.lower()
CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
HIGH_MEM_QUEUE = 'edx.{0}core.high_mem'.format(QUEUE_VARIANT)
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {},
HIGH_MEM_QUEUE: {},
}
# If we're a worker on the high_mem queue, set ourselves to die after processing
# one request to avoid having memory leaks take down the worker server. This env
# var is set in /etc/init/edx-workers.conf -- this should probably be replaced
# with some celery API call to see what queue we started listening to, but I
# don't know what that call is or if it's active at this point in the code.
if os.environ.get('QUEUE') == 'high_mem':
CELERYD_MAX_TASKS_PER_CHILD = 1
########################## NON-SECURE ENV CONFIG ##############################
# Things like server locations, ports, etc.
with open(CONFIG_ROOT / CONFIG_PREFIX + "env.json") as env_file:
ENV_TOKENS = json.load(env_file)
# STATIC_ROOT specifies the directory where static files are
# collected
STATIC_ROOT_BASE = ENV_TOKENS.get('STATIC_ROOT_BASE', None)
if STATIC_ROOT_BASE:
STATIC_ROOT = path(STATIC_ROOT_BASE)
# STATIC_URL_BASE specifies the base url to use for static files
STATIC_URL_BASE = ENV_TOKENS.get('STATIC_URL_BASE', None)
if STATIC_URL_BASE:
# collectstatic will fail if STATIC_URL is a unicode string
STATIC_URL = STATIC_URL_BASE.encode('ascii')
if not STATIC_URL.endswith("/"):
STATIC_URL += "/"
PLATFORM_NAME = ENV_TOKENS.get('PLATFORM_NAME', PLATFORM_NAME)
# For displaying on the receipt. At Stanford PLATFORM_NAME != MERCHANT_NAME, but PLATFORM_NAME is a fine default
CC_MERCHANT_NAME = ENV_TOKENS.get('CC_MERCHANT_NAME', PLATFORM_NAME)
EMAIL_BACKEND = ENV_TOKENS.get('EMAIL_BACKEND', EMAIL_BACKEND)
EMAIL_FILE_PATH = ENV_TOKENS.get('EMAIL_FILE_PATH', None)
EMAIL_HOST = ENV_TOKENS.get('EMAIL_HOST', 'localhost') # django default is localhost
EMAIL_PORT = ENV_TOKENS.get('EMAIL_PORT', 25) # django default is 25
EMAIL_USE_TLS = ENV_TOKENS.get('EMAIL_USE_TLS', False) # django default is False
SITE_NAME = ENV_TOKENS['SITE_NAME']
HTTPS = ENV_TOKENS.get('HTTPS', HTTPS)
SESSION_ENGINE = ENV_TOKENS.get('SESSION_ENGINE', SESSION_ENGINE)
SESSION_COOKIE_DOMAIN = ENV_TOKENS.get('SESSION_COOKIE_DOMAIN')
REGISTRATION_EXTRA_FIELDS = ENV_TOKENS.get('REGISTRATION_EXTRA_FIELDS', REGISTRATION_EXTRA_FIELDS)
CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')
# allow for environments to specify what cookie name our login subsystem should use
# this is to fix a bug regarding simultaneous logins between edx.org and edge.edx.org which can
# happen with some browsers (e.g. Firefox)
if ENV_TOKENS.get('SESSION_COOKIE_NAME', None):
# NOTE, there's a bug in Django (http://bugs.python.org/issue18012) which necessitates this being a str()
SESSION_COOKIE_NAME = str(ENV_TOKENS.get('SESSION_COOKIE_NAME'))
BOOK_URL = ENV_TOKENS['BOOK_URL']
MEDIA_URL = ENV_TOKENS['MEDIA_URL']
LOG_DIR = ENV_TOKENS['LOG_DIR']
CACHES = ENV_TOKENS['CACHES']
# Cache used for location mapping -- called many times with the same key/value
# in a given request.
if 'loc_cache' not in CACHES:
CACHES['loc_cache'] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
}
# Email overrides
DEFAULT_FROM_EMAIL = ENV_TOKENS.get('DEFAULT_FROM_EMAIL', DEFAULT_FROM_EMAIL)
DEFAULT_FEEDBACK_EMAIL = ENV_TOKENS.get('DEFAULT_FEEDBACK_EMAIL', DEFAULT_FEEDBACK_EMAIL)
ADMINS = ENV_TOKENS.get('ADMINS', ADMINS)
SERVER_EMAIL = ENV_TOKENS.get('SERVER_EMAIL', SERVER_EMAIL)
TECH_SUPPORT_EMAIL = ENV_TOKENS.get('TECH_SUPPORT_EMAIL', TECH_SUPPORT_EMAIL)
CONTACT_EMAIL = ENV_TOKENS.get('CONTACT_EMAIL', CONTACT_EMAIL)
BUGS_EMAIL = ENV_TOKENS.get('BUGS_EMAIL', BUGS_EMAIL)
PAYMENT_SUPPORT_EMAIL = ENV_TOKENS.get('PAYMENT_SUPPORT_EMAIL', PAYMENT_SUPPORT_EMAIL)
PAID_COURSE_REGISTRATION_CURRENCY = ENV_TOKENS.get('PAID_COURSE_REGISTRATION_CURRENCY',
PAID_COURSE_REGISTRATION_CURRENCY)
# Payment Report Settings
PAYMENT_REPORT_GENERATOR_GROUP = ENV_TOKENS.get('PAYMENT_REPORT_GENERATOR_GROUP', PAYMENT_REPORT_GENERATOR_GROUP)
# Bulk Email overrides
BULK_EMAIL_DEFAULT_FROM_EMAIL = ENV_TOKENS.get('BULK_EMAIL_DEFAULT_FROM_EMAIL', BULK_EMAIL_DEFAULT_FROM_EMAIL)
BULK_EMAIL_EMAILS_PER_TASK = ENV_TOKENS.get('BULK_EMAIL_EMAILS_PER_TASK', BULK_EMAIL_EMAILS_PER_TASK)
BULK_EMAIL_EMAILS_PER_QUERY = ENV_TOKENS.get('BULK_EMAIL_EMAILS_PER_QUERY', BULK_EMAIL_EMAILS_PER_QUERY)
BULK_EMAIL_DEFAULT_RETRY_DELAY = ENV_TOKENS.get('BULK_EMAIL_DEFAULT_RETRY_DELAY', BULK_EMAIL_DEFAULT_RETRY_DELAY)
BULK_EMAIL_MAX_RETRIES = ENV_TOKENS.get('BULK_EMAIL_MAX_RETRIES', BULK_EMAIL_MAX_RETRIES)
BULK_EMAIL_INFINITE_RETRY_CAP = ENV_TOKENS.get('BULK_EMAIL_INFINITE_RETRY_CAP', BULK_EMAIL_INFINITE_RETRY_CAP)
BULK_EMAIL_LOG_SENT_EMAILS = ENV_TOKENS.get('BULK_EMAIL_LOG_SENT_EMAILS', BULK_EMAIL_LOG_SENT_EMAILS)
BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS = ENV_TOKENS.get('BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS', BULK_EMAIL_RETRY_DELAY_BETWEEN_SENDS)
# We want Bulk Email running on the high-priority queue, so we define the
# routing key that points to it. At the moment, the name is the same.
# We have to reset the value here, since we have changed the value of the queue name.
BULK_EMAIL_ROUTING_KEY = HIGH_PRIORITY_QUEUE
# Theme overrides
THEME_NAME = ENV_TOKENS.get('THEME_NAME', None)
# Marketing link overrides
MKTG_URL_LINK_MAP.update(ENV_TOKENS.get('MKTG_URL_LINK_MAP', {}))
# Timezone overrides
TIME_ZONE = ENV_TOKENS.get('TIME_ZONE', TIME_ZONE)
# Translation overrides
LANGUAGES = ENV_TOKENS.get('LANGUAGES', LANGUAGES)
LANGUAGE_CODE = ENV_TOKENS.get('LANGUAGE_CODE', LANGUAGE_CODE)
USE_I18N = ENV_TOKENS.get('USE_I18N', USE_I18N)
# Additional installed apps
for app in ENV_TOKENS.get('ADDL_INSTALLED_APPS', []):
INSTALLED_APPS += (app,)
ENV_FEATURES = ENV_TOKENS.get('FEATURES', ENV_TOKENS.get('MITX_FEATURES', {}))
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED)
local_loglevel = ENV_TOKENS.get('LOCAL_LOGLEVEL', 'INFO')
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
local_loglevel=local_loglevel,
debug=False,
service_variant=SERVICE_VARIANT)
COURSE_LISTINGS = ENV_TOKENS.get('COURSE_LISTINGS', {})
SUBDOMAIN_BRANDING = ENV_TOKENS.get('SUBDOMAIN_BRANDING', {})
VIRTUAL_UNIVERSITIES = ENV_TOKENS.get('VIRTUAL_UNIVERSITIES', [])
META_UNIVERSITIES = ENV_TOKENS.get('META_UNIVERSITIES', {})
COMMENTS_SERVICE_URL = ENV_TOKENS.get("COMMENTS_SERVICE_URL", '')
COMMENTS_SERVICE_KEY = ENV_TOKENS.get("COMMENTS_SERVICE_KEY", '')
CERT_QUEUE = ENV_TOKENS.get("CERT_QUEUE", 'test-pull')
ZENDESK_URL = ENV_TOKENS.get("ZENDESK_URL")
FEEDBACK_SUBMISSION_EMAIL = ENV_TOKENS.get("FEEDBACK_SUBMISSION_EMAIL")
MKTG_URLS = ENV_TOKENS.get('MKTG_URLS', MKTG_URLS)
# git repo loading environment
GIT_REPO_DIR = ENV_TOKENS.get('GIT_REPO_DIR', '/edx/var/edxapp/course_repos')
GIT_IMPORT_STATIC = ENV_TOKENS.get('GIT_IMPORT_STATIC', True)
for name, value in ENV_TOKENS.get("CODE_JAIL", {}).items():
oldvalue = CODE_JAIL.get(name)
if isinstance(oldvalue, dict):
for subname, subvalue in value.items():
oldvalue[subname] = subvalue
else:
CODE_JAIL[name] = value
COURSES_WITH_UNSAFE_CODE = ENV_TOKENS.get("COURSES_WITH_UNSAFE_CODE", [])
# Event Tracking
if "TRACKING_IGNORE_URL_PATTERNS" in ENV_TOKENS:
TRACKING_IGNORE_URL_PATTERNS = ENV_TOKENS.get("TRACKING_IGNORE_URL_PATTERNS")
# SSL external authentication settings
SSL_AUTH_EMAIL_DOMAIN = ENV_TOKENS.get("SSL_AUTH_EMAIL_DOMAIN", "MIT.EDU")
SSL_AUTH_DN_FORMAT_STRING = ENV_TOKENS.get("SSL_AUTH_DN_FORMAT_STRING",
"/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN={0}/emailAddress={1}")
# Django CAS external authentication settings
CAS_EXTRA_LOGIN_PARAMS = ENV_TOKENS.get("CAS_EXTRA_LOGIN_PARAMS", None)
if FEATURES.get('AUTH_USE_CAS'):
CAS_SERVER_URL = ENV_TOKENS.get("CAS_SERVER_URL", None)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'django_cas.backends.CASBackend',
)
INSTALLED_APPS += ('django_cas',)
MIDDLEWARE_CLASSES += ('django_cas.middleware.CASMiddleware',)
CAS_ATTRIBUTE_CALLBACK = ENV_TOKENS.get('CAS_ATTRIBUTE_CALLBACK', None)
if CAS_ATTRIBUTE_CALLBACK:
import importlib
CAS_USER_DETAILS_RESOLVER = getattr(
importlib.import_module(CAS_ATTRIBUTE_CALLBACK['module']),
CAS_ATTRIBUTE_CALLBACK['function']
)
HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS = ENV_TOKENS.get('HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS',{})
############################## SECURE AUTH ITEMS ###############
# Secret things: passwords, access keys, etc.
with open(CONFIG_ROOT / CONFIG_PREFIX + "auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
############### Mixed Related(Secure/Not-Secure) Items ##########
# If Segment.io key specified, load it and enable Segment.io if the feature flag is set
SEGMENT_IO_LMS_KEY = AUTH_TOKENS.get('SEGMENT_IO_LMS_KEY')
if SEGMENT_IO_LMS_KEY:
FEATURES['SEGMENT_IO_LMS'] = ENV_TOKENS.get('SEGMENT_IO_LMS', False)
CC_PROCESSOR = AUTH_TOKENS.get('CC_PROCESSOR', CC_PROCESSOR)
SECRET_KEY = AUTH_TOKENS['SECRET_KEY']
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
if AWS_ACCESS_KEY_ID == "":
AWS_ACCESS_KEY_ID = None
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
if AWS_SECRET_ACCESS_KEY == "":
AWS_SECRET_ACCESS_KEY = None
AWS_STORAGE_BUCKET_NAME = AUTH_TOKENS.get('AWS_STORAGE_BUCKET_NAME', 'edxuploads')
# If there is a database called 'read_replica', you can use the use_read_replica_if_available
# function in util/query.py, which is useful for very large database reads
DATABASES = AUTH_TOKENS['DATABASES']
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
# Get the MODULESTORE from auth.json, but if it doesn't exist,
# use the one from common.py
MODULESTORE = AUTH_TOKENS.get('MODULESTORE', MODULESTORE)
CONTENTSTORE = AUTH_TOKENS.get('CONTENTSTORE', CONTENTSTORE)
DOC_STORE_CONFIG = AUTH_TOKENS.get('DOC_STORE_CONFIG',DOC_STORE_CONFIG)
MONGODB_LOG = AUTH_TOKENS.get('MONGODB_LOG', {})
OPEN_ENDED_GRADING_INTERFACE = AUTH_TOKENS.get('OPEN_ENDED_GRADING_INTERFACE',
OPEN_ENDED_GRADING_INTERFACE)
EMAIL_HOST_USER = AUTH_TOKENS.get('EMAIL_HOST_USER', '') # django default is ''
EMAIL_HOST_PASSWORD = AUTH_TOKENS.get('EMAIL_HOST_PASSWORD', '') # django default is ''
# Datadog for events!
DATADOG = AUTH_TOKENS.get("DATADOG", {})
DATADOG.update(ENV_TOKENS.get("DATADOG", {}))
# TODO: deprecated (compatibility with previous settings)
if 'DATADOG_API' in AUTH_TOKENS:
DATADOG['api_key'] = AUTH_TOKENS['DATADOG_API']
# Analytics dashboard server
ANALYTICS_SERVER_URL = ENV_TOKENS.get("ANALYTICS_SERVER_URL")
ANALYTICS_API_KEY = AUTH_TOKENS.get("ANALYTICS_API_KEY", "")
# Zendesk
ZENDESK_USER = AUTH_TOKENS.get("ZENDESK_USER")
ZENDESK_API_KEY = AUTH_TOKENS.get("ZENDESK_API_KEY")
# API Key for inbound requests from Notifier service
EDX_API_KEY = AUTH_TOKENS.get("EDX_API_KEY")
# Celery Broker
CELERY_BROKER_TRANSPORT = ENV_TOKENS.get("CELERY_BROKER_TRANSPORT", "")
CELERY_BROKER_HOSTNAME = ENV_TOKENS.get("CELERY_BROKER_HOSTNAME", "")
CELERY_BROKER_VHOST = ENV_TOKENS.get("CELERY_BROKER_VHOST", "")
CELERY_BROKER_USER = AUTH_TOKENS.get("CELERY_BROKER_USER", "")
CELERY_BROKER_PASSWORD = AUTH_TOKENS.get("CELERY_BROKER_PASSWORD", "")
BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_USER,
CELERY_BROKER_PASSWORD,
CELERY_BROKER_HOSTNAME,
CELERY_BROKER_VHOST)
# upload limits
STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE)
# Event tracking
TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
EVENT_TRACKING_BACKENDS.update(AUTH_TOKENS.get("EVENT_TRACKING_BACKENDS", {}))
# Student identity verification settings
VERIFY_STUDENT = AUTH_TOKENS.get("VERIFY_STUDENT", VERIFY_STUDENT)
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = HIGH_MEM_QUEUE
GRADES_DOWNLOAD = ENV_TOKENS.get("GRADES_DOWNLOAD", GRADES_DOWNLOAD)
##### ACCOUNT LOCKOUT DEFAULT PARAMETERS #####
MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = ENV_TOKENS.get("MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED", 5)
MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = ENV_TOKENS.get("MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS", 15 * 60)
MICROSITE_CONFIGURATION = ENV_TOKENS.get('MICROSITE_CONFIGURATION', {})
MICROSITE_ROOT_DIR = path(ENV_TOKENS.get('MICROSITE_ROOT_DIR', ''))
#### PASSWORD POLICY SETTINGS #####
PASSWORD_MIN_LENGTH = ENV_TOKENS.get("PASSWORD_MIN_LENGTH")
PASSWORD_MAX_LENGTH = ENV_TOKENS.get("PASSWORD_MAX_LENGTH")
PASSWORD_COMPLEXITY = ENV_TOKENS.get("PASSWORD_COMPLEXITY", {})
PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD = ENV_TOKENS.get("PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD")
PASSWORD_DICTIONARY = ENV_TOKENS.get("PASSWORD_DICTIONARY", [])
### INACTIVITY SETTINGS ####
SESSION_INACTIVITY_TIMEOUT_IN_SECONDS = AUTH_TOKENS.get("SESSION_INACTIVITY_TIMEOUT_IN_SECONDS")
##### LMS DEADLINE DISPLAY TIME_ZONE #######
TIME_ZONE_DISPLAYED_FOR_DEADLINES = ENV_TOKENS.get("TIME_ZONE_DISPLAYED_FOR_DEADLINES",
TIME_ZONE_DISPLAYED_FOR_DEADLINES)
##### X-Frame-Options response header settings #####
X_FRAME_OPTIONS = ENV_TOKENS.get('X_FRAME_OPTIONS', X_FRAME_OPTIONS)
##### Third-party auth options ################################################
THIRD_PARTY_AUTH = AUTH_TOKENS.get('THIRD_PARTY_AUTH', THIRD_PARTY_AUTH)
##### ADVANCED_SECURITY_CONFIG #####
ADVANCED_SECURITY_CONFIG = ENV_TOKENS.get('ADVANCED_SECURITY_CONFIG', {})
##### GOOGLE ANALYTICS IDS #####
GOOGLE_ANALYTICS_ACCOUNT = AUTH_TOKENS.get('GOOGLE_ANALYTICS_ACCOUNT')
GOOGLE_ANALYTICS_LINKEDIN = AUTH_TOKENS.get('GOOGLE_ANALYTICS_LINKEDIN')
PDFGEN_BUCKENT_NAME = ENV_TOKENS.get('PDFGEN_BUCKENT_NAME')
PDFGEN_ACCESS_KEY_ID = ENV_TOKENS.get('PDFGEN_ACCESS_KEY_ID')
PDFGEN_SECRET_ACCESS_KEY = ENV_TOKENS.get('PDFGEN_SECRET_ACCESS_KEY')
PDFGEN_BASE_IMG_DIR = ENV_TOKENS.get('PDFGEN_BASE_IMG_DIR', CONFIG_ROOT)
PDFGEN_BASE_PDF_DIR = ENV_TOKENS.get('PDFGEN_BASE_PDF_DIR', CONFIG_ROOT)
PDFGEN_CERT_AUTHOR = ENV_TOKENS.get('PDFGEN_CERT_AUTHOR', 'gacco')
PDFGEN_CERT_TITLE = ENV_TOKENS.get('PDFGEN_CERT_TITLE', 'gacco Certificate')
FB_URL = ENV_TOKENS.get('FB_URL',"#")
TW_URL = ENV_TOKENS.get('TW_URL',"#")
FB_APPID = ENV_TOKENS.get('FB_APPID', "")
FB_SITE = ENV_TOKENS.get('FB_SITE', "")
FB_TITLE = ENV_TOKENS.get('FB_TITLE', "")
FB_TYPE = ENV_TOKENS.get('FB_TYPE', "")
FB_DESC = ENV_TOKENS.get('FB_DESC', "")
FB_URL = ENV_TOKENS.get('FB_URL', "")
FB_IMG = ENV_TOKENS.get('FB_IMG', "")
FB_ACTION_TYPE = ENV_TOKENS.get('FB_ACTION_TYPE', "") | agpl-3.0 |
polyaxon/polyaxon | core/polyaxon/polypod/compiler/converters/service.py | 1 | 3141 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Iterable, Optional
from polyaxon.polyflow import V1CompiledOperation, V1Plugins, V1Service
from polyaxon.polypod.compiler.converters.base import (
BaseConverter,
PlatformConverterMixin,
)
from polyaxon.polypod.custom_resources import get_service_custom_resource
from polyaxon.polypod.mixins import ServiceMixin
from polyaxon.polypod.specs.contexts import PluginsContextsSpec
from polyaxon.schemas.types import V1ConnectionType, V1K8sResourceType
class ServiceConverter(ServiceMixin, BaseConverter):
def get_resource(
self,
compiled_operation: V1CompiledOperation,
artifacts_store: V1ConnectionType,
connection_by_names: Dict[str, V1ConnectionType],
secrets: Optional[Iterable[V1K8sResourceType]],
config_maps: Optional[Iterable[V1K8sResourceType]],
default_sa: str = None,
default_auth: bool = False,
) -> Dict:
service = compiled_operation.run # type: V1Service
plugins = compiled_operation.plugins or V1Plugins()
contexts = PluginsContextsSpec.from_config(plugins, default_auth=default_auth)
replica_spec = self.get_replica_resource(
plugins=plugins,
contexts=contexts,
environment=service.environment,
volumes=service.volumes,
init=service.init,
sidecars=service.sidecars,
container=service.container,
artifacts_store=artifacts_store,
connections=service.connections,
connection_by_names=connection_by_names,
secrets=secrets,
config_maps=config_maps,
default_sa=default_sa,
ports=service.ports,
)
return get_service_custom_resource(
namespace=self.namespace,
main_container=replica_spec.main_container,
sidecar_containers=replica_spec.sidecar_containers,
init_containers=replica_spec.init_containers,
resource_name=self.resource_name,
volumes=replica_spec.volumes,
environment=replica_spec.environment,
termination=compiled_operation.termination,
collect_logs=contexts.collect_logs,
sync_statuses=contexts.sync_statuses,
notifications=plugins.notifications,
labels=replica_spec.labels,
annotations=replica_spec.annotations,
ports=service.ports,
)
class PlatformServiceConverter(PlatformConverterMixin, ServiceConverter):
pass
| apache-2.0 |
bdcht/amoco | amoco/arch/z80/spec_gb.py | 2 | 11709 | # -*- coding: utf-8 -*-
# This code is part of Amoco
# Copyright (C) 2012 Axel Tillequin (bdcht3@gmail.com)
# published under GPLv2 license
# spec_xxx files are providers for instruction objects.
# These objects are wrapped and created by disasm.py.
from amoco.logger import Log
logger = Log(__name__)
logger.debug("loading module")
from amoco.arch.core import *
from amoco.arch.z80 import env
# modifications of spec_mostek.ISPECS according to GB specs...
# (all DD/FD prefixed spec are removed IX/IY
# remove unused registers:
del env.ix, env.iy
del env.ir
del env.i, env.r
del env.ixh, env.ixl
del env.iyh, env.iyl
# remove unused flags & conditions:
del env.pf
del env.xf
del env.yf
del env.sf
del env.CONDITION[0b100]
del env.CONDITION[0b101]
del env.CONDITION[0b110]
del env.CONDITION[0b111]
# update flags:
env.cf.pos = 4
env.hf.pos = 5
env.nf.pos = 6
env.zf.pos = 7
# Prefixes are removed (obj.misc['pfx'] is always None)
# simplified getreg8/getreg16
def getreg8(obj, x):
r = env.reg8[x]
return r
def getreg16(obj, x):
if x == 0b11 and obj.mnemonic not in ("PUSH", "POP"):
r = env.sp
else:
r = env.reg16[x]
return r
ISPECS = []
# ----------------
# 8-bit load group
# ----------------
# LD r,r'
@ispec("8<[ 01 rd(3) rs(3) ]", mnemonic="LD")
def mostek_ld(obj, rd, rs):
dst, src = getreg8(obj, rd), getreg8(obj, rs)
if dst._is_mem or src._is_mem:
if rd == rs or (obj.misc["pfx"] is not None):
raise InstructionError(obj)
obj.operands = [dst, src]
obj.type = type_data_processing
# LD r,n
@ispec("16<[ n(8) 00 r(3) 110 ]", mnemonic="LD")
def mostek_ld(obj, r, n):
dst = getreg8(obj, r)
if r == 0b110 and obj.misc["pfx"] is not None:
raise InstructionError(obj)
obj.operands = [dst, env.cst(n, 8)]
obj.type = type_data_processing
# LD (BC/DE), A
@ispec("8<[ 000 b rev 010 ]", mnemonic="LD")
def mostek_ld(obj, b, rev):
base = env.reg16[b]
obj.operands = [env.mem(base, 8), env.a]
if rev:
obj.operands.reverse()
obj.type = type_data_processing
# LDD/LDI a,(hl)
@ispec("8<[ {3a} ]", mnemonic="LDD")
@ispec("8<[ {2a} ]", mnemonic="LDI")
def mostek_ld(obj):
obj.operands = [env.a, env.reg8[0b110]]
obj.type = type_data_processing
@ispec("8<[ {32} ]", mnemonic="LDD")
@ispec("8<[ {22} ]", mnemonic="LDI")
def mostek_ld(obj):
obj.operands = [env.reg8[0b110], env.a]
obj.type = type_data_processing
@ispec("8<[ {f2} ]", mnemonic="LD")
def mostek_ld(obj):
base = env.composer([env.c, env.cst(0xFF, 8)])
obj.operands = [env.a, env.mem(base, 8)]
obj.type = type_data_processing
@ispec("8<[ {e2} ]", mnemonic="LD")
def mostek_ld(obj):
base = env.composer([env.c, env.cst(0xFF, 8)])
obj.operands = [env.mem(base, 8), env.a]
obj.type = type_data_processing
@ispec("16<[ n(8) {f0} ]", mnemonic="LD")
def mostek_ld(obj, n):
base = env.cst(0xFF00, 16) + n
obj.operands = [env.a, env.mem(base, 8)]
obj.type = type_data_processing
@ispec("16<[ n(8) {e0} ]", mnemonic="LD")
def mostek_ld(obj, n):
base = env.cst(0xFF00, 16) + n
obj.operands = [env.mem(base, 8), env.a]
obj.type = type_data_processing
@ispec("24<[ n(16) {fa} ]", mnemonic="LD")
def mostek_ld(obj, n):
base = env.cst(n, 16)
obj.operands = [env.a, env.mem(base, 8)]
obj.type = type_data_processing
@ispec("24<[ n(16) {ea} ]", mnemonic="LD")
def mostek_ld(obj, n):
base = env.cst(n, 16)
obj.operands = [env.mem(base, 8), env.a]
obj.type = type_data_processing
# -----------------
# 16-bit load group
# -----------------
# LD dd,nn
@ispec("24<[ nn(16) 00 dd(2) 0001 ]", mnemonic="LD")
def mostek_ld(obj, dd, nn):
dst = getreg16(obj, dd)
obj.operands = [dst, env.cst(nn, 16)]
obj.type = type_data_processing
# LD hl,(nn) / LD (nn), hl
@ispec("24<[ nn(16) 00 10 rev 010 ]", mnemonic="LD")
def mostek_ld(obj, rev, nn):
dst = getreg16(obj, 0b10)
obj.operands = [dst, env.mem(env.cst(nn, 16), 16)]
if not rev:
obj.operands.reverse()
obj.type = type_data_processing
# LD SP,HL
@ispec("8<[ 1111 1001 ]", mnemonic="LD")
def mostek_ld(obj):
dst = getreg16(obj, 0b10)
obj.operands = [env.sp, dst]
obj.type = type_data_processing
# PUSH qq
@ispec("8<[ 11 qq(2) 0101 ]", mnemonic="PUSH")
@ispec("8<[ 11 qq(2) 0001 ]", mnemonic="POP")
def mostek_ld(obj, qq):
src = getreg16(obj, qq)
if src == env.sp:
src = env.af
obj.operands = [src]
obj.type = type_data_processing
# LDHL SP,n
@ispec("16<[ n(8) {f8} ]", mnemonic="LDHL")
def mostek_ld(obj, n):
disp = env.cst(n, 8).signextend(16)
obj.operands = [env.sp, disp]
obj.type = type_data_processing
# LD (nn), SP
@ispec("24<[ nn(16) {08} ]", mnemonic="LD")
def mostek_ld(obj, nn):
obj.operands = [env.mem(env.cst(nn, 16), 16), env.sp]
obj.type = type_data_processing
# ----------------------
# 8-bit Arithmetic Group
# ----------------------
# ADD a,r
@ispec("8<[ 1000 0 r(3) ]", mnemonic="ADD")
@ispec("8<[ 1000 1 r(3) ]", mnemonic="ADC")
@ispec("8<[ 1001 0 r(3) ]", mnemonic="SUB")
@ispec("8<[ 1001 1 r(3) ]", mnemonic="SBC")
@ispec("8<[ 1010 0 r(3) ]", mnemonic="AND")
@ispec("8<[ 1011 0 r(3) ]", mnemonic="OR")
@ispec("8<[ 1010 1 r(3) ]", mnemonic="XOR")
@ispec("8<[ 1011 1 r(3) ]", mnemonic="CP")
@ispec("8<[ 00 r(3) 100 ]", mnemonic="INC")
@ispec("8<[ 00 r(3) 101 ]", mnemonic="DEC")
def mostek_arithmetic(obj, r):
if r == 0b110 and obj.misc["pfx"] is not None:
raise InstructionError(obj)
src = getreg8(obj, r)
obj.operands = [env.a, src]
if obj.mnemonic in ("INC", "DEC"):
obj.operands.pop(0)
obj.type = type_data_processing
@ispec("16<[ n(8) 1100 0110 ]", mnemonic="ADD")
@ispec("16<[ n(8) 1100 1110 ]", mnemonic="ADC")
@ispec("16<[ n(8) 1101 0110 ]", mnemonic="SUB")
@ispec("16<[ n(8) 1101 1110 ]", mnemonic="SBC")
@ispec("16<[ n(8) 1110 0110 ]", mnemonic="AND")
@ispec("16<[ n(8) 1111 0110 ]", mnemonic="OR")
@ispec("16<[ n(8) 1110 1110 ]", mnemonic="XOR")
@ispec("16<[ n(8) 1111 1110 ]", mnemonic="CP")
def mostek_arithmetic(obj, n):
obj.operands = [env.a, env.cst(n, 8)]
obj.type = type_data_processing
# ADD SP,n
@ispec("16<[ n(8) {e8} ]", mnemonic="ADD")
def mostek_ld(obj, n):
disp = env.cst(n, 8).signextend(16)
obj.operands = [env.sp, disp]
obj.type = type_data_processing
# ------------------------------------------------
# General Purpose Arithmetic and CPU Control Group
# ------------------------------------------------
@ispec("8<[ {76} ]", mnemonic="HALT")
@ispec("8<[ {f3} ]", mnemonic="DI")
@ispec("8<[ {fb} ]", mnemonic="EI")
@ispec("8<[ {10} ]", mnemonic="STOP")
def mostek_gpa_cpuc(obj):
obj.operands = []
obj.type = type_cpu_state
@ispec("8<[ {27} ]", mnemonic="DAA")
@ispec("8<[ {2f} ]", mnemonic="CPL")
@ispec("8<[ {3f} ]", mnemonic="CCF")
@ispec("8<[ {37} ]", mnemonic="SCF")
@ispec("8<[ {00} ]", mnemonic="NOP")
def mostek_arithmetic(obj):
obj.operands = []
obj.type = type_data_processing
@ispec("8<[ {d9} ]", mnemonic="RETI")
def mostek_arithmetic(obj):
obj.operands = []
obj.type = type_control_flow
# -----------------------
# 16-bit Arithmetic Group
# -----------------------
@ispec(" 8<[ 00 ss(2) 1001 ]", mnemonic="ADD")
@ispec(" 8<[ 00 ss(2) 0011 ]", mnemonic="INC")
@ispec(" 8<[ 00 ss(2) 1011 ]", mnemonic="DEC")
def mostek_arithmetic(obj, ss):
dst = getreg16(obj, 0b10) # hl (or ix/iy)
src = getreg16(obj, ss)
obj.operands = [dst, src]
if obj.mnemonic in ("INC", "DEC"):
obj.operands.pop(0)
obj.type = type_data_processing
@ispec("16<[ n(8) 1100 0110 ]", mnemonic="ADD")
@ispec("16<[ n(8) 1100 1110 ]", mnemonic="ADC")
@ispec("16<[ n(8) 1101 0110 ]", mnemonic="SUB")
@ispec("16<[ n(8) 1101 1110 ]", mnemonic="SBC")
@ispec("16<[ n(8) 1110 0110 ]", mnemonic="AND")
@ispec("16<[ n(8) 1111 0110 ]", mnemonic="OR")
@ispec("16<[ n(8) 1110 1110 ]", mnemonic="XOR")
@ispec("16<[ n(8) 1111 1110 ]", mnemonic="CP")
def mostek_arithmetic(obj, n):
# DD/FD prefix are ignored
obj.operands = [env.a, env.cst(n, 8)]
obj.type = type_data_processing
# ----------------------
# Rotate and Shift Group
# ----------------------
@ispec("8<[ {07} ]", mnemonic="RLCA")
@ispec("8<[ {17} ]", mnemonic="RLA")
@ispec("8<[ {0f} ]", mnemonic="RRCA")
@ispec("8<[ {1f} ]", mnemonic="RRA")
def mostek_rotshift(obj):
obj.operands = []
obj.type = type_data_processing
@ispec("16<[ 00000 r(3) {cb} ]", mnemonic="RLC")
@ispec("16<[ 00010 r(3) {cb} ]", mnemonic="RL")
@ispec("16<[ 00001 r(3) {cb} ]", mnemonic="RRC")
@ispec("16<[ 00011 r(3) {cb} ]", mnemonic="RR")
@ispec("16<[ 00100 r(3) {cb} ]", mnemonic="SLA")
@ispec("16<[ 00110 r(3) {cb} ]", mnemonic="SWAP") # undocumented
@ispec("16<[ 00101 r(3) {cb} ]", mnemonic="SRA")
@ispec("16<[ 00111 r(3) {cb} ]", mnemonic="SRL")
def mostek_rotshift(obj, r):
if obj.misc["pfx"] is not None:
raise InstructionError(obj)
op1 = getreg8(obj, r)
obj.operands = [op1]
obj.type = type_data_processing
# -----------------------------
# Bit Set, Reset and Test Group
# -----------------------------
# unprefixed BIT & SET:
@ispec("16<[ 01 b(3) r(3) {cb} ]", mnemonic="BIT")
@ispec("16<[ 11 b(3) r(3) {cb} ]", mnemonic="SET")
def mostek_bitset(obj, b, r):
if obj.misc["pfx"] is not None:
raise InstructionError(obj)
op1 = env.cst(b, 3)
op2 = getreg8(obj, r)
obj.operands = [op1, op2]
obj.type = type_data_processing
# ----------
# Jump Group
# ----------
@ispec("24<[ nn(16) 11 000 011 ]", mnemonic="JP")
def mostek_jump(obj, nn):
obj.operands = [env.cst(nn, 16)]
obj.type = type_control_flow
@ispec("24<[ nn(16) 11 cc(3) 010 ]", mnemonic="JPcc")
def mostek_jump(obj, cc, nn):
if cc >= 0b100:
raise InstructionError(obj)
obj.cond = env.CONDITION[cc]
obj.operands = [obj.cond[0], env.cst(nn, 16)]
obj.type = type_control_flow
@ispec("16<[ e(8) {18} ]", mnemonic="JR")
@ispec("16<[ e(8) {10} ]", mnemonic="DJNZ")
@ispec("16<[ e(8) {38} ]", mnemonic="JRcc", cond=("c", env.cf == 1))
@ispec("16<[ e(8) {30} ]", mnemonic="JRcc", cond=("nc", env.cf == 0))
@ispec("16<[ e(8) {28} ]", mnemonic="JRcc", cond=("z", env.zf == 1))
@ispec("16<[ e(8) {20} ]", mnemonic="JRcc", cond=("nz", env.zf == 0))
def mostek_jump(obj, e):
disp = env.cst(e, 8).signextend(16)
obj.operands = [disp]
if hasattr(obj, "cond"):
obj.operands.insert(0, obj.cond[0])
obj.type = type_control_flow
@ispec("8<[ {e9} ]", mnemonic="JP")
def mostek_jump(obj):
r = getreg16(obj, 0b10)
# is it mem(r,16) ??
obj.operands = [r]
obj.type = type_control_flow
# ---------------------
# Call and Return Group
# ---------------------
@ispec("24<[ nn(16) 1100 1101 ]", mnemonic="CALL")
def mostek_call(obj, nn):
obj.operands = [env.cst(nn, 16)]
obj.type = type_control_flow
@ispec("24<[ nn(16) 11 cc(3) 100 ]", mnemonic="CALLcc")
def mostek_call(obj, cc, nn):
if cc >= 0b100:
raise InstructionError(obj)
obj.cond = env.CONDITION[cc]
obj.operands = [obj.cond[0], env.cst(nn, 16)]
obj.type = type_control_flow
@ispec("8<[ {c9} ]", mnemonic="RET")
def mostek_ret(obj):
obj.operands = []
obj.type = type_control_flow
@ispec("8<[ 11 cc(3) 000 ]", mnemonic="RETcc")
def mostek_ret(obj, cc):
if cc >= 0b100:
raise InstructionError(obj)
obj.cond = env.CONDITION[cc]
obj.operands = [obj.cond[0]]
obj.type = type_control_flow
@ispec("8<[ 11 t(3) 111 ]", mnemonic="RST")
def mostek_rst(obj, t):
p = (0x00, 0x08, 0x10, 0x18, 0x20, 0x28, 0x30, 0x38)[t]
obj.operands = [env.cst(p, 8)]
obj.type = type_control_flow
| gpl-2.0 |
bparees/kubernetes | hack/verify-publishing-bot.py | 32 | 4816 | #!/usr/bin/env python
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import fnmatch
import os
import sys
import json
def get_gomod_dependencies(rootdir, components):
all_dependencies = {}
for component in components:
with open(os.path.join(rootdir, component, "go.mod")) as f:
print(component + " dependencies")
all_dependencies[component] = []
lines = list(set(f))
lines.sort()
for line in lines:
for dep in components:
if dep == component:
continue
if ("k8s.io/" + dep + " =>") not in line:
continue
print("\t"+dep)
if dep not in all_dependencies[component]:
all_dependencies[component].append(dep)
return all_dependencies
def get_rules_dependencies(rules_file):
import yaml
with open(rules_file) as f:
data = yaml.load(f)
return data
def main():
rootdir = os.path.dirname(__file__) + "/../"
rootdir = os.path.abspath(rootdir)
components = []
for component in os.listdir(rootdir + '/staging/src/k8s.io/'):
components.append(component)
components.sort()
rules_file = "/staging/publishing/rules.yaml"
try:
import yaml
except ImportError:
print("Please install missing pyyaml module and re-run %s" % sys.argv[0])
sys.exit(1)
rules_dependencies = get_rules_dependencies(rootdir + rules_file)
gomod_dependencies = get_gomod_dependencies(rootdir + '/staging/src/k8s.io/', components)
processed_repos = []
for rule in rules_dependencies["rules"]:
branch = rule["branches"][0]
# If this no longer exists in master
if rule["destination"] not in gomod_dependencies:
# Make sure we don't include a rule to publish it from master
for branch in rule["branches"]:
if branch["name"] == "master":
raise Exception("cannot find master branch for destination %s" % rule["destination"])
# And skip validation of publishing rules for it
continue
if branch["name"] != "master":
raise Exception("cannot find master branch for destination %s" % rule["destination"])
if branch["source"]["branch"] != "master":
raise Exception("cannot find master source branch for destination %s" % rule["destination"])
print("processing : %s" % rule["destination"])
if rule["destination"] not in gomod_dependencies:
raise Exception("missing go.mod for %s" % rule["destination"])
processed_repos.append(rule["destination"])
processed_deps = []
for dep in set(gomod_dependencies[rule["destination"]]):
found = False
if "dependencies" in branch:
for dep2 in branch["dependencies"]:
processed_deps.append(dep2["repository"])
if dep2["branch"] != "master":
raise Exception("Looking for master branch and found : %s for destination", dep2,
rule["destination"])
if dep2["repository"] == dep:
found = True
else:
raise Exception(
"Please add %s as dependencies under destination %s in %s" % (gomod_dependencies[rule["destination"]], rule["destination"], rules_file))
if not found:
raise Exception("Please add %s as a dependency under destination %s in %s" % (dep, rule["destination"], rules_file))
else:
print(" found dependency %s" % dep)
extraDeps = set(processed_deps) - set(gomod_dependencies[rule["destination"]])
if len(extraDeps) > 0:
raise Exception("extra dependencies in rules for %s: %s" % (rule["destination"], ','.join(str(s) for s in extraDeps)))
items = set(gomod_dependencies.keys()) - set(processed_repos)
if len(items) > 0:
raise Exception("missing rules for %s" % ','.join(str(s) for s in items))
print("Done.")
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
JakeBrand/CMPUT410-E4 | lab4/lib/python2.7/site-packages/jinja2/exceptions.py | 977 | 4428 | # -*- coding: utf-8 -*-
"""
jinja2.exceptions
~~~~~~~~~~~~~~~~~
Jinja exceptions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import imap, text_type, PY2, implements_to_string
class TemplateError(Exception):
"""Baseclass for all template errors."""
if PY2:
def __init__(self, message=None):
if message is not None:
message = text_type(message).encode('utf-8')
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message.decode('utf-8', 'replace')
def __unicode__(self):
return self.message or u''
else:
def __init__(self, message=None):
Exception.__init__(self, message)
@property
def message(self):
if self.args:
message = self.args[0]
if message is not None:
return message
@implements_to_string
class TemplateNotFound(IOError, LookupError, TemplateError):
"""Raised if a template does not exist."""
# looks weird, but removes the warning descriptor that just
# bogusly warns us about message being deprecated
message = None
def __init__(self, name, message=None):
IOError.__init__(self)
if message is None:
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self):
return self.message
class TemplatesNotFound(TemplateNotFound):
"""Like :class:`TemplateNotFound` but raised if multiple templates
are selected. This is a subclass of :class:`TemplateNotFound`
exception, so just catching the base exception will catch both.
.. versionadded:: 2.2
"""
def __init__(self, names=(), message=None):
if message is None:
message = u'none of the templates given were found: ' + \
u', '.join(imap(text_type, names))
TemplateNotFound.__init__(self, names and names[-1] or None, message)
self.templates = list(names)
@implements_to_string
class TemplateSyntaxError(TemplateError):
"""Raised to tell the user that there is a problem with the template."""
def __init__(self, message, lineno, name=None, filename=None):
TemplateError.__init__(self, message)
self.lineno = lineno
self.name = name
self.filename = filename
self.source = None
# this is set to True if the debug.translate_syntax_error
# function translated the syntax error into a new traceback
self.translated = False
def __str__(self):
# for translated errors we only return the message
if self.translated:
return self.message
# otherwise attach some stuff
location = 'line %d' % self.lineno
name = self.filename or self.name
if name:
location = 'File "%s", %s' % (name, location)
lines = [self.message, ' ' + location]
# if the source is set, add the line to the output
if self.source is not None:
try:
line = self.source.splitlines()[self.lineno - 1]
except IndexError:
line = None
if line:
lines.append(' ' + line.strip())
return u'\n'.join(lines)
class TemplateAssertionError(TemplateSyntaxError):
"""Like a template syntax error, but covers cases where something in the
template caused an error at compile time that wasn't necessarily caused
by a syntax error. However it's a direct subclass of
:exc:`TemplateSyntaxError` and has the same attributes.
"""
class TemplateRuntimeError(TemplateError):
"""A generic runtime error in the template engine. Under some situations
Jinja may raise this exception.
"""
class UndefinedError(TemplateRuntimeError):
"""Raised if a template tries to operate on :class:`Undefined`."""
class SecurityError(TemplateRuntimeError):
"""Raised if a template tries to do something insecure if the
sandbox is enabled.
"""
class FilterArgumentError(TemplateRuntimeError):
"""This error is raised if a filter was called with inappropriate
arguments
"""
| apache-2.0 |
impowski/servo | tests/wpt/web-platform-tests/tools/pytest/testing/test_pdb.py | 170 | 9594 | import sys
import _pytest._code
def runpdb_and_get_report(testdir, source):
p = testdir.makepyfile(source)
result = testdir.runpytest_inprocess("--pdb", p)
reports = result.reprec.getreports("pytest_runtest_logreport")
assert len(reports) == 3, reports # setup/call/teardown
return reports[1]
class TestPDB:
def pytest_funcarg__pdblist(self, request):
monkeypatch = request.getfuncargvalue("monkeypatch")
pdblist = []
def mypdb(*args):
pdblist.append(args)
plugin = request.config.pluginmanager.getplugin('pdb')
monkeypatch.setattr(plugin, 'post_mortem', mypdb)
return pdblist
def test_pdb_on_fail(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
def test_func():
assert 0
""")
assert rep.failed
assert len(pdblist) == 1
tb = _pytest._code.Traceback(pdblist[0][0])
assert tb[-1].name == "test_func"
def test_pdb_on_xfail(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import pytest
@pytest.mark.xfail
def test_func():
assert 0
""")
assert "xfail" in rep.keywords
assert not pdblist
def test_pdb_on_skip(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import pytest
def test_func():
pytest.skip("hello")
""")
assert rep.skipped
assert len(pdblist) == 0
def test_pdb_on_BdbQuit(self, testdir, pdblist):
rep = runpdb_and_get_report(testdir, """
import bdb
def test_func():
raise bdb.BdbQuit
""")
assert rep.failed
assert len(pdblist) == 0
def test_pdb_interaction(self, testdir):
p1 = testdir.makepyfile("""
def test_1():
i = 0
assert i == 1
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect(".*def test_1")
child.expect(".*i = 0")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "def test_1" not in rest
if child.isalive():
child.wait()
def test_pdb_interaction_capture(self, testdir):
p1 = testdir.makepyfile("""
def test_1():
print("getrekt")
assert False
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect("getrekt")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "getrekt" not in rest
if child.isalive():
child.wait()
def test_pdb_interaction_exception(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def globalfunc():
pass
def test_1():
pytest.raises(ValueError, globalfunc)
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
child.expect(".*def test_1")
child.expect(".*pytest.raises.*globalfunc")
child.expect("(Pdb)")
child.sendline("globalfunc")
child.expect(".*function")
child.sendeof()
child.expect("1 failed")
if child.isalive():
child.wait()
def test_pdb_interaction_on_collection_issue181(self, testdir):
p1 = testdir.makepyfile("""
import pytest
xxx
""")
child = testdir.spawn_pytest("--pdb %s" % p1)
#child.expect(".*import pytest.*")
child.expect("(Pdb)")
child.sendeof()
child.expect("1 error")
if child.isalive():
child.wait()
def test_pdb_interaction_on_internal_error(self, testdir):
testdir.makeconftest("""
def pytest_runtest_protocol():
0/0
""")
p1 = testdir.makepyfile("def test_func(): pass")
child = testdir.spawn_pytest("--pdb %s" % p1)
#child.expect(".*import pytest.*")
child.expect("(Pdb)")
child.sendeof()
if child.isalive():
child.wait()
def test_pdb_interaction_capturing_simple(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1():
i = 0
print ("hello17")
pytest.set_trace()
x = 3
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("x = 3")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf-8")
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
if child.isalive():
child.wait()
def test_pdb_set_trace_interception(self, testdir):
p1 = testdir.makepyfile("""
import pdb
def test_1():
pdb.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "reading from stdin while output" not in rest
if child.isalive():
child.wait()
def test_pdb_and_capsys(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1(capsys):
print ("hello1")
pytest.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.send("capsys.readouterr()\n")
child.expect("hello1")
child.sendeof()
child.read()
if child.isalive():
child.wait()
def test_set_trace_capturing_afterwards(self, testdir):
p1 = testdir.makepyfile("""
import pdb
def test_1():
pdb.set_trace()
def test_2():
print ("hello")
assert 0
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.send("c\n")
child.expect("test_2")
child.expect("Captured")
child.expect("hello")
child.sendeof()
child.read()
if child.isalive():
child.wait()
def test_pdb_interaction_doctest(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def function_1():
'''
>>> i = 0
>>> assert i == 1
'''
""")
child = testdir.spawn_pytest("--doctest-modules --pdb %s" % p1)
child.expect("(Pdb)")
child.sendline('i')
child.expect("0")
child.expect("(Pdb)")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
if child.isalive():
child.wait()
def test_pdb_interaction_capturing_twice(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def test_1():
i = 0
print ("hello17")
pytest.set_trace()
x = 3
print ("hello18")
pytest.set_trace()
x = 4
""")
child = testdir.spawn_pytest(str(p1))
child.expect("test_1")
child.expect("x = 3")
child.expect("(Pdb)")
child.sendline('c')
child.expect("x = 4")
child.sendeof()
rest = child.read().decode("utf8")
assert "1 failed" in rest
assert "def test_1" in rest
assert "hello17" in rest # out is captured
assert "hello18" in rest # out is captured
if child.isalive():
child.wait()
def test_pdb_used_outside_test(self, testdir):
p1 = testdir.makepyfile("""
import pytest
pytest.set_trace()
x = 5
""")
child = testdir.spawn("%s %s" %(sys.executable, p1))
child.expect("x = 5")
child.sendeof()
child.wait()
def test_pdb_used_in_generate_tests(self, testdir):
p1 = testdir.makepyfile("""
import pytest
def pytest_generate_tests(metafunc):
pytest.set_trace()
x = 5
def test_foo(a):
pass
""")
child = testdir.spawn_pytest(str(p1))
child.expect("x = 5")
child.sendeof()
child.wait()
def test_pdb_collection_failure_is_shown(self, testdir):
p1 = testdir.makepyfile("""xxx """)
result = testdir.runpytest_subprocess("--pdb", p1)
result.stdout.fnmatch_lines([
"*NameError*xxx*",
"*1 error*",
])
def test_enter_pdb_hook_is_called(self, testdir):
testdir.makeconftest("""
def pytest_enter_pdb(config):
assert config.testing_verification == 'configured'
print 'enter_pdb_hook'
def pytest_configure(config):
config.testing_verification = 'configured'
""")
p1 = testdir.makepyfile("""
import pytest
def test_foo():
pytest.set_trace()
""")
child = testdir.spawn_pytest(str(p1))
child.expect("enter_pdb_hook")
child.send('c\n')
child.sendeof()
if child.isalive():
child.wait()
| mpl-2.0 |
mbox/django | tests/file_storage/tests.py | 3 | 25486 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import errno
import os
import shutil
import sys
import tempfile
import time
import unittest
from datetime import datetime, timedelta
try:
import threading
except ImportError:
import dummy_threading as threading
from django.core.cache import cache
from django.core.exceptions import SuspiciousOperation
from django.core.files.base import File, ContentFile
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import LiveServerTestCase, SimpleTestCase
from django.test import override_settings
from django.utils import six
from django.utils.six.moves.urllib.request import urlopen
from django.utils._os import upath
from .models import Storage, temp_storage, temp_storage_location
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class('django.core.files.storage.FileSystemStorage'),
FileSystemStorage)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with six.assertRaisesRegex(self, ImportError, "No module named '?storage'?"):
get_storage_class('storage.NonExistingStorage')
def test_get_nonexisting_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
self.assertRaises(ImportError, get_storage_class,
'django.core.files.storage.NonExistingStorage')
def test_get_nonexisting_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
# Error message may or may not be the fully qualified path.
with six.assertRaisesRegex(self, ImportError,
"No module named '?(django.core.files.)?non_existing_storage'?"):
get_storage_class(
'django.core.files.non_existing_storage.NonExistingStorage')
class FileStorageTests(unittest.TestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(location=self.temp_dir,
base_url='/test_media_url/')
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix='aBc')
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_emtpy_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location='')
self.assertEqual(storage.base_location, '')
self.assertEqual(storage.location, upath(os.getcwd()))
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'w')
f.write('storage contents')
f.close()
self.assertTrue(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'r')
self.assertEqual(f.read(), 'storage contents')
f.close()
self.storage.delete('storage_test')
self.assertFalse(self.storage.exists('storage_test'))
def test_file_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
atime = self.storage.accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(
os.path.getatime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.accessed_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_created_time(self):
"""
File storage returns a Datetime object for the creation time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
ctime = self.storage.created_time(f_name)
self.assertEqual(ctime, datetime.fromtimestamp(
os.path.getctime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.created_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_modified_time(self):
"""
File storage returns a Datetime object for the last modified time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
mtime = self.storage.modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(
os.path.getmtime(self.storage.path(f_name))))
self.assertTrue(datetime.now() - self.storage.modified_time(f_name) < timedelta(seconds=2))
self.storage.delete(f_name)
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f.name = 'test.file'
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists('path/to'))
self.storage.save('path/to/test.file',
ContentFile('file saved with path'))
self.assertTrue(self.storage.exists('path/to'))
with self.storage.open('path/to/test.file') as f:
self.assertEqual(f.read(), b'file saved with path')
self.assertTrue(os.path.exists(
os.path.join(self.temp_dir, 'path', 'to', 'test.file')))
self.storage.delete('path/to/test.file')
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.assertEqual(self.storage.path(f_name),
os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the Web.
"""
self.assertEqual(self.storage.url('test.file'),
'%s%s' % (self.storage.base_url, 'test.file'))
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(self.storage.url(r"""~!*()'@#$%^&*abc`+ =.file"""),
"""/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file""")
# should stanslate os path separator(s) to the url path separator
self.assertEqual(self.storage.url("""a/b\\c.file"""),
"""/test_media_url/a/b/c.file""")
self.storage.base_url = None
self.assertRaises(ValueError, self.storage.url, 'test.file')
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists('storage_test_1'))
self.assertFalse(self.storage.exists('storage_test_2'))
self.assertFalse(self.storage.exists('storage_dir_1'))
self.storage.save('storage_test_1', ContentFile('custom content'))
self.storage.save('storage_test_2', ContentFile('custom content'))
os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1'))
dirs, files = self.storage.listdir('')
self.assertEqual(set(dirs), set(['storage_dir_1']))
self.assertEqual(set(files),
set(['storage_test_1', 'storage_test_2']))
self.storage.delete('storage_test_1')
self.storage.delete('storage_test_2')
os.rmdir(os.path.join(self.temp_dir, 'storage_dir_1'))
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
self.assertRaises(SuspiciousOperation, self.storage.exists, '..')
self.assertRaises(SuspiciousOperation, self.storage.exists, '/etc/passwd')
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
other_temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = 'CaSe_SeNsItIvE'
file = other_temp_storage.open(mixed_case, 'w')
file.write('storage contents')
file.close()
self.assertEqual(os.path.join(self.temp_dir2, mixed_case),
other_temp_storage.path(mixed_case))
other_temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path):
if path == os.path.join(self.temp_dir, 'normal'):
real_makedirs(path)
elif path == os.path.join(self.temp_dir, 'raced'):
real_makedirs(path)
raise OSError(errno.EEXIST, 'simulated EEXIST')
elif path == os.path.join(self.temp_dir, 'error'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.makedirs = fake_makedirs
self.storage.save('normal/test.file',
ContentFile('saved normally'))
with self.storage.open('normal/test.file') as f:
self.assertEqual(f.read(), b'saved normally')
self.storage.save('raced/test.file',
ContentFile('saved with race'))
with self.storage.open('raced/test.file') as f:
self.assertEqual(f.read(), b'saved with race')
# Check that OSErrors aside from EEXIST are still raised.
self.assertRaises(OSError,
self.storage.save, 'error/test.file', ContentFile('not saved'))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, 'normal.file'):
real_remove(path)
elif path == os.path.join(self.temp_dir, 'raced.file'):
real_remove(path)
raise OSError(errno.ENOENT, 'simulated ENOENT')
elif path == os.path.join(self.temp_dir, 'error.file'):
raise OSError(errno.EACCES, 'simulated EACCES')
else:
self.fail('unexpected argument %r' % path)
try:
os.remove = fake_remove
self.storage.save('normal.file', ContentFile('delete normally'))
self.storage.delete('normal.file')
self.assertFalse(self.storage.exists('normal.file'))
self.storage.save('raced.file', ContentFile('delete with race'))
self.storage.delete('raced.file')
self.assertFalse(self.storage.exists('normal.file'))
# Check that OSErrors aside from ENOENT are still raised.
self.storage.save('error.file', ContentFile('delete with error'))
self.assertRaises(OSError, self.storage.delete, 'error.file')
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behavior when file.chunks() is raising an error
"""
f1 = ContentFile('chunks fails')
def failing_chunks():
raise IOError
f1.chunks = failing_chunks
with self.assertRaises(IOError):
self.storage.save('error.file', f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
with self.assertRaises(AssertionError):
self.storage.delete('')
class CustomStorage(FileSystemStorage):
def get_available_name(self, name):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
parts = name.split('.')
basename, ext = parts[0], parts[1:]
number = 2
while self.exists(name):
name = '.'.join([basename, str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save('custom_storage', ContentFile('custom contents'))
self.assertEqual(first, 'custom_storage')
second = self.storage.save('custom_storage', ContentFile('more contents'))
self.assertEqual(second, 'custom_storage.2')
self.storage.delete(first)
self.storage.delete(second)
class FileFieldStorageTests(unittest.TestCase):
def tearDown(self):
shutil.rmtree(temp_storage_location)
def test_files(self):
# Attempting to access a FileField from the class raises a descriptive
# error
self.assertRaises(AttributeError, lambda: Storage.normal)
# An object without a file has limited functionality.
obj1 = Storage()
self.assertEqual(obj1.normal.name, "")
self.assertRaises(ValueError, lambda: obj1.normal.size)
# Saving a file enables full functionality.
obj1.normal.save("django_test.txt", ContentFile("content"))
self.assertEqual(obj1.normal.name, "tests/django_test.txt")
self.assertEqual(obj1.normal.size, 7)
self.assertEqual(obj1.normal.read(), b"content")
obj1.normal.close()
# File objects can be assigned to FileField attributes, but shouldn't
# get committed until the model it's attached to is saved.
obj1.normal = SimpleUploadedFile("assignment.txt", b"content")
dirs, files = temp_storage.listdir("tests")
self.assertEqual(dirs, [])
self.assertFalse("assignment.txt" in files)
obj1.save()
dirs, files = temp_storage.listdir("tests")
self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"])
# Save another file with the same name.
obj2 = Storage()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertEqual(obj2.normal.name, "tests/django_test_1.txt")
self.assertEqual(obj2.normal.size, 12)
obj2.normal.close()
# Deleting an object does not delete the file it uses.
obj2.delete()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertEqual(obj2.normal.name, "tests/django_test_2.txt")
obj2.normal.close()
def test_filefield_read(self):
# Files can be read in a little at a time, if necessary.
obj = Storage.objects.create(
normal=SimpleUploadedFile("assignment.txt", b"content"))
obj.normal.open()
self.assertEqual(obj.normal.read(3), b"con")
self.assertEqual(obj.normal.read(), b"tent")
self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"])
obj.normal.close()
def test_file_numbering(self):
# Multiple files with the same name get _N appended to them.
objs = [Storage() for i in range(3)]
for o in objs:
o.normal.save("multiple_files.txt", ContentFile("Same Content"))
self.assertEqual(
[o.normal.name for o in objs],
["tests/multiple_files.txt", "tests/multiple_files_1.txt", "tests/multiple_files_2.txt"]
)
for o in objs:
o.delete()
def test_filefield_default(self):
# Default values allow an object to access a single file.
temp_storage.save('tests/default.txt', ContentFile('default content'))
obj = Storage.objects.create()
self.assertEqual(obj.default.name, "tests/default.txt")
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
# But it shouldn't be deleted, even if there are no more objects using
# it.
obj.delete()
obj = Storage()
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
def test_empty_upload_to(self):
# upload_to can be empty, meaning it does not use subdirectory.
obj = Storage()
obj.empty.save('django_test.txt', ContentFile('more content'))
self.assertEqual(obj.empty.name, "./django_test.txt")
self.assertEqual(obj.empty.read(), b"more content")
obj.empty.close()
def test_random_upload_to(self):
# Verify the fix for #5655, making sure the directory is only
# determined once.
obj = Storage()
obj.random.save("random_file", ContentFile("random content"))
self.assertTrue(obj.random.name.endswith("/random_file"))
obj.random.close()
def test_filefield_pickling(self):
# Push an object into the cache to make sure it pickles properly
obj = Storage()
obj.normal.save("django_test.txt", ContentFile("more content"))
obj.normal.close()
cache.set("obj", obj)
self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt")
def test_file_object(self):
# Create sample file
temp_storage.save('tests/example.txt', ContentFile('some content'))
# Load it as python file object
with open(temp_storage.path('tests/example.txt')) as file_obj:
# Save it using storage and read its content
temp_storage.save('tests/file_obj', file_obj)
self.assertTrue(temp_storage.exists('tests/file_obj'))
with temp_storage.open('tests/file_obj') as f:
self.assertEqual(f.read(), b'some content')
def test_stringio(self):
# Test passing StringIO instance as content argument to save
output = six.StringIO()
output.write('content')
output.seek(0)
# Save it and read written file
temp_storage.save('tests/stringio', output)
self.assertTrue(temp_storage.exists('tests/stringio'))
with temp_storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super(ContentFile, self).chunks()
class FileSaveRaceConditionTest(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=['conflict'])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
self.save_file('conflict')
self.thread.join()
self.assertTrue(self.storage.exists('conflict'))
self.assertTrue(self.storage.exists('conflict_1'))
self.storage.delete('conflict')
self.storage.delete('conflict_1')
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports umasks and chmod.")
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765)
def test_file_upload_directory_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None)
def test_file_upload_directory_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
class FileStoragePathParsing(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save('dotted.path/test', ContentFile("1"))
self.storage.save('dotted.path/test', ContentFile("2"))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertTrue(os.path.exists(os.path.join(self.storage_dir, 'dotted.path/test')))
self.assertTrue(os.path.exists(os.path.join(self.storage_dir, 'dotted.path/test_1')))
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save('dotted.path/.test', ContentFile("1"))
self.storage.save('dotted.path/.test', ContentFile("2"))
self.assertTrue(os.path.exists(os.path.join(self.storage_dir, 'dotted.path/.test')))
self.assertTrue(os.path.exists(os.path.join(self.storage_dir, 'dotted.path/.test_1')))
class ContentFileStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_saving(self):
"""
Test that ContentFile can be saved correctly with the filesystem storage,
both if it was initialized with string or unicode content"""
self.storage.save('bytes.txt', ContentFile(b"content"))
self.storage.save('unicode.txt', ContentFile("español"))
@override_settings(ROOT_URLCONF='file_storage.urls')
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib2_urlopen(self):
"""
Test the File storage API with a file like object coming from urllib2.urlopen()
"""
file_like_object = urlopen(self.live_server_url + '/')
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + '/')
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
| bsd-3-clause |
deepsrijit1105/edx-platform | lms/djangoapps/verified_track_content/forms.py | 28 | 1617 | """
Forms for configuring courses for verified track cohorting
"""
from django import forms
from django.utils.translation import ugettext as _
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from verified_track_content.models import VerifiedTrackCohortedCourse
class VerifiedTrackCourseForm(forms.ModelForm):
"""Validate course keys for the VerifiedTrackCohortedCourse model
The default behavior in Django admin is to:
* Save course keys for courses that do not exist.
* Return a 500 response if the course key format is invalid.
Using this form ensures that we display a user-friendly
error message instead.
"""
class Meta(object): # pylint:disable=missing-docstring
model = VerifiedTrackCohortedCourse
fields = '__all__'
def clean_course_key(self):
"""Validate the course key.
Checks that the key format is valid and that
the course exists. If not, displays an error message.
Arguments:
field_name (str): The name of the field to validate.
Returns:
CourseKey
"""
cleaned_id = self.cleaned_data['course_key']
error_msg = _('COURSE NOT FOUND. Please check that the course ID is valid.')
try:
course_key = CourseKey.from_string(cleaned_id)
except InvalidKeyError:
raise forms.ValidationError(error_msg)
if not modulestore().has_course(course_key):
raise forms.ValidationError(error_msg)
return course_key
| agpl-3.0 |
BigDataforYou/movie_recommendation_workshop_1 | big_data_4_you_demo_1/venv/lib/python2.7/site-packages/pandas/io/tests/parser/skiprows.py | 1 | 2376 | # -*- coding: utf-8 -*-
"""
Tests that skipped rows are properly handled during
parsing for all of the parsers defined in parsers.py
"""
from datetime import datetime
import numpy as np
import pandas.util.testing as tm
from pandas import DataFrame
from pandas.compat import StringIO, range, lrange
class SkipRowsTests(object):
def test_skiprows_bug(self):
# see gh-505
text = """#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
1/1/2000,1.,2.,3.
1/2/2000,4,5,6
1/3/2000,7,8,9
"""
data = self.read_csv(StringIO(text), skiprows=lrange(6), header=None,
index_col=0, parse_dates=True)
data2 = self.read_csv(StringIO(text), skiprows=6, header=None,
index_col=0, parse_dates=True)
expected = DataFrame(np.arange(1., 10.).reshape((3, 3)),
columns=[1, 2, 3],
index=[datetime(2000, 1, 1), datetime(2000, 1, 2),
datetime(2000, 1, 3)])
expected.index.name = 0
tm.assert_frame_equal(data, expected)
tm.assert_frame_equal(data, data2)
def test_deep_skiprows(self):
# see gh-4382
text = "a,b,c\n" + \
"\n".join([",".join([str(i), str(i + 1), str(i + 2)])
for i in range(10)])
condensed_text = "a,b,c\n" + \
"\n".join([",".join([str(i), str(i + 1), str(i + 2)])
for i in [0, 1, 2, 3, 4, 6, 8, 9]])
data = self.read_csv(StringIO(text), skiprows=[6, 8])
condensed_data = self.read_csv(StringIO(condensed_text))
tm.assert_frame_equal(data, condensed_data)
def test_skiprows_blank(self):
# see gh-9832
text = """#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
1/1/2000,1.,2.,3.
1/2/2000,4,5,6
1/3/2000,7,8,9
"""
data = self.read_csv(StringIO(text), skiprows=6, header=None,
index_col=0, parse_dates=True)
expected = DataFrame(np.arange(1., 10.).reshape((3, 3)),
columns=[1, 2, 3],
index=[datetime(2000, 1, 1), datetime(2000, 1, 2),
datetime(2000, 1, 3)])
expected.index.name = 0
tm.assert_frame_equal(data, expected)
| mit |
rrampage/rethinkdb | external/v8_3.30.33.16/build/gyp/test/additional-targets/gyptest-additional.py | 139 | 1530 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('all.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Build all.
test.build('all.gyp', chdir='relocate/src')
if test.format=='xcode':
chdir = 'relocate/src/dir1'
else:
chdir = 'relocate/src'
# Output is as expected.
file_content = 'Hello from emit.py\n'
test.built_file_must_match('out2.txt', file_content, chdir=chdir)
test.built_file_must_not_exist('out.txt', chdir='relocate/src')
test.built_file_must_not_exist('foolib1',
type=test.SHARED_LIB,
chdir=chdir)
# TODO(mmoss) Make consistent with msvs, with 'dir1' before 'out/Default'?
if test.format in ('make', 'ninja', 'android', 'cmake'):
chdir='relocate/src'
else:
chdir='relocate/src/dir1'
# Build the action explicitly.
test.build('actions.gyp', 'action1_target', chdir=chdir)
# Check that things got run.
file_content = 'Hello from emit.py\n'
test.built_file_must_exist('out.txt', chdir=chdir)
# Build the shared library explicitly.
test.build('actions.gyp', 'foolib1', chdir=chdir)
test.built_file_must_exist('foolib1',
type=test.SHARED_LIB,
chdir=chdir,
subdir='dir1')
test.pass_test()
| agpl-3.0 |
teosz/servo | tests/wpt/css-tests/tools/sslutils/openssl.py | 253 | 13023 | import functools
import os
import shutil
import subprocess
import tempfile
from datetime import datetime
class OpenSSL(object):
def __init__(self, logger, binary, base_path, conf_path, hosts, duration,
base_conf_path=None):
"""Context manager for interacting with OpenSSL.
Creates a config file for the duration of the context.
:param logger: stdlib logger or python structured logger
:param binary: path to openssl binary
:param base_path: path to directory for storing certificates
:param conf_path: path for configuration file storing configuration data
:param hosts: list of hosts to include in configuration (or None if not
generating host certificates)
:param duration: Certificate duration in days"""
self.base_path = base_path
self.binary = binary
self.conf_path = conf_path
self.base_conf_path = base_conf_path
self.logger = logger
self.proc = None
self.cmd = []
self.hosts = hosts
self.duration = duration
def __enter__(self):
with open(self.conf_path, "w") as f:
f.write(get_config(self.base_path, self.hosts, self.duration))
return self
def __exit__(self, *args, **kwargs):
os.unlink(self.conf_path)
def log(self, line):
if hasattr(self.logger, "process_output"):
self.logger.process_output(self.proc.pid if self.proc is not None else None,
line.decode("utf8", "replace"),
command=" ".join(self.cmd))
else:
self.logger.debug(line)
def __call__(self, cmd, *args, **kwargs):
"""Run a command using OpenSSL in the current context.
:param cmd: The openssl subcommand to run
:param *args: Additional arguments to pass to the command
"""
self.cmd = [self.binary, cmd]
if cmd != "x509":
self.cmd += ["-config", self.conf_path]
self.cmd += list(args)
env = os.environ.copy()
if self.base_conf_path is not None:
env["OPENSSL_CONF"] = self.base_conf_path.encode("utf8")
self.proc = subprocess.Popen(self.cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
env=env)
stdout, stderr = self.proc.communicate()
self.log(stdout)
if self.proc.returncode != 0:
raise subprocess.CalledProcessError(self.proc.returncode, self.cmd,
output=stdout)
self.cmd = []
self.proc = None
return stdout
def make_subject(common_name,
country=None,
state=None,
locality=None,
organization=None,
organization_unit=None):
args = [("country", "C"),
("state", "ST"),
("locality", "L"),
("organization", "O"),
("organization_unit", "OU"),
("common_name", "CN")]
rv = []
for var, key in args:
value = locals()[var]
if value is not None:
rv.append("/%s=%s" % (key, value.replace("/", "\\/")))
return "".join(rv)
def make_alt_names(hosts):
rv = []
for name in hosts:
rv.append("DNS:%s" % name)
return ",".join(rv)
def get_config(root_dir, hosts, duration=30):
if hosts is None:
san_line = ""
else:
san_line = "subjectAltName = %s" % make_alt_names(hosts)
if os.path.sep == "\\":
# This seems to be needed for the Shining Light OpenSSL on
# Windows, at least.
root_dir = root_dir.replace("\\", "\\\\")
rv = """[ ca ]
default_ca = CA_default
[ CA_default ]
dir = %(root_dir)s
certs = $dir
new_certs_dir = $certs
crl_dir = $dir%(sep)scrl
database = $dir%(sep)sindex.txt
private_key = $dir%(sep)scakey.pem
certificate = $dir%(sep)scacert.pem
serial = $dir%(sep)sserial
crldir = $dir%(sep)scrl
crlnumber = $dir%(sep)scrlnumber
crl = $crldir%(sep)scrl.pem
RANDFILE = $dir%(sep)sprivate%(sep)s.rand
x509_extensions = usr_cert
name_opt = ca_default
cert_opt = ca_default
default_days = %(duration)d
default_crl_days = %(duration)d
default_md = sha256
preserve = no
policy = policy_anything
copy_extensions = copy
[ policy_anything ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 2048
default_keyfile = privkey.pem
distinguished_name = req_distinguished_name
attributes = req_attributes
x509_extensions = v3_ca
# Passwords for private keys if not present they will be prompted for
# input_password = secret
# output_password = secret
string_mask = utf8only
req_extensions = v3_req
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
countryName_default = AU
countryName_min = 2
countryName_max = 2
stateOrProvinceName = State or Province Name (full name)
stateOrProvinceName_default =
localityName = Locality Name (eg, city)
0.organizationName = Organization Name
0.organizationName_default = Web Platform Tests
organizationalUnitName = Organizational Unit Name (eg, section)
#organizationalUnitName_default =
commonName = Common Name (e.g. server FQDN or YOUR name)
commonName_max = 64
emailAddress = Email Address
emailAddress_max = 64
[ req_attributes ]
[ usr_cert ]
basicConstraints=CA:false
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid,issuer
[ v3_req ]
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
%(san_line)s
[ v3_ca ]
basicConstraints = CA:true
subjectKeyIdentifier=hash
authorityKeyIdentifier=keyid:always,issuer:always
keyUsage = keyCertSign
""" % {"root_dir": root_dir,
"san_line": san_line,
"duration": duration,
"sep": os.path.sep.replace("\\", "\\\\")}
return rv
class OpenSSLEnvironment(object):
ssl_enabled = True
def __init__(self, logger, openssl_binary="openssl", base_path=None,
password="web-platform-tests", force_regenerate=False,
duration=30, base_conf_path=None):
"""SSL environment that creates a local CA and host certificate using OpenSSL.
By default this will look in base_path for existing certificates that are still
valid and only create new certificates if there aren't any. This behaviour can
be adjusted using the force_regenerate option.
:param logger: a stdlib logging compatible logger or mozlog structured logger
:param openssl_binary: Path to the OpenSSL binary
:param base_path: Path in which certificates will be stored. If None, a temporary
directory will be used and removed when the server shuts down
:param password: Password to use
:param force_regenerate: Always create a new certificate even if one already exists.
"""
self.logger = logger
self.temporary = False
if base_path is None:
base_path = tempfile.mkdtemp()
self.temporary = True
self.base_path = os.path.abspath(base_path)
self.password = password
self.force_regenerate = force_regenerate
self.duration = duration
self.base_conf_path = base_conf_path
self.path = None
self.binary = openssl_binary
self.openssl = None
self._ca_cert_path = None
self._ca_key_path = None
self.host_certificates = {}
def __enter__(self):
if not os.path.exists(self.base_path):
os.makedirs(self.base_path)
path = functools.partial(os.path.join, self.base_path)
with open(path("index.txt"), "w"):
pass
with open(path("serial"), "w") as f:
f.write("01")
self.path = path
return self
def __exit__(self, *args, **kwargs):
if self.temporary:
shutil.rmtree(self.base_path)
def _config_openssl(self, hosts):
conf_path = self.path("openssl.cfg")
return OpenSSL(self.logger, self.binary, self.base_path, conf_path, hosts,
self.duration, self.base_conf_path)
def ca_cert_path(self):
"""Get the path to the CA certificate file, generating a
new one if needed"""
if self._ca_cert_path is None and not self.force_regenerate:
self._load_ca_cert()
if self._ca_cert_path is None:
self._generate_ca()
return self._ca_cert_path
def _load_ca_cert(self):
key_path = self.path("cakey.pem")
cert_path = self.path("cacert.pem")
if self.check_key_cert(key_path, cert_path, None):
self.logger.info("Using existing CA cert")
self._ca_key_path, self._ca_cert_path = key_path, cert_path
def check_key_cert(self, key_path, cert_path, hosts):
"""Check that a key and cert file exist and are valid"""
if not os.path.exists(key_path) or not os.path.exists(cert_path):
return False
with self._config_openssl(hosts) as openssl:
end_date_str = openssl("x509",
"-noout",
"-enddate",
"-in", cert_path).split("=", 1)[1].strip()
# Not sure if this works in other locales
end_date = datetime.strptime(end_date_str, "%b %d %H:%M:%S %Y %Z")
# Should have some buffer here e.g. 1 hr
if end_date < datetime.now():
return False
#TODO: check the key actually signed the cert.
return True
def _generate_ca(self):
path = self.path
self.logger.info("Generating new CA in %s" % self.base_path)
key_path = path("cakey.pem")
req_path = path("careq.pem")
cert_path = path("cacert.pem")
with self._config_openssl(None) as openssl:
openssl("req",
"-batch",
"-new",
"-newkey", "rsa:2048",
"-keyout", key_path,
"-out", req_path,
"-subj", make_subject("web-platform-tests"),
"-passout", "pass:%s" % self.password)
openssl("ca",
"-batch",
"-create_serial",
"-keyfile", key_path,
"-passin", "pass:%s" % self.password,
"-selfsign",
"-extensions", "v3_ca",
"-in", req_path,
"-out", cert_path)
os.unlink(req_path)
self._ca_key_path, self._ca_cert_path = key_path, cert_path
def host_cert_path(self, hosts):
"""Get a tuple of (private key path, certificate path) for a host,
generating new ones if necessary.
hosts must be a list of all hosts to appear on the certificate, with
the primary hostname first."""
hosts = tuple(hosts)
if hosts not in self.host_certificates:
if not self.force_regenerate:
key_cert = self._load_host_cert(hosts)
else:
key_cert = None
if key_cert is None:
key, cert = self._generate_host_cert(hosts)
else:
key, cert = key_cert
self.host_certificates[hosts] = key, cert
return self.host_certificates[hosts]
def _load_host_cert(self, hosts):
host = hosts[0]
key_path = self.path("%s.key" % host)
cert_path = self.path("%s.pem" % host)
# TODO: check that this cert was signed by the CA cert
if self.check_key_cert(key_path, cert_path, hosts):
self.logger.info("Using existing host cert")
return key_path, cert_path
def _generate_host_cert(self, hosts):
host = hosts[0]
if self._ca_key_path is None:
self._generate_ca()
ca_key_path = self._ca_key_path
assert os.path.exists(ca_key_path)
path = self.path
req_path = path("wpt.req")
cert_path = path("%s.pem" % host)
key_path = path("%s.key" % host)
self.logger.info("Generating new host cert")
with self._config_openssl(hosts) as openssl:
openssl("req",
"-batch",
"-newkey", "rsa:2048",
"-keyout", key_path,
"-in", ca_key_path,
"-nodes",
"-out", req_path)
openssl("ca",
"-batch",
"-in", req_path,
"-passin", "pass:%s" % self.password,
"-subj", make_subject(host),
"-out", cert_path)
os.unlink(req_path)
return key_path, cert_path
| mpl-2.0 |
mxamin/youtube-dl | youtube_dl/extractor/mixcloud.py | 16 | 11671 | from __future__ import unicode_literals
import base64
import functools
import itertools
import re
from .common import InfoExtractor
from ..compat import (
compat_chr,
compat_ord,
compat_urllib_parse_unquote,
compat_urlparse,
)
from ..utils import (
clean_html,
ExtractorError,
OnDemandPagedList,
parse_count,
str_to_int,
)
class MixcloudIE(InfoExtractor):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/([^/]+)/(?!stream|uploads|favorites|listens|playlists)([^/]+)'
IE_NAME = 'mixcloud'
_TESTS = [{
'url': 'http://www.mixcloud.com/dholbach/cryptkeeper/',
'info_dict': {
'id': 'dholbach-cryptkeeper',
'ext': 'm4a',
'title': 'Cryptkeeper',
'description': 'After quite a long silence from myself, finally another Drum\'n\'Bass mix with my favourite current dance floor bangers.',
'uploader': 'Daniel Holbach',
'uploader_id': 'dholbach',
'thumbnail': 're:https?://.*\.jpg',
'view_count': int,
'like_count': int,
},
}, {
'url': 'http://www.mixcloud.com/gillespeterson/caribou-7-inch-vinyl-mix-chat/',
'info_dict': {
'id': 'gillespeterson-caribou-7-inch-vinyl-mix-chat',
'ext': 'mp3',
'title': 'Caribou 7 inch Vinyl Mix & Chat',
'description': 'md5:2b8aec6adce69f9d41724647c65875e8',
'uploader': 'Gilles Peterson Worldwide',
'uploader_id': 'gillespeterson',
'thumbnail': 're:https?://.*',
'view_count': int,
'like_count': int,
},
}]
# See https://www.mixcloud.com/media/js2/www_js_2.9e23256562c080482435196ca3975ab5.js
@staticmethod
def _decrypt_play_info(play_info):
KEY = 'pleasedontdownloadourmusictheartistswontgetpaid'
play_info = base64.b64decode(play_info.encode('ascii'))
return ''.join([
compat_chr(compat_ord(ch) ^ compat_ord(KEY[idx % len(KEY)]))
for idx, ch in enumerate(play_info)])
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group(1)
cloudcast_name = mobj.group(2)
track_id = compat_urllib_parse_unquote('-'.join((uploader, cloudcast_name)))
webpage = self._download_webpage(url, track_id)
message = self._html_search_regex(
r'(?s)<div[^>]+class="global-message cloudcast-disabled-notice-light"[^>]*>(.+?)<(?:a|/div)',
webpage, 'error message', default=None)
encrypted_play_info = self._search_regex(
r'm-play-info="([^"]+)"', webpage, 'play info')
play_info = self._parse_json(
self._decrypt_play_info(encrypted_play_info), track_id)
if message and 'stream_url' not in play_info:
raise ExtractorError('%s said: %s' % (self.IE_NAME, message), expected=True)
song_url = play_info['stream_url']
PREFIX = (
r'm-play-on-spacebar[^>]+'
r'(?:\s+[a-zA-Z0-9-]+(?:="[^"]+")?)*?\s+')
title = self._html_search_regex(
PREFIX + r'm-title="([^"]+)"', webpage, 'title')
thumbnail = self._proto_relative_url(self._html_search_regex(
PREFIX + r'm-thumbnail-url="([^"]+)"', webpage, 'thumbnail',
fatal=False))
uploader = self._html_search_regex(
PREFIX + r'm-owner-name="([^"]+)"',
webpage, 'uploader', fatal=False)
uploader_id = self._search_regex(
r'\s+"profile": "([^"]+)",', webpage, 'uploader id', fatal=False)
description = self._og_search_description(webpage)
like_count = parse_count(self._search_regex(
r'\bbutton-favorite[^>]+>.*?<span[^>]+class=["\']toggle-number[^>]+>\s*([^<]+)',
webpage, 'like count', default=None))
view_count = str_to_int(self._search_regex(
[r'<meta itemprop="interactionCount" content="UserPlays:([0-9]+)"',
r'/listeners/?">([0-9,.]+)</a>'],
webpage, 'play count', default=None))
return {
'id': track_id,
'title': title,
'url': song_url,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'view_count': view_count,
'like_count': like_count,
}
class MixcloudPlaylistBaseIE(InfoExtractor):
_PAGE_SIZE = 24
def _find_urls_in_page(self, page):
for url in re.findall(r'm-play-button m-url="(?P<url>[^"]+)"', page):
yield self.url_result(
compat_urlparse.urljoin('https://www.mixcloud.com', clean_html(url)),
MixcloudIE.ie_key())
def _fetch_tracks_page(self, path, video_id, page_name, current_page, real_page_number=None):
real_page_number = real_page_number or current_page + 1
return self._download_webpage(
'https://www.mixcloud.com/%s/' % path, video_id,
note='Download %s (page %d)' % (page_name, current_page + 1),
errnote='Unable to download %s' % page_name,
query={'page': real_page_number, 'list': 'main', '_ajax': '1'},
headers={'X-Requested-With': 'XMLHttpRequest'})
def _tracks_page_func(self, page, video_id, page_name, current_page):
resp = self._fetch_tracks_page(page, video_id, page_name, current_page)
for item in self._find_urls_in_page(resp):
yield item
def _get_user_description(self, page_content):
return self._html_search_regex(
r'<div[^>]+class="description-text"[^>]*>(.+?)</div>',
page_content, 'user description', fatal=False)
class MixcloudUserIE(MixcloudPlaylistBaseIE):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/(?P<user>[^/]+)/(?P<type>uploads|favorites|listens)?/?$'
IE_NAME = 'mixcloud:user'
_TESTS = [{
'url': 'http://www.mixcloud.com/dholbach/',
'info_dict': {
'id': 'dholbach_uploads',
'title': 'Daniel Holbach (uploads)',
'description': 'md5:327af72d1efeb404a8216c27240d1370',
},
'playlist_mincount': 11,
}, {
'url': 'http://www.mixcloud.com/dholbach/uploads/',
'info_dict': {
'id': 'dholbach_uploads',
'title': 'Daniel Holbach (uploads)',
'description': 'md5:327af72d1efeb404a8216c27240d1370',
},
'playlist_mincount': 11,
}, {
'url': 'http://www.mixcloud.com/dholbach/favorites/',
'info_dict': {
'id': 'dholbach_favorites',
'title': 'Daniel Holbach (favorites)',
'description': 'md5:327af72d1efeb404a8216c27240d1370',
},
'params': {
'playlist_items': '1-100',
},
'playlist_mincount': 100,
}, {
'url': 'http://www.mixcloud.com/dholbach/listens/',
'info_dict': {
'id': 'dholbach_listens',
'title': 'Daniel Holbach (listens)',
'description': 'md5:327af72d1efeb404a8216c27240d1370',
},
'params': {
'playlist_items': '1-100',
},
'playlist_mincount': 100,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user_id = mobj.group('user')
list_type = mobj.group('type')
# if only a profile URL was supplied, default to download all uploads
if list_type is None:
list_type = 'uploads'
video_id = '%s_%s' % (user_id, list_type)
profile = self._download_webpage(
'https://www.mixcloud.com/%s/' % user_id, video_id,
note='Downloading user profile',
errnote='Unable to download user profile')
username = self._og_search_title(profile)
description = self._get_user_description(profile)
entries = OnDemandPagedList(
functools.partial(
self._tracks_page_func,
'%s/%s' % (user_id, list_type), video_id, 'list of %s' % list_type),
self._PAGE_SIZE, use_cache=True)
return self.playlist_result(
entries, video_id, '%s (%s)' % (username, list_type), description)
class MixcloudPlaylistIE(MixcloudPlaylistBaseIE):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/(?P<user>[^/]+)/playlists/(?P<playlist>[^/]+)/?$'
IE_NAME = 'mixcloud:playlist'
_TESTS = [{
'url': 'https://www.mixcloud.com/RedBullThre3style/playlists/tokyo-finalists-2015/',
'info_dict': {
'id': 'RedBullThre3style_tokyo-finalists-2015',
'title': 'National Champions 2015',
'description': 'md5:6ff5fb01ac76a31abc9b3939c16243a3',
},
'playlist_mincount': 16,
}, {
'url': 'https://www.mixcloud.com/maxvibes/playlists/jazzcat-on-ness-radio/',
'info_dict': {
'id': 'maxvibes_jazzcat-on-ness-radio',
'title': 'Jazzcat on Ness Radio',
'description': 'md5:7bbbf0d6359a0b8cda85224be0f8f263',
},
'playlist_mincount': 23
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user_id = mobj.group('user')
playlist_id = mobj.group('playlist')
video_id = '%s_%s' % (user_id, playlist_id)
profile = self._download_webpage(
url, user_id,
note='Downloading playlist page',
errnote='Unable to download playlist page')
description = self._get_user_description(profile)
playlist_title = self._html_search_regex(
r'<span[^>]+class="[^"]*list-playlist-title[^"]*"[^>]*>(.*?)</span>',
profile, 'playlist title')
entries = OnDemandPagedList(
functools.partial(
self._tracks_page_func,
'%s/playlists/%s' % (user_id, playlist_id), video_id, 'tracklist'),
self._PAGE_SIZE)
return self.playlist_result(entries, video_id, playlist_title, description)
class MixcloudStreamIE(MixcloudPlaylistBaseIE):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/(?P<id>[^/]+)/stream/?$'
IE_NAME = 'mixcloud:stream'
_TEST = {
'url': 'https://www.mixcloud.com/FirstEar/stream/',
'info_dict': {
'id': 'FirstEar',
'title': 'First Ear',
'description': 'Curators of good music\nfirstearmusic.com',
},
'playlist_mincount': 192,
}
def _real_extract(self, url):
user_id = self._match_id(url)
webpage = self._download_webpage(url, user_id)
entries = []
prev_page_url = None
def _handle_page(page):
entries.extend(self._find_urls_in_page(page))
return self._search_regex(
r'm-next-page-url="([^"]+)"', page,
'next page URL', default=None)
next_page_url = _handle_page(webpage)
for idx in itertools.count(0):
if not next_page_url or prev_page_url == next_page_url:
break
prev_page_url = next_page_url
current_page = int(self._search_regex(
r'\?page=(\d+)', next_page_url, 'next page number'))
next_page_url = _handle_page(self._fetch_tracks_page(
'%s/stream' % user_id, user_id, 'stream', idx,
real_page_number=current_page))
username = self._og_search_title(webpage)
description = self._get_user_description(webpage)
return self.playlist_result(entries, user_id, username, description)
| unlicense |
jhawkesworth/ansible | lib/ansible/modules/network/fortios/fortios_vpn_ipsec_phase1_interface.py | 21 | 54728 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_vpn_ipsec_phase1_interface
short_description: Configure VPN remote gateway in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify vpn_ipsec feature and phase1_interface category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
vpn_ipsec_phase1_interface:
description:
- Configure VPN remote gateway.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
acct-verify:
description:
- Enable/disable verification of RADIUS accounting record.
choices:
- enable
- disable
add-gw-route:
description:
- Enable/disable automatically add a route to the remote gateway.
choices:
- enable
- disable
add-route:
description:
- Enable/disable control addition of a route to peer destination selector.
choices:
- disable
- enable
assign-ip:
description:
- Enable/disable assignment of IP to IPsec interface via configuration method.
choices:
- disable
- enable
assign-ip-from:
description:
- Method by which the IP address will be assigned.
choices:
- range
- usrgrp
- dhcp
- name
authmethod:
description:
- Authentication method.
choices:
- psk
- signature
authmethod-remote:
description:
- Authentication method (remote side).
choices:
- psk
- signature
authpasswd:
description:
- XAuth password (max 35 characters).
authusr:
description:
- XAuth user name.
authusrgrp:
description:
- Authentication user group. Source user.group.name.
auto-discovery-forwarder:
description:
- Enable/disable forwarding auto-discovery short-cut messages.
choices:
- enable
- disable
auto-discovery-psk:
description:
- Enable/disable use of pre-shared secrets for authentication of auto-discovery tunnels.
choices:
- enable
- disable
auto-discovery-receiver:
description:
- Enable/disable accepting auto-discovery short-cut messages.
choices:
- enable
- disable
auto-discovery-sender:
description:
- Enable/disable sending auto-discovery short-cut messages.
choices:
- enable
- disable
auto-negotiate:
description:
- Enable/disable automatic initiation of IKE SA negotiation.
choices:
- enable
- disable
backup-gateway:
description:
- Instruct unity clients about the backup gateway address(es).
suboptions:
address:
description:
- Address of backup gateway.
required: true
banner:
description:
- Message that unity client should display after connecting.
cert-id-validation:
description:
- Enable/disable cross validation of peer ID and the identity in the peer's certificate as specified in RFC 4945.
choices:
- enable
- disable
certificate:
description:
- The names of up to 4 signed personal certificates.
suboptions:
name:
description:
- Certificate name. Source vpn.certificate.local.name.
required: true
childless-ike:
description:
- Enable/disable childless IKEv2 initiation (RFC 6023).
choices:
- enable
- disable
client-auto-negotiate:
description:
- Enable/disable allowing the VPN client to bring up the tunnel when there is no traffic.
choices:
- disable
- enable
client-keep-alive:
description:
- Enable/disable allowing the VPN client to keep the tunnel up when there is no traffic.
choices:
- disable
- enable
comments:
description:
- Comment.
default-gw:
description:
- IPv4 address of default route gateway to use for traffic exiting the interface.
default-gw-priority:
description:
- Priority for default gateway route. A higher priority number signifies a less preferred route.
dhgrp:
description:
- DH group.
choices:
- 1
- 2
- 5
- 14
- 15
- 16
- 17
- 18
- 19
- 20
- 21
- 27
- 28
- 29
- 30
- 31
digital-signature-auth:
description:
- Enable/disable IKEv2 Digital Signature Authentication (RFC 7427).
choices:
- enable
- disable
distance:
description:
- Distance for routes added by IKE (1 - 255).
dns-mode:
description:
- DNS server mode.
choices:
- manual
- auto
domain:
description:
- Instruct unity clients about the default DNS domain.
dpd:
description:
- Dead Peer Detection mode.
choices:
- disable
- on-idle
- on-demand
dpd-retrycount:
description:
- Number of DPD retry attempts.
dpd-retryinterval:
description:
- DPD retry interval.
eap:
description:
- Enable/disable IKEv2 EAP authentication.
choices:
- enable
- disable
eap-identity:
description:
- IKEv2 EAP peer identity type.
choices:
- use-id-payload
- send-request
encap-local-gw4:
description:
- Local IPv4 address of GRE/VXLAN tunnel.
encap-local-gw6:
description:
- Local IPv6 address of GRE/VXLAN tunnel.
encap-remote-gw4:
description:
- Remote IPv4 address of GRE/VXLAN tunnel.
encap-remote-gw6:
description:
- Remote IPv6 address of GRE/VXLAN tunnel.
encapsulation:
description:
- Enable/disable GRE/VXLAN encapsulation.
choices:
- none
- gre
- vxlan
encapsulation-address:
description:
- Source for GRE/VXLAN tunnel address.
choices:
- ike
- ipv4
- ipv6
enforce-unique-id:
description:
- Enable/disable peer ID uniqueness check.
choices:
- disable
- keep-new
- keep-old
exchange-interface-ip:
description:
- Enable/disable exchange of IPsec interface IP address.
choices:
- enable
- disable
forticlient-enforcement:
description:
- Enable/disable FortiClient enforcement.
choices:
- enable
- disable
fragmentation:
description:
- Enable/disable fragment IKE message on re-transmission.
choices:
- enable
- disable
fragmentation-mtu:
description:
- IKE fragmentation MTU (500 - 16000).
group-authentication:
description:
- Enable/disable IKEv2 IDi group authentication.
choices:
- enable
- disable
group-authentication-secret:
description:
- Password for IKEv2 IDi group authentication. (ASCII string or hexadecimal indicated by a leading 0x.)
ha-sync-esp-seqno:
description:
- Enable/disable sequence number jump ahead for IPsec HA.
choices:
- enable
- disable
idle-timeout:
description:
- Enable/disable IPsec tunnel idle timeout.
choices:
- enable
- disable
idle-timeoutinterval:
description:
- IPsec tunnel idle timeout in minutes (5 - 43200).
ike-version:
description:
- IKE protocol version.
choices:
- 1
- 2
include-local-lan:
description:
- Enable/disable allow local LAN access on unity clients.
choices:
- disable
- enable
interface:
description:
- Local physical, aggregate, or VLAN outgoing interface. Source system.interface.name.
ip-version:
description:
- IP version to use for VPN interface.
choices:
- 4
- 6
ipv4-dns-server1:
description:
- IPv4 DNS server 1.
ipv4-dns-server2:
description:
- IPv4 DNS server 2.
ipv4-dns-server3:
description:
- IPv4 DNS server 3.
ipv4-end-ip:
description:
- End of IPv4 range.
ipv4-exclude-range:
description:
- Configuration Method IPv4 exclude ranges.
suboptions:
end-ip:
description:
- End of IPv4 exclusive range.
id:
description:
- ID.
required: true
start-ip:
description:
- Start of IPv4 exclusive range.
ipv4-name:
description:
- IPv4 address name. Source firewall.address.name firewall.addrgrp.name.
ipv4-netmask:
description:
- IPv4 Netmask.
ipv4-split-exclude:
description:
- IPv4 subnets that should not be sent over the IPsec tunnel. Source firewall.address.name firewall.addrgrp.name.
ipv4-split-include:
description:
- IPv4 split-include subnets. Source firewall.address.name firewall.addrgrp.name.
ipv4-start-ip:
description:
- Start of IPv4 range.
ipv4-wins-server1:
description:
- WINS server 1.
ipv4-wins-server2:
description:
- WINS server 2.
ipv6-dns-server1:
description:
- IPv6 DNS server 1.
ipv6-dns-server2:
description:
- IPv6 DNS server 2.
ipv6-dns-server3:
description:
- IPv6 DNS server 3.
ipv6-end-ip:
description:
- End of IPv6 range.
ipv6-exclude-range:
description:
- Configuration method IPv6 exclude ranges.
suboptions:
end-ip:
description:
- End of IPv6 exclusive range.
id:
description:
- ID.
required: true
start-ip:
description:
- Start of IPv6 exclusive range.
ipv6-name:
description:
- IPv6 address name. Source firewall.address6.name firewall.addrgrp6.name.
ipv6-prefix:
description:
- IPv6 prefix.
ipv6-split-exclude:
description:
- IPv6 subnets that should not be sent over the IPsec tunnel. Source firewall.address6.name firewall.addrgrp6.name.
ipv6-split-include:
description:
- IPv6 split-include subnets. Source firewall.address6.name firewall.addrgrp6.name.
ipv6-start-ip:
description:
- Start of IPv6 range.
keepalive:
description:
- NAT-T keep alive interval.
keylife:
description:
- Time to wait in seconds before phase 1 encryption key expires.
local-gw:
description:
- IPv4 address of the local gateway's external interface.
local-gw6:
description:
- IPv6 address of the local gateway's external interface.
localid:
description:
- Local ID.
localid-type:
description:
- Local ID type.
choices:
- auto
- fqdn
- user-fqdn
- keyid
- address
- asn1dn
mesh-selector-type:
description:
- Add selectors containing subsets of the configuration depending on traffic.
choices:
- disable
- subnet
- host
mode:
description:
- The ID protection mode used to establish a secure channel.
choices:
- aggressive
- main
mode-cfg:
description:
- Enable/disable configuration method.
choices:
- disable
- enable
monitor:
description:
- IPsec interface as backup for primary interface. Source vpn.ipsec.phase1-interface.name.
monitor-hold-down-delay:
description:
- Time to wait in seconds before recovery once primary re-establishes.
monitor-hold-down-time:
description:
- Time of day at which to fail back to primary after it re-establishes.
monitor-hold-down-type:
description:
- Recovery time method when primary interface re-establishes.
choices:
- immediate
- delay
- time
monitor-hold-down-weekday:
description:
- Day of the week to recover once primary re-establishes.
choices:
- everyday
- sunday
- monday
- tuesday
- wednesday
- thursday
- friday
- saturday
name:
description:
- IPsec remote gateway name.
required: true
nattraversal:
description:
- Enable/disable NAT traversal.
choices:
- enable
- disable
- forced
negotiate-timeout:
description:
- IKE SA negotiation timeout in seconds (1 - 300).
net-device:
description:
- Enable/disable kernel device creation for dialup instances.
choices:
- enable
- disable
npu-offload:
description:
- Enable/disable offloading NPU.
choices:
- enable
- disable
passive-mode:
description:
- Enable/disable IPsec passive mode for static tunnels.
choices:
- enable
- disable
peer:
description:
- Accept this peer certificate. Source user.peer.name.
peergrp:
description:
- Accept this peer certificate group. Source user.peergrp.name.
peerid:
description:
- Accept this peer identity.
peertype:
description:
- Accept this peer type.
choices:
- any
- one
- dialup
- peer
- peergrp
ppk:
description:
- Enable/disable IKEv2 Postquantum Preshared Key (PPK).
choices:
- disable
- allow
- require
ppk-identity:
description:
- IKEv2 Postquantum Preshared Key Identity.
ppk-secret:
description:
- IKEv2 Postquantum Preshared Key (ASCII string or hexadecimal encoded with a leading 0x).
priority:
description:
- Priority for routes added by IKE (0 - 4294967295).
proposal:
description:
- Phase1 proposal.
choices:
- des-md5
- des-sha1
- des-sha256
- des-sha384
- des-sha512
psksecret:
description:
- Pre-shared secret for PSK authentication (ASCII string or hexadecimal encoded with a leading 0x).
psksecret-remote:
description:
- Pre-shared secret for remote side PSK authentication (ASCII string or hexadecimal encoded with a leading 0x).
reauth:
description:
- Enable/disable re-authentication upon IKE SA lifetime expiration.
choices:
- disable
- enable
rekey:
description:
- Enable/disable phase1 rekey.
choices:
- enable
- disable
remote-gw:
description:
- IPv4 address of the remote gateway's external interface.
remote-gw6:
description:
- IPv6 address of the remote gateway's external interface.
remotegw-ddns:
description:
- Domain name of remote gateway (eg. name.DDNS.com).
rsa-signature-format:
description:
- Digital Signature Authentication RSA signature format.
choices:
- pkcs1
- pss
save-password:
description:
- Enable/disable saving XAuth username and password on VPN clients.
choices:
- disable
- enable
send-cert-chain:
description:
- Enable/disable sending certificate chain.
choices:
- enable
- disable
signature-hash-alg:
description:
- Digital Signature Authentication hash algorithms.
choices:
- sha1
- sha2-256
- sha2-384
- sha2-512
split-include-service:
description:
- Split-include services. Source firewall.service.group.name firewall.service.custom.name.
suite-b:
description:
- Use Suite-B.
choices:
- disable
- suite-b-gcm-128
- suite-b-gcm-256
tunnel-search:
description:
- Tunnel search method for when the interface is shared.
choices:
- selectors
- nexthop
type:
description:
- Remote gateway type.
choices:
- static
- dynamic
- ddns
unity-support:
description:
- Enable/disable support for Cisco UNITY Configuration Method extensions.
choices:
- disable
- enable
usrgrp:
description:
- User group name for dialup peers. Source user.group.name.
vni:
description:
- VNI of VXLAN tunnel.
wizard-type:
description:
- GUI VPN Wizard Type.
choices:
- custom
- dialup-forticlient
- dialup-ios
- dialup-android
- dialup-windows
- dialup-cisco
- static-fortigate
- dialup-fortigate
- static-cisco
- dialup-cisco-fw
xauthtype:
description:
- XAuth type.
choices:
- disable
- client
- pap
- chap
- auto
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure VPN remote gateway.
fortios_vpn_ipsec_phase1_interface:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
vpn_ipsec_phase1_interface:
state: "present"
acct-verify: "enable"
add-gw-route: "enable"
add-route: "disable"
assign-ip: "disable"
assign-ip-from: "range"
authmethod: "psk"
authmethod-remote: "psk"
authpasswd: "<your_own_value>"
authusr: "<your_own_value>"
authusrgrp: "<your_own_value> (source user.group.name)"
auto-discovery-forwarder: "enable"
auto-discovery-psk: "enable"
auto-discovery-receiver: "enable"
auto-discovery-sender: "enable"
auto-negotiate: "enable"
backup-gateway:
-
address: "<your_own_value>"
banner: "<your_own_value>"
cert-id-validation: "enable"
certificate:
-
name: "default_name_23 (source vpn.certificate.local.name)"
childless-ike: "enable"
client-auto-negotiate: "disable"
client-keep-alive: "disable"
comments: "<your_own_value>"
default-gw: "<your_own_value>"
default-gw-priority: "29"
dhgrp: "1"
digital-signature-auth: "enable"
distance: "32"
dns-mode: "manual"
domain: "<your_own_value>"
dpd: "disable"
dpd-retrycount: "36"
dpd-retryinterval: "<your_own_value>"
eap: "enable"
eap-identity: "use-id-payload"
encap-local-gw4: "<your_own_value>"
encap-local-gw6: "<your_own_value>"
encap-remote-gw4: "<your_own_value>"
encap-remote-gw6: "<your_own_value>"
encapsulation: "none"
encapsulation-address: "ike"
enforce-unique-id: "disable"
exchange-interface-ip: "enable"
forticlient-enforcement: "enable"
fragmentation: "enable"
fragmentation-mtu: "50"
group-authentication: "enable"
group-authentication-secret: "<your_own_value>"
ha-sync-esp-seqno: "enable"
idle-timeout: "enable"
idle-timeoutinterval: "55"
ike-version: "1"
include-local-lan: "disable"
interface: "<your_own_value> (source system.interface.name)"
ip-version: "4"
ipv4-dns-server1: "<your_own_value>"
ipv4-dns-server2: "<your_own_value>"
ipv4-dns-server3: "<your_own_value>"
ipv4-end-ip: "<your_own_value>"
ipv4-exclude-range:
-
end-ip: "<your_own_value>"
id: "66"
start-ip: "<your_own_value>"
ipv4-name: "<your_own_value> (source firewall.address.name firewall.addrgrp.name)"
ipv4-netmask: "<your_own_value>"
ipv4-split-exclude: "<your_own_value> (source firewall.address.name firewall.addrgrp.name)"
ipv4-split-include: "<your_own_value> (source firewall.address.name firewall.addrgrp.name)"
ipv4-start-ip: "<your_own_value>"
ipv4-wins-server1: "<your_own_value>"
ipv4-wins-server2: "<your_own_value>"
ipv6-dns-server1: "<your_own_value>"
ipv6-dns-server2: "<your_own_value>"
ipv6-dns-server3: "<your_own_value>"
ipv6-end-ip: "<your_own_value>"
ipv6-exclude-range:
-
end-ip: "<your_own_value>"
id: "81"
start-ip: "<your_own_value>"
ipv6-name: "<your_own_value> (source firewall.address6.name firewall.addrgrp6.name)"
ipv6-prefix: "84"
ipv6-split-exclude: "<your_own_value> (source firewall.address6.name firewall.addrgrp6.name)"
ipv6-split-include: "<your_own_value> (source firewall.address6.name firewall.addrgrp6.name)"
ipv6-start-ip: "<your_own_value>"
keepalive: "88"
keylife: "89"
local-gw: "<your_own_value>"
local-gw6: "<your_own_value>"
localid: "<your_own_value>"
localid-type: "auto"
mesh-selector-type: "disable"
mode: "aggressive"
mode-cfg: "disable"
monitor: "<your_own_value> (source vpn.ipsec.phase1-interface.name)"
monitor-hold-down-delay: "98"
monitor-hold-down-time: "<your_own_value>"
monitor-hold-down-type: "immediate"
monitor-hold-down-weekday: "everyday"
name: "default_name_102"
nattraversal: "enable"
negotiate-timeout: "104"
net-device: "enable"
npu-offload: "enable"
passive-mode: "enable"
peer: "<your_own_value> (source user.peer.name)"
peergrp: "<your_own_value> (source user.peergrp.name)"
peerid: "<your_own_value>"
peertype: "any"
ppk: "disable"
ppk-identity: "<your_own_value>"
ppk-secret: "<your_own_value>"
priority: "115"
proposal: "des-md5"
psksecret: "<your_own_value>"
psksecret-remote: "<your_own_value>"
reauth: "disable"
rekey: "enable"
remote-gw: "<your_own_value>"
remote-gw6: "<your_own_value>"
remotegw-ddns: "<your_own_value>"
rsa-signature-format: "pkcs1"
save-password: "disable"
send-cert-chain: "enable"
signature-hash-alg: "sha1"
split-include-service: "<your_own_value> (source firewall.service.group.name firewall.service.custom.name)"
suite-b: "disable"
tunnel-search: "selectors"
type: "static"
unity-support: "disable"
usrgrp: "<your_own_value> (source user.group.name)"
vni: "134"
wizard-type: "custom"
xauthtype: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_vpn_ipsec_phase1_interface_data(json):
option_list = ['acct-verify', 'add-gw-route', 'add-route',
'assign-ip', 'assign-ip-from', 'authmethod',
'authmethod-remote', 'authpasswd', 'authusr',
'authusrgrp', 'auto-discovery-forwarder', 'auto-discovery-psk',
'auto-discovery-receiver', 'auto-discovery-sender', 'auto-negotiate',
'backup-gateway', 'banner', 'cert-id-validation',
'certificate', 'childless-ike', 'client-auto-negotiate',
'client-keep-alive', 'comments', 'default-gw',
'default-gw-priority', 'dhgrp', 'digital-signature-auth',
'distance', 'dns-mode', 'domain',
'dpd', 'dpd-retrycount', 'dpd-retryinterval',
'eap', 'eap-identity', 'encap-local-gw4',
'encap-local-gw6', 'encap-remote-gw4', 'encap-remote-gw6',
'encapsulation', 'encapsulation-address', 'enforce-unique-id',
'exchange-interface-ip', 'forticlient-enforcement', 'fragmentation',
'fragmentation-mtu', 'group-authentication', 'group-authentication-secret',
'ha-sync-esp-seqno', 'idle-timeout', 'idle-timeoutinterval',
'ike-version', 'include-local-lan', 'interface',
'ip-version', 'ipv4-dns-server1', 'ipv4-dns-server2',
'ipv4-dns-server3', 'ipv4-end-ip', 'ipv4-exclude-range',
'ipv4-name', 'ipv4-netmask', 'ipv4-split-exclude',
'ipv4-split-include', 'ipv4-start-ip', 'ipv4-wins-server1',
'ipv4-wins-server2', 'ipv6-dns-server1', 'ipv6-dns-server2',
'ipv6-dns-server3', 'ipv6-end-ip', 'ipv6-exclude-range',
'ipv6-name', 'ipv6-prefix', 'ipv6-split-exclude',
'ipv6-split-include', 'ipv6-start-ip', 'keepalive',
'keylife', 'local-gw', 'local-gw6',
'localid', 'localid-type', 'mesh-selector-type',
'mode', 'mode-cfg', 'monitor',
'monitor-hold-down-delay', 'monitor-hold-down-time', 'monitor-hold-down-type',
'monitor-hold-down-weekday', 'name', 'nattraversal',
'negotiate-timeout', 'net-device', 'npu-offload',
'passive-mode', 'peer', 'peergrp',
'peerid', 'peertype', 'ppk',
'ppk-identity', 'ppk-secret', 'priority',
'proposal', 'psksecret', 'psksecret-remote',
'reauth', 'rekey', 'remote-gw',
'remote-gw6', 'remotegw-ddns', 'rsa-signature-format',
'save-password', 'send-cert-chain', 'signature-hash-alg',
'split-include-service', 'suite-b', 'tunnel-search',
'type', 'unity-support', 'usrgrp',
'vni', 'wizard-type', 'xauthtype']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def vpn_ipsec_phase1_interface(data, fos):
vdom = data['vdom']
vpn_ipsec_phase1_interface_data = data['vpn_ipsec_phase1_interface']
flattened_data = flatten_multilists_attributes(vpn_ipsec_phase1_interface_data)
filtered_data = filter_vpn_ipsec_phase1_interface_data(flattened_data)
if vpn_ipsec_phase1_interface_data['state'] == "present":
return fos.set('vpn.ipsec',
'phase1-interface',
data=filtered_data,
vdom=vdom)
elif vpn_ipsec_phase1_interface_data['state'] == "absent":
return fos.delete('vpn.ipsec',
'phase1-interface',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_vpn_ipsec(data, fos):
login(data)
if data['vpn_ipsec_phase1_interface']:
resp = vpn_ipsec_phase1_interface(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"vpn_ipsec_phase1_interface": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"acct-verify": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"add-gw-route": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"add-route": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"assign-ip": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"assign-ip-from": {"required": False, "type": "str",
"choices": ["range", "usrgrp", "dhcp",
"name"]},
"authmethod": {"required": False, "type": "str",
"choices": ["psk", "signature"]},
"authmethod-remote": {"required": False, "type": "str",
"choices": ["psk", "signature"]},
"authpasswd": {"required": False, "type": "str"},
"authusr": {"required": False, "type": "str"},
"authusrgrp": {"required": False, "type": "str"},
"auto-discovery-forwarder": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"auto-discovery-psk": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"auto-discovery-receiver": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"auto-discovery-sender": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"auto-negotiate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"backup-gateway": {"required": False, "type": "list",
"options": {
"address": {"required": True, "type": "str"}
}},
"banner": {"required": False, "type": "str"},
"cert-id-validation": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"certificate": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"childless-ike": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"client-auto-negotiate": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"client-keep-alive": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"comments": {"required": False, "type": "str"},
"default-gw": {"required": False, "type": "str"},
"default-gw-priority": {"required": False, "type": "int"},
"dhgrp": {"required": False, "type": "str",
"choices": ["1", "2", "5",
"14", "15", "16",
"17", "18", "19",
"20", "21", "27",
"28", "29", "30",
"31"]},
"digital-signature-auth": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"distance": {"required": False, "type": "int"},
"dns-mode": {"required": False, "type": "str",
"choices": ["manual", "auto"]},
"domain": {"required": False, "type": "str"},
"dpd": {"required": False, "type": "str",
"choices": ["disable", "on-idle", "on-demand"]},
"dpd-retrycount": {"required": False, "type": "int"},
"dpd-retryinterval": {"required": False, "type": "str"},
"eap": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"eap-identity": {"required": False, "type": "str",
"choices": ["use-id-payload", "send-request"]},
"encap-local-gw4": {"required": False, "type": "str"},
"encap-local-gw6": {"required": False, "type": "str"},
"encap-remote-gw4": {"required": False, "type": "str"},
"encap-remote-gw6": {"required": False, "type": "str"},
"encapsulation": {"required": False, "type": "str",
"choices": ["none", "gre", "vxlan"]},
"encapsulation-address": {"required": False, "type": "str",
"choices": ["ike", "ipv4", "ipv6"]},
"enforce-unique-id": {"required": False, "type": "str",
"choices": ["disable", "keep-new", "keep-old"]},
"exchange-interface-ip": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"forticlient-enforcement": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"fragmentation": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"fragmentation-mtu": {"required": False, "type": "int"},
"group-authentication": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"group-authentication-secret": {"required": False, "type": "password-3"},
"ha-sync-esp-seqno": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"idle-timeout": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"idle-timeoutinterval": {"required": False, "type": "int"},
"ike-version": {"required": False, "type": "str",
"choices": ["1", "2"]},
"include-local-lan": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"interface": {"required": False, "type": "str"},
"ip-version": {"required": False, "type": "str",
"choices": ["4", "6"]},
"ipv4-dns-server1": {"required": False, "type": "str"},
"ipv4-dns-server2": {"required": False, "type": "str"},
"ipv4-dns-server3": {"required": False, "type": "str"},
"ipv4-end-ip": {"required": False, "type": "str"},
"ipv4-exclude-range": {"required": False, "type": "list",
"options": {
"end-ip": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"start-ip": {"required": False, "type": "str"}
}},
"ipv4-name": {"required": False, "type": "str"},
"ipv4-netmask": {"required": False, "type": "str"},
"ipv4-split-exclude": {"required": False, "type": "str"},
"ipv4-split-include": {"required": False, "type": "str"},
"ipv4-start-ip": {"required": False, "type": "str"},
"ipv4-wins-server1": {"required": False, "type": "str"},
"ipv4-wins-server2": {"required": False, "type": "str"},
"ipv6-dns-server1": {"required": False, "type": "str"},
"ipv6-dns-server2": {"required": False, "type": "str"},
"ipv6-dns-server3": {"required": False, "type": "str"},
"ipv6-end-ip": {"required": False, "type": "str"},
"ipv6-exclude-range": {"required": False, "type": "list",
"options": {
"end-ip": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"start-ip": {"required": False, "type": "str"}
}},
"ipv6-name": {"required": False, "type": "str"},
"ipv6-prefix": {"required": False, "type": "int"},
"ipv6-split-exclude": {"required": False, "type": "str"},
"ipv6-split-include": {"required": False, "type": "str"},
"ipv6-start-ip": {"required": False, "type": "str"},
"keepalive": {"required": False, "type": "int"},
"keylife": {"required": False, "type": "int"},
"local-gw": {"required": False, "type": "str"},
"local-gw6": {"required": False, "type": "str"},
"localid": {"required": False, "type": "str"},
"localid-type": {"required": False, "type": "str",
"choices": ["auto", "fqdn", "user-fqdn",
"keyid", "address", "asn1dn"]},
"mesh-selector-type": {"required": False, "type": "str",
"choices": ["disable", "subnet", "host"]},
"mode": {"required": False, "type": "str",
"choices": ["aggressive", "main"]},
"mode-cfg": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"monitor": {"required": False, "type": "str"},
"monitor-hold-down-delay": {"required": False, "type": "int"},
"monitor-hold-down-time": {"required": False, "type": "str"},
"monitor-hold-down-type": {"required": False, "type": "str",
"choices": ["immediate", "delay", "time"]},
"monitor-hold-down-weekday": {"required": False, "type": "str",
"choices": ["everyday", "sunday", "monday",
"tuesday", "wednesday", "thursday",
"friday", "saturday"]},
"name": {"required": True, "type": "str"},
"nattraversal": {"required": False, "type": "str",
"choices": ["enable", "disable", "forced"]},
"negotiate-timeout": {"required": False, "type": "int"},
"net-device": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"npu-offload": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"passive-mode": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"peer": {"required": False, "type": "str"},
"peergrp": {"required": False, "type": "str"},
"peerid": {"required": False, "type": "str"},
"peertype": {"required": False, "type": "str",
"choices": ["any", "one", "dialup",
"peer", "peergrp"]},
"ppk": {"required": False, "type": "str",
"choices": ["disable", "allow", "require"]},
"ppk-identity": {"required": False, "type": "str"},
"ppk-secret": {"required": False, "type": "password-3"},
"priority": {"required": False, "type": "int"},
"proposal": {"required": False, "type": "str",
"choices": ["des-md5", "des-sha1", "des-sha256",
"des-sha384", "des-sha512"]},
"psksecret": {"required": False, "type": "password-3"},
"psksecret-remote": {"required": False, "type": "password-3"},
"reauth": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"rekey": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"remote-gw": {"required": False, "type": "str"},
"remote-gw6": {"required": False, "type": "str"},
"remotegw-ddns": {"required": False, "type": "str"},
"rsa-signature-format": {"required": False, "type": "str",
"choices": ["pkcs1", "pss"]},
"save-password": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"send-cert-chain": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"signature-hash-alg": {"required": False, "type": "str",
"choices": ["sha1", "sha2-256", "sha2-384",
"sha2-512"]},
"split-include-service": {"required": False, "type": "str"},
"suite-b": {"required": False, "type": "str",
"choices": ["disable", "suite-b-gcm-128", "suite-b-gcm-256"]},
"tunnel-search": {"required": False, "type": "str",
"choices": ["selectors", "nexthop"]},
"type": {"required": False, "type": "str",
"choices": ["static", "dynamic", "ddns"]},
"unity-support": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"usrgrp": {"required": False, "type": "str"},
"vni": {"required": False, "type": "int"},
"wizard-type": {"required": False, "type": "str",
"choices": ["custom", "dialup-forticlient", "dialup-ios",
"dialup-android", "dialup-windows", "dialup-cisco",
"static-fortigate", "dialup-fortigate", "static-cisco",
"dialup-cisco-fw"]},
"xauthtype": {"required": False, "type": "str",
"choices": ["disable", "client", "pap",
"chap", "auto"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_vpn_ipsec(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
tafaRU/odoo | addons/account_bank_statement_extensions/report/__init__.py | 415 | 1128 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import bank_statement_balance_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kaichogami/scikit-learn | sklearn/tests/test_learning_curve.py | 59 | 10869 | # Author: Alexander Fabisch <afabisch@informatik.uni-bremen.de>
#
# License: BSD 3 clause
import sys
from sklearn.externals.six.moves import cStringIO as StringIO
import numpy as np
import warnings
from sklearn.base import BaseEstimator
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.datasets import make_classification
with warnings.catch_warnings():
warnings.simplefilter('ignore')
from sklearn.learning_curve import learning_curve, validation_curve
from sklearn.cross_validation import KFold
from sklearn.linear_model import PassiveAggressiveClassifier
class MockImprovingEstimator(BaseEstimator):
"""Dummy classifier to test the learning curve"""
def __init__(self, n_max_train_sizes):
self.n_max_train_sizes = n_max_train_sizes
self.train_sizes = 0
self.X_subset = None
def fit(self, X_subset, y_subset=None):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, Y=None):
# training score becomes worse (2 -> 1), test error better (0 -> 1)
if self._is_training_data(X):
return 2. - float(self.train_sizes) / self.n_max_train_sizes
else:
return float(self.train_sizes) / self.n_max_train_sizes
def _is_training_data(self, X):
return X is self.X_subset
class MockIncrementalImprovingEstimator(MockImprovingEstimator):
"""Dummy classifier that provides partial_fit"""
def __init__(self, n_max_train_sizes):
super(MockIncrementalImprovingEstimator,
self).__init__(n_max_train_sizes)
self.x = None
def _is_training_data(self, X):
return self.x in X
def partial_fit(self, X, y=None, **params):
self.train_sizes += X.shape[0]
self.x = X[0]
class MockEstimatorWithParameter(BaseEstimator):
"""Dummy classifier to test the validation curve"""
def __init__(self, param=0.5):
self.X_subset = None
self.param = param
def fit(self, X_subset, y_subset):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, y=None):
return self.param if self._is_training_data(X) else 1 - self.param
def _is_training_data(self, X):
return X is self.X_subset
def test_learning_curve():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
with warnings.catch_warnings(record=True) as w:
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_equal(train_scores.shape, (10, 3))
assert_equal(test_scores.shape, (10, 3))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_verbose():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
train_sizes, train_scores, test_scores = \
learning_curve(estimator, X, y, cv=3, verbose=1)
finally:
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = old_stdout
assert("[learning_curve]" in out)
def test_learning_curve_incremental_learning_not_possible():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
# The mockup does not have partial_fit()
estimator = MockImprovingEstimator(1)
assert_raises(ValueError, learning_curve, estimator, X, y,
exploit_incremental_learning=True)
def test_learning_curve_incremental_learning():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_incremental_learning_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_batch_and_incremental_learning_are_equal():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
train_sizes = np.linspace(0.2, 1.0, 5)
estimator = PassiveAggressiveClassifier(n_iter=1, shuffle=False)
train_sizes_inc, train_scores_inc, test_scores_inc = \
learning_curve(
estimator, X, y, train_sizes=train_sizes,
cv=3, exploit_incremental_learning=True)
train_sizes_batch, train_scores_batch, test_scores_batch = \
learning_curve(
estimator, X, y, cv=3, train_sizes=train_sizes,
exploit_incremental_learning=False)
assert_array_equal(train_sizes_inc, train_sizes_batch)
assert_array_almost_equal(train_scores_inc.mean(axis=1),
train_scores_batch.mean(axis=1))
assert_array_almost_equal(test_scores_inc.mean(axis=1),
test_scores_batch.mean(axis=1))
def test_learning_curve_n_sample_range_out_of_bounds():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.0, 1.0])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.1, 1.1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 20])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[1, 21])
def test_learning_curve_remove_duplicate_sample_sizes():
X, y = make_classification(n_samples=3, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(2)
train_sizes, _, _ = assert_warns(
RuntimeWarning, learning_curve, estimator, X, y, cv=3,
train_sizes=np.linspace(0.33, 1.0, 3))
assert_array_equal(train_sizes, [1, 2])
def test_learning_curve_with_boolean_indices():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
cv = KFold(n=30, n_folds=3)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_validation_curve():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
param_range = np.linspace(0, 1, 10)
with warnings.catch_warnings(record=True) as w:
train_scores, test_scores = validation_curve(
MockEstimatorWithParameter(), X, y, param_name="param",
param_range=param_range, cv=2
)
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_array_almost_equal(train_scores.mean(axis=1), param_range)
assert_array_almost_equal(test_scores.mean(axis=1), 1 - param_range)
| bsd-3-clause |
alistairlow/tensorflow | tensorflow/contrib/learn/python/learn/estimators/metric_key.py | 89 | 1569 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Enum for metric keys."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class MetricKey(object):
"""Metric key strings."""
LOSS = "loss"
AUC = "auc"
AUC_PR = "auc_precision_recall"
CLASS_AUC = "auc/class%d"
CLASS_AUC_PR = "auc_precision_recall/class%d"
PREDICTION_MEAN = "labels/prediction_mean"
CLASS_PREDICTION_MEAN = "labels/prediction_mean/class%d"
CLASS_LOGITS_MEAN = "labels/logits_mean/class%d"
CLASS_PROBABILITY_MEAN = "labels/probability_mean/class%d"
LABEL_MEAN = "labels/actual_label_mean"
CLASS_LABEL_MEAN = "labels/actual_label_mean/class%d"
ACCURACY = "accuracy"
ACCURACY_BASELINE = "accuracy/baseline_label_mean"
ACCURACY_MEAN = "accuracy/threshold_%f_mean"
PRECISION_MEAN = "precision/positive_threshold_%f_mean"
RECALL_MEAN = "recall/positive_threshold_%f_mean"
| apache-2.0 |
mindw/pywinauto | pywinauto/unittests/test_menuwrapper.py | 3 | 2569 | # GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"Tests for HwndWrapper"
import sys
sys.path.append(".")
from pywinauto.application import Application
from pywinauto.controls.HwndWrapper import HwndWrapper
from pywinauto import win32structures, win32defines
from pywinauto.controls import menuwrapper
import time
import pprint
import pdb
import ctypes
__revision__ = "$Revision: 234 $"
#try:
# from pywinauto.controls.pywinauto import *
#except ImportError:
# # allow it to be imported in a dev environment
# import sys
#
# pywinauto_imp = "\\".join(__file__.split('\\')[:-3])
# print "sdfdsf", pywinauto_imp
# sys.path.append(pywinauto_imp)
# from pywinauto.controls.HwndWrapper import *
import unittest
class MenuWrapperTests(unittest.TestCase):
"Unit tests for the TreeViewWrapper class"
def setUp(self):
"""Start the application set some data and ensure the application
is in the state we want it."""
# start the application
self.app = Application()
self.app.start_("Notepad.exe")
self.dlg = self.app.Notepad
def tearDown(self):
"Close the application after tests"
# close the application
#self.dlg.TypeKeys("%{F4}")
self.app.kill_()
def testInvalidHandle(self):
"Test that an exception is raised with an invalid menu handle"
#self.assertRaises(InvalidWindowHandle, HwndWrapper, -1)
pass
def testItemCount(self):
self.assertEquals(5, self.dlg.Menu().ItemCount())
def testItem(self):
pass
def testItems(self):
pass
def testGetProperties(self):
pass
def testGetMenuPath(self):
pass
def test__repr__(self):
pass
if __name__ == "__main__":
unittest.main()
| lgpl-2.1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.