repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
kirsty-tortoise/mathsmap | mathsmap/new_map.py | 1 | 2417 | """
Contains classes controlling wizards for making new maps
"""
import tkinter as tk
import mathsmap.colours as colours
class Wizard:
"""
A base class for all wizards in this project
"""
def clear(self):
"""
Remove all current widgets from top level of wizard
"""
for child in self.top.winfo_children():
child.destroy()
class NewMap(Wizard):
"""
Class for any new map
"""
def __init__(self, controller):
"""
Set up NewMap wizard
"""
self.controller = controller
self.top = tk.Toplevel()
self.top.title("Make a new map")
self.welcome_screen()
def welcome_screen(self):
"""
Sets up first screen of wizard
"""
self.clear()
self.scheme = colours.BLUESCHEME
self.background = self.scheme.lighter
self.top.configure(background=self.background)
self.title = tk.Label(self.top, text="Let's make a new map!", font=(None, 20),
background=self.background)
self.title.grid(row=0, column=0, columnspan=2)
self.text = tk.Label(self.top,
text=("When do you need to make your mathsmap? " +
"Is it right now, possibly in a rush before exams, " +
"or over time, while you attend lectures and learn?"),
wraplength=400, background=self.background
)
self.text.grid(row=1, column=0, columnspan=2)
buttons_needed = [("Right now!", 0),
("Over time!", 1)]
for text, column in buttons_needed:
button = tk.Button(self.top, text=text, width=15, height=3,
background=self.scheme.darkest, activebackground=self.scheme.darker,
foreground="white", font=(None, 15))
button.grid(row=2, column=column, pady=5)
def clear(self):
"""
Remove all current widgets from top level
"""
for child in self.top.winfo_children():
child.destroy()
class NewFutureMap(Wizard):
"""
Class for new maps to be added to slowly in the future
"""
pass
class NewNowMap(Wizard):
"""
Class for new maps to be added to and completed right now
"""
pass
| mit |
jorik041/scikit-learn | sklearn/decomposition/pca.py | 192 | 23117 | """ Principal Component Analysis
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis A. Engemann <d.engemann@fz-juelich.de>
# Michael Eickenberg <michael.eickenberg@inria.fr>
#
# License: BSD 3 clause
from math import log, sqrt
import numpy as np
from scipy import linalg
from scipy.special import gammaln
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_random_state, as_float_array
from ..utils import check_array
from ..utils.extmath import fast_dot, fast_logdet, randomized_svd
from ..utils.validation import check_is_fitted
def _assess_dimension_(spectrum, rank, n_samples, n_features):
"""Compute the likelihood of a rank ``rank`` dataset
The dataset is assumed to be embedded in gaussian noise of shape(n,
dimf) having spectrum ``spectrum``.
Parameters
----------
spectrum: array of shape (n)
Data spectrum.
rank: int
Tested rank value.
n_samples: int
Number of samples.
n_features: int
Number of features.
Returns
-------
ll: float,
The log-likelihood
Notes
-----
This implements the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
"""
if rank > len(spectrum):
raise ValueError("The tested rank cannot exceed the rank of the"
" dataset")
pu = -rank * log(2.)
for i in range(rank):
pu += (gammaln((n_features - i) / 2.)
- log(np.pi) * (n_features - i) / 2.)
pl = np.sum(np.log(spectrum[:rank]))
pl = -pl * n_samples / 2.
if rank == n_features:
pv = 0
v = 1
else:
v = np.sum(spectrum[rank:]) / (n_features - rank)
pv = -np.log(v) * n_samples * (n_features - rank) / 2.
m = n_features * rank - rank * (rank + 1.) / 2.
pp = log(2. * np.pi) * (m + rank + 1.) / 2.
pa = 0.
spectrum_ = spectrum.copy()
spectrum_[rank:n_features] = v
for i in range(rank):
for j in range(i + 1, len(spectrum)):
pa += log((spectrum[i] - spectrum[j]) *
(1. / spectrum_[j] - 1. / spectrum_[i])) + log(n_samples)
ll = pu + pl + pv + pp - pa / 2. - rank * log(n_samples) / 2.
return ll
def _infer_dimension_(spectrum, n_samples, n_features):
"""Infers the dimension of a dataset of shape (n_samples, n_features)
The dataset is described by its spectrum `spectrum`.
"""
n_spectrum = len(spectrum)
ll = np.empty(n_spectrum)
for rank in range(n_spectrum):
ll[rank] = _assess_dimension_(spectrum, rank, n_samples, n_features)
return ll.argmax()
class PCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA)
Linear dimensionality reduction using Singular Value Decomposition of the
data and keeping only the most significant singular vectors to project the
data to a lower dimensional space.
This implementation uses the scipy.linalg implementation of the singular
value decomposition. It only works for dense arrays and is not scalable to
large dimensional data.
The time complexity of this implementation is ``O(n ** 3)`` assuming
n ~ n_samples ~ n_features.
Read more in the :ref:`User Guide <PCA>`.
Parameters
----------
n_components : int, None or string
Number of components to keep.
if n_components is not set all components are kept::
n_components == min(n_samples, n_features)
if n_components == 'mle', Minka\'s MLE is used to guess the dimension
if ``0 < n_components < 1``, select the number of components such that
the amount of variance that needs to be explained is greater than the
percentage specified by n_components
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by n_samples times singular values to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making there data respect some hard-wired assumptions.
Attributes
----------
components_ : array, [n_components, n_features]
Principal axes in feature space, representing the directions of
maximum variance in the data.
explained_variance_ratio_ : array, [n_components]
Percentage of variance explained by each of the selected components.
If ``n_components`` is not set then all components are stored and the
sum of explained variances is equal to 1.0
mean_ : array, [n_features]
Per-feature empirical mean, estimated from the training set.
n_components_ : int
The estimated number of components. Relevant when n_components is set
to 'mle' or a number between 0 and 1 to select using explained
variance.
noise_variance_ : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf. It is required to
computed the estimated data covariance and score samples.
Notes
-----
For n_components='mle', this class uses the method of `Thomas P. Minka:
Automatic Choice of Dimensionality for PCA. NIPS 2000: 598-604`
Implements the probabilistic PCA model from:
M. Tipping and C. Bishop, Probabilistic Principal Component Analysis,
Journal of the Royal Statistical Society, Series B, 61, Part 3, pp. 611-622
via the score and score_samples methods.
See http://www.miketipping.com/papers/met-mppca.pdf
Due to implementation subtleties of the Singular Value Decomposition (SVD),
which is used in this implementation, running fit twice on the same matrix
can lead to principal components with signs flipped (change in direction).
For this reason, it is important to always use the same estimator object to
transform data in a consistent fashion.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import PCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = PCA(n_components=2)
>>> pca.fit(X)
PCA(copy=True, n_components=2, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self, n_components=None, copy=True, whiten=False):
self.n_components = n_components
self.copy = copy
self.whiten = whiten
def fit(self, X, y=None):
"""Fit the model with X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(X)
return self
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
U, S, V = self._fit(X)
U = U[:, :self.n_components_]
if self.whiten:
# X_new = X * V / S * sqrt(n_samples) = U * sqrt(n_samples)
U *= sqrt(X.shape[0])
else:
# X_new = X * V = U * S * V^T * V = U * S
U *= S[:self.n_components_]
return U
def _fit(self, X):
"""Fit the model on X
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
U, s, V : ndarrays
The SVD of the input data, copied and centered when
requested.
"""
X = check_array(X)
n_samples, n_features = X.shape
X = as_float_array(X, copy=self.copy)
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
U, S, V = linalg.svd(X, full_matrices=False)
explained_variance_ = (S ** 2) / n_samples
explained_variance_ratio_ = (explained_variance_ /
explained_variance_.sum())
components_ = V
n_components = self.n_components
if n_components is None:
n_components = n_features
elif n_components == 'mle':
if n_samples < n_features:
raise ValueError("n_components='mle' is only supported "
"if n_samples >= n_features")
n_components = _infer_dimension_(explained_variance_,
n_samples, n_features)
elif not 0 <= n_components <= n_features:
raise ValueError("n_components=%r invalid for n_features=%d"
% (n_components, n_features))
if 0 < n_components < 1.0:
# number of components for which the cumulated explained variance
# percentage is superior to the desired threshold
ratio_cumsum = explained_variance_ratio_.cumsum()
n_components = np.sum(ratio_cumsum < n_components) + 1
# Compute noise covariance using Probabilistic PCA model
# The sigma2 maximum likelihood (cf. eq. 12.46)
if n_components < n_features:
self.noise_variance_ = explained_variance_[n_components:].mean()
else:
self.noise_variance_ = 0.
# store n_samples to revert whitening when getting covariance
self.n_samples_ = n_samples
self.components_ = components_[:n_components]
self.explained_variance_ = explained_variance_[:n_components]
explained_variance_ratio_ = explained_variance_ratio_[:n_components]
self.explained_variance_ratio_ = explained_variance_ratio_
self.n_components_ = n_components
return (U, S, V)
def get_covariance(self):
"""Compute data covariance with the generative model.
``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances.
Returns
-------
cov : array, shape=(n_features, n_features)
Estimated covariance of data.
"""
components_ = self.components_
exp_var = self.explained_variance_
if self.whiten:
components_ = components_ * np.sqrt(exp_var[:, np.newaxis])
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
cov = np.dot(components_.T * exp_var_diff, components_)
cov.flat[::len(cov) + 1] += self.noise_variance_ # modify diag inplace
return cov
def get_precision(self):
"""Compute data precision matrix with the generative model.
Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.
Returns
-------
precision : array, shape=(n_features, n_features)
Estimated precision of data.
"""
n_features = self.components_.shape[1]
# handle corner cases first
if self.n_components_ == 0:
return np.eye(n_features) / self.noise_variance_
if self.n_components_ == n_features:
return linalg.inv(self.get_covariance())
# Get precision using matrix inversion lemma
components_ = self.components_
exp_var = self.explained_variance_
exp_var_diff = np.maximum(exp_var - self.noise_variance_, 0.)
precision = np.dot(components_, components_.T) / self.noise_variance_
precision.flat[::len(precision) + 1] += 1. / exp_var_diff
precision = np.dot(components_.T,
np.dot(linalg.inv(precision), components_))
precision /= -(self.noise_variance_ ** 2)
precision.flat[::len(precision) + 1] += 1. / self.noise_variance_
return precision
def transform(self, X):
"""Apply the dimensionality reduction on X.
X is projected on the first principal components previous extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
check_is_fitted(self, 'mean_')
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X_transformed = fast_dot(X, self.components_.T)
if self.whiten:
X_transformed /= np.sqrt(self.explained_variance_)
return X_transformed
def inverse_transform(self, X):
"""Transform data back to its original space, i.e.,
return an input X_original whose transform would be X
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples is the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
"""
check_is_fitted(self, 'mean_')
if self.whiten:
return fast_dot(
X,
np.sqrt(self.explained_variance_[:, np.newaxis]) *
self.components_) + self.mean_
else:
return fast_dot(X, self.components_) + self.mean_
def score_samples(self, X):
"""Return the log-likelihood of each sample
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X: array, shape(n_samples, n_features)
The data.
Returns
-------
ll: array, shape (n_samples,)
Log-likelihood of each sample under the current model
"""
check_is_fitted(self, 'mean_')
X = check_array(X)
Xr = X - self.mean_
n_features = X.shape[1]
log_like = np.zeros(X.shape[0])
precision = self.get_precision()
log_like = -.5 * (Xr * (np.dot(Xr, precision))).sum(axis=1)
log_like -= .5 * (n_features * log(2. * np.pi)
- fast_logdet(precision))
return log_like
def score(self, X, y=None):
"""Return the average log-likelihood of all samples
See. "Pattern Recognition and Machine Learning"
by C. Bishop, 12.2.1 p. 574
or http://www.miketipping.com/papers/met-mppca.pdf
Parameters
----------
X: array, shape(n_samples, n_features)
The data.
Returns
-------
ll: float
Average log-likelihood of the samples under the current model
"""
return np.mean(self.score_samples(X))
class RandomizedPCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Read more in the :ref:`User Guide <RandomizedPCA>`.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, optional
Number of iterations for the power method. 3 by default.
whiten : bool, optional
When True (False by default) the `components_` vectors are divided
by the singular values to ensure uncorrelated outputs with unit
component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton.
Attributes
----------
components_ : array, [n_components, n_features]
Components with maximum variance.
explained_variance_ratio_ : array, [n_components]
Percentage of variance explained by each of the selected components. \
k is not set then all components are stored and the sum of explained \
variances is equal to 1.0
mean_ : array, [n_features]
Per-feature empirical mean, estimated from the training set.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=3, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
See also
--------
PCA
TruncatedSVD
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
"""
def __init__(self, n_components=None, copy=True, iterated_power=3,
whiten=False, random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X by extracting the first principal components.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(check_array(X))
return self
def _fit(self, X):
"""Fit the model to the data X.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
"""
random_state = check_random_state(self.random_state)
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X, n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S ** 2) / n_samples
full_var = np.var(X, axis=0).sum()
self.explained_variance_ratio_ = exp_var / full_var
if self.whiten:
self.components_ = V / S[:, np.newaxis] * sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X, y=None):
"""Apply dimensionality reduction on X.
X is projected on the first principal components previous extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
check_is_fitted(self, 'mean_')
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X = fast_dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = check_array(X)
X = self._fit(X)
return fast_dot(X, self.components_.T)
def inverse_transform(self, X, y=None):
"""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
"""
check_is_fitted(self, 'mean_')
X_original = fast_dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
| bsd-3-clause |
yongshengwang/hue | build/env/lib/python2.7/site-packages/pysaml2-2.4.0-py2.7.egg/saml2/ecp_client.py | 32 | 11142 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
"""
Contains a class that can do SAML ECP Authentication for other python
programs.
"""
import cookielib
import logging
from saml2 import soap
from saml2 import saml
from saml2 import samlp
from saml2 import SAMLError
from saml2 import BINDING_SOAP
from saml2.client_base import MIME_PAOS
from saml2.config import Config
from saml2.entity import Entity
from saml2.httpbase import set_list2dict, dict2set_list
from saml2.profile import paos
from saml2.profile import ecp
from saml2.mdstore import MetadataStore
from saml2.s_utils import BadRequest
SERVICE = "urn:oasis:names:tc:SAML:2.0:profiles:SSO:ecp"
PAOS_HEADER_INFO = 'ver="%s";"%s"' % (paos.NAMESPACE, SERVICE)
logger = logging.getLogger(__name__)
class Client(Entity):
def __init__(self, user, passwd, sp="", idp=None, metadata_file=None,
xmlsec_binary=None, verbose=0, ca_certs="",
disable_ssl_certificate_validation=True, key_file=None,
cert_file=None, config=None):
"""
:param user: user name
:param passwd: user password
:param sp: The SP URL
:param idp: The IdP PAOS endpoint
:param metadata_file: Where the metadata file is if used
:param xmlsec_binary: Where the xmlsec1 binary can be found (*)
:param verbose: Chatty or not
:param ca_certs: is the path of a file containing root CA certificates
for SSL server certificate validation (*)
:param disable_ssl_certificate_validation: If
disable_ssl_certificate_validation is true, SSL cert validation
will not be performed (*)
:param key_file: Private key filename (*)
:param cert_file: Certificate filename (*)
:param config: Config() instance, overrides all the parameters marked
with an asterisk (*) above
"""
if not config:
config = Config()
config.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
config.key_file = key_file
config.cert_file = cert_file
config.ca_certs = ca_certs
config.xmlsec_binary = xmlsec_binary
Entity.__init__(self, "sp", config)
self._idp = idp
self._sp = sp
self.user = user
self.passwd = passwd
self._verbose = verbose
if metadata_file:
self._metadata = MetadataStore([saml, samlp], None, config)
self._metadata.load("local", metadata_file)
logger.debug("Loaded metadata from '%s'" % metadata_file)
else:
self._metadata = None
self.metadata = self._metadata
self.cookie_handler = None
self.done_ecp = False
self.cookie_jar = cookielib.LWPCookieJar()
def phase2(self, authn_request, rc_url, idp_entity_id, headers=None,
sign=False, **kwargs):
"""
Doing the second phase of the ECP conversation, the conversation
with the IdP happens.
:param authn_request: The AuthenticationRequest
:param rc_url: The assertion consumer service url of the SP
:param idp_entity_id: The EntityID of the IdP
:param headers: Possible extra headers
:param sign: If the message should be signed
:return: The response from the IdP
"""
_, destination = self.pick_binding("single_sign_on_service",
[BINDING_SOAP], "idpsso",
entity_id=idp_entity_id)
ht_args = self.apply_binding(BINDING_SOAP, authn_request, destination,
sign=sign)
if headers:
ht_args["headers"].extend(headers)
logger.debug("[P2] Sending request: %s" % ht_args["data"])
# POST the request to the IdP
response = self.send(**ht_args)
logger.debug("[P2] Got IdP response: %s" % response)
if response.status_code != 200:
raise SAMLError(
"Request to IdP failed (%s): %s" % (response.status_code,
response.error))
# SAMLP response in a SOAP envelope body, ecp response in headers
respdict = self.parse_soap_message(response.text)
if respdict is None:
raise SAMLError("Unexpected reply from the IdP")
logger.debug("[P2] IdP response dict: %s" % respdict)
idp_response = respdict["body"]
assert idp_response.c_tag == "Response"
logger.debug("[P2] IdP AUTHN response: %s" % idp_response)
_ecp_response = None
for item in respdict["header"]:
if item.c_tag == "Response" and item.c_namespace == ecp.NAMESPACE:
_ecp_response = item
_acs_url = _ecp_response.assertion_consumer_service_url
if rc_url != _acs_url:
error = ("response_consumer_url '%s' does not match" % rc_url,
"assertion_consumer_service_url '%s" % _acs_url)
# Send an error message to the SP
_ = self.send(rc_url, "POST", data=soap.soap_fault(error))
# Raise an exception so the user knows something went wrong
raise SAMLError(error)
return idp_response
@staticmethod
def parse_sp_ecp_response(respdict):
if respdict is None:
raise SAMLError("Unexpected reply from the SP")
logger.debug("[P1] SP response dict: %s" % respdict)
# AuthnRequest in the body or not
authn_request = respdict["body"]
assert authn_request.c_tag == "AuthnRequest"
# ecp.RelayState among headers
_relay_state = None
_paos_request = None
for item in respdict["header"]:
if item.c_tag == "RelayState" and item.c_namespace == ecp.NAMESPACE:
_relay_state = item
if item.c_tag == "Request" and item.c_namespace == paos.NAMESPACE:
_paos_request = item
if _paos_request is None:
raise BadRequest("Missing request")
_rc_url = _paos_request.response_consumer_url
return {"authn_request": authn_request, "rc_url": _rc_url,
"relay_state": _relay_state}
def ecp_conversation(self, respdict, idp_entity_id=None):
"""
:param respdict:
:param idp_entity_id:
:return:
"""
args = self.parse_sp_ecp_response(respdict)
# **********************
# Phase 2 - talk to the IdP
# **********************
idp_response = self.phase2(idp_entity_id=idp_entity_id, **args)
# **********************************
# Phase 3 - back to the SP
# **********************************
ht_args = self.use_soap(idp_response, args["rc_url"],
[args["relay_state"]])
logger.debug("[P3] Post to SP: %s" % ht_args["data"])
ht_args["headers"].append(('Content-Type', 'application/vnd.paos+xml'))
# POST the package from the IdP to the SP
response = self.send(args["rc_url"], "POST", **ht_args)
if response.status_code == 302:
# ignore where the SP is redirecting us to and go for the
# url I started off with.
pass
else:
print response.error
raise SAMLError(
"Error POSTing package to SP: %s" % response.error)
logger.debug("[P3] SP response: %s" % response.text)
self.done_ecp = True
logger.debug("Done ECP")
return None
def add_paos_headers(self, headers=None):
if headers:
headers = set_list2dict(headers)
headers["PAOS"] = PAOS_HEADER_INFO
if "Accept" in headers:
headers["Accept"] += ";%s" % MIME_PAOS
elif "accept" in headers:
headers["Accept"] = headers["accept"]
headers["Accept"] += ";%s" % MIME_PAOS
del headers["accept"]
headers = dict2set_list(headers)
else:
headers = [
('Accept', 'text/html; %s' % MIME_PAOS),
('PAOS', PAOS_HEADER_INFO)
]
return headers
def operation(self, url, idp_entity_id, op, **opargs):
"""
This is the method that should be used by someone that wants
to authenticate using SAML ECP
:param url: The page that access is sought for
:param idp_entity_id: The entity ID of the IdP that should be
used for authentication
:param op: Which HTTP operation (GET/POST/PUT/DELETE)
:param opargs: Arguments to the HTTP call
:return: The page
"""
if url not in opargs:
url = self._sp
# ********************************************
# Phase 1 - First conversation with the SP
# ********************************************
# headers needed to indicate to the SP that I'm ECP enabled
opargs["headers"] = self.add_paos_headers(opargs["headers"])
response = self.send(url, op, **opargs)
logger.debug("[Op] SP response: %s" % response)
if response.status_code != 200:
raise SAMLError(
"Request to SP failed: %s" % response.error)
# The response might be a AuthnRequest instance in a SOAP envelope
# body. If so it's the start of the ECP conversation
# Two SOAP header blocks; paos:Request and ecp:Request
# may also contain a ecp:RelayState SOAP header block
# If channel-binding was part of the PAOS header any number of
# <cb:ChannelBindings> header blocks may also be present
# if 'holder-of-key' option then one or more <ecp:SubjectConfirmation>
# header blocks may also be present
try:
respdict = self.parse_soap_message(response.text)
self.ecp_conversation(respdict, idp_entity_id)
# should by now be authenticated so this should go smoothly
response = self.send(url, op, **opargs)
except (soap.XmlParseError, AssertionError, KeyError):
pass
#print "RESP",response, self.http.response
if response.status_code != 404:
raise SAMLError("Error performing operation: %s" % (
response.error,))
return response
# different HTTP operations
def delete(self, url=None, idp_entity_id=None):
return self.operation(url, idp_entity_id, "DELETE")
def get(self, url=None, idp_entity_id=None, headers=None):
return self.operation(url, idp_entity_id, "GET", headers=headers)
def post(self, url=None, data="", idp_entity_id=None, headers=None):
return self.operation(url, idp_entity_id, "POST", data=data,
headers=headers)
def put(self, url=None, data="", idp_entity_id=None, headers=None):
return self.operation(url, idp_entity_id, "PUT", data=data,
headers=headers)
| apache-2.0 |
tiagocoutinho/bliss | bliss/controllers/emulators/keithley.py | 1 | 2009 | # -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
import time
import random
import gevent
from bliss.comm.scpi import Commands
from bliss.controllers.keithley_scpi_mapping import COMMANDS, MODEL_COMMANDS
from .scpi import SCPI
# 'KEITHLEY INSTRUMENTS INC.,MODEL 6485,1008577,B03 Sep 25 2002 10:53:29/A02 /E'
class BaseKeithley(SCPI):
Manufacturer = 'KEITHLEY INSTRUMENTS INC.'
Version = '1008577'
Firmware = 'B03 Sep 25 2002 10:53:29/A02 /E'
IDNFieldSep = ','
def __init__(self, *args, **kwargs):
super(BaseKeithley, self).__init__(*args, **kwargs)
self.start_time = time.time()
def syst_ver(self):
return self.Version
class Keithley6485(BaseKeithley):
Model = 'MODEL 6485'
PLC = 50 # 50Hz
NPLC = 5.0
FormElem = 'READ',
def __init__(self, *args, **kwargs):
kwargs['commands'] = Commands(COMMANDS, MODEL_COMMANDS['6485'])
super(Keithley6485, self).__init__(*args, **kwargs)
# def curr_nplc(self, is_query, value=None):
# if is_query:
# return self.NPLC
# self.NPLC = float(value)
def curr_rang(self):
return 123.456
def form_elem(self, is_query, value=None):
if is_query:
return ','.join(self.FormElem)
self.FormElem = tuple(map(str.upper, value.split(',')))
def read(self):
# assumptions: reading from sensor and result in SCI notation
# emulate read time
gevent.sleep(self.NPLC * 1./self.PLC)
result = []
for i in self.FormElem:
if i == 'READ':
result.append('%EA' % (random.random()*(20E-3 - 2E-9) + 2E-9))
elif i == 'TIME':
ts = (time.time() - self.start_time) % 99999.99
result.append('%E' % ts)
return ','.join(result)
def meas(self):
return self.read()
| lgpl-3.0 |
ppapadeas/wprevents | vendor-local/lib/python/unidecode/x0ae.py | 253 | 4875 | data = (
'geul', # 0x00
'geulg', # 0x01
'geulm', # 0x02
'geulb', # 0x03
'geuls', # 0x04
'geult', # 0x05
'geulp', # 0x06
'geulh', # 0x07
'geum', # 0x08
'geub', # 0x09
'geubs', # 0x0a
'geus', # 0x0b
'geuss', # 0x0c
'geung', # 0x0d
'geuj', # 0x0e
'geuc', # 0x0f
'geuk', # 0x10
'geut', # 0x11
'geup', # 0x12
'geuh', # 0x13
'gyi', # 0x14
'gyig', # 0x15
'gyigg', # 0x16
'gyigs', # 0x17
'gyin', # 0x18
'gyinj', # 0x19
'gyinh', # 0x1a
'gyid', # 0x1b
'gyil', # 0x1c
'gyilg', # 0x1d
'gyilm', # 0x1e
'gyilb', # 0x1f
'gyils', # 0x20
'gyilt', # 0x21
'gyilp', # 0x22
'gyilh', # 0x23
'gyim', # 0x24
'gyib', # 0x25
'gyibs', # 0x26
'gyis', # 0x27
'gyiss', # 0x28
'gying', # 0x29
'gyij', # 0x2a
'gyic', # 0x2b
'gyik', # 0x2c
'gyit', # 0x2d
'gyip', # 0x2e
'gyih', # 0x2f
'gi', # 0x30
'gig', # 0x31
'gigg', # 0x32
'gigs', # 0x33
'gin', # 0x34
'ginj', # 0x35
'ginh', # 0x36
'gid', # 0x37
'gil', # 0x38
'gilg', # 0x39
'gilm', # 0x3a
'gilb', # 0x3b
'gils', # 0x3c
'gilt', # 0x3d
'gilp', # 0x3e
'gilh', # 0x3f
'gim', # 0x40
'gib', # 0x41
'gibs', # 0x42
'gis', # 0x43
'giss', # 0x44
'ging', # 0x45
'gij', # 0x46
'gic', # 0x47
'gik', # 0x48
'git', # 0x49
'gip', # 0x4a
'gih', # 0x4b
'gga', # 0x4c
'ggag', # 0x4d
'ggagg', # 0x4e
'ggags', # 0x4f
'ggan', # 0x50
'gganj', # 0x51
'gganh', # 0x52
'ggad', # 0x53
'ggal', # 0x54
'ggalg', # 0x55
'ggalm', # 0x56
'ggalb', # 0x57
'ggals', # 0x58
'ggalt', # 0x59
'ggalp', # 0x5a
'ggalh', # 0x5b
'ggam', # 0x5c
'ggab', # 0x5d
'ggabs', # 0x5e
'ggas', # 0x5f
'ggass', # 0x60
'ggang', # 0x61
'ggaj', # 0x62
'ggac', # 0x63
'ggak', # 0x64
'ggat', # 0x65
'ggap', # 0x66
'ggah', # 0x67
'ggae', # 0x68
'ggaeg', # 0x69
'ggaegg', # 0x6a
'ggaegs', # 0x6b
'ggaen', # 0x6c
'ggaenj', # 0x6d
'ggaenh', # 0x6e
'ggaed', # 0x6f
'ggael', # 0x70
'ggaelg', # 0x71
'ggaelm', # 0x72
'ggaelb', # 0x73
'ggaels', # 0x74
'ggaelt', # 0x75
'ggaelp', # 0x76
'ggaelh', # 0x77
'ggaem', # 0x78
'ggaeb', # 0x79
'ggaebs', # 0x7a
'ggaes', # 0x7b
'ggaess', # 0x7c
'ggaeng', # 0x7d
'ggaej', # 0x7e
'ggaec', # 0x7f
'ggaek', # 0x80
'ggaet', # 0x81
'ggaep', # 0x82
'ggaeh', # 0x83
'ggya', # 0x84
'ggyag', # 0x85
'ggyagg', # 0x86
'ggyags', # 0x87
'ggyan', # 0x88
'ggyanj', # 0x89
'ggyanh', # 0x8a
'ggyad', # 0x8b
'ggyal', # 0x8c
'ggyalg', # 0x8d
'ggyalm', # 0x8e
'ggyalb', # 0x8f
'ggyals', # 0x90
'ggyalt', # 0x91
'ggyalp', # 0x92
'ggyalh', # 0x93
'ggyam', # 0x94
'ggyab', # 0x95
'ggyabs', # 0x96
'ggyas', # 0x97
'ggyass', # 0x98
'ggyang', # 0x99
'ggyaj', # 0x9a
'ggyac', # 0x9b
'ggyak', # 0x9c
'ggyat', # 0x9d
'ggyap', # 0x9e
'ggyah', # 0x9f
'ggyae', # 0xa0
'ggyaeg', # 0xa1
'ggyaegg', # 0xa2
'ggyaegs', # 0xa3
'ggyaen', # 0xa4
'ggyaenj', # 0xa5
'ggyaenh', # 0xa6
'ggyaed', # 0xa7
'ggyael', # 0xa8
'ggyaelg', # 0xa9
'ggyaelm', # 0xaa
'ggyaelb', # 0xab
'ggyaels', # 0xac
'ggyaelt', # 0xad
'ggyaelp', # 0xae
'ggyaelh', # 0xaf
'ggyaem', # 0xb0
'ggyaeb', # 0xb1
'ggyaebs', # 0xb2
'ggyaes', # 0xb3
'ggyaess', # 0xb4
'ggyaeng', # 0xb5
'ggyaej', # 0xb6
'ggyaec', # 0xb7
'ggyaek', # 0xb8
'ggyaet', # 0xb9
'ggyaep', # 0xba
'ggyaeh', # 0xbb
'ggeo', # 0xbc
'ggeog', # 0xbd
'ggeogg', # 0xbe
'ggeogs', # 0xbf
'ggeon', # 0xc0
'ggeonj', # 0xc1
'ggeonh', # 0xc2
'ggeod', # 0xc3
'ggeol', # 0xc4
'ggeolg', # 0xc5
'ggeolm', # 0xc6
'ggeolb', # 0xc7
'ggeols', # 0xc8
'ggeolt', # 0xc9
'ggeolp', # 0xca
'ggeolh', # 0xcb
'ggeom', # 0xcc
'ggeob', # 0xcd
'ggeobs', # 0xce
'ggeos', # 0xcf
'ggeoss', # 0xd0
'ggeong', # 0xd1
'ggeoj', # 0xd2
'ggeoc', # 0xd3
'ggeok', # 0xd4
'ggeot', # 0xd5
'ggeop', # 0xd6
'ggeoh', # 0xd7
'gge', # 0xd8
'ggeg', # 0xd9
'ggegg', # 0xda
'ggegs', # 0xdb
'ggen', # 0xdc
'ggenj', # 0xdd
'ggenh', # 0xde
'gged', # 0xdf
'ggel', # 0xe0
'ggelg', # 0xe1
'ggelm', # 0xe2
'ggelb', # 0xe3
'ggels', # 0xe4
'ggelt', # 0xe5
'ggelp', # 0xe6
'ggelh', # 0xe7
'ggem', # 0xe8
'ggeb', # 0xe9
'ggebs', # 0xea
'gges', # 0xeb
'ggess', # 0xec
'ggeng', # 0xed
'ggej', # 0xee
'ggec', # 0xef
'ggek', # 0xf0
'gget', # 0xf1
'ggep', # 0xf2
'ggeh', # 0xf3
'ggyeo', # 0xf4
'ggyeog', # 0xf5
'ggyeogg', # 0xf6
'ggyeogs', # 0xf7
'ggyeon', # 0xf8
'ggyeonj', # 0xf9
'ggyeonh', # 0xfa
'ggyeod', # 0xfb
'ggyeol', # 0xfc
'ggyeolg', # 0xfd
'ggyeolm', # 0xfe
'ggyeolb', # 0xff
)
| bsd-3-clause |
r-rathi/error-control-coding | perf/plot-pegd.py | 1 | 1496 | import numpy as np
import matplotlib.pyplot as plt
from errsim import *
def label(d, pe, pb, n):
if pb is None:
pb = pe
label = 'd={} pe={} n={} BSC'.format(d, pe, n)
else:
label = 'd={} pe={} n={} pb={}'.format(d, pe, n, pb)
return label
def plot(pe, fpath=None):
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=plt.figaspect(1/2))
r = np.arange(8, 65)
pWL = jointpmf5(pe, pe, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'g--', lw=2, label=label(3, pe, None, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'g-', lw=2, label=label(6, pe, None, 128))
pWL = jointpmf5(pe, .1, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'b--', lw=2, label=label(3, pe, .1, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'b-', lw=2, label=label(6, pe, .1, 128))
pWL = jointpmf5(pe, .5, 128)
ax.plot(r, r_vs_pegd(pWL, 3, r) , 'r--', lw=2, label=label(3, pe, .5, 128))
ax.plot(r, r_vs_pegd(pWL, 6, r) , 'r-', lw=2, label=label(6, pe, .5, 128))
ax.set_yscale('log')
ax.set_xticks(r[::8])
ax.set_xlim(r[0], r[-1])
#ax.set_ylim(1e-30, 1e-1)
ax.set_xlabel('Burst error correction capability, $r$')
ax.set_ylabel('$P_{egd}$')
ax.set_title('Probability of Exceeding Guarenteed Error Detection Capability')
ax.legend(loc='lower right')
ax.grid(True)
#plt.tight_layout()
if fpath:
fig.savefig(fpath)
plt.show()
plt.close('all')
plot(1e-15, 'plots/pegd-pe=1e15.png')
plot(1e-6, 'plots/pegd-pe=1e6.png')
| mit |
stevehof/location-ninja | lib/jinja2/defaults.py | 659 | 1068 | # -*- coding: utf-8 -*-
"""
jinja2.defaults
~~~~~~~~~~~~~~~
Jinja default filters and tags.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import range_type
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
# defaults for the parser / lexer
BLOCK_START_STRING = '{%'
BLOCK_END_STRING = '%}'
VARIABLE_START_STRING = '{{'
VARIABLE_END_STRING = '}}'
COMMENT_START_STRING = '{#'
COMMENT_END_STRING = '#}'
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = '\n'
KEEP_TRAILING_NEWLINE = False
# default filters, tests and namespace
from jinja2.filters import FILTERS as DEFAULT_FILTERS
from jinja2.tests import TESTS as DEFAULT_TESTS
DEFAULT_NAMESPACE = {
'range': range_type,
'dict': lambda **kw: kw,
'lipsum': generate_lorem_ipsum,
'cycler': Cycler,
'joiner': Joiner
}
# export all constants
__all__ = tuple(x for x in locals().keys() if x.isupper())
| gpl-3.0 |
nikhilraog/boto | boto/opsworks/__init__.py | 135 | 1657 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the Amazon OpsWorks service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.opsworks.layer1 import OpsWorksConnection
return get_regions('opsworks', connection_cls=OpsWorksConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
zentner-kyle/servo | tests/wpt/css-tests/tools/html5lib/html5lib/treebuilders/__init__.py | 1730 | 3405 | """A collection of modules for building different kinds of tree from
HTML documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.
2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree
3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
"""
from __future__ import absolute_import, division, unicode_literals
from ..utils import default_etree
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - A generic builder for DOM implementations, defaulting to
a xml.dom.minidom based implementation.
"etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to
xml.etree.cElementTree if available and
xml.etree.ElementTree if not.
"lxml" - A etree-based builder for lxml.etree, handling
limitations of lxml's implementation.
implementation - (Currently applies to the "etree" and "dom" tree types). A
module implementing the tree type e.g.
xml.etree.ElementTree or xml.etree.cElementTree."""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType)
| mpl-2.0 |
grimli/life-game | life_thread.py | 1 | 3242 | #!/usr/bin/python3
import random
from tkinter import *
import threading
import queue
class Evolver( threading.Thread ):
""" this thread calculate next status
The result is maid available to the shower trough a queue """
def __init__(self, queue):
threading.Thread.__init__(self)
self.queue = queue
def run(self):
while True:
print("time: %d\n" % board.time)
board.evolve()
self.queue.put(1)
class CellularAutoma:
def __init__(self, lenght=6, scale=10):
"""genera la tabella iniziale quadrata e di dimensione iniziale lenght"""
self.board = [[]]
self.scale = scale
line = []
random.seed()
self.time = 0
for a in range( lenght ):
line = []
for b in range( lenght ):
tmp = random.randint( -1, 1 )
if tmp > 0:
line.append( 1 )
else:
line.append( 0 )
self.board.append( line )
self.board.remove([])
#init GUI
self.master = Tk()
self.master.title('life game')
self.w = Canvas(self.master, width=lenght*self.scale, height=lenght*self.scale)
self.w.pack()
def evolve( self ):
"""esegue lo step di evoluzione del gioco life su una tabella sferica"""
rows = len(self.board)
columns = len(self.board[0])
board2 = [[0 for j in range(rows)] for i in range(columns)]
for i in range( 0, rows ):
for j in range( 0, columns ):
totale = self.board[(i-1)%(rows)][(j-1)%(columns)]+self.board[(i-1)%(rows)][j%(columns)]+self.board[(i-1)%(rows)][(j+1)%(columns)]+self.board[i%(rows)][(j-1)%(columns)]+self.board[i%(rows)][j%(columns)]+self.board[i%(rows)][(j+1)%(columns)]+self.board[(i+1)%(rows)][(j-1)%(columns)]+self.board[(i+1)%(rows)][j%(columns)]+self.board[(i+1)%(rows)][(j+1)%(columns)]
if self.board[i][j] == 0:
if totale == 3:
board2[i][j]=1
else:
board2[i][j]=0
if self.board[i][j] == 1:
if totale <= 2:
board2[i][j]=0
elif totale <= 4:
board2[i][j]=1
else:
board2[i][j]=0
self.board = board2
self.time = self.time + 1
def show( self ):
"""Gives a graphical representation of the data"""
self.w.delete(ALL)
for i,v in enumerate(self.board):
for j,w in enumerate( self.board[i] ):
if (self.board[i][j] == 0):
self.w.create_rectangle(i*self.scale, j*self.scale, i*self.scale+self.scale, j*self.scale+self.scale, fill="blue")
else:
self.w.create_rectangle(i*self.scale, j*self.scale, i*self.scale+self.scale, j*self.scale+self.scale, fill="yellow")
if __name__ == '__main__':
dim = input( "Inserisci la dimensione della board: ")
board = CellularAutoma( lenght=int(dim), scale=5)
queue = queue.Queue( maxsize=1 )
t1 = Evolver(queue)
t1.start()
# Tkinter cannot be executed on a separeted thread
while True:
flag = queue.get()
board2 = board
queue.task_done()
board2.show()
board.master.update()
| gpl-3.0 |
mdsafwan/Deal-My-Stuff | Lib/site-packages/django/utils/inspect.py | 15 | 2502 | from __future__ import absolute_import
import inspect
import sys
HAS_INSPECT_SIGNATURE = sys.version_info >= (3, 3)
def getargspec(func):
if not HAS_INSPECT_SIGNATURE:
return inspect.getargspec(func)
sig = inspect.signature(func)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and p.default is not p.empty
] or None
return args, varargs, varkw, defaults
def get_func_args(func):
if not HAS_INSPECT_SIGNATURE:
argspec = inspect.getargspec(func)
return argspec.args[1:] # ignore 'self'
sig = inspect.signature(func)
return [
arg_name for arg_name, param in sig.parameters.items()
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
def func_accepts_kwargs(func):
if not HAS_INSPECT_SIGNATURE:
# Not all callables are inspectable with getargspec, so we'll
# try a couple different ways but in the end fall back on assuming
# it is -- we don't want to prevent registration of valid but weird
# callables.
try:
argspec = inspect.getargspec(func)
except TypeError:
try:
argspec = inspect.getargspec(func.__call__)
except (TypeError, AttributeError):
argspec = None
return not argspec or argspec[2] is not None
return any(
p for p in inspect.signature(func).parameters.values()
if p.kind == p.VAR_KEYWORD
)
def func_has_no_args(func):
args = inspect.getargspec(func)[0] if not HAS_INSPECT_SIGNATURE else [
p for p in inspect.signature(func).parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD and p.default is p.empty
]
return len(args) == 1
def func_supports_parameter(func, parameter):
if HAS_INSPECT_SIGNATURE:
return parameter in inspect.signature(func).parameters
else:
args, varargs, varkw, defaults = inspect.getargspec(func)
return parameter in args
| apache-2.0 |
Serag8/Bachelor | google_appengine/lib/PyAMF-0.6.1/pyamf/tests/test_flex_messaging.py | 26 | 6744 | # -*- coding: utf-8 -*-
#
# Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Flex Messaging compatibility tests.
@since: 0.3.2
"""
import unittest
import datetime
import uuid
import pyamf
from pyamf.flex import messaging
class AbstractMessageTestCase(unittest.TestCase):
def test_repr(self):
a = messaging.AbstractMessage()
a.body = u'é,è'
try:
repr(a)
except:
raise
class EncodingTestCase(unittest.TestCase):
"""
Encoding tests for L{messaging}
"""
def test_AcknowledgeMessage(self):
m = messaging.AcknowledgeMessage()
m.correlationId = '1234'
self.assertEqual(pyamf.encode(m).getvalue(),
'\n\x81\x0bUflex.messaging.messages.AcknowledgeMessage\tbody'
'\x11clientId\x17destination\x0fheaders\x13messageId\x13timestamp'
'\x15timeToLive\x1bcorrelationId\x01\x01\x01\n\x0b\x01\x01\x01\x01'
'\x01\x06\t1234\x01')
def test_CommandMessage(self):
m = messaging.CommandMessage(operation='foo.bar')
self.assertEqual(pyamf.encode(m).getvalue(),
'\n\x81\x1bMflex.messaging.messages.CommandMessage\x1bcorrelationId'
'\tbody\x11clientId\x17destination\x0fheaders\x13messageId\x13'
'timestamp\x15timeToLive\x13operation\x01\x01\x01\x01\n\x0b\x01\x01'
'\x01\x01\x01\x06\x0ffoo.bar\x01')
def test_ErrorMessage(self):
m = messaging.ErrorMessage(faultString='ValueError')
self.assertEqual(pyamf.encode(m).getvalue(),
'\n\x81[Iflex.messaging.messages.ErrorMessage\x1bcorrelationId\x15'
'timeToLive\x13timestamp\x13messageId\x0fheaders\x17destination'
'\x11clientId\tbody\x19extendedData\x13faultCode\x17faultDetail'
'\x17faultString\x13rootCause\x01\x01\x01\x01\n\x0b\x01\x01\x01'
'\x01\x01\n\x05\x01\x01\x01\x06\x15ValueError\n\x05\x01\x01')
def test_RemotingMessage(self):
m = messaging.RemotingMessage(source='foo.bar')
self.assertEqual(pyamf.encode(m).getvalue(),
'\n\x81\x1bOflex.messaging.messages.RemotingMessage\x15timeToLive'
'\x13timestamp\x13messageId\x0fheaders\x17destination\x11clientId'
'\tbody\x13operation\rsource\x01\x01\x01\n\x0b\x01\x01\x01\x01\x01'
'\x01\x06\x0ffoo.bar\x01')
class SmallMessageTestCase(unittest.TestCase):
"""
Tests for L{messaging.SmallMessageMixIn}
"""
def setUp(self):
self.decoder = pyamf.get_decoder(pyamf.AMF3)
self.buffer = self.decoder.stream
def test_acknowledge(self):
bytes = ('\n\x07\x07DSK\xa8\x03\n\x0b\x01%DSMessagingVersion\x05?\xf0'
'\x00\x00\x00\x00\x00\x00\tDSId\x06IEE0D161D-C11D-25CB-8DBE-3B77B'
'54B55D9\x01\x05Br3&m\x85\x10\x00\x0c!\xee\r\x16\x1d\xc1(&[\xc9'
'\x80RK\x9bE\xc6\xc4\x0c!\xee\r\x16\x1d\xc1=\x8e\xa3\xe0\x10\xef'
'\xad;\xe5\xc5j\x02\x0c!S\x84\x83\xdb\xa9\xc8\xcaM`\x952f\xdbQ'
'\xc9<\x00')
self.buffer.write(bytes)
self.buffer.seek(0)
msg = self.decoder.readElement()
self.assertTrue(isinstance(msg, messaging.AcknowledgeMessageExt))
self.assertEqual(msg.body, None)
self.assertEqual(msg.destination, None)
self.assertEqual(msg.timeToLive, None)
self.assertEqual(msg.timestamp, datetime.datetime(2009, 8, 19, 11, 24, 43, 985000))
self.assertEqual(msg.headers, {
'DSMessagingVersion': 1.0,
'DSId': u'EE0D161D-C11D-25CB-8DBE-3B77B54B55D9'
})
self.assertEqual(msg.clientId, uuid.UUID('ee0d161d-c128-265b-c980-524b9b45c6c4'))
self.assertEqual(msg.messageId, uuid.UUID('ee0d161d-c13d-8ea3-e010-efad3be5c56a'))
self.assertEqual(msg.correlationId, uuid.UUID('538483db-a9c8-ca4d-6095-3266db51c93c'))
self.assertEqual(self.buffer.remaining(), 0)
# now encode the msg to check that encoding is byte for byte the same
buffer = pyamf.encode(msg, encoding=pyamf.AMF3).getvalue()
self.assertEqual(buffer, bytes)
def test_command(self):
bytes = ('\n\x07\x07DSC\x88\x02\n\x0b\x01\tDSId\x06IEE0D161D-C11D-'
'25CB-8DBE-3B77B54B55D9\x01\x0c!\xc0\xdf\xb7|\xd6\xee$1s\x152f'
'\xe11\xa8f\x01\x06\x01\x01\x04\x02')
self.buffer.write(bytes)
self.buffer.seek(0)
msg = self.decoder.readElement()
self.assertTrue(isinstance(msg, messaging.CommandMessageExt))
self.assertEqual(msg.body, None)
self.assertEqual(msg.destination, None)
self.assertEqual(msg.timeToLive, None)
self.assertEqual(msg.timestamp, None)
self.assertEqual(msg.headers, {
'DSId': u'EE0D161D-C11D-25CB-8DBE-3B77B54B55D9'
})
self.assertEqual(msg.clientId, None)
self.assertEqual(msg.messageId, uuid.UUID('c0dfb77c-d6ee-2431-7315-3266e131a866'))
self.assertEqual(msg.correlationId, u'')
self.assertEqual(self.buffer.remaining(), 0)
# now encode the msg to check that encoding is byte for byte the same
buffer = pyamf.encode(msg, encoding=pyamf.AMF3).getvalue()
self.assertEqual(buffer, bytes)
def test_async(self):
pass
def test_getmessage(self):
"""
Tests for `getSmallMessage`
"""
for cls in ['AbstractMessage', 'ErrorMessage', 'RemotingMessage']:
cls = getattr(messaging, cls)
self.assertRaises(NotImplementedError, cls().getSmallMessage)
kwargs = {
'body': {'foo': 'bar'},
'clientId': 'spam',
'destination': 'eggs',
'headers': {'blarg': 'whoop'},
'messageId': 'baz',
'timestamp': 1234,
'timeToLive': 99
}
# test async
a = messaging.AsyncMessage(correlationId='yay', **kwargs)
m = a.getSmallMessage()
k = kwargs.copy()
k.update({'correlationId': 'yay'})
self.assertTrue(isinstance(m, messaging.AsyncMessageExt))
self.assertEqual(m.__dict__, k)
# test command
a = messaging.CommandMessage(operation='yay', **kwargs)
m = a.getSmallMessage()
k = kwargs.copy()
k.update({'operation': 'yay', 'correlationId': None})
self.assertTrue(isinstance(m, messaging.CommandMessageExt))
self.assertEqual(m.__dict__, k)
# test ack
a = messaging.AcknowledgeMessage(**kwargs)
m = a.getSmallMessage()
k = kwargs.copy()
k.update({'correlationId': None})
self.assertTrue(isinstance(m, messaging.AcknowledgeMessageExt))
self.assertEqual(m.__dict__, k)
| mit |
chajadan/dragonfly | dragonfly/engines/backend_natlink/recobs.py | 5 | 2646 | #
# This file is part of Dragonfly.
# (c) Copyright 2007, 2008 by Christo Butcher
# Licensed under the LGPL.
#
# Dragonfly is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragonfly is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dragonfly. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
Recognition observer class for the Natlink engine
============================================================================
"""
from ..base import RecObsManagerBase
from ...grammar.grammar import Grammar
from ...grammar.rule_base import Rule
from ...grammar.elements import Impossible
#---------------------------------------------------------------------------
class NatlinkRecObsManager(RecObsManagerBase):
def __init__(self, engine):
RecObsManagerBase.__init__(self, engine)
self._grammar = None
def _activate(self):
if not self._grammar:
self._grammar = NatlinkRecObsGrammar(self)
self._grammar.load()
def _deactivate(self):
if self._grammar:
self._grammar.unload()
self._grammar = None
#---------------------------------------------------------------------------
class NatlinkRecObsGrammar(Grammar):
def __init__(self, manager):
self._manager = manager
name = "_recobs_grammar"
Grammar.__init__(self, name, description=None, context=None)
rule = Rule(element=Impossible(), exported=True)
self.add_rule(rule)
#-----------------------------------------------------------------------
# Callback methods for handling utterances and recognitions.
def process_begin(self, executable, title, handle):
self._manager.notify_begin()
def process_recognition(self, words):
raise RuntimeError("Recognition observer received an unexpected"
" recognition: %s" % (words,))
def process_recognition_other(self, words):
self._manager.notify_recognition(words)
def process_recognition_failure(self):
self._manager.notify_failure()
| lgpl-3.0 |
Shanec132006/lab3 | lib/flask/testsuite/signals.py | 554 | 4807 | # -*- coding: utf-8 -*-
"""
flask.testsuite.signals
~~~~~~~~~~~~~~~~~~~~~~~
Signalling.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import flask
import unittest
from flask.testsuite import FlaskTestCase
class SignalsTestCase(FlaskTestCase):
def test_template_rendered(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
recorded = []
def record(sender, template, context):
recorded.append((template, context))
flask.template_rendered.connect(record, app)
try:
app.test_client().get('/')
self.assert_equal(len(recorded), 1)
template, context = recorded[0]
self.assert_equal(template.name, 'simple_template.html')
self.assert_equal(context['whiskey'], 42)
finally:
flask.template_rendered.disconnect(record, app)
def test_request_signals(self):
app = flask.Flask(__name__)
calls = []
def before_request_signal(sender):
calls.append('before-signal')
def after_request_signal(sender, response):
self.assert_equal(response.data, b'stuff')
calls.append('after-signal')
@app.before_request
def before_request_handler():
calls.append('before-handler')
@app.after_request
def after_request_handler(response):
calls.append('after-handler')
response.data = 'stuff'
return response
@app.route('/')
def index():
calls.append('handler')
return 'ignored anyway'
flask.request_started.connect(before_request_signal, app)
flask.request_finished.connect(after_request_signal, app)
try:
rv = app.test_client().get('/')
self.assert_equal(rv.data, b'stuff')
self.assert_equal(calls, ['before-signal', 'before-handler',
'handler', 'after-handler',
'after-signal'])
finally:
flask.request_started.disconnect(before_request_signal, app)
flask.request_finished.disconnect(after_request_signal, app)
def test_request_exception_signal(self):
app = flask.Flask(__name__)
recorded = []
@app.route('/')
def index():
1 // 0
def record(sender, exception):
recorded.append(exception)
flask.got_request_exception.connect(record, app)
try:
self.assert_equal(app.test_client().get('/').status_code, 500)
self.assert_equal(len(recorded), 1)
self.assert_true(isinstance(recorded[0], ZeroDivisionError))
finally:
flask.got_request_exception.disconnect(record, app)
def test_appcontext_signals(self):
app = flask.Flask(__name__)
recorded = []
def record_push(sender, **kwargs):
recorded.append('push')
def record_pop(sender, **kwargs):
recorded.append('push')
@app.route('/')
def index():
return 'Hello'
flask.appcontext_pushed.connect(record_push, app)
flask.appcontext_popped.connect(record_pop, app)
try:
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.data, b'Hello')
self.assert_equal(recorded, ['push'])
self.assert_equal(recorded, ['push', 'pop'])
finally:
flask.appcontext_pushed.disconnect(record_push, app)
flask.appcontext_popped.disconnect(record_pop, app)
def test_flash_signal(self):
app = flask.Flask(__name__)
app.config['SECRET_KEY'] = 'secret'
@app.route('/')
def index():
flask.flash('This is a flash message', category='notice')
return flask.redirect('/other')
recorded = []
def record(sender, message, category):
recorded.append((message, category))
flask.message_flashed.connect(record, app)
try:
client = app.test_client()
with client.session_transaction():
client.get('/')
self.assert_equal(len(recorded), 1)
message, category = recorded[0]
self.assert_equal(message, 'This is a flash message')
self.assert_equal(category, 'notice')
finally:
flask.message_flashed.disconnect(record, app)
def suite():
suite = unittest.TestSuite()
if flask.signals_available:
suite.addTest(unittest.makeSuite(SignalsTestCase))
return suite
| apache-2.0 |
benoitsteiner/tensorflow-xsmm | tensorflow/contrib/tensor_forest/hybrid/python/models/hard_decisions_to_data_then_nn.py | 189 | 2852 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A model that places a hard decision tree embedding before a neural net."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.tensor_forest.hybrid.python import hybrid_model
from tensorflow.contrib.tensor_forest.hybrid.python.layers import decisions_to_data
from tensorflow.contrib.tensor_forest.hybrid.python.layers import fully_connected
from tensorflow.python.ops import nn_ops
from tensorflow.python.training import adagrad
class HardDecisionsToDataThenNN(hybrid_model.HybridModel):
"""A model that treats tree inference as hard at test."""
def __init__(self,
params,
device_assigner=None,
optimizer_class=adagrad.AdagradOptimizer,
**kwargs):
super(HardDecisionsToDataThenNN, self).__init__(
params,
device_assigner=device_assigner,
optimizer_class=optimizer_class,
**kwargs)
self.layers = [decisions_to_data.HardDecisionsToDataLayer(
params, 0, device_assigner),
fully_connected.FullyConnectedLayer(
params, 1, device_assigner=device_assigner)]
def _base_inference(self, data, data_spec=None, soft=False):
if soft:
inference_result = self.layers[0].soft_inference_graph(data)
else:
inference_result = self._do_layer_inference(self.layers[0], data)
for layer in self.layers[1:]:
inference_result = self._do_layer_inference(layer, inference_result)
output_size = 1 if self.is_regression else self.params.num_classes
output = layers.fully_connected(
inference_result, output_size, activation_fn=nn_ops.softmax)
return output
def inference_graph(self, data, data_spec=None):
"""Returns the op that performs inference on a batch of data."""
return nn_ops.softmax(
self._base_inference(
data, data_spec=data_spec, soft=True))
# pylint: disable=unused-argument
def training_inference_graph(self, data, data_spec=None):
return self._base_inference(data, data_spec=data_spec, soft=False)
| apache-2.0 |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/debian/headphones/apps/headphones/lib/requests/packages/chardet/mbcssm.py | 1783 | 19590 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
BIG5_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
)
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# CP949
CP949_cls = (
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
)
CP949_st = (
#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
)
CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
CP949SMModel = {'classTable': CP949_cls,
'classFactor': 10,
'stateTable': CP949_st,
'charLenTable': CP949CharLenTable,
'name': 'CP949'}
# EUC-JP
EUCJP_cls = (
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5 # f8 - ff
)
EUCJP_st = (
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
)
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0 # f8 - ff
)
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
)
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = (
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
EUCTW_st = (
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0 # f8 - ff
)
GB2312_st = (
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,2,2,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,0,0,0) # f8 - ff
SJIS_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
)
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2BE_st = (
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart #30-37
)
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2LE_st = (
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart #30-37
)
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0 # f8 - ff
)
UTF8_st = (
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
)
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
| gpl-2.0 |
vmindru/ansible | lib/ansible/modules/cloud/scaleway/scaleway_security_group_rule.py | 28 | 7313 | #!/usr/bin/python
#
# Scaleway Security Group Rule management module
#
# Copyright (C) 2018 Antoine Barbare (antoinebarbare@gmail.com).
#
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: scaleway_security_group_rule
short_description: Scaleway Security Group Rule management module
version_added: "2.8"
author: Antoine Barbare (@abarbare)
description:
- This module manages Security Group Rule on Scaleway account
U(https://developer.scaleway.com)
extends_documentation_fragment: scaleway
options:
state:
description:
- Indicate desired state of the Security Group Rule.
default: present
choices:
- present
- absent
region:
description:
- Scaleway region to use (for example C(par1)).
required: true
choices:
- ams1
- EMEA-NL-EVS
- par1
- EMEA-FR-PAR1
protocol:
description:
- Network protocol to use
choices:
- TCP
- UDP
- ICMP
required: true
port:
description:
- Port related to the rule, null value for all the ports
required: true
type: int
ip_range:
description:
- IPV4 CIDR notation to apply to the rule
default: 0.0.0.0/0
direction:
description:
- Rule direction
choices:
- inbound
- outbound
required: true
action:
description:
- Rule action
choices:
- accept
- drop
required: true
security_group:
description:
- Security Group unique identifier
required: true
'''
EXAMPLES = '''
- name: Create a Security Group Rule
scaleway_security_group_rule:
state: present
region: par1
protocol: TCP
port: 80
ip_range: 0.0.0.0/0
direction: inbound
action: accept
security_group: b57210ee-1281-4820-a6db-329f78596ecb
register: security_group_rule_creation_task
'''
RETURN = '''
data:
description: This is only present when C(state=present)
returned: when C(state=present)
type: dict
sample: {
"scaleway_security_group_rule": {
"direction": "inbound",
"protocol": "TCP",
"ip_range": "0.0.0.0/0",
"dest_port_from": 80,
"action": "accept",
"position": 2,
"dest_port_to": null,
"editable": null,
"id": "10cb0b9a-80f6-4830-abd7-a31cd828b5e9"
}
}
'''
from ansible.module_utils.scaleway import SCALEWAY_LOCATION, scaleway_argument_spec, Scaleway, payload_from_object
from ansible.module_utils.compat.ipaddress import ip_network
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
def get_sgr_from_api(security_group_rules, security_group_rule):
""" Check if a security_group_rule specs are present in security_group_rules
Return None if no rules match the specs
Return the rule if found
"""
for sgr in security_group_rules:
if (sgr['ip_range'] == security_group_rule['ip_range'] and sgr['dest_port_from'] == security_group_rule['dest_port_from'] and
sgr['direction'] == security_group_rule['direction'] and sgr['action'] == security_group_rule['action'] and
sgr['protocol'] == security_group_rule['protocol']):
return sgr
return None
def present_strategy(api, security_group_id, security_group_rule):
ret = {'changed': False}
response = api.get('security_groups/%s/rules' % security_group_id)
if not response.ok:
api.module.fail_json(
msg='Error getting security group rules "%s": "%s" (%s)' %
(response.info['msg'], response.json['message'], response.json))
existing_rule = get_sgr_from_api(
response.json['rules'], security_group_rule)
if not existing_rule:
ret['changed'] = True
if api.module.check_mode:
return ret
# Create Security Group Rule
response = api.post('/security_groups/%s/rules' % security_group_id,
data=payload_from_object(security_group_rule))
if not response.ok:
api.module.fail_json(
msg='Error during security group rule creation: "%s": "%s" (%s)' %
(response.info['msg'], response.json['message'], response.json))
ret['scaleway_security_group_rule'] = response.json['rule']
else:
ret['scaleway_security_group_rule'] = existing_rule
return ret
def absent_strategy(api, security_group_id, security_group_rule):
ret = {'changed': False}
response = api.get('security_groups/%s/rules' % security_group_id)
if not response.ok:
api.module.fail_json(
msg='Error getting security group rules "%s": "%s" (%s)' %
(response.info['msg'], response.json['message'], response.json))
existing_rule = get_sgr_from_api(
response.json['rules'], security_group_rule)
if not existing_rule:
return ret
ret['changed'] = True
if api.module.check_mode:
return ret
response = api.delete(
'/security_groups/%s/rules/%s' %
(security_group_id, existing_rule['id']))
if not response.ok:
api.module.fail_json(
msg='Error deleting security group rule "%s": "%s" (%s)' %
(response.info['msg'], response.json['message'], response.json))
return ret
def core(module):
api = Scaleway(module=module)
security_group_rule = {
'protocol': module.params['protocol'],
'dest_port_from': module.params['port'],
'ip_range': module.params['ip_range'],
'direction': module.params['direction'],
'action': module.params['action'],
}
region = module.params['region']
module.params['api_url'] = SCALEWAY_LOCATION[region]['api_endpoint']
if module.params['state'] == 'present':
summary = present_strategy(
api=api,
security_group_id=module.params['security_group'],
security_group_rule=security_group_rule)
else:
summary = absent_strategy(
api=api,
security_group_id=module.params['security_group'],
security_group_rule=security_group_rule)
module.exit_json(**summary)
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['absent', 'present']),
region=dict(required=True, choices=SCALEWAY_LOCATION.keys()),
protocol=dict(required=True, choices=['TCP', 'UDP', 'ICMP']),
port=dict(required=True, type=int),
ip_range=dict(default='0.0.0.0/0', type=lambda x: to_text(ip_network(to_text(x)))),
direction=dict(required=True, choices=['inbound', 'outbound']),
action=dict(required=True, choices=['accept', 'drop']),
security_group=dict(required=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
core(module)
if __name__ == '__main__':
main()
| gpl-3.0 |
tareqalayan/ansible | lib/ansible/modules/cloud/amazon/aws_s3_bucket_facts.py | 48 | 2664 | #!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_s3_bucket_facts
short_description: Lists S3 buckets in AWS
requirements:
- boto3 >= 1.4.4
- python >= 2.6
description:
- Lists S3 buckets in AWS
version_added: "2.4"
author: "Gerben Geijteman (@hyperized)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Note: Only AWS S3 is currently supported
# Lists all s3 buckets
- aws_s3_bucket_facts:
'''
RETURN = '''
buckets:
description: "List of buckets"
returned: always
sample:
- creation_date: 2017-07-06 15:05:12 +00:00
name: my_bucket
type: list
'''
import traceback
try:
import botocore
except ImportError:
pass # will be detected by imported HAS_BOTO3
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (boto3_conn, ec2_argument_spec, HAS_BOTO3, camel_dict_to_snake_dict,
get_aws_connection_info)
def get_bucket_list(module, connection):
"""
Return result of list_buckets json encoded
:param module:
:param connection:
:return:
"""
try:
buckets = camel_dict_to_snake_dict(connection.list_buckets())['buckets']
except botocore.exceptions.ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
return buckets
def main():
"""
Get list of S3 buckets
:return:
"""
# Ensure we have an empty dict
result = {}
# Including ec2 argument spec
module = AnsibleModule(argument_spec=ec2_argument_spec(), supports_check_mode=True)
# Verify Boto3 is used
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
# Set up connection
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=HAS_BOTO3)
connection = boto3_conn(module, conn_type='client', resource='s3', region=region, endpoint=ec2_url,
**aws_connect_params)
# Gather results
result['buckets'] = get_bucket_list(module, connection)
# Send exit
module.exit_json(msg="Retrieved s3 facts.", ansible_facts=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
simcity4242/python-bitcoinlib | bitcoin/core/serialize.py | 7 | 9557 | # Copyright (C) 2012-2014 The python-bitcoinlib developers
#
# This file is part of python-bitcoinlib.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-bitcoinlib, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
"""Serialization routines
You probabably don't need to use these directly.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import hashlib
import struct
# Py3 compatibility
import sys
if sys.version > '3':
_bchr = lambda x: bytes([x])
_bord = lambda x: x[0]
from io import BytesIO as _BytesIO
else:
_bchr = chr
_bord = ord
from cStringIO import StringIO as _BytesIO
MAX_SIZE = 0x02000000
def Hash(msg):
"""SHA256^2)(msg) -> bytes"""
return hashlib.sha256(hashlib.sha256(msg).digest()).digest()
def Hash160(msg):
"""RIPEME160(SHA256(msg)) -> bytes"""
h = hashlib.new('ripemd160')
h.update(hashlib.sha256(msg).digest())
return h.digest()
class SerializationError(Exception):
"""Base class for serialization errors"""
class SerializationTruncationError(SerializationError):
"""Serialized data was truncated
Thrown by deserialize() and stream_deserialize()
"""
class DeserializationExtraDataError(SerializationError):
"""Deserialized data had extra data at the end
Thrown by deserialize() when not all data is consumed during
deserialization. The deserialized object and extra padding not consumed are
saved.
"""
def __init__(self, msg, obj, padding):
super(DeserializationExtraDataError, self).__init__(msg)
self.obj = obj
self.padding = padding
def ser_read(f, n):
"""Read from a stream safely
Raises SerializationError and SerializationTruncationError appropriately.
Use this instead of f.read() in your classes stream_(de)serialization()
functions.
"""
if n > MAX_SIZE:
raise SerializationError('Asked to read 0x%x bytes; MAX_SIZE exceeded')
r = f.read(n)
if len(r) < n:
raise SerializationTruncationError('Asked to read %i bytes, but only got %i' % (n, len(r)))
return r
class Serializable(object):
"""Base class for serializable objects"""
__slots__ = []
def stream_serialize(self, f):
"""Serialize to a stream"""
raise NotImplementedError
@classmethod
def stream_deserialize(cls, f):
"""Deserialize from a stream"""
raise NotImplementedError
def serialize(self):
"""Serialize, returning bytes"""
f = _BytesIO()
self.stream_serialize(f)
return f.getvalue()
@classmethod
def deserialize(cls, buf, allow_padding=False):
"""Deserialize bytes, returning an instance
allow_padding - Allow buf to include extra padding. (default False)
If allow_padding is False and not all bytes are consumed during
deserialization DeserializationExtraDataError will be raised.
"""
fd = _BytesIO(buf)
r = cls.stream_deserialize(fd)
if not allow_padding:
padding = fd.read()
if len(padding) != 0:
raise DeserializationExtraDataError('Not all bytes consumed during deserialization',
r, padding)
return r
def GetHash(self):
"""Return the hash of the serialized object"""
return Hash(self.serialize())
def __eq__(self, other):
if (not isinstance(other, self.__class__) and
not isinstance(self, other.__class__)):
return NotImplemented
return self.serialize() == other.serialize()
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self.serialize())
class ImmutableSerializable(Serializable):
"""Immutable serializable object"""
__slots__ = ['_cached_GetHash', '_cached__hash__']
def __setattr__(self, name, value):
raise AttributeError('Object is immutable')
def __delattr__(self, name):
raise AttributeError('Object is immutable')
def GetHash(self):
"""Return the hash of the serialized object"""
try:
return self._cached_GetHash
except AttributeError:
_cached_GetHash = super(ImmutableSerializable, self).GetHash()
object.__setattr__(self, '_cached_GetHash', _cached_GetHash)
return _cached_GetHash
def __hash__(self):
try:
return self._cached__hash__
except AttributeError:
_cached__hash__ = hash(self.serialize())
object.__setattr__(self, '_cached__hash__', _cached__hash__)
return _cached__hash__
class Serializer(object):
"""Base class for object serializers"""
def __new__(cls):
raise NotImplementedError
@classmethod
def stream_serialize(cls, obj, f):
raise NotImplementedError
@classmethod
def stream_deserialize(cls, f):
raise NotImplementedError
@classmethod
def serialize(cls, obj):
f = _BytesIO()
cls.stream_serialize(obj, f)
return f.getvalue()
@classmethod
def deserialize(cls, buf):
return cls.stream_deserialize(_BytesIO(buf))
class VarIntSerializer(Serializer):
"""Serialization of variable length ints"""
@classmethod
def stream_serialize(cls, i, f):
if i < 0:
raise ValueError('varint must be non-negative integer')
elif i < 0xfd:
f.write(_bchr(i))
elif i <= 0xffff:
f.write(_bchr(0xfd))
f.write(struct.pack(b'<H', i))
elif i <= 0xffffffff:
f.write(_bchr(0xfe))
f.write(struct.pack(b'<I', i))
else:
f.write(_bchr(0xff))
f.write(struct.pack(b'<Q', i))
@classmethod
def stream_deserialize(cls, f):
r = _bord(ser_read(f, 1))
if r < 0xfd:
return r
elif r == 0xfd:
return struct.unpack(b'<H', ser_read(f, 2))[0]
elif r == 0xfe:
return struct.unpack(b'<I', ser_read(f, 4))[0]
else:
return struct.unpack(b'<Q', ser_read(f, 8))[0]
class BytesSerializer(Serializer):
"""Serialization of bytes instances"""
@classmethod
def stream_serialize(cls, b, f):
VarIntSerializer.stream_serialize(len(b), f)
f.write(b)
@classmethod
def stream_deserialize(cls, f):
l = VarIntSerializer.stream_deserialize(f)
return ser_read(f, l)
class VectorSerializer(Serializer):
"""Base class for serializers of object vectors"""
@classmethod
def stream_serialize(cls, inner_cls, objs, f):
VarIntSerializer.stream_serialize(len(objs), f)
for obj in objs:
inner_cls.stream_serialize(obj, f)
@classmethod
def stream_deserialize(cls, inner_cls, f):
n = VarIntSerializer.stream_deserialize(f)
r = []
for i in range(n):
r.append(inner_cls.stream_deserialize(f))
return r
class uint256VectorSerializer(Serializer):
"""Serialize vectors of uint256"""
@classmethod
def stream_serialize(cls, uints, f):
VarIntSerializer.stream_serialize(len(uints), f)
for uint in uints:
assert len(uint) == 32
f.write(uint)
@classmethod
def stream_deserialize(cls, f):
n = VarIntSerializer.stream_deserialize(f)
r = []
for i in range(n):
r.append(ser_read(f, 32))
return r
class intVectorSerialzer(Serializer):
@classmethod
def stream_serialize(cls, ints, f):
l = len(ints)
VarIntSerializer.stream_serialize(l, f)
for i in ints:
f.write(struct.pack(b"<i", i))
@classmethod
def stream_deserialize(cls, f):
l = VarIntSerializer.stream_deserialize(f)
ints = []
for i in range(l):
ints.append(struct.unpack(b"<i", ser_read(f, 4)))
class VarStringSerializer(Serializer):
"""Serialize variable length strings"""
@classmethod
def stream_serialize(cls, s, f):
l = len(s)
VarIntSerializer.stream_serialize(l, f)
f.write(s)
@classmethod
def stream_deserialize(cls, f):
l = VarIntSerializer.stream_deserialize(f)
return ser_read(f, l)
def uint256_from_str(s):
"""Convert bytes to uint256"""
r = 0
t = struct.unpack(b"<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
"""Convert compact encoding to uint256
Used for the nBits compact encoding of the target in the block header.
"""
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def uint256_to_shortstr(u):
s = "%064x" % (u,)
return s[:16]
__all__ = (
'MAX_SIZE',
'Hash',
'Hash160',
'SerializationError',
'SerializationTruncationError',
'DeserializationExtraDataError',
'ser_read',
'Serializable',
'ImmutableSerializable',
'Serializer',
'VarIntSerializer',
'BytesSerializer',
'VectorSerializer',
'uint256VectorSerializer',
'intVectorSerialzer',
'VarStringSerializer',
'uint256_from_str',
'uint256_from_compact',
'uint256_to_shortstr',
)
| lgpl-3.0 |
alxgu/ansible | lib/ansible/modules/monitoring/statusio_maintenance.py | 92 | 16873 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Benjamin Copeland (@bhcopeland) <ben@copeland.me.uk>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: statusio_maintenance
short_description: Create maintenance windows for your status.io dashboard
description:
- Creates a maintenance window for status.io
- Deletes a maintenance window for status.io
notes:
- You can use the apiary API url (http://docs.statusio.apiary.io/) to
capture API traffic
- Use start_date and start_time with minutes to set future maintenance window
version_added: "2.2"
author: Benjamin Copeland (@bhcopeland) <ben@copeland.me.uk>
options:
title:
description:
- A descriptive title for the maintenance window
default: "A new maintenance window"
desc:
description:
- Message describing the maintenance window
default: "Created by Ansible"
state:
description:
- Desired state of the package.
default: "present"
choices: ["present", "absent"]
api_id:
description:
- Your unique API ID from status.io
required: true
api_key:
description:
- Your unique API Key from status.io
required: true
statuspage:
description:
- Your unique StatusPage ID from status.io
required: true
url:
description:
- Status.io API URL. A private apiary can be used instead.
default: "https://api.status.io"
components:
description:
- The given name of your component (server name)
aliases: ['component']
containers:
description:
- The given name of your container (data center)
aliases: ['container']
all_infrastructure_affected:
description:
- If it affects all components and containers
type: bool
default: 'no'
automation:
description:
- Automatically start and end the maintenance window
type: bool
default: 'no'
maintenance_notify_now:
description:
- Notify subscribers now
type: bool
default: 'no'
maintenance_notify_72_hr:
description:
- Notify subscribers 72 hours before maintenance start time
type: bool
default: 'no'
maintenance_notify_24_hr:
description:
- Notify subscribers 24 hours before maintenance start time
type: bool
default: 'no'
maintenance_notify_1_hr:
description:
- Notify subscribers 1 hour before maintenance start time
type: bool
default: 'no'
maintenance_id:
description:
- The maintenance id number when deleting a maintenance window
minutes:
description:
- The length of time in UTC that the maintenance will run \
(starting from playbook runtime)
default: 10
start_date:
description:
- Date maintenance is expected to start (Month/Day/Year) (UTC)
- End Date is worked out from start_date + minutes
start_time:
description:
- Time maintenance is expected to start (Hour:Minutes) (UTC)
- End Time is worked out from start_time + minutes
'''
EXAMPLES = '''
- name: Create a maintenance window for 10 minutes on server1, with automation to stop the maintenance
statusio_maintenance:
title: Router Upgrade from ansible
desc: Performing a Router Upgrade
components: server1.example.com
api_id: api_id
api_key: api_key
statuspage: statuspage_id
maintenance_notify_1_hr: True
automation: True
- name: Create a maintenance window for 60 minutes on server1 and server2
statusio_maintenance:
title: Routine maintenance
desc: Some security updates
components:
- server1.example.com
- server2.example.com
minutes: 60
api_id: api_id
api_key: api_key
statuspage: statuspage_id
maintenance_notify_1_hr: True
automation: True
delegate_to: localhost
- name: Create a future maintenance window for 24 hours to all hosts inside the Primary Data Center
statusio_maintenance:
title: Data center downtime
desc: Performing a Upgrade to our data center
components: Primary Data Center
api_id: api_id
api_key: api_key
statuspage: statuspage_id
start_date: 01/01/2016
start_time: 12:00
minutes: 1440
- name: Delete a maintenance window
statusio_maintenance:
title: Remove a maintenance window
maintenance_id: 561f90faf74bc94a4700087b
statuspage: statuspage_id
api_id: api_id
api_key: api_key
state: absent
'''
# TODO: Add RETURN documentation.
RETURN = ''' # '''
import datetime
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import open_url
def get_api_auth_headers(api_id, api_key, url, statuspage):
headers = {
"x-api-id": api_id,
"x-api-key": api_key,
"Content-Type": "application/json"
}
try:
response = open_url(
url + "/v2/component/list/" + statuspage, headers=headers)
data = json.loads(response.read())
if data['status']['message'] == 'Authentication failed':
return 1, None, None, "Authentication failed: " \
"Check api_id/api_key and statuspage id."
else:
auth_headers = headers
auth_content = data
except Exception as e:
return 1, None, None, to_native(e)
return 0, auth_headers, auth_content, None
def get_component_ids(auth_content, components):
host_ids = []
lower_components = [x.lower() for x in components]
for result in auth_content["result"]:
if result['name'].lower() in lower_components:
data = {
"component_id": result["_id"],
"container_id": result["containers"][0]["_id"]
}
host_ids.append(data)
lower_components.remove(result['name'].lower())
if len(lower_components):
# items not found in the api
return 1, None, lower_components
return 0, host_ids, None
def get_container_ids(auth_content, containers):
host_ids = []
lower_containers = [x.lower() for x in containers]
for result in auth_content["result"]:
if result["containers"][0]["name"].lower() in lower_containers:
data = {
"component_id": result["_id"],
"container_id": result["containers"][0]["_id"]
}
host_ids.append(data)
lower_containers.remove(result["containers"][0]["name"].lower())
if len(lower_containers):
# items not found in the api
return 1, None, lower_containers
return 0, host_ids, None
def get_date_time(start_date, start_time, minutes):
returned_date = []
if start_date and start_time:
try:
datetime.datetime.strptime(start_date, '%m/%d/%Y')
returned_date.append(start_date)
except (NameError, ValueError):
return 1, None, "Not a valid start_date format."
try:
datetime.datetime.strptime(start_time, '%H:%M')
returned_date.append(start_time)
except (NameError, ValueError):
return 1, None, "Not a valid start_time format."
try:
# Work out end date/time based on minutes
date_time_start = datetime.datetime.strptime(
start_time + start_date, '%H:%M%m/%d/%Y')
delta = date_time_start + datetime.timedelta(minutes=minutes)
returned_date.append(delta.strftime("%m/%d/%Y"))
returned_date.append(delta.strftime("%H:%M"))
except (NameError, ValueError):
return 1, None, "Couldn't work out a valid date"
else:
now = datetime.datetime.utcnow()
delta = now + datetime.timedelta(minutes=minutes)
# start_date
returned_date.append(now.strftime("%m/%d/%Y"))
returned_date.append(now.strftime("%H:%M"))
# end_date
returned_date.append(delta.strftime("%m/%d/%Y"))
returned_date.append(delta.strftime("%H:%M"))
return 0, returned_date, None
def create_maintenance(auth_headers, url, statuspage, host_ids,
all_infrastructure_affected, automation, title, desc,
returned_date, maintenance_notify_now,
maintenance_notify_72_hr, maintenance_notify_24_hr,
maintenance_notify_1_hr):
returned_dates = [[x] for x in returned_date]
component_id = []
container_id = []
for val in host_ids:
component_id.append(val['component_id'])
container_id.append(val['container_id'])
try:
values = json.dumps({
"statuspage_id": statuspage,
"components": component_id,
"containers": container_id,
"all_infrastructure_affected": str(int(all_infrastructure_affected)),
"automation": str(int(automation)),
"maintenance_name": title,
"maintenance_details": desc,
"date_planned_start": returned_dates[0],
"time_planned_start": returned_dates[1],
"date_planned_end": returned_dates[2],
"time_planned_end": returned_dates[3],
"maintenance_notify_now": str(int(maintenance_notify_now)),
"maintenance_notify_72_hr": str(int(maintenance_notify_72_hr)),
"maintenance_notify_24_hr": str(int(maintenance_notify_24_hr)),
"maintenance_notify_1_hr": str(int(maintenance_notify_1_hr))
})
response = open_url(
url + "/v2/maintenance/schedule", data=values,
headers=auth_headers)
data = json.loads(response.read())
if data["status"]["error"] == "yes":
return 1, None, data["status"]["message"]
except Exception as e:
return 1, None, to_native(e)
return 0, None, None
def delete_maintenance(auth_headers, url, statuspage, maintenance_id):
try:
values = json.dumps({
"statuspage_id": statuspage,
"maintenance_id": maintenance_id,
})
response = open_url(
url=url + "/v2/maintenance/delete",
data=values,
headers=auth_headers)
data = json.loads(response.read())
if data["status"]["error"] == "yes":
return 1, None, "Invalid maintenance_id"
except Exception as e:
return 1, None, to_native(e)
return 0, None, None
def main():
module = AnsibleModule(
argument_spec=dict(
api_id=dict(required=True),
api_key=dict(required=True, no_log=True),
statuspage=dict(required=True),
state=dict(required=False, default='present',
choices=['present', 'absent']),
url=dict(default='https://api.status.io', required=False),
components=dict(type='list', required=False, default=None,
aliases=['component']),
containers=dict(type='list', required=False, default=None,
aliases=['container']),
all_infrastructure_affected=dict(type='bool', default=False,
required=False),
automation=dict(type='bool', default=False, required=False),
title=dict(required=False, default='A new maintenance window'),
desc=dict(required=False, default='Created by Ansible'),
minutes=dict(type='int', required=False, default=10),
maintenance_notify_now=dict(type='bool', default=False,
required=False),
maintenance_notify_72_hr=dict(type='bool', default=False,
required=False),
maintenance_notify_24_hr=dict(type='bool', default=False,
required=False),
maintenance_notify_1_hr=dict(type='bool', default=False,
required=False),
maintenance_id=dict(required=False, default=None),
start_date=dict(default=None, required=False),
start_time=dict(default=None, required=False)
),
supports_check_mode=True,
)
api_id = module.params['api_id']
api_key = module.params['api_key']
statuspage = module.params['statuspage']
state = module.params['state']
url = module.params['url']
components = module.params['components']
containers = module.params['containers']
all_infrastructure_affected = module.params['all_infrastructure_affected']
automation = module.params['automation']
title = module.params['title']
desc = module.params['desc']
minutes = module.params['minutes']
maintenance_notify_now = module.params['maintenance_notify_now']
maintenance_notify_72_hr = module.params['maintenance_notify_72_hr']
maintenance_notify_24_hr = module.params['maintenance_notify_24_hr']
maintenance_notify_1_hr = module.params['maintenance_notify_1_hr']
maintenance_id = module.params['maintenance_id']
start_date = module.params['start_date']
start_time = module.params['start_time']
if state == "present":
if api_id and api_key:
(rc, auth_headers, auth_content, error) = \
get_api_auth_headers(api_id, api_key, url, statuspage)
if rc != 0:
module.fail_json(msg="Failed to get auth keys: %s" % error)
else:
auth_headers = {}
auth_content = {}
if minutes or start_time and start_date:
(rc, returned_date, error) = get_date_time(
start_date, start_time, minutes)
if rc != 0:
module.fail_json(msg="Failed to set date/time: %s" % error)
if not components and not containers:
return module.fail_json(msg="A Component or Container must be "
"defined")
elif components and containers:
return module.fail_json(msg="Components and containers cannot "
"be used together")
else:
if components:
(rc, host_ids, error) = get_component_ids(auth_content,
components)
if rc != 0:
module.fail_json(msg="Failed to find component %s" % error)
if containers:
(rc, host_ids, error) = get_container_ids(auth_content,
containers)
if rc != 0:
module.fail_json(msg="Failed to find container %s" % error)
if module.check_mode:
module.exit_json(changed=True)
else:
(rc, _, error) = create_maintenance(
auth_headers, url, statuspage, host_ids,
all_infrastructure_affected, automation,
title, desc, returned_date, maintenance_notify_now,
maintenance_notify_72_hr, maintenance_notify_24_hr,
maintenance_notify_1_hr)
if rc == 0:
module.exit_json(changed=True, result="Successfully created "
"maintenance")
else:
module.fail_json(msg="Failed to create maintenance: %s"
% error)
if state == "absent":
if api_id and api_key:
(rc, auth_headers, auth_content, error) = \
get_api_auth_headers(api_id, api_key, url, statuspage)
if rc != 0:
module.fail_json(msg="Failed to get auth keys: %s" % error)
else:
auth_headers = {}
if module.check_mode:
module.exit_json(changed=True)
else:
(rc, _, error) = delete_maintenance(
auth_headers, url, statuspage, maintenance_id)
if rc == 0:
module.exit_json(
changed=True,
result="Successfully deleted maintenance"
)
else:
module.fail_json(
msg="Failed to delete maintenance: %s" % error)
if __name__ == '__main__':
main()
| gpl-3.0 |
RamonGuiuGou/l10n-spain | l10n_es_account_balance_report/__openerp__.py | 1 | 1064 | # -*- coding: utf-8 -*-
# Copyright 2004-2011 Pexego Sistemas Informáticos
# Copyright 2013 Zikzakmedia
# Copyright 2014 Juanjo Algaz
# Copyright 2014 Joaquín Gutierrez <joaquing.pedrosa@gmail.com>
# Copyright 2014-2016 Tecnativa - Pedro M. Baeza
# Copyright 2016 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0).
{
"name": "Informes de cuentas anuales españoles",
"version": "9.0.1.0.0",
"author": "Pexego, "
"Tecnativa,"
"Zikzakmedia,"
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"website": "http://www.pexego.es",
"category": "Localisation/Accounting",
"depends": [
'l10n_es',
'account_balance_reporting',
],
"data": [
'data/balance_pymes.xml',
'data/pyg_pymes.xml',
'data/balance_abreviado.xml',
'data/pyg_abreviado.xml',
'data/balance_normal.xml',
'data/pyg_normal.xml',
'data/estado_ingresos_gastos_normal.xml',
],
'installable': True,
}
| agpl-3.0 |
veestr/sjsu-298-experiment | main.py | 1 | 9053 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import jinja2
import os
import random
import glob
import json
import cgi
import string
import re
import time
from datetime import datetime, tzinfo, timedelta
from google.appengine.ext import db
from google.appengine.api import memcache
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
main_template=JINJA_ENVIRONMENT.get_template('index.html')
report_template=JINJA_ENVIRONMENT.get_template('report.html')
account_template=JINJA_ENVIRONMENT.get_template('account.html')
bad_password_template=JINJA_ENVIRONMENT.get_template('bad_password.html')
class Zone(tzinfo):
"""Define timezone objects."""
def __init__(self,offset,isdst,name):
self.offset = offset
self.isdst = isdst
self.name = name
def utcoffset(self, dt):
return timedelta(hours=self.offset) + self.dst(dt)
def dst(self, dt):
return timedelta(hours=1) if self.isdst else timedelta(0)
def tzname(self,dt):
return self.name
PST = Zone(-8, True, 'PST')
class Account(db.Model):
"""Defines accounts in the database."""
date = db.StringProperty()
user = db.StringProperty(indexed=True)
site = db.StringProperty()
initial_password = db.StringProperty()
second_password = db.IntegerProperty()
third_password = db.IntegerProperty()
pass
def get_all_accounts():
"""Returns all stored accounts as a list of lists."""
accounts=[]
q = Account.all()
q.order('-date')
accounts.append(['Date','User','Site','Original Password','5 Min ','1 Week'])
for account in q:
entry=[]
entry.append(account.date)
entry.append(account.user)
entry.append(account.site)
entry.append(account.initial_password)
entry.append(account.second_password)
entry.append(account.third_password)
accounts.append(entry)
return accounts
def get_possible_sites(user):
"""Returns a set of sites available for the specified user. Each site
is represented by an image in the 'images' directory. """
files=()
if user.lower().endswith("ur"):
#ASSERT: the username indicates unrelated condition
files=(file.replace('images/', '') for file in glob.glob('images/ur_*'))
elif user.lower().endswith("r"):
#ASSERT: the username indicates related condition
files=(file.replace('images/', '') for file in glob.glob('images/r_*'))
return set(files)
def get_registered_sites(user,iteration):
"""Returns a set of the sites the specified user has registered for given the
specific iteration. The sites are considered to be registered if the password is set to a
value in ragen [0,3] for the specified iteration."""
sites=set()
q=Account.all()
q.filter('user =',user)
if int(iteration)==1:
#ASSERT: Filter out the site where second_password has not been set
q.filter('second_password >=', 0).filter('second_password <=', 3)
if int(iteration)==2:
#ASSERT: Filter out the site where third_password has not been set
q.filter('third_password >=', 0).filter('third_password <=', 3)
for account in q:
sites.add(account.site)
return sites
def verify_site(user, site, password):
"""Verifies whether the password for user is correct for the specific site."""
q=Account.all()
q.filter('user =',user)
q.filter('site =', site)
result = q.get()
stored_pass = str(result.initial_password)
return stored_pass == password
def get_site_for_user(iteration, user):
"""Returns the site for the specified user or '' of no such sites can be returned."""
possible_sites=get_possible_sites(user)
last_site=memcache.get(user)
registered_sites=get_registered_sites(user,iteration)
registered_sites.add(last_site)
allowed_sites=possible_sites.difference(registered_sites)
print "Registred sites: %s" % registered_sites
print "Possible sites: %s" % possible_sites
print "Allowed sites: %s" % allowed_sites
if len(allowed_sites) > 0:
return random.sample(allowed_sites, 1).pop()
else:
return ''
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write(main_template.render())
memcache.flush_all()
pass
class ReportHandler(webapp2.RequestHandler):
def get(self):
template_values = {
'accounts' : get_all_accounts(),
}
self.response.write(report_template.render(template_values))
pass
class AccountHandler(webapp2.RequestHandler):
def get(self,iteration,attempt):
user=cgi.escape(self.request.get('user'))
site=cgi.escape(self.request.get('site'))
#TODO: Handle this more gracefully
if not user:
self.redirect('/')
possible_sites=get_possible_sites(user)
if site:
#ASSESRT: We know for which site to display the account info
selected_site = site
else:
#ASSERT: we need to figure out the site for the user and if
# such site does not exist, we need to go back to the main screen
selected_site=get_site_for_user(iteration, user)
if selected_site=="":
self.redirect('/')
if int(iteration)==1 or int(iteration)==2:
# ASSERT: The user is going to verify the site's credentials
# thus, we need a different verification procedure
action="/verify"
elif int(iteration)==0:
#ASSERT: This is the user's first time, so we need to save the info
action="/save"
template_values = {
'selected_site' : cgi.escape(selected_site),
'user': user,
'iteration': iteration,
'attempt': attempt,
'action': action,
}
self.response.write(account_template.render(template_values))
pass
def save(self):
"""Saves the account credentials and redirects to the new account page."""
user=cgi.escape(self.request.get('user'))
print "user in save(): %s" % user
password=cgi.escape(self.request.get('pass1'))
site=cgi.escape(self.request.get('site'))
iteration=int(cgi.escape(self.request.get('iteration')))
self.response.status=201
account=Account(
user=user,
initial_password=password,
site=site,
second_password=-1,
third_password=-1,
date=datetime.now(PST).strftime('%m/%d/%Y %H:%M:%S %Z')
)
account.put()
memcache.set(key=user, value=site)
new_path='/account/0/1/?user='+user
return self.redirect(new_path)
def verify(self):
"""Verifies the credentials for the site."""
user=cgi.escape(self.request.get('user'))
password=cgi.escape(self.request.get('pass1'))
site=cgi.escape(self.request.get('site'))
iteration=int(cgi.escape(self.request.get('iteration')))
attempt=int(cgi.escape(self.request.get('attempt')))
is_pass_valid=verify_site(user,site, password)
existing_accounts=db.GqlQuery("SELECT * from Account WHERE user=:1 AND site=:2",user,site).fetch(1)
account=existing_accounts[0]
if is_pass_valid:
#ASSERT: The password provided by user for the site is valid
#Mark the attempt as such and go on to the next site
if iteration==1:
account.second_password=attempt
if iteration==2:
account.third_password=attempt
new_path = '/account/'+str(iteration)+'/1/?user='+user
memcache.set(key=user, value=site)
account.put()
return self.redirect(new_path)
else:
#Comment
if attempt < 3:
#ASSERT: The pass is not valid, redirect to the next attempt for this site
next_attempt=attempt+1
new_path = '/account/'+str(iteration)+'/'+str(next_attempt)+'/?user='+user+'&site='+site
msg = "Your password did not match the one you've created for this site. "
if attempt >= 3:
#ASSERT: The pass is not valid for this site and we do not have any more attempts left
#redirect to the next site within the same iteration
if iteration==1:
account.second_password=0
if iteration==2:
account.third_password=0
new_path = '/account/'+str(iteration)+'/1/?user='+user
memcache.set(key=user, value=site)
account.put()
msg = "You have exhausted all attemps. Re-directing to the next site or the main menu if no sites are available. "
template_values = {
'attempts_left' : 3-attempt,
'target_url': new_path,
'message': msg,
}
self.response.write(bad_password_template.render(template_values))
pass
app = webapp2.WSGIApplication([
webapp2.Route(r'/', handler=MainHandler),
webapp2.Route(r'/report', handler=ReportHandler),
webapp2.Route(r'/account/<iteration>/<attempt>/', handler=AccountHandler),
webapp2.Route(r'/save', handler=AccountHandler, methods=['POST'], handler_method='save'),
webapp2.Route(r'/verify', handler=AccountHandler, methods=['POST'], handler_method='verify')
], debug=True)
| mit |
CiscoSystems/horizon | openstack_dashboard/dashboards/admin/metering/views.py | 51 | 6870 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponse # noqa
from django.utils.translation import ugettext_lazy as _
import django.views
from horizon import exceptions
from horizon import forms
from horizon import tabs
from horizon.utils import csvbase
from openstack_dashboard.api import ceilometer
from openstack_dashboard.dashboards.admin.metering import forms as \
metering_forms
from openstack_dashboard.dashboards.admin.metering import tabs as \
metering_tabs
from openstack_dashboard.utils import metering as metering_utils
LOG = logging.getLogger(__name__)
class IndexView(tabs.TabbedTableView):
tab_group_class = metering_tabs.CeilometerOverviewTabs
template_name = 'admin/metering/index.html'
page_title = _("Resources Usage Overview")
class CreateUsageReport(forms.ModalFormView):
form_class = metering_forms.UsageReportForm
template_name = 'admin/metering/daily.html'
success_url = reverse_lazy('horizon:admin:metering:index')
page_title = _("Modify Usage Report Parameters")
class SamplesView(django.views.generic.TemplateView):
def get(self, request, *args, **kwargs):
meter = request.GET.get('meter', None)
if not meter:
return HttpResponse(json.dumps({}),
content_type='application/json')
meter_name = meter.replace(".", "_")
date_options = request.GET.get('date_options', None)
date_from = request.GET.get('date_from', None)
date_to = request.GET.get('date_to', None)
stats_attr = request.GET.get('stats_attr', 'avg')
group_by = request.GET.get('group_by', None)
try:
date_from, date_to = metering_utils.calc_date_args(date_from,
date_to,
date_options)
except Exception:
exceptions.handle(self.request, _('Dates cannot be recognized.'))
if group_by == 'project':
query = metering_utils.ProjectAggregatesQuery(request,
date_from,
date_to,
3600 * 24)
else:
query = metering_utils.MeterQuery(request, date_from,
date_to, 3600 * 24)
resources, unit = query.query(meter)
series = metering_utils.series_for_meter(request, resources,
group_by, meter,
meter_name, stats_attr, unit)
series = metering_utils.normalize_series_by_unit(series)
ret = {'series': series, 'settings': {}}
return HttpResponse(json.dumps(ret), content_type='application/json')
class CsvReportView(django.views.generic.View):
def get(self, request, **response_kwargs):
render_class = ReportCsvRenderer
response_kwargs.setdefault("filename", "usage.csv")
context = {'usage': load_report_data(request)}
resp = render_class(request=request,
template=None,
context=context,
content_type='csv',
**response_kwargs)
return resp
class ReportCsvRenderer(csvbase.BaseCsvResponse):
columns = [_("Project Name"), _("Meter"), _("Description"),
_("Service"), _("Time"), _("Value (Avg)"), _("Unit")]
def get_row_data(self):
for p in self.context['usage'].values():
for u in p:
yield (u["project"],
u["meter"],
u["description"],
u["service"],
u["time"],
u["value"],
u["unit"])
def load_report_data(request):
meters = ceilometer.Meters(request)
services = {
_('Nova'): meters.list_nova(),
_('Neutron'): meters.list_neutron(),
_('Glance'): meters.list_glance(),
_('Cinder'): meters.list_cinder(),
_('Swift_meters'): meters.list_swift(),
_('Kwapi'): meters.list_kwapi(),
_('IPMI'): meters.list_ipmi(),
}
project_rows = {}
date_options = request.GET.get('date_options', 7)
date_from = request.GET.get('date_from')
date_to = request.GET.get('date_to')
try:
date_from, date_to = metering_utils.calc_date_args(date_from,
date_to,
date_options)
except Exception:
exceptions.handle(request, _('Dates cannot be recognized.'))
try:
project_aggregates = metering_utils.ProjectAggregatesQuery(request,
date_from,
date_to,
3600 * 24)
except Exception:
exceptions.handle(request,
_('Unable to retrieve project list.'))
for meter in meters._cached_meters.values():
service = None
for name, m_list in services.items():
if meter in m_list:
service = name
break
res, unit = project_aggregates.query(meter.name)
for r in res:
values = r.get_meter(meter.name.replace(".", "_"))
if values:
for value in values:
row = {"name": 'none',
"project": r.id,
"meter": meter.name,
"description": meter.description,
"service": service,
"time": value._apiresource.period_end,
"value": value._apiresource.avg,
"unit": meter.unit}
if r.id not in project_rows:
project_rows[r.id] = [row]
else:
project_rows[r.id].append(row)
return project_rows
| apache-2.0 |
xodus7/tensorflow | tensorflow/python/autograph/converters/continue_statements.py | 11 | 5281 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Canonicalizes continue statements by de-sugaring into a control boolean."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import templates
from tensorflow.python.autograph.pyct.static_analysis.annos import NodeAnno
# Tags for local state.
CONTROL_VAR_NAME = 'control_var_name'
CONTINUE_USED = 'continue_used'
GUARD_CREATED = 'guard_created'
CREATE_GUARD_NEXT = 'create_guard_next'
class ContinueCanonicalizationTransformer(converter.Base):
"""Canonicalizes continue statements into additional conditionals."""
def visit_Continue(self, node):
self.set_local(CONTINUE_USED, True)
template = """
var_name = tf.constant(True)
"""
return templates.replace(
template, var_name=self.get_local(CONTROL_VAR_NAME))
def _postprocess_statement(self, node):
# Example of how the state machine below works:
#
# 1| stmt # State: CONTINUE_USED = False
# | # Action: none
# 2| if cond:
# 3| continue # State: CONTINUE_USED = True,
# | # GUARD_CREATED = False,
# | # CREATE_GUARD_NEXT = False
# | # Action: set CREATE_GUARD_NEXT = True
# 4| stmt # State: CONTINUE_USED = True,
# | # GUARD_CREATED = False,
# | # CREATE_GUARD_NEXT = True
# | # Action: create `if not continue_used`,
# | # set GUARD_CREATED = True
# 5| stmt # State: CONTINUE_USED = True, GUARD_CREATED = True
# | # Action: none (will be wrapped under previously
# | # created if node)
if self.get_local(CONTINUE_USED, False):
if self.get_local(GUARD_CREATED, False):
return node, None
elif not self.get_local(CREATE_GUARD_NEXT, False):
self.set_local(CREATE_GUARD_NEXT, True)
return node, None
else:
self.set_local(GUARD_CREATED, True)
template = """
if not var_name:
original_node
"""
cond, = templates.replace(
template,
var_name=self.get_local(CONTROL_VAR_NAME),
original_node=node)
return cond, cond.body
return node, None
def _visit_loop_body(self, node, nodes):
self.enter_local_scope()
scope = anno.getanno(node, NodeAnno.BODY_SCOPE)
continue_var = self.ctx.namer.new_symbol('continue_', scope.referenced)
self.set_local(CONTROL_VAR_NAME, continue_var)
nodes = self.visit_block(nodes, after_visit=self._postprocess_statement)
if self.get_local(CONTINUE_USED, False):
template = """
var_name = tf.constant(False)
"""
control_var_init = templates.replace(template, var_name=continue_var)
nodes = control_var_init + nodes
self.exit_local_scope()
return nodes
def _visit_non_loop_body(self, nodes):
self.enter_local_scope(inherit=(CONTROL_VAR_NAME,))
nodes = self.visit_block(nodes, after_visit=self._postprocess_statement)
continue_used = self.get_local(CONTINUE_USED, False)
self.exit_local_scope(keep=(CONTINUE_USED,))
return nodes, continue_used
def visit_While(self, node):
node.test = self.visit(node.test)
node.body = self._visit_loop_body(node, node.body)
# A continue in the else clause applies to the containing scope.
node.orelse, _ = self._visit_non_loop_body(node.orelse)
return node
def visit_For(self, node):
node.target = self.generic_visit(node.target)
node.iter = self.generic_visit(node.iter)
node.body = self._visit_loop_body(node, node.body)
# A continue in the else clause applies to the containing scope.
node.orelse, _ = self._visit_non_loop_body(node.orelse)
return node
def visit_If(self, node):
node.test = self.generic_visit(node.test)
node.body, continue_used_body = self._visit_non_loop_body(node.body)
node.orelse, continue_used_orelse = self._visit_non_loop_body(node.orelse)
self.set_local(CONTINUE_USED, continue_used_body or continue_used_orelse)
return node
def visit_With(self, node):
node.items = self.visit_block(node.items)
node.body, _ = self._visit_non_loop_body(node.body)
return node
def transform(node, ctx):
return ContinueCanonicalizationTransformer(ctx).visit(node)
| apache-2.0 |
gxx/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/views/models.py | 144 | 1202 | """
Regression tests for Django built-in views.
"""
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
def get_absolute_url(self):
return '/views/authors/%s/' % self.id
class BaseArticle(models.Model):
"""
An abstract article Model so that we can create article models with and
without a get_absolute_url method (for create_update generic views tests).
"""
title = models.CharField(max_length=100)
slug = models.SlugField()
author = models.ForeignKey(Author)
class Meta:
abstract = True
def __unicode__(self):
return self.title
class Article(BaseArticle):
date_created = models.DateTimeField()
class UrlArticle(BaseArticle):
"""
An Article class with a get_absolute_url defined.
"""
date_created = models.DateTimeField()
def get_absolute_url(self):
return '/urlarticles/%s/' % self.slug
get_absolute_url.purge = True
class DateArticle(BaseArticle):
"""
An article Model with a DateField instead of DateTimeField,
for testing #7602
"""
date_created = models.DateField()
| gpl-3.0 |
gnowxilef/youtube-dl | youtube_dl/extractor/varzesh3.py | 64 | 3238 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlparse,
compat_parse_qs,
)
from ..utils import (
clean_html,
remove_start,
)
class Varzesh3IE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?video\.varzesh3\.com/(?:[^/]+/)+(?P<id>[^/]+)/?'
_TESTS = [{
'url': 'http://video.varzesh3.com/germany/bundesliga/5-%D9%88%D8%A7%DA%A9%D9%86%D8%B4-%D8%A8%D8%B1%D8%AA%D8%B1-%D8%AF%D8%B1%D9%88%D8%A7%D8%B2%D9%87%E2%80%8C%D8%A8%D8%A7%D9%86%D8%A7%D9%86%D8%9B%D9%87%D9%81%D8%AA%D9%87-26-%D8%A8%D9%88%D9%86%D8%AF%D8%B3/',
'md5': '2a933874cb7dce4366075281eb49e855',
'info_dict': {
'id': '76337',
'ext': 'mp4',
'title': '۵ واکنش برتر دروازهبانان؛هفته ۲۶ بوندسلیگا',
'description': 'فصل ۲۰۱۵-۲۰۱۴',
'thumbnail': r're:^https?://.*\.jpg$',
},
'skip': 'HTTP 404 Error',
}, {
'url': 'http://video.varzesh3.com/video/112785/%D8%AF%D9%84%D9%87-%D8%B9%D9%84%DB%8C%D8%9B-%D8%B3%D8%AA%D8%A7%D8%B1%D9%87-%D9%86%D9%88%D8%B8%D9%87%D9%88%D8%B1-%D9%84%DB%8C%DA%AF-%D8%A8%D8%B1%D8%AA%D8%B1-%D8%AC%D8%B2%DB%8C%D8%B1%D9%87',
'md5': '841b7cd3afbc76e61708d94e53a4a4e7',
'info_dict': {
'id': '112785',
'ext': 'mp4',
'title': 'دله علی؛ ستاره نوظهور لیگ برتر جزیره',
'description': 'فوتبال 120',
},
'expected_warnings': ['description'],
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_url = self._search_regex(
r'<source[^>]+src="([^"]+)"', webpage, 'video url')
title = remove_start(self._html_search_regex(
r'<title>([^<]+)</title>', webpage, 'title'), 'ویدیو ورزش 3 | ')
description = self._html_search_regex(
r'(?s)<div class="matn">(.+?)</div>',
webpage, 'description', default=None)
if description is None:
description = clean_html(self._html_search_meta('description', webpage))
thumbnail = self._og_search_thumbnail(webpage, default=None)
if thumbnail is None:
fb_sharer_url = self._search_regex(
r'<a[^>]+href="(https?://www\.facebook\.com/sharer/sharer\.php?[^"]+)"',
webpage, 'facebook sharer URL', fatal=False)
sharer_params = compat_parse_qs(compat_urllib_parse_urlparse(fb_sharer_url).query)
thumbnail = sharer_params.get('p[images][0]', [None])[0]
video_id = self._search_regex(
r"<link[^>]+rel='(?:canonical|shortlink)'[^>]+href='/\?p=([^']+)'",
webpage, display_id, default=None)
if video_id is None:
video_id = self._search_regex(
r'var\s+VideoId\s*=\s*(\d+);', webpage, 'video id',
default=display_id)
return {
'url': video_url,
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
| unlicense |
xiangke/pycopia | mibs/pycopia/mibs/SNMPv2_TM.py | 1 | 2645 | # python
# This file is generated by a program (mib2py). Any edits will be lost.
from pycopia.aid import Enum
import pycopia.SMI.Basetypes
Range = pycopia.SMI.Basetypes.Range
Ranges = pycopia.SMI.Basetypes.Ranges
from pycopia.SMI.Objects import ColumnObject, MacroObject, NotificationObject, RowObject, ScalarObject, NodeObject, ModuleObject, GroupObject
# imports
from SNMPv2_SMI import MODULE_IDENTITY, OBJECT_IDENTITY, snmpModules, snmpDomains, snmpProxys
from SNMPv2_TC import TEXTUAL_CONVENTION
class SNMPv2_TM(ModuleObject):
path = '/usr/share/snmp/mibs/ietf/SNMPv2-TM'
conformance = 5
name = 'SNMPv2-TM'
language = 2
description = 'The MIB module for SNMP transport mappings.\n\nCopyright (C) The Internet Society (2002). This\nversion of this MIB module is part of RFC 3417;\nsee the RFC itself for full legal notices.'
# nodes
class snmpUDPDomain(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 1, 1])
name = 'snmpUDPDomain'
class snmpCLNSDomain(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 1, 2])
name = 'snmpCLNSDomain'
class snmpCONSDomain(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 1, 3])
name = 'snmpCONSDomain'
class snmpDDPDomain(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 1, 4])
name = 'snmpDDPDomain'
class snmpIPXDomain(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 1, 5])
name = 'snmpIPXDomain'
class rfc1157Proxy(NodeObject):
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 2, 1])
name = 'rfc1157Proxy'
class rfc1157Domain(NodeObject):
status = 2
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 2, 1, 1])
name = 'rfc1157Domain'
class snmpv2tm(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 19])
name = 'snmpv2tm'
# macros
# types
class SnmpUDPAddress(pycopia.SMI.Basetypes.OctetString):
status = 1
ranges = Ranges(Range(6, 6))
format = '1d.1d.1d.1d/2d'
class SnmpOSIAddress(pycopia.SMI.Basetypes.OctetString):
status = 1
ranges = Ranges(Range(1, 1), Range(4, 85))
format = '*1x:/1x:'
class SnmpNBPAddress(pycopia.SMI.Basetypes.OctetString):
status = 1
ranges = Ranges(Range(3, 99))
class SnmpIPXAddress(pycopia.SMI.Basetypes.OctetString):
status = 1
ranges = Ranges(Range(12, 12))
format = '4x.1x:1x:1x:1x:1x:1x.2d'
# scalars
# columns
# rows
# notifications (traps)
# groups
# capabilities
# special additions
# Add to master OIDMAP.
from pycopia import SMI
SMI.update_oidmap(__name__)
| lgpl-2.1 |
Glottotopia/aagd | moin/local/moin/MoinMoin/macro/WantedPages.py | 2 | 3445 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - WantedPages Macro
@copyright: 2001 Juergen Hermann <jh@web.de>
@license: GNU GPL, see COPYING for details.
"""
from MoinMoin import wikiutil
Dependencies = ["pages"]
def macro_WantedPages(macro):
request = macro.request
_ = request.getText
# prevent recursion
if request.mode_getpagelinks:
return ''
if request.isSpiderAgent: # reduce bot cpu usage
return ''
# Get allpages switch from the form
allpages = int(request.values.get('allpages', 0)) != 0
# Control bar - filter the list of pages
# TODO: we should make this a widget and use on all page listing pages
label = (_('Include system pages'), _('Exclude system pages'))[allpages]
page = macro.formatter.page
controlbar = macro.formatter.div(1, css_class="controlbar") + \
page.link_to(request, label, querystr={'allpages': '%s' % (allpages and '0' or '1')}) + \
macro.formatter.div(0)
# Get page dict readable by current user
pages = request.rootpage.getPageDict()
# build a dict of wanted pages
wanted = {}
deprecated_links = []
for name, page in pages.items():
# Skip system pages, because missing translations are not wanted pages,
# unless you are a translator and clicked "Include system pages"
if not allpages and wikiutil.isSystemPage(request, name):
continue
# Add links to pages which do not exist in pages dict
links = page.getPageLinks(request)
is_deprecated = page.parse_processing_instructions(
).get('deprecated', False)
for link in links:
if not link in pages and request.user.may.read(link):
if is_deprecated:
deprecated_links.append(link)
if link in wanted:
wanted[link][name] = 1
else:
wanted[link] = {name: 1}
for link in deprecated_links:
if len(wanted[link]) == 1:
del wanted[link]
# Check for the extreme case when there are no wanted pages
if not wanted:
return u"%s<p>%s</p>" % (controlbar, _("No wanted pages in this wiki."))
# Return a list of page links
wantednames = wanted.keys()
wantednames.sort()
result = []
result.append(macro.formatter.number_list(1))
for name in wantednames:
if not name:
continue
result.append(macro.formatter.listitem(1))
# Add link to the wanted page
result.append(macro.formatter.pagelink(1, name, generated=1))
result.append(macro.formatter.text(name))
result.append(macro.formatter.pagelink(0, name))
# Add links to pages that want this page, highliting
# the link in those pages.
where = wanted[name].keys()
where.sort()
if macro.formatter.page.page_name in where:
where.remove(macro.formatter.page.page_name)
wherelinks = [pages[pagename].link_to(request, querystr={'highlight': name}, rel='nofollow')
for pagename in where]
result.append(": " + ', '.join(wherelinks))
result.append(macro.formatter.listitem(0))
result.append(macro.formatter.number_list(0))
return u'%s%s' % (controlbar, u''.join(result))
| mit |
w1r0x/ansible | lib/ansible/playbook/helpers.py | 66 | 4255 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.errors import AnsibleParserError
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of mixed task/block data (parsed from YAML),
return a list of Block() objects, where implicit blocks
are created for each bare Task.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
assert isinstance(ds, (list, type(None)))
block_list = []
if ds:
for block in ds:
b = Block.load(
block,
play=play,
parent_block=parent_block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader
)
# Implicit blocks are created by bare tasks listed in a play without
# an explicit block statement. If we have two implicit blocks in a row,
# squash them down to a single block to save processing time later.
if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
for t in b.block:
t._block = block_list[-1]
block_list[-1].block.extend(b.block)
else:
block_list.append(b)
return block_list
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of task datastructures (parsed from YAML),
return a list of Task() or TaskInclude() objects.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
assert isinstance(ds, list)
task_list = []
for task in ds:
assert isinstance(task, dict)
if 'block' in task:
t = Block.load(
task,
play=play,
parent_block=block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
else:
if use_handlers:
t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list
def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.role.include import RoleInclude
assert isinstance(ds, list)
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
| gpl-3.0 |
webmasterraj/FogOrNot | flask/lib/python2.7/site-packages/numpy/f2py/common_rules.py | 176 | 5030 | #!/usr/bin/env python
"""
Build common block mechanism for f2py2e.
Copyright 2000 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 10:57:33 $
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
__version__ = "$Revision: 1.19 $"[10:-1]
from . import __version__
f2py_version = __version__.version
from .auxfuncs import (
hasbody, hascommon, hasnote, isintent_hide, outmess
)
from . import capi_maps
from . import func2subr
from .crackfortran import rmbadname
def findcommonblocks(block, top=1):
ret = []
if hascommon(block):
for n in block['common'].keys():
vars = {}
for v in block['common'][n]:
vars[v] = block['vars'][v]
ret.append((n, block['common'][n], vars))
elif hasbody(block):
for b in block['body']:
ret = ret + findcommonblocks(b, 0)
if top:
tret = []
names = []
for t in ret:
if t[0] not in names:
names.append(t[0])
tret.append(t)
return tret
return ret
def buildhooks(m):
ret = {'commonhooks': [], 'initcommonhooks': [],
'docs': ['"COMMON blocks:\\n"']}
fwrap = ['']
def fadd(line, s=fwrap):
s[0] = '%s\n %s' % (s[0], line)
chooks = ['']
def cadd(line, s=chooks):
s[0] = '%s\n%s' % (s[0], line)
ihooks = ['']
def iadd(line, s=ihooks):
s[0] = '%s\n%s' % (s[0], line)
doc = ['']
def dadd(line, s=doc):
s[0] = '%s\n%s' % (s[0], line)
for (name, vnames, vars) in findcommonblocks(m):
lower_name = name.lower()
hnames, inames = [], []
for n in vnames:
if isintent_hide(vars[n]):
hnames.append(n)
else:
inames.append(n)
if hnames:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n\t\t Hidden: %s\n' % (
name, ','.join(inames), ','.join(hnames)))
else:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n' % (
name, ','.join(inames)))
fadd('subroutine f2pyinit%s(setupfunc)' % name)
fadd('external setupfunc')
for n in vnames:
fadd(func2subr.var2fixfortran(vars, n))
if name == '_BLNK_':
fadd('common %s' % (','.join(vnames)))
else:
fadd('common /%s/ %s' % (name, ','.join(vnames)))
fadd('call setupfunc(%s)' % (','.join(inames)))
fadd('end\n')
cadd('static FortranDataDef f2py_%s_def[] = {' % (name))
idims = []
for n in inames:
ct = capi_maps.getctype(vars[n])
at = capi_maps.c2capi_map[ct]
dm = capi_maps.getarrdims(n, vars[n])
if dm['dims']:
idims.append('(%s)' % (dm['dims']))
else:
idims.append('')
dms = dm['dims'].strip()
if not dms:
dms = '-1'
cadd('\t{\"%s\",%s,{{%s}},%s},' % (n, dm['rank'], dms, at))
cadd('\t{NULL}\n};')
inames1 = rmbadname(inames)
inames1_tps = ','.join(['char *' + s for s in inames1])
cadd('static void f2py_setup_%s(%s) {' % (name, inames1_tps))
cadd('\tint i_f2py=0;')
for n in inames1:
cadd('\tf2py_%s_def[i_f2py++].data = %s;' % (name, n))
cadd('}')
if '_' in lower_name:
F_FUNC = 'F_FUNC_US'
else:
F_FUNC = 'F_FUNC'
cadd('extern void %s(f2pyinit%s,F2PYINIT%s)(void(*)(%s));'
% (F_FUNC, lower_name, name.upper(),
','.join(['char*'] * len(inames1))))
cadd('static void f2py_init_%s(void) {' % name)
cadd('\t%s(f2pyinit%s,F2PYINIT%s)(f2py_setup_%s);'
% (F_FUNC, lower_name, name.upper(), name))
cadd('}\n')
iadd('\tF2PyDict_SetItemString(d, \"%s\", PyFortranObject_New(f2py_%s_def,f2py_init_%s));' % (
name, name, name))
tname = name.replace('_', '\\_')
dadd('\\subsection{Common block \\texttt{%s}}\n' % (tname))
dadd('\\begin{description}')
for n in inames:
dadd('\\item[]{{}\\verb@%s@{}}' %
(capi_maps.getarrdocsign(n, vars[n])))
if hasnote(vars[n]):
note = vars[n]['note']
if isinstance(note, list):
note = '\n'.join(note)
dadd('--- %s' % (note))
dadd('\\end{description}')
ret['docs'].append(
'"\t/%s/ %s\\n"' % (name, ','.join(map(lambda v, d: v + d, inames, idims))))
ret['commonhooks'] = chooks
ret['initcommonhooks'] = ihooks
ret['latexdoc'] = doc[0]
if len(ret['docs']) <= 1:
ret['docs'] = ''
return ret, fwrap[0]
| gpl-2.0 |
zzcclp/spark | python/pyspark/tests/test_install_spark.py | 23 | 4345 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import tempfile
import unittest
from pyspark.install import install_spark, DEFAULT_HADOOP, DEFAULT_HIVE, \
UNSUPPORTED_COMBINATIONS, checked_versions, checked_package_name
class SparkInstallationTestCase(unittest.TestCase):
def test_install_spark(self):
# Test only one case. Testing this is expensive because it needs to download
# the Spark distribution.
spark_version, hadoop_version, hive_version = checked_versions("3.0.1", "3.2", "2.3")
with tempfile.TemporaryDirectory() as tmp_dir:
install_spark(
dest=tmp_dir,
spark_version=spark_version,
hadoop_version=hadoop_version,
hive_version=hive_version)
self.assertTrue(os.path.isdir("%s/jars" % tmp_dir))
self.assertTrue(os.path.exists("%s/bin/spark-submit" % tmp_dir))
self.assertTrue(os.path.exists("%s/RELEASE" % tmp_dir))
def test_package_name(self):
self.assertEqual(
"spark-3.0.0-bin-hadoop3.2",
checked_package_name("spark-3.0.0", "hadoop3.2", "hive2.3"))
def test_checked_versions(self):
test_version = "3.0.1" # Just pick one version to test.
# Positive test cases
self.assertEqual(
("spark-3.0.0", "hadoop2.7", "hive2.3"),
checked_versions("spark-3.0.0", "hadoop2.7", "hive2.3"))
self.assertEqual(
("spark-3.0.0", "hadoop2.7", "hive2.3"),
checked_versions("3.0.0", "2.7", "2.3"))
self.assertEqual(
("spark-2.4.1", "without-hadoop", "hive2.3"),
checked_versions("2.4.1", "without", "2.3"))
self.assertEqual(
("spark-3.0.1", "without-hadoop", "hive2.3"),
checked_versions("spark-3.0.1", "without-hadoop", "hive2.3"))
# Negative test cases
for (hadoop_version, hive_version) in UNSUPPORTED_COMBINATIONS:
with self.assertRaisesRegex(RuntimeError, 'Hive.*should.*Hadoop'):
checked_versions(
spark_version=test_version,
hadoop_version=hadoop_version,
hive_version=hive_version)
with self.assertRaisesRegex(RuntimeError, "Spark version should start with 'spark-'"):
checked_versions(
spark_version="malformed",
hadoop_version=DEFAULT_HADOOP,
hive_version=DEFAULT_HIVE)
with self.assertRaisesRegex(RuntimeError, "Spark distribution.*malformed.*"):
checked_versions(
spark_version=test_version,
hadoop_version="malformed",
hive_version=DEFAULT_HIVE)
with self.assertRaisesRegex(RuntimeError, "Spark distribution.*malformed.*"):
checked_versions(
spark_version=test_version,
hadoop_version=DEFAULT_HADOOP,
hive_version="malformed")
with self.assertRaisesRegex(RuntimeError, "Spark distribution of hive1.2 is not supported"):
checked_versions(
spark_version=test_version,
hadoop_version="hadoop3.2",
hive_version="hive1.2")
if __name__ == "__main__":
from pyspark.tests.test_install_spark import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
alexryndin/ambari | ambari-common/src/main/python/resource_management/libraries/functions/packages_analyzer.py | 1 | 10470 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import sys
import logging
import subprocess
from threading import Thread
import threading
from ambari_commons import OSCheck, OSConst
from ambari_commons import shell
from resource_management.core.logger import Logger
from resource_management.core import shell as rmf_shell
from resource_management.core.exceptions import Fail
__all__ = ["installedPkgsByName", "allInstalledPackages", "allAvailablePackages", "nameMatch",
"getInstalledRepos", "getInstalledPkgsByRepo", "getInstalledPkgsByNames", "getPackageDetails"]
LIST_INSTALLED_PACKAGES_UBUNTU = "COLUMNS=9999 ; for i in $(dpkg -l |grep ^ii |awk -F' ' '{print $2}'); do apt-cache showpkg \"$i\"|head -3|grep -v '^Versions'| tr -d '()' | awk '{ print $1\" \"$2 }'|sed -e 's/^Package: //;' | paste -d ' ' - -; done"
LIST_AVAILABLE_PACKAGES_UBUNTU = "packages=`for i in $(ls -1 /var/lib/apt/lists | grep -v \"ubuntu.com\") ; do grep ^Package: /var/lib/apt/lists/$i | awk '{print $2}' ; done` ; for i in $packages; do apt-cache showpkg \"$i\"|head -3|grep -v '^Versions'| tr -d '()' | awk '{ print $1\" \"$2 }'|sed -e 's/^Package: //;' | paste -d ' ' - -; done"
logger = logging.getLogger()
# default timeout for async invoked processes
TIMEOUT_SECONDS = 40
def _launch_subprocess(command):
isShell = not isinstance(command, (list, tuple))
return subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=isShell, close_fds=True)
def subprocessWithTimeout(command):
event = threading.Event()
def watchdog_func(command):
event.wait(TIMEOUT_SECONDS)
if command.returncode is None:
logger.error("Task timed out and will be killed")
shell.kill_process_with_children(command.pid)
pass
osStat = _launch_subprocess(command)
logger.debug("Launching watchdog thread")
event.clear()
thread = Thread(target=watchdog_func, args=(osStat, ))
thread.start()
out, err = osStat.communicate()
result = {}
result['out'] = out
result['err'] = err
result['retCode'] = osStat.returncode
event.set()
thread.join()
return result
def installedPkgsByName(allInstalledPackages,
pkgName, installedPkgs):
"""
Get all installed package whose name starts with the
strings contained in pkgName
"""
for item in allInstalledPackages:
if item[0].find(pkgName) == 0:
installedPkgs.append(item[0])
def allInstalledPackages(allInstalledPackages):
"""
All installed packages in system
"""
osType = OSCheck.get_os_family()
if OSCheck.is_suse_family():
return _lookUpZypperPackages(
["sudo", "zypper", "--no-gpg-checks", "search", "--installed-only", "--details"],
allInstalledPackages)
elif OSCheck.is_redhat_family():
return _lookUpYumPackages(
["sudo", "yum", "list", "installed"],
'Installed Packages',
allInstalledPackages)
elif OSCheck.is_ubuntu_family():
return _lookUpAptPackages(
LIST_INSTALLED_PACKAGES_UBUNTU,
allInstalledPackages)
def allAvailablePackages(allAvailablePackages):
osType = OSCheck.get_os_family()
if OSCheck.is_suse_family():
return _lookUpZypperPackages(
["sudo", "zypper", "--no-gpg-checks", "search", "--uninstalled-only", "--details"],
allAvailablePackages)
elif OSCheck.is_redhat_family():
return _lookUpYumPackages(
["sudo", "yum", "list", "available"],
'Available Packages',
allAvailablePackages)
elif OSCheck.is_ubuntu_family():
return _lookUpAptPackages(
LIST_AVAILABLE_PACKAGES_UBUNTU,
allAvailablePackages)
# ToDo: add execution via sudo for ubuntu (currently Ubuntu is not supported)
def _lookUpAptPackages(command, allPackages):
try:
result = subprocessWithTimeout(command)
if 0 == result['retCode']:
for x in result['out'].split('\n'):
if x.strip():
allPackages.append(x.split(' '))
except:
logger.error("Unexpected error:", sys.exc_info()[0])
def _lookUpYumPackages(command, skipTill, allPackages):
try:
result = subprocessWithTimeout(command)
if 0 == result['retCode']:
lines = result['out'].split('\n')
lines = [line.strip() for line in lines]
items = []
skipIndex = 3
for index in range(len(lines)):
if skipTill in lines[index]:
skipIndex = index + 1
break
for line in lines[skipIndex:]:
items = items + line.strip(' \t\n\r').split()
for i in range(0, len(items), 3):
if '.' in items[i]:
items[i] = items[i][:items[i].rindex('.')]
if items[i + 2].find('@') == 0:
items[i + 2] = items[i + 2][1:]
allPackages.append(items[i:i + 3])
except:
logger.error("Unexpected error:", sys.exc_info()[0])
def _lookUpZypperPackages(command, allPackages):
try:
result = subprocessWithTimeout(command)
if 0 == result['retCode']:
lines = result['out'].split('\n')
lines = [line.strip() for line in lines]
items = []
for index in range(len(lines)):
if "--+--" in lines[index]:
skipIndex = index + 1
break
for line in lines[skipIndex:]:
items = line.strip(' \t\n\r').split('|')
allPackages.append([items[1].strip(), items[3].strip(), items[5].strip()])
except:
logger.error("Unexpected error:", sys.exc_info()[0])
def nameMatch(lookupName, actualName):
tokens = actualName.strip().split()
for token in tokens:
if token.lower().find(lookupName.lower()) == 0:
return True
return False
def getInstalledRepos(hintPackages, allPackages, ignoreRepos, repoList):
"""
Gets all installed repos by name based on repos that provide any package
contained in hintPackages
Repos starting with value in ignoreRepos will not be returned
hintPackages must be regexps.
"""
allRepos = []
for hintPackage in hintPackages:
for item in allPackages:
if re.match(hintPackage, item[0]) and not item[2] in allRepos:
allRepos.append(item[2])
for repo in allRepos:
ignore = False
for ignoredRepo in ignoreRepos:
if nameMatch(ignoredRepo, repo):
ignore = True
if not ignore:
repoList.append(repo)
def getInstalledPkgsByRepo(repos, ignorePackages, installedPackages):
"""
Get all the installed packages from the repos listed in repos
"""
packagesFromRepo = []
packagesToRemove = []
for repo in repos:
subResult = []
for item in installedPackages:
if repo == item[2]:
subResult.append(item[0])
packagesFromRepo = list(set(packagesFromRepo + subResult))
for package in packagesFromRepo:
keepPackage = True
for ignorePackage in ignorePackages:
if nameMatch(ignorePackage, package):
keepPackage = False
break
if keepPackage:
packagesToRemove.append(package)
return packagesToRemove
def getInstalledPkgsByNames(pkgNames, installedPackages):
"""
Gets all installed packages that start with names in pkgNames
"""
packages = []
for pkgName in pkgNames:
subResult = []
installedPkgsByName(installedPackages, pkgName, subResult)
packages = list(set(packages + subResult))
return packages
def getPackageDetails(installedPackages, foundPackages):
"""
Gets the name, version, and repoName for the packages
"""
packageDetails = []
for package in foundPackages:
pkgDetail = {}
for installedPackage in installedPackages:
if package == installedPackage[0]:
pkgDetail['name'] = installedPackage[0]
pkgDetail['version'] = installedPackage[1]
pkgDetail['repoName'] = installedPackage[2]
packageDetails.append(pkgDetail)
return packageDetails
def getReposToRemove(repos, ignoreList):
reposToRemove = []
for repo in repos:
addToRemoveList = True
for ignoreRepo in ignoreList:
if nameMatch(ignoreRepo, repo):
addToRemoveList = False
continue
if addToRemoveList:
reposToRemove.append(repo)
return reposToRemove
def getInstalledPackageVersion(package_name):
if OSCheck.is_ubuntu_family():
code, out, err = rmf_shell.checked_call("dpkg -s {0} | grep Version | awk '{{print $2}}'".format(package_name), stderr=subprocess.PIPE)
else:
code, out, err = rmf_shell.checked_call("rpm -q --queryformat '%{{version}}-%{{release}}' {0} | sed -e 's/\.el[0-9]//g'".format(package_name), stderr=subprocess.PIPE)
return out
def verifyDependencies():
"""
Verify that we have no dependency issues in package manager. Dependency issues could appear because of aborted or terminated
package installation process or invalid packages state after manual modification of packages list on the host
:return True if no dependency issues found, False if dependency issue present
:rtype bool
"""
check_str = None
cmd = None
if OSCheck.is_redhat_family():
cmd = ['/usr/bin/yum', '-d', '0', '-e', '0', 'check', 'dependencies']
check_str = "has missing requires|Error:"
elif OSCheck.is_suse_family():
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive', 'verify', '--dry-run']
check_str = "\d+ new package(s)? to install"
elif OSCheck.is_ubuntu_family():
cmd = ['/usr/bin/apt-get', '-qq', 'check']
check_str = "has missing dependency|E:"
if check_str is None or cmd is None:
raise Fail("Unsupported OSFamily on the Agent Host")
code, out = rmf_shell.checked_call(cmd, sudo=True)
output_regex = re.compile(check_str)
if code or (out and output_regex.search(out)):
err_msg = Logger.filter_text("Failed to verify package dependencies. Execution of '%s' returned %s. %s" % (cmd, code, out))
Logger.error(err_msg)
return False
return True
| apache-2.0 |
abhijo89/Django-facebook | docs/docs_env/Lib/encodings/hex_codec.py | 528 | 2309 | """ Python 'hex_codec' Codec - 2-digit hex content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs, binascii
### Codec APIs
def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input))
def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input,errors='strict'):
return hex_encode(input,errors)
def decode(self, input,errors='strict'):
return hex_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| bsd-3-clause |
c0hen/django-venv | lib/python3.4/site-packages/django/core/management/commands/dumpdata.py | 48 | 9116 | import warnings
from collections import OrderedDict
from django.apps import apps
from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, router
class ProxyModelWarning(Warning):
pass
class Command(BaseCommand):
help = (
"Output the contents of the database as a fixture of the given format "
"(using each model's default manager unless --all is specified)."
)
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='app_label[.ModelName]', nargs='*',
help='Restricts dumped data to the specified app_label or app_label.ModelName.',
)
parser.add_argument(
'--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.',
)
parser.add_argument(
'--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing output.',
)
parser.add_argument(
'--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to dump fixtures from. '
'Defaults to the "default" database.',
)
parser.add_argument(
'-e', '--exclude', dest='exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude '
'(use multiple --exclude to exclude multiple apps/models).',
)
parser.add_argument(
'--natural-foreign', action='store_true', dest='use_natural_foreign_keys', default=False,
help='Use natural foreign keys if they are available.',
)
parser.add_argument(
'--natural-primary', action='store_true', dest='use_natural_primary_keys', default=False,
help='Use natural primary keys if they are available.',
)
parser.add_argument(
'-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, "
"including those that would otherwise be filtered or modified by a custom manager.",
)
parser.add_argument(
'--pks', dest='primary_keys',
help="Only dump objects with given primary keys. Accepts a comma-separated "
"list of keys. This option only works when you specify one model.",
)
parser.add_argument(
'-o', '--output', default=None, dest='output',
help='Specifies file to which the output is written.'
)
def handle(self, *app_labels, **options):
format = options['format']
indent = options['indent']
using = options['database']
excludes = options['exclude']
output = options['output']
show_traceback = options['traceback']
use_natural_foreign_keys = options['use_natural_foreign_keys']
use_natural_primary_keys = options['use_natural_primary_keys']
use_base_manager = options['use_base_manager']
pks = options['primary_keys']
if pks:
primary_keys = pks.split(',')
else:
primary_keys = []
excluded_apps = set()
excluded_models = set()
for exclude in excludes:
if '.' in exclude:
try:
model = apps.get_model(exclude)
except LookupError:
raise CommandError('Unknown model in excludes: %s' % exclude)
excluded_models.add(model)
else:
try:
app_config = apps.get_app_config(exclude)
except LookupError as e:
raise CommandError(str(e))
excluded_apps.add(app_config)
if len(app_labels) == 0:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict(
(app_config, None) for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config not in excluded_apps
)
else:
if len(app_labels) > 1 and primary_keys:
raise CommandError("You can only use --pks option with one model")
app_list = OrderedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
try:
model = app_config.get_model(model_label)
except LookupError:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
app_list_value = app_list.setdefault(app_config, [])
# We may have previously seen a "all-models" request for
# this app (no model qualifier was given). In this case
# there is no need adding specific models to the list.
if app_list_value is not None:
if model not in app_list_value:
app_list_value.append(model)
except ValueError:
if primary_keys:
raise CommandError("You can only use --pks option with one model")
# This is just an app - no model qualifier
app_label = label
try:
app_config = apps.get_app_config(app_label)
except LookupError as e:
raise CommandError(str(e))
if app_config.models_module is None or app_config in excluded_apps:
continue
app_list[app_config] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
try:
serializers.get_serializer(format)
except serializers.SerializerDoesNotExist:
pass
raise CommandError("Unknown serialization format: %s" % format)
def get_objects(count_only=False):
"""
Collate the objects to be serialized. If count_only is True, just
count the number of objects to be serialized.
"""
models = serializers.sort_dependencies(app_list.items())
for model in models:
if model in excluded_models:
continue
if model._meta.proxy and model._meta.proxy_for_model not in models:
warnings.warn(
"%s is a proxy model and won't be serialized." % model._meta.label,
category=ProxyModelWarning,
)
if not model._meta.proxy and router.allow_migrate_model(using, model):
if use_base_manager:
objects = model._base_manager
else:
objects = model._default_manager
queryset = objects.using(using).order_by(model._meta.pk.name)
if primary_keys:
queryset = queryset.filter(pk__in=primary_keys)
if count_only:
yield queryset.order_by().count()
else:
for obj in queryset.iterator():
yield obj
try:
self.stdout.ending = None
progress_output = None
object_count = 0
# If dumpdata is outputting to stdout, there is no way to display progress
if (output and self.stdout.isatty() and options['verbosity'] > 0):
progress_output = self.stdout
object_count = sum(get_objects(count_only=True))
stream = open(output, 'w') if output else None
try:
serializers.serialize(
format, get_objects(), indent=indent,
use_natural_foreign_keys=use_natural_foreign_keys,
use_natural_primary_keys=use_natural_primary_keys,
stream=stream or self.stdout, progress_output=progress_output,
object_count=object_count,
)
finally:
if stream:
stream.close()
except Exception as e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
| gpl-3.0 |
GabrielFortin/ansible-module-f5bigip | library/f5bigip_sys_httpd.py | 2 | 9725 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016-2018, Eric Jacob <erjac77@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: f5bigip_sys_httpd
short_description: BIG-IP sys httpd module
description:
- Configures the HTTP daemon for the BIG-IP system.
version_added: "2.4"
author:
- "Eric Jacob (@erjac77)"
options:
allow:
description:
- Configures IP addresses and hostnames for the HTTP clients from which the httpd daemon accepts requests.
default: all
auth_name:
description:
- Specifies the name for the authentication realm.
default: BIG-IP
auth_pam_dashboard_timeout:
description:
- Specifies whether idle timeout while viewing the dashboard is enforced or not.
default: off
choices: ['on', 'off']
auth_pam_idle_timeout:
description:
- Specifies the number of seconds of inactivity that can elapse before the GUI session is automatically
logged out.
default: 1200
auth_pam_validate_ip:
description:
- Specifies whether the check for consistent inbound IP for the entire web session is enforced or not.
default: on
choices: ['on', 'off']
description:
description:
- Specifies descriptive text that identifies the component.
fast_cgitimeout:
description:
- Specifies, in seconds, the timeout for FastCGI.
default: 300
hostname_lookup:
description:
- Specifies whether to lookup hostname or not.
default: off
choices: ['on', 'off']
log_level:
description:
- Specifies the minimum httpd message level to include in the system log.
default: warn
choices: ['alert', 'crit', 'debug', 'emerg', 'err', 'info', 'notice', 'warning']
redirect_http_to_https:
description:
- Specifies whether the system should redirect HTTP requests targeted at the configuration utility to HTTPS.
default: disabled
choices: ['enabled', 'disabled']
request_header_max_timeout:
description:
- Specifies, in seconds, the maximum time allowed to receive all of the .request headers
default: 40
request_header_min_rate:
description:
- Specifies, in bytes per second, the minimum average rate at which the request headers must be received.
default: 500
request_header_timeout:
description:
- Specifies, in seconds, the time allowed to receive all of the request headers.
default: 20
request_body_max_timeout:
description:
- Specifies, in seconds, the maximum time allowed to receive all of the request body.
default: 0 (no limit)
request_body_min_rate:
description:
- Specifies, in bytes per second, the minimum average rate at which the request body must be received.
default: 500
request_body_timeout:
description:
- Specifies, in seconds, the time allowed for reading all of the request body.
default: 60
ssl_ca_cert_file:
description:
- Specifies the name of the file that contains the SSL Certificate Authority (CA) certificate file.
ssl_certchainfile:
description:
- Specifies the name of the file that contains the SSL certificate chain.
ssl_certfile:
description:
- Specifies the name of the file that contains the SSL certificate.
default: /etc/httpd/conf/ssl.crt/server.crt
ssl_certkeyfile:
description:
- Specifies the name of the file that contains the SSL certificate key.
default: /etc/httpd/conf/ssl.key/server.key
ssl_ciphersuite:
description:
- Specifies the ciphers that the system uses.
default: 'ALL:!ADH:!EXPORT:!eNULL:!MD5:!DES:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2'
ssl_include:
description:
- TODO
ssl_ocsp_default_responder:
description:
- Specifies the default responder URI for OCSP validation.
default: http://localhost.localdomain
ssl_ocsp_enable:
description:
- Specifies OCSP validation of the client certificate chain.
default: off
choices: ['on', 'off']
ssl_ocsp_override_responder:
description:
- Specifies the force use of default responder URI for OCSP validation.
default: off
choices: ['on', 'off']
ssl_ocsp_responder_timeout:
description:
- Specifies the maximum allowable time in seconds for OCSP response.
default: 300
ssl_ocsp_response_max_age:
description:
- Specifies the maximum allowable age ("freshness") for OCSP responses.
default: -1
ssl_ocsp_response_time_skew:
description:
- Specifies the maximum allowable time skew in seconds for OCSP response validation.
default: 300
ssl_protocol:
description:
- The list of SSL protocols to accept on the management console.
default: all -SSLv2
ssl_verify_client:
description:
- Specifies if the client certificate needs to be verified for SSL session establishment.
default: no
choices: ['yes', 'no']
ssl_verify_depth:
description:
- Specifies maximum depth of CA certificates in client certificate verification.
default: 10
requirements:
- BIG-IP >= 12.0
- ansible-common-f5
- f5-sdk
'''
EXAMPLES = '''
- name: Set SYS HTTPD allow clients
f5bigip_sys_httpd:
f5_hostname: 172.16.227.35
f5_username: admin
f5_password: admin
f5_port: 443
allow:
- 172.16.227.0/24
- 10.0.0.0/8
delegate_to: localhost
- name: Reset SYS HTTPD allow clients
f5bigip_sys_httpd:
f5_hostname: 172.16.227.35
f5_username: admin
f5_password: admin
f5_port: 443
allow:
- ALL
- 127.
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible_common_f5.base import F5_ACTIVATION_CHOICES
from ansible_common_f5.base import F5_PROVIDER_ARGS
from ansible_common_f5.base import F5_SEVERITY_CHOICES
from ansible_common_f5.base import F5_SWITCH_CHOICES
from ansible_common_f5.bigip import F5BigIpUnnamedObject
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
allow=dict(type='list'),
auth_name=dict(type='str'),
auth_pam_dashboard_timeout=dict(type='str', choices=F5_SWITCH_CHOICES),
auth_pam_idle_timeout=dict(type='int'),
auth_pam_validate_ip=dict(type='str', choices=F5_SWITCH_CHOICES),
description=dict(type='str'),
fastcgi_timeout=dict(type='int'),
hostname_lookup=dict(type='str', choices=F5_SWITCH_CHOICES),
log_level=dict(type='str', choices=F5_SEVERITY_CHOICES),
redirect_http_to_https=dict(type='str', choices=F5_ACTIVATION_CHOICES),
request_header_max_timeout=dict(type='int'),
request_header_min_rate=dict(type='int'),
request_header_timeout=dict(type='int'),
request_body_max_timeout=dict(type='int'),
request_body_min_rate=dict(type='int'),
request_body_timeout=dict(type='int'),
ssl_ca_cert_file=dict(type='str'),
ssl_certchainfile=dict(type='str'),
ssl_certfile=dict(type='str'),
ssl_certkeyfile=dict(type='str'),
ssl_ciphersuite=dict(type='str'),
ssl_include=dict(type='str'),
ssl_ocsp_enable=dict(type='str', choices=['no', 'require', 'optional', 'optional-no-ca']),
ssl_ocsp_default_responder=dict(type='str'),
ssl_ocsp_override_responder=dict(type='str', choices=F5_SWITCH_CHOICES),
ssl_ocsp_responder_timeout=dict(type='int'),
ssl_ocsp_response_max_age=dict(type='int'),
ssl_ocsp_response_time_skew=dict(type='int'),
ssl_protocol=dict(type='str'),
ssl_verify_client=dict(type='str'),
ssl_verify_depth=dict(type='int')
)
argument_spec.update(F5_PROVIDER_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
class F5BigIpSysHttpd(F5BigIpUnnamedObject):
def _set_crud_methods(self):
self._methods = {
'read': self._api.tm.sys.httpd.load,
'update': self._api.tm.sys.httpd.update
}
def main():
params = ModuleParams()
module = AnsibleModule(argument_spec=params.argument_spec, supports_check_mode=params.supports_check_mode)
try:
obj = F5BigIpSysHttpd(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == '__main__':
main()
| apache-2.0 |
jettisonjoe/openhtf | openhtf/util/timeouts.py | 2 | 16019 | # Copyright 2014 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple utility to do timeout checking."""
import contextlib
import functools
import logging
import threading
import time
_LOG = logging.getLogger(__name__)
class PolledTimeout(object):
"""An object which tracks if a timeout has expired."""
def __init__(self, timeout_s):
"""Construct a PolledTimeout object.
Args:
timeout_s: This may either be a number or None. If a number, this object
will consider to be expired after number seconds after construction. If
None, this object never expires.
"""
self.start = time.time()
self.timeout_s = timeout_s
@classmethod
def from_millis(cls, timeout_ms):
"""Create a new PolledTimeout if needed.
If timeout_ms is already a PolledTimeout, just return it, otherwise create a
new PolledTimeout with the given timeout in milliseconds.
Args:
timeout_ms: PolledTimeout object, or number of milliseconds to use for
creating a new one.
Returns:
A PolledTimeout object that will expire in timeout_ms milliseconds, which
may be timeout_ms itself, or a newly allocated PolledTimeout.
"""
if hasattr(timeout_ms, 'has_expired'):
return timeout_ms
if timeout_ms is None:
return cls(None)
return cls(timeout_ms / 1000.0)
@classmethod
def from_seconds(cls, timeout_s):
"""Create a new PolledTimeout if needed.
If timeout_s is already a PolledTimeout, just return it, otherwise create a
new PolledTimeout with the given timeout in seconds.
Args:
timeout_s: PolledTimeout object, or number of seconds to use for creating
a new one.
Returns:
A PolledTimeout object that will expire in timeout_s seconds, which may
be timeout_s itself, or a newly allocated PolledTimeout.
"""
if hasattr(timeout_s, 'has_expired'):
return timeout_s
return cls(timeout_s)
def restart(self):
"""Restarts the timeout. Initializing the start time to now."""
self.start = time.time()
def expire(self):
"""Expire the timeout immediately."""
self.timeout_s = 0
def has_expired(self):
"""Returns True if the timeout has expired."""
if self.timeout_s is None:
return False
return self.seconds >= self.timeout_s
# Bad API. alusco is sometimes bad at naming.
Poll = has_expired # pylint: disable=invalid-name
# pylint: disable=missing-docstring
@property
def seconds(self):
return time.time() - self.start
@property
def remaining(self):
if self.timeout_s is None:
return None
# We max() against 0 to ensure we don't return a (slightly) negative number.
# This reduces races between .has_expired() calls and sleeping/waiting
# .remaining seconds.
return max(0, self.timeout_s - self.seconds)
@property
def remaining_ms(self):
if self.timeout_s is None:
return None
return self.remaining * 1000
# pylint: disable=missing-docstring
# There's now no way to tell if a timeout occurred generically
# which sort of sucks (for generic validation fn)
def loop_until_timeout_or_valid(timeout_s, function, validation_fn, sleep_s=1): # pylint: disable=invalid-name
"""Loops until the specified function returns valid or a timeout is reached.
Note: The function may return anything which, when passed to validation_fn,
evaluates to implicit True. This function will loop calling the function as
long as the result of validation_fn(function_result) returns something which
evaluates to False. We ensure function is called at least once regardless
of timeout.
Args:
timeout_s: The number of seconds to wait until a timeout condition is
reached. As a convenience, this accepts None to mean never timeout. Can
also be passed a PolledTimeout object instead of an integer.
function: The function to call each iteration.
validation_fn: The validation function called on the function result to
determine whether to keep looping.
sleep_s: The number of seconds to wait after calling the function.
Returns:
Whatever the function returned last.
"""
if timeout_s is None or not hasattr(timeout_s, 'has_expired'):
timeout_s = PolledTimeout(timeout_s)
while True:
# Calls the function at least once
result = function()
if validation_fn(result) or timeout_s.has_expired():
return result
time.sleep(sleep_s)
def loop_until_timeout_or_true(timeout_s, function, sleep_s=1): # pylint: disable=invalid-name
"""Loops until the specified function returns True or a timeout is reached.
Note: The function may return anything which evaluates to implicit True. This
function will loop calling it as long as it continues to return something
which evaluates to False. We ensure this method is called at least once
regardless of timeout.
Args:
timeout_s: The number of seconds to wait until a timeout condition is
reached. As a convenience, this accepts None to mean never timeout. Can
also be passed a PolledTimeout object instead of an integer.
function: The function to call each iteration.
sleep_s: The number of seconds to wait after calling the function.
Returns:
Whatever the function returned last.
"""
return loop_until_timeout_or_valid(timeout_s, function, lambda x: x, sleep_s)
def loop_until_timeout_or_not_none(timeout_s, function, sleep_s=1): # pylint: disable=invalid-name
"""Loops until the specified function returns non-None or until a timeout.
Args:
timeout_s: The number of seconds to wait until a timeout condition is
reached. As a convenience, this accepts None to mean never timeout. Can
also be passed a PolledTimeout object instead of an integer.
function: The function to call each iteration.
sleep_s: The number of seconds to wait after calling the function.
Returns:
Whatever the function returned last.
"""
return loop_until_timeout_or_valid(
timeout_s, function, lambda x: x is not None, sleep_s)
def loop_until_true_else_raise(timeout_s,
function,
invert=False,
message=None,
sleep_s=1):
"""Repeatedly call the given function until truthy, or raise on a timeout.
Args:
timeout_s: The number of seconds to wait until a timeout condition is
reached. As a convenience, this accepts None to mean never timeout. Can
also be passed a PolledTimeout object instead of an integer.
function: The function to call each iteration.
invert: If True, wait for the callable to return falsey instead of truthy.
message: Optional custom error message to use on a timeout.
sleep_s: Seconds to sleep between call attempts.
Returns:
The final return value of the function.
Raises:
RuntimeError if the timeout is reached before the function returns truthy.
"""
def validate(x):
return bool(x) != invert
result = loop_until_timeout_or_valid(timeout_s, function, validate, sleep_s=1)
if validate(result):
return result
if message is not None:
raise RuntimeError(message)
name = '(unknown)'
if hasattr(function, '__name__'):
name = function.__name__
elif (isinstance(function, functools.partial)
and hasattr(function.func, '__name__')):
name = function.func.__name__
raise RuntimeError(
'Function %s failed to return %s within %d seconds.'
% (name, 'falsey' if invert else 'truthy', timeout_s))
class Interval(object):
"""An object which can execute a method on an interval."""
def __init__(self, method, stop_if_false=False):
"""Initializes the Interval.
Args:
method: A callable to execute, it should take no arguments.
stop_if_false: If True, the interval will exit if the method returns
False.
"""
self.method = method
self.stopped = threading.Event()
self.thread = None
self.stop_if_false = stop_if_false
@property
def running(self):
if self.thread:
return self.thread.isAlive()
return False
def start(self, interval_s):
"""Starts executing the method at the specified interval.
Args:
interval_s: The amount of time between executions of the method.
Returns:
False if the interval was already running.
"""
if self.running:
return False
self.stopped.clear()
def _execute():
# Always execute immediately once
if not self.method() and self.stop_if_false:
return
while not self.stopped.wait(interval_s):
if not self.method() and self.stop_if_false:
return
self.thread = threading.Thread(target=_execute)
self.thread.daemon = True
self.thread.start()
return True
def stop(self, timeout_s=None):
"""Stops the interval.
If a timeout is provided and stop returns False then the thread is
effectively abandoned in whatever state it was in (presumably dead-locked).
Args:
timeout_s: The time in seconds to wait on the thread to finish. By
default it's forever.
Returns:
False if a timeout was provided and we timed out.
"""
self.stopped.set()
if self.thread:
self.thread.join(timeout_s)
return not self.thread.isAlive()
else:
return True
def join(self, timeout_s=None):
"""Joins blocking until the interval ends or until timeout is reached.
Args:
timeout_s: The time in seconds to wait, defaults to forever.
Returns:
True if the interval is still running and we reached the timeout.
"""
if not self.thread:
return False
self.thread.join(timeout_s)
return self.running
def execute_forever(method, interval_s): # pylint: disable=invalid-name
"""Executes a method forever at the specified interval.
Args:
method: The callable to execute.
interval_s: The number of seconds to start the execution after each method
finishes.
Returns:
An Interval object.
"""
interval = Interval(method)
interval.start(interval_s)
return interval
def execute_until_false(method, interval_s): # pylint: disable=invalid-name
"""Executes a method forever until the method returns a false value.
Args:
method: The callable to execute.
interval_s: The number of seconds to start the execution after each method
finishes.
Returns:
An Interval object.
"""
interval = Interval(method, stop_if_false=True)
interval.start(interval_s)
return interval
# pylint: disable=invalid-name
def retry_until_true_or_limit_reached(method, limit, sleep_s=1,
catch_exceptions=()):
"""Executes a method until the retry limit is hit or True is returned."""
return retry_until_valid_or_limit_reached(
method, limit, lambda x: x, sleep_s, catch_exceptions)
def retry_until_not_none_or_limit_reached(method, limit, sleep_s=1,
catch_exceptions=()):
"""Executes a method until the retry limit is hit or not None is returned."""
return retry_until_valid_or_limit_reached(
method, limit, lambda x: x is not None, sleep_s, catch_exceptions)
def retry_until_valid_or_limit_reached(method, limit, validation_fn, sleep_s=1,
catch_exceptions=()):
"""Executes a method until the retry limit or validation_fn returns True.
The method is always called once so the effective lower limit for 'limit' is
1. Passing in a number less than 1 will still result it the method being
called once.
Args:
method: The method to execute should take no arguments.
limit: The number of times to try this method. Must be >0.
validation_fn: The validation function called on the function result to
determine whether to keep looping.
sleep_s: The time to sleep in between invocations.
catch_exceptions: Tuple of exception types to catch and count as failures.
Returns:
Whatever the method last returned, implicit False would indicate the
method never succeeded.
"""
assert limit > 0, 'Limit must be greater than 0'
def _execute_method(helper):
try:
return method()
except catch_exceptions:
if not helper.remaining:
raise
return None
helper = RetryHelper(limit - 1)
result = _execute_method(helper)
while not validation_fn(result) and helper.retry_if_possible():
time.sleep(sleep_s)
result = _execute_method(helper)
return result
# pylint: disable=invalid-name
@contextlib.contextmanager
def take_at_least_n_seconds(time_s):
"""A context manager which ensures it takes at least time_s to execute.
Example:
with take_at_least_n_seconds(5):
do.Something()
do.SomethingElse()
# if Something and SomethingElse took 3 seconds, the with block with sleep
# for 2 seconds before exiting.
Args:
time_s: The number of seconds this block should take. If it doesn't take at
least this time, then this method blocks during __exit__.
Yields:
To do some actions then on completion waits the remaining time.
"""
timeout = PolledTimeout(time_s)
yield
while not timeout.has_expired():
time.sleep(timeout.remaining)
def take_at_most_n_seconds(time_s, func, *args, **kwargs):
"""A function that returns whether a function call took less than time_s.
NOTE: The function call is not killed and will run indefinitely if hung.
Args:
time_s: Maximum amount of time to take.
func: Function to call.
*args: Arguments to call the function with.
**kwargs: Keyword arguments to call the function with.
Returns:
True if the function finished in less than time_s seconds.
"""
thread = threading.Thread(target=func, args=args, kwargs=kwargs)
thread.start()
thread.join(time_s)
if thread.is_alive():
return False
return True
def execute_after_delay(time_s, func, *args, **kwargs):
"""A function that executes the given function after a delay.
Executes func in a separate thread after a delay, so that this function
returns immediately. Note that any exceptions raised by func will be
ignored (but logged). Also, if time_s is a PolledTimeout with no expiration,
then this method simply returns immediately and does nothing.
Args:
time_s: Delay in seconds to wait before executing func, may be a
PolledTimeout object.
func: Function to call.
*args: Arguments to call the function with.
**kwargs: Keyword arguments to call the function with.
"""
timeout = PolledTimeout.from_seconds(time_s)
def target():
time.sleep(timeout.remaining)
try:
func(*args, **kwargs)
except Exception: # pylint: disable=broad-except
_LOG.exception('Error executing %s after %s expires.', func, timeout)
if timeout.remaining is not None:
thread = threading.Thread(target=target)
thread.start()
class RetryHelper(object):
"""A helper with to simplify retrying.
Attributes:
remaining: The remaining number of retries.
"""
def __init__(self, retries):
"""Initializes this object.
Args:
retries: The number of retries to allow.
"""
self.remaining = retries
def retry_if_possible(self):
"""Decrements a retry.
Returns:
True if you should proceed, or False if you're out of retries.
"""
self.remaining -= 1
return self.remaining >= 0
| apache-2.0 |
glorizen/nupic | external/linux32/lib/python2.6/site-packages/matplotlib/dviread.py | 69 | 29920 | """
An experimental module for reading dvi files output by TeX. Several
limitations make this not (currently) useful as a general-purpose dvi
preprocessor.
Interface::
dvi = Dvi(filename, 72)
for page in dvi: # iterate over pages
w, h, d = page.width, page.height, page.descent
for x,y,font,glyph,width in page.text:
fontname = font.texname
pointsize = font.size
...
for x,y,height,width in page.boxes:
...
"""
import errno
import matplotlib
import matplotlib.cbook as mpl_cbook
import numpy as np
import struct
import subprocess
_dvistate = mpl_cbook.Bunch(pre=0, outer=1, inpage=2, post_post=3, finale=4)
class Dvi(object):
"""
A dvi ("device-independent") file, as produced by TeX.
The current implementation only reads the first page and does not
even attempt to verify the postamble.
"""
def __init__(self, filename, dpi):
"""
Initialize the object. This takes the filename as input and
opens the file; actually reading the file happens when
iterating through the pages of the file.
"""
matplotlib.verbose.report('Dvi: ' + filename, 'debug')
self.file = open(filename, 'rb')
self.dpi = dpi
self.fonts = {}
self.state = _dvistate.pre
def __iter__(self):
"""
Iterate through the pages of the file.
Returns (text, pages) pairs, where:
text is a list of (x, y, fontnum, glyphnum, width) tuples
boxes is a list of (x, y, height, width) tuples
The coordinates are transformed into a standard Cartesian
coordinate system at the dpi value given when initializing.
The coordinates are floating point numbers, but otherwise
precision is not lost and coordinate values are not clipped to
integers.
"""
while True:
have_page = self._read()
if have_page:
yield self._output()
else:
break
def close(self):
"""
Close the underlying file if it is open.
"""
if not self.file.closed:
self.file.close()
def _output(self):
"""
Output the text and boxes belonging to the most recent page.
page = dvi._output()
"""
minx, miny, maxx, maxy = np.inf, np.inf, -np.inf, -np.inf
maxy_pure = -np.inf
for elt in self.text + self.boxes:
if len(elt) == 4: # box
x,y,h,w = elt
e = 0 # zero depth
else: # glyph
x,y,font,g,w = elt
h = _mul2012(font._scale, font._tfm.height[g])
e = _mul2012(font._scale, font._tfm.depth[g])
minx = min(minx, x)
miny = min(miny, y - h)
maxx = max(maxx, x + w)
maxy = max(maxy, y + e)
maxy_pure = max(maxy_pure, y)
if self.dpi is None:
# special case for ease of debugging: output raw dvi coordinates
return mpl_cbook.Bunch(text=self.text, boxes=self.boxes,
width=maxx-minx, height=maxy_pure-miny,
descent=maxy-maxy_pure)
d = self.dpi / (72.27 * 2**16) # from TeX's "scaled points" to dpi units
text = [ ((x-minx)*d, (maxy-y)*d, f, g, w*d)
for (x,y,f,g,w) in self.text ]
boxes = [ ((x-minx)*d, (maxy-y)*d, h*d, w*d) for (x,y,h,w) in self.boxes ]
return mpl_cbook.Bunch(text=text, boxes=boxes,
width=(maxx-minx)*d,
height=(maxy_pure-miny)*d,
descent=(maxy-maxy_pure)*d)
def _read(self):
"""
Read one page from the file. Return True if successful,
False if there were no more pages.
"""
while True:
byte = ord(self.file.read(1))
self._dispatch(byte)
# if self.state == _dvistate.inpage:
# matplotlib.verbose.report(
# 'Dvi._read: after %d at %f,%f' %
# (byte, self.h, self.v),
# 'debug-annoying')
if byte == 140: # end of page
return True
if self.state == _dvistate.post_post: # end of file
self.close()
return False
def _arg(self, nbytes, signed=False):
"""
Read and return an integer argument "nbytes" long.
Signedness is determined by the "signed" keyword.
"""
str = self.file.read(nbytes)
value = ord(str[0])
if signed and value >= 0x80:
value = value - 0x100
for i in range(1, nbytes):
value = 0x100*value + ord(str[i])
return value
def _dispatch(self, byte):
"""
Based on the opcode "byte", read the correct kinds of
arguments from the dvi file and call the method implementing
that opcode with those arguments.
"""
if 0 <= byte <= 127: self._set_char(byte)
elif byte == 128: self._set_char(self._arg(1))
elif byte == 129: self._set_char(self._arg(2))
elif byte == 130: self._set_char(self._arg(3))
elif byte == 131: self._set_char(self._arg(4, True))
elif byte == 132: self._set_rule(self._arg(4, True), self._arg(4, True))
elif byte == 133: self._put_char(self._arg(1))
elif byte == 134: self._put_char(self._arg(2))
elif byte == 135: self._put_char(self._arg(3))
elif byte == 136: self._put_char(self._arg(4, True))
elif byte == 137: self._put_rule(self._arg(4, True), self._arg(4, True))
elif byte == 138: self._nop()
elif byte == 139: self._bop(*[self._arg(4, True) for i in range(11)])
elif byte == 140: self._eop()
elif byte == 141: self._push()
elif byte == 142: self._pop()
elif byte == 143: self._right(self._arg(1, True))
elif byte == 144: self._right(self._arg(2, True))
elif byte == 145: self._right(self._arg(3, True))
elif byte == 146: self._right(self._arg(4, True))
elif byte == 147: self._right_w(None)
elif byte == 148: self._right_w(self._arg(1, True))
elif byte == 149: self._right_w(self._arg(2, True))
elif byte == 150: self._right_w(self._arg(3, True))
elif byte == 151: self._right_w(self._arg(4, True))
elif byte == 152: self._right_x(None)
elif byte == 153: self._right_x(self._arg(1, True))
elif byte == 154: self._right_x(self._arg(2, True))
elif byte == 155: self._right_x(self._arg(3, True))
elif byte == 156: self._right_x(self._arg(4, True))
elif byte == 157: self._down(self._arg(1, True))
elif byte == 158: self._down(self._arg(2, True))
elif byte == 159: self._down(self._arg(3, True))
elif byte == 160: self._down(self._arg(4, True))
elif byte == 161: self._down_y(None)
elif byte == 162: self._down_y(self._arg(1, True))
elif byte == 163: self._down_y(self._arg(2, True))
elif byte == 164: self._down_y(self._arg(3, True))
elif byte == 165: self._down_y(self._arg(4, True))
elif byte == 166: self._down_z(None)
elif byte == 167: self._down_z(self._arg(1, True))
elif byte == 168: self._down_z(self._arg(2, True))
elif byte == 169: self._down_z(self._arg(3, True))
elif byte == 170: self._down_z(self._arg(4, True))
elif 171 <= byte <= 234: self._fnt_num(byte-171)
elif byte == 235: self._fnt_num(self._arg(1))
elif byte == 236: self._fnt_num(self._arg(2))
elif byte == 237: self._fnt_num(self._arg(3))
elif byte == 238: self._fnt_num(self._arg(4, True))
elif 239 <= byte <= 242:
len = self._arg(byte-238)
special = self.file.read(len)
self._xxx(special)
elif 243 <= byte <= 246:
k = self._arg(byte-242, byte==246)
c, s, d, a, l = [ self._arg(x) for x in (4, 4, 4, 1, 1) ]
n = self.file.read(a+l)
self._fnt_def(k, c, s, d, a, l, n)
elif byte == 247:
i, num, den, mag, k = [ self._arg(x) for x in (1, 4, 4, 4, 1) ]
x = self.file.read(k)
self._pre(i, num, den, mag, x)
elif byte == 248: self._post()
elif byte == 249: self._post_post()
else:
raise ValueError, "unknown command: byte %d"%byte
def _pre(self, i, num, den, mag, comment):
if self.state != _dvistate.pre:
raise ValueError, "pre command in middle of dvi file"
if i != 2:
raise ValueError, "Unknown dvi format %d"%i
if num != 25400000 or den != 7227 * 2**16:
raise ValueError, "nonstandard units in dvi file"
# meaning: TeX always uses those exact values, so it
# should be enough for us to support those
# (There are 72.27 pt to an inch so 7227 pt =
# 7227 * 2**16 sp to 100 in. The numerator is multiplied
# by 10^5 to get units of 10**-7 meters.)
if mag != 1000:
raise ValueError, "nonstandard magnification in dvi file"
# meaning: LaTeX seems to frown on setting \mag, so
# I think we can assume this is constant
self.state = _dvistate.outer
def _set_char(self, char):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced set_char in dvi file"
self._put_char(char)
self.h += self.fonts[self.f]._width_of(char)
def _set_rule(self, a, b):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced set_rule in dvi file"
self._put_rule(a, b)
self.h += b
def _put_char(self, char):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced put_char in dvi file"
font = self.fonts[self.f]
if font._vf is None:
self.text.append((self.h, self.v, font, char,
font._width_of(char)))
# matplotlib.verbose.report(
# 'Dvi._put_char: %d,%d %d' %(self.h, self.v, char),
# 'debug-annoying')
else:
scale = font._scale
for x, y, f, g, w in font._vf[char].text:
newf = DviFont(scale=_mul2012(scale, f._scale),
tfm=f._tfm, texname=f.texname, vf=f._vf)
self.text.append((self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
newf, g, newf._width_of(g)))
self.boxes.extend([(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
_mul2012(a, scale), _mul2012(b, scale))
for x, y, a, b in font._vf[char].boxes])
def _put_rule(self, a, b):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced put_rule in dvi file"
if a > 0 and b > 0:
self.boxes.append((self.h, self.v, a, b))
# matplotlib.verbose.report(
# 'Dvi._put_rule: %d,%d %d,%d' % (self.h, self.v, a, b),
# 'debug-annoying')
def _nop(self):
pass
def _bop(self, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, p):
if self.state != _dvistate.outer:
raise ValueError, \
"misplaced bop in dvi file (state %d)" % self.state
self.state = _dvistate.inpage
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack = []
self.text = [] # list of (x,y,fontnum,glyphnum)
self.boxes = [] # list of (x,y,width,height)
def _eop(self):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced eop in dvi file"
self.state = _dvistate.outer
del self.h, self.v, self.w, self.x, self.y, self.z, self.stack
def _push(self):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced push in dvi file"
self.stack.append((self.h, self.v, self.w, self.x, self.y, self.z))
def _pop(self):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced pop in dvi file"
self.h, self.v, self.w, self.x, self.y, self.z = self.stack.pop()
def _right(self, b):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced right in dvi file"
self.h += b
def _right_w(self, new_w):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced w in dvi file"
if new_w is not None:
self.w = new_w
self.h += self.w
def _right_x(self, new_x):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced x in dvi file"
if new_x is not None:
self.x = new_x
self.h += self.x
def _down(self, a):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced down in dvi file"
self.v += a
def _down_y(self, new_y):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced y in dvi file"
if new_y is not None:
self.y = new_y
self.v += self.y
def _down_z(self, new_z):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced z in dvi file"
if new_z is not None:
self.z = new_z
self.v += self.z
def _fnt_num(self, k):
if self.state != _dvistate.inpage:
raise ValueError, "misplaced fnt_num in dvi file"
self.f = k
def _xxx(self, special):
matplotlib.verbose.report(
'Dvi._xxx: encountered special: %s'
% ''.join([(32 <= ord(ch) < 127) and ch
or '<%02x>' % ord(ch)
for ch in special]),
'debug')
def _fnt_def(self, k, c, s, d, a, l, n):
tfm = _tfmfile(n[-l:])
if c != 0 and tfm.checksum != 0 and c != tfm.checksum:
raise ValueError, 'tfm checksum mismatch: %s'%n
# It seems that the assumption behind the following check is incorrect:
#if d != tfm.design_size:
# raise ValueError, 'tfm design size mismatch: %d in dvi, %d in %s'%\
# (d, tfm.design_size, n)
vf = _vffile(n[-l:])
self.fonts[k] = DviFont(scale=s, tfm=tfm, texname=n, vf=vf)
def _post(self):
if self.state != _dvistate.outer:
raise ValueError, "misplaced post in dvi file"
self.state = _dvistate.post_post
# TODO: actually read the postamble and finale?
# currently post_post just triggers closing the file
def _post_post(self):
raise NotImplementedError
class DviFont(object):
"""
Object that holds a font's texname and size, supports comparison,
and knows the widths of glyphs in the same units as the AFM file.
There are also internal attributes (for use by dviread.py) that
are _not_ used for comparison.
The size is in Adobe points (converted from TeX points).
"""
__slots__ = ('texname', 'size', 'widths', '_scale', '_vf', '_tfm')
def __init__(self, scale, tfm, texname, vf):
self._scale, self._tfm, self.texname, self._vf = \
scale, tfm, texname, vf
self.size = scale * (72.0 / (72.27 * 2**16))
try:
nchars = max(tfm.width.iterkeys())
except ValueError:
nchars = 0
self.widths = [ (1000*tfm.width.get(char, 0)) >> 20
for char in range(nchars) ]
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.texname == other.texname and self.size == other.size
def __ne__(self, other):
return not self.__eq__(other)
def _width_of(self, char):
"""
Width of char in dvi units. For internal use by dviread.py.
"""
width = self._tfm.width.get(char, None)
if width is not None:
return _mul2012(width, self._scale)
matplotlib.verbose.report(
'No width for char %d in font %s' % (char, self.texname),
'debug')
return 0
class Vf(Dvi):
"""
A virtual font (\*.vf file) containing subroutines for dvi files.
Usage::
vf = Vf(filename)
glyph = vf[code]
glyph.text, glyph.boxes, glyph.width
"""
def __init__(self, filename):
Dvi.__init__(self, filename, 0)
self._first_font = None
self._chars = {}
self._packet_ends = None
self._read()
self.close()
def __getitem__(self, code):
return self._chars[code]
def _dispatch(self, byte):
# If we are in a packet, execute the dvi instructions
if self.state == _dvistate.inpage:
byte_at = self.file.tell()-1
if byte_at == self._packet_ends:
self._finalize_packet()
# fall through
elif byte_at > self._packet_ends:
raise ValueError, "Packet length mismatch in vf file"
else:
if byte in (139, 140) or byte >= 243:
raise ValueError, "Inappropriate opcode %d in vf file" % byte
Dvi._dispatch(self, byte)
return
# We are outside a packet
if byte < 242: # a short packet (length given by byte)
cc, tfm = self._arg(1), self._arg(3)
self._init_packet(byte, cc, tfm)
elif byte == 242: # a long packet
pl, cc, tfm = [ self._arg(x) for x in (4, 4, 4) ]
self._init_packet(pl, cc, tfm)
elif 243 <= byte <= 246:
Dvi._dispatch(self, byte)
elif byte == 247: # preamble
i, k = self._arg(1), self._arg(1)
x = self.file.read(k)
cs, ds = self._arg(4), self._arg(4)
self._pre(i, x, cs, ds)
elif byte == 248: # postamble (just some number of 248s)
self.state = _dvistate.post_post
else:
raise ValueError, "unknown vf opcode %d" % byte
def _init_packet(self, pl, cc, tfm):
if self.state != _dvistate.outer:
raise ValueError, "Misplaced packet in vf file"
self.state = _dvistate.inpage
self._packet_ends = self.file.tell() + pl
self._packet_char = cc
self._packet_width = tfm
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack, self.text, self.boxes = [], [], []
self.f = self._first_font
def _finalize_packet(self):
self._chars[self._packet_char] = mpl_cbook.Bunch(
text=self.text, boxes=self.boxes, width = self._packet_width)
self.state = _dvistate.outer
def _pre(self, i, x, cs, ds):
if self.state != _dvistate.pre:
raise ValueError, "pre command in middle of vf file"
if i != 202:
raise ValueError, "Unknown vf format %d" % i
if len(x):
matplotlib.verbose.report('vf file comment: ' + x, 'debug')
self.state = _dvistate.outer
# cs = checksum, ds = design size
def _fnt_def(self, k, *args):
Dvi._fnt_def(self, k, *args)
if self._first_font is None:
self._first_font = k
def _fix2comp(num):
"""
Convert from two's complement to negative.
"""
assert 0 <= num < 2**32
if num & 2**31:
return num - 2**32
else:
return num
def _mul2012(num1, num2):
"""
Multiply two numbers in 20.12 fixed point format.
"""
# Separated into a function because >> has surprising precedence
return (num1*num2) >> 20
class Tfm(object):
"""
A TeX Font Metric file. This implementation covers only the bare
minimum needed by the Dvi class.
Attributes:
checksum: for verifying against dvi file
design_size: design size of the font (in what units?)
width[i]: width of character \#i, needs to be scaled
by the factor specified in the dvi file
(this is a dict because indexing may not start from 0)
height[i], depth[i]: height and depth of character \#i
"""
__slots__ = ('checksum', 'design_size', 'width', 'height', 'depth')
def __init__(self, filename):
matplotlib.verbose.report('opening tfm file ' + filename, 'debug')
file = open(filename, 'rb')
try:
header1 = file.read(24)
lh, bc, ec, nw, nh, nd = \
struct.unpack('!6H', header1[2:14])
matplotlib.verbose.report(
'lh=%d, bc=%d, ec=%d, nw=%d, nh=%d, nd=%d' % (
lh, bc, ec, nw, nh, nd), 'debug')
header2 = file.read(4*lh)
self.checksum, self.design_size = \
struct.unpack('!2I', header2[:8])
# there is also encoding information etc.
char_info = file.read(4*(ec-bc+1))
widths = file.read(4*nw)
heights = file.read(4*nh)
depths = file.read(4*nd)
finally:
file.close()
self.width, self.height, self.depth = {}, {}, {}
widths, heights, depths = \
[ struct.unpack('!%dI' % (len(x)/4), x)
for x in (widths, heights, depths) ]
for i in range(ec-bc):
self.width[bc+i] = _fix2comp(widths[ord(char_info[4*i])])
self.height[bc+i] = _fix2comp(heights[ord(char_info[4*i+1]) >> 4])
self.depth[bc+i] = _fix2comp(depths[ord(char_info[4*i+1]) & 0xf])
class PsfontsMap(object):
"""
A psfonts.map formatted file, mapping TeX fonts to PS fonts.
Usage: map = PsfontsMap('.../psfonts.map'); map['cmr10']
For historical reasons, TeX knows many Type-1 fonts by different
names than the outside world. (For one thing, the names have to
fit in eight characters.) Also, TeX's native fonts are not Type-1
but Metafont, which is nontrivial to convert to PostScript except
as a bitmap. While high-quality conversions to Type-1 format exist
and are shipped with modern TeX distributions, we need to know
which Type-1 fonts are the counterparts of which native fonts. For
these reasons a mapping is needed from internal font names to font
file names.
A texmf tree typically includes mapping files called e.g.
psfonts.map, pdftex.map, dvipdfm.map. psfonts.map is used by
dvips, pdftex.map by pdfTeX, and dvipdfm.map by dvipdfm.
psfonts.map might avoid embedding the 35 PostScript fonts, while
the pdf-related files perhaps only avoid the "Base 14" pdf fonts.
But the user may have configured these files differently.
"""
__slots__ = ('_font',)
def __init__(self, filename):
self._font = {}
file = open(filename, 'rt')
try:
self._parse(file)
finally:
file.close()
def __getitem__(self, texname):
result = self._font[texname]
fn, enc = result.filename, result.encoding
if fn is not None and not fn.startswith('/'):
result.filename = find_tex_file(fn)
if enc is not None and not enc.startswith('/'):
result.encoding = find_tex_file(result.encoding)
return result
def _parse(self, file):
"""Parse each line into words."""
for line in file:
line = line.strip()
if line == '' or line.startswith('%'):
continue
words, pos = [], 0
while pos < len(line):
if line[pos] == '"': # double quoted word
pos += 1
end = line.index('"', pos)
words.append(line[pos:end])
pos = end + 1
else: # ordinary word
end = line.find(' ', pos+1)
if end == -1: end = len(line)
words.append(line[pos:end])
pos = end
while pos < len(line) and line[pos] == ' ':
pos += 1
self._register(words)
def _register(self, words):
"""Register a font described by "words".
The format is, AFAIK: texname fontname [effects and filenames]
Effects are PostScript snippets like ".177 SlantFont",
filenames begin with one or two less-than signs. A filename
ending in enc is an encoding file, other filenames are font
files. This can be overridden with a left bracket: <[foobar
indicates an encoding file named foobar.
There is some difference between <foo.pfb and <<bar.pfb in
subsetting, but I have no example of << in my TeX installation.
"""
texname, psname = words[:2]
effects, encoding, filename = [], None, None
for word in words[2:]:
if not word.startswith('<'):
effects.append(word)
else:
word = word.lstrip('<')
if word.startswith('['):
assert encoding is None
encoding = word[1:]
elif word.endswith('.enc'):
assert encoding is None
encoding = word
else:
assert filename is None
filename = word
self._font[texname] = mpl_cbook.Bunch(
texname=texname, psname=psname, effects=effects,
encoding=encoding, filename=filename)
class Encoding(object):
"""
Parses a \*.enc file referenced from a psfonts.map style file.
The format this class understands is a very limited subset of
PostScript.
Usage (subject to change)::
for name in Encoding(filename):
whatever(name)
"""
__slots__ = ('encoding',)
def __init__(self, filename):
file = open(filename, 'rt')
try:
matplotlib.verbose.report('Parsing TeX encoding ' + filename, 'debug-annoying')
self.encoding = self._parse(file)
matplotlib.verbose.report('Result: ' + `self.encoding`, 'debug-annoying')
finally:
file.close()
def __iter__(self):
for name in self.encoding:
yield name
def _parse(self, file):
result = []
state = 0
for line in file:
comment_start = line.find('%')
if comment_start > -1:
line = line[:comment_start]
line = line.strip()
if state == 0:
# Expecting something like /FooEncoding [
if '[' in line:
state = 1
line = line[line.index('[')+1:].strip()
if state == 1:
if ']' in line: # ] def
line = line[:line.index(']')]
state = 2
words = line.split()
for w in words:
if w.startswith('/'):
# Allow for /abc/def/ghi
subwords = w.split('/')
result.extend(subwords[1:])
else:
raise ValueError, "Broken name in encoding file: " + w
return result
def find_tex_file(filename, format=None):
"""
Call kpsewhich to find a file in the texmf tree.
If format is not None, it is used as the value for the --format option.
See the kpathsea documentation for more information.
Apparently most existing TeX distributions on Unix-like systems
use kpathsea. I hear MikTeX (a popular distribution on Windows)
doesn't use kpathsea, so what do we do? (TODO)
"""
cmd = ['kpsewhich']
if format is not None:
cmd += ['--format=' + format]
cmd += [filename]
matplotlib.verbose.report('find_tex_file(%s): %s' \
% (filename,cmd), 'debug')
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
result = pipe.communicate()[0].rstrip()
matplotlib.verbose.report('find_tex_file result: %s' % result,
'debug')
return result
def _read_nointr(pipe, bufsize=-1):
while True:
try:
return pipe.read(bufsize)
except OSError, e:
if e.errno == errno.EINTR:
continue
else:
raise
# With multiple text objects per figure (e.g. tick labels) we may end
# up reading the same tfm and vf files many times, so we implement a
# simple cache. TODO: is this worth making persistent?
_tfmcache = {}
_vfcache = {}
def _fontfile(texname, class_, suffix, cache):
try:
return cache[texname]
except KeyError:
pass
filename = find_tex_file(texname + suffix)
if filename:
result = class_(filename)
else:
result = None
cache[texname] = result
return result
def _tfmfile(texname):
return _fontfile(texname, Tfm, '.tfm', _tfmcache)
def _vffile(texname):
return _fontfile(texname, Vf, '.vf', _vfcache)
if __name__ == '__main__':
import sys
matplotlib.verbose.set_level('debug-annoying')
fname = sys.argv[1]
try: dpi = float(sys.argv[2])
except IndexError: dpi = None
dvi = Dvi(fname, dpi)
fontmap = PsfontsMap(find_tex_file('pdftex.map'))
for page in dvi:
print '=== new page ==='
fPrev = None
for x,y,f,c,w in page.text:
if f != fPrev:
print 'font', f.texname, 'scaled', f._scale/pow(2.0,20)
fPrev = f
print x,y,c, 32 <= c < 128 and chr(c) or '.', w
for x,y,w,h in page.boxes:
print x,y,'BOX',w,h
| agpl-3.0 |
lihui7115/ChromiumGStreamerBackend | third_party/cython/src/Cython/Compiler/AnalysedTreeTransforms.py | 87 | 3774 | from Visitor import ScopeTrackingTransform
from Nodes import StatListNode, SingleAssignmentNode, CFuncDefNode, DefNode
from ExprNodes import DictNode, DictItemNode, NameNode, UnicodeNode
from PyrexTypes import py_object_type
from StringEncoding import EncodedString
import Symtab
class AutoTestDictTransform(ScopeTrackingTransform):
# Handles autotestdict directive
blacklist = ['__cinit__', '__dealloc__', '__richcmp__',
'__nonzero__', '__bool__',
'__len__', '__contains__']
def visit_ModuleNode(self, node):
if node.is_pxd:
return node
self.scope_type = 'module'
self.scope_node = node
if not self.current_directives['autotestdict']:
return node
self.all_docstrings = self.current_directives['autotestdict.all']
self.cdef_docstrings = self.all_docstrings or self.current_directives['autotestdict.cdef']
assert isinstance(node.body, StatListNode)
# First see if __test__ is already created
if u'__test__' in node.scope.entries:
# Do nothing
return node
pos = node.pos
self.tests = []
self.testspos = node.pos
test_dict_entry = node.scope.declare_var(EncodedString(u'__test__'),
py_object_type,
pos,
visibility='public')
create_test_dict_assignment = SingleAssignmentNode(pos,
lhs=NameNode(pos, name=EncodedString(u'__test__'),
entry=test_dict_entry),
rhs=DictNode(pos, key_value_pairs=self.tests))
self.visitchildren(node)
node.body.stats.append(create_test_dict_assignment)
return node
def add_test(self, testpos, path, doctest):
pos = self.testspos
keystr = u'%s (line %d)' % (path, testpos[1])
key = UnicodeNode(pos, value=EncodedString(keystr))
value = UnicodeNode(pos, value=doctest)
self.tests.append(DictItemNode(pos, key=key, value=value))
def visit_ExprNode(self, node):
# expressions cannot contain functions and lambda expressions
# do not have a docstring
return node
def visit_FuncDefNode(self, node):
if not node.doc or (isinstance(node, DefNode) and node.fused_py_func):
return node
if not self.cdef_docstrings:
if isinstance(node, CFuncDefNode) and not node.py_func:
return node
if not self.all_docstrings and '>>>' not in node.doc:
return node
pos = self.testspos
if self.scope_type == 'module':
path = node.entry.name
elif self.scope_type in ('pyclass', 'cclass'):
if isinstance(node, CFuncDefNode):
if node.py_func is not None:
name = node.py_func.name
else:
name = node.entry.name
else:
name = node.name
if self.scope_type == 'cclass' and name in self.blacklist:
return node
if self.scope_type == 'pyclass':
class_name = self.scope_node.name
else:
class_name = self.scope_node.class_name
if isinstance(node.entry.scope, Symtab.PropertyScope):
property_method_name = node.entry.scope.name
path = "%s.%s.%s" % (class_name, node.entry.scope.name,
node.entry.name)
else:
path = "%s.%s" % (class_name, node.entry.name)
else:
assert False
self.add_test(node.pos, path, node.doc)
return node
| bsd-3-clause |
baylabs/grpc | src/python/grpcio_tests/tests/interop/_intraop_test_case.py | 23 | 2869 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Common code for unit tests of the interoperability test code."""
from tests.interop import methods
class IntraopTestCase(object):
"""Unit test methods.
This class must be mixed in with unittest.TestCase and a class that defines
setUp and tearDown methods that manage a stub attribute.
"""
def testEmptyUnary(self):
methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None)
def testLargeUnary(self):
methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None)
def testServerStreaming(self):
methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None)
def testClientStreaming(self):
methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None)
def testPingPong(self):
methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
def testCancelAfterBegin(self):
methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(self.stub,
None)
def testCancelAfterFirstResponse(self):
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(
self.stub, None)
def testTimeoutOnSleepingServer(self):
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(
self.stub, None)
| bsd-3-clause |
openpli-arm/enigma2-arm | lib/python/Components/PluginComponent.py | 1 | 4036 | from os import path as os_path, listdir as os_listdir
from traceback import print_exc
from sys import stdout
from Tools.Directories import fileExists
from Tools.Import import my_import
from Plugins.Plugin import PluginDescriptor
import keymapparser
class PluginComponent:
firstRun = True
restartRequired = False
def __init__(self):
self.plugins = {}
self.pluginList = [ ]
self.setPluginPrefix("Plugins.")
self.resetWarnings()
def setPluginPrefix(self, prefix):
self.prefix = prefix
def addPlugin(self, plugin):
if self.firstRun or not plugin.needsRestart:
self.pluginList.append(plugin)
for x in plugin.where:
self.plugins.setdefault(x, []).append(plugin)
if x == PluginDescriptor.WHERE_AUTOSTART:
plugin(reason=0)
else:
self.restartRequired = True
def removePlugin(self, plugin):
self.pluginList.remove(plugin)
for x in plugin.where:
self.plugins[x].remove(plugin)
if x == PluginDescriptor.WHERE_AUTOSTART:
plugin(reason=1)
def readPluginList(self, directory):
"""enumerates plugins"""
categories = os_listdir(directory)
new_plugins = [ ]
for c in categories:
directory_category = directory + c
if not os_path.isdir(directory_category):
continue
open(directory_category + "/__init__.py", "a").close()
for pluginname in os_listdir(directory_category):
path = directory_category + "/" + pluginname
if os_path.isdir(path):
if fileExists(path + "/plugin.pyc") or fileExists(path + "/plugin.pyo") or fileExists(path + "/plugin.py"):
try:
plugin = my_import('.'.join(["Plugins", c, pluginname, "plugin"]))
if not plugin.__dict__.has_key("Plugins"):
print "Plugin %s doesn't have 'Plugin'-call." % (pluginname)
continue
plugins = plugin.Plugins(path=path)
except Exception, exc:
print "Plugin ", c + "/" + pluginname, "failed to load:", exc
print_exc(file=stdout)
print "skipping plugin."
self.warnings.append( (c + "/" + pluginname, str(exc)) )
continue
# allow single entry not to be a list
if not isinstance(plugins, list):
plugins = [ plugins ]
for p in plugins:
p.updateIcon(path)
new_plugins.append(p)
if fileExists(path + "/keymap.xml"):
try:
keymapparser.readKeymap(path + "/keymap.xml")
except Exception, exc:
print "keymap for plugin %s/%s failed to load: " % (c, pluginname), exc
self.warnings.append( (c + "/" + pluginname, str(exc)) )
# build a diff between the old list of plugins and the new one
# internally, the "fnc" argument will be compared with __eq__
plugins_added = [p for p in new_plugins if p not in self.pluginList]
plugins_removed = [p for p in self.pluginList if not p.internal and p not in new_plugins]
#ignore already installed but reloaded plugins
for p in plugins_removed:
for pa in plugins_added:
if pa.name == p.name and pa.where == p.where:
pa.needsRestart = False
for p in plugins_removed:
self.removePlugin(p)
for p in plugins_added:
self.addPlugin(p)
if self.firstRun:
self.firstRun = False
def getPlugins(self, where):
"""Get list of plugins in a specific category"""
if not isinstance(where, list):
where = [ where ]
res = [ ]
for x in where:
res.extend(self.plugins.get(x, [ ]))
res.sort(key=lambda x:x.weight)
return res
def getPluginsForMenu(self, menuid):
res = [ ]
for p in self.getPlugins(PluginDescriptor.WHERE_MENU):
res += p(menuid)
return res
def clearPluginList(self):
self.pluginList = []
self.plugins = {}
self.firstRun = True
self.restartRequired = False
def shutdown(self):
for p in self.pluginList[:]:
self.removePlugin(p)
def resetWarnings(self):
self.warnings = [ ]
def getNextWakeupTime(self):
wakeup = -1
for p in self.pluginList:
current = p.getWakeupTime()
if current > -1 and (wakeup > current or wakeup == -1):
wakeup = current
return int(wakeup)
plugins = PluginComponent()
| gpl-2.0 |
sundapeng/kafka | tests/kafkatest/services/verifiable_producer.py | 34 | 3924 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.services.background_thread import BackgroundThreadService
import json
class VerifiableProducer(BackgroundThreadService):
logs = {
"producer_log": {
"path": "/mnt/producer.log",
"collect_default": False}
}
def __init__(self, context, num_nodes, kafka, topic, max_messages=-1, throughput=100000):
super(VerifiableProducer, self).__init__(context, num_nodes)
self.kafka = kafka
self.topic = topic
self.max_messages = max_messages
self.throughput = throughput
self.acked_values = []
self.not_acked_values = []
def _worker(self, idx, node):
cmd = self.start_cmd
self.logger.debug("VerifiableProducer %d command: %s" % (idx, cmd))
for line in node.account.ssh_capture(cmd):
line = line.strip()
data = self.try_parse_json(line)
if data is not None:
with self.lock:
if data["name"] == "producer_send_error":
data["node"] = idx
self.not_acked_values.append(int(data["value"]))
elif data["name"] == "producer_send_success":
self.acked_values.append(int(data["value"]))
@property
def start_cmd(self):
cmd = "/opt/kafka/bin/kafka-verifiable-producer.sh" \
" --topic %s --broker-list %s" % (self.topic, self.kafka.bootstrap_servers())
if self.max_messages > 0:
cmd += " --max-messages %s" % str(self.max_messages)
if self.throughput > 0:
cmd += " --throughput %s" % str(self.throughput)
cmd += " 2>> /mnt/producer.log | tee -a /mnt/producer.log &"
return cmd
@property
def acked(self):
with self.lock:
return self.acked_values
@property
def not_acked(self):
with self.lock:
return self.not_acked_values
@property
def num_acked(self):
with self.lock:
return len(self.acked_values)
@property
def num_not_acked(self):
with self.lock:
return len(self.not_acked_values)
def stop_node(self, node):
node.account.kill_process("VerifiableProducer", allow_fail=False)
if self.worker_threads is None:
return
# block until the corresponding thread exits
if len(self.worker_threads) >= self.idx(node):
# Need to guard this because stop is preemptively called before the worker threads are added and started
self.worker_threads[self.idx(node) - 1].join()
def clean_node(self, node):
node.account.kill_process("VerifiableProducer", clean_shutdown=False, allow_fail=False)
node.account.ssh("rm -rf /mnt/producer.log", allow_fail=False)
def try_parse_json(self, string):
"""Try to parse a string as json. Return None if not parseable."""
try:
record = json.loads(string)
return record
except ValueError:
self.logger.debug("Could not parse as json: %s" % str(string))
return None
| apache-2.0 |
pipermerriam/django | tests/managers_regress/tests.py | 264 | 8342 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.template import Context, Template
from django.test import TestCase, override_settings
from django.utils.encoding import force_text
from .models import (
AbstractBase1, AbstractBase2, AbstractBase3, Child1, Child2, Child3,
Child4, Child5, Child6, Child7, RelatedModel, RelationModel,
)
class ManagersRegressionTests(TestCase):
def test_managers(self):
Child1.objects.create(name='fred', data='a1')
Child1.objects.create(name='barney', data='a2')
Child2.objects.create(name='fred', data='b1', value=1)
Child2.objects.create(name='barney', data='b2', value=42)
Child3.objects.create(name='fred', data='c1', comment='yes')
Child3.objects.create(name='barney', data='c2', comment='no')
Child4.objects.create(name='fred', data='d1')
Child4.objects.create(name='barney', data='d2')
Child5.objects.create(name='fred', comment='yes')
Child5.objects.create(name='barney', comment='no')
Child6.objects.create(name='fred', data='f1', value=42)
Child6.objects.create(name='barney', data='f2', value=42)
Child7.objects.create(name='fred')
Child7.objects.create(name='barney')
self.assertQuerysetEqual(Child1.manager1.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child1.manager2.all(), ["<Child1: a2>"])
self.assertQuerysetEqual(Child1._default_manager.all(), ["<Child1: a1>"])
self.assertQuerysetEqual(Child2._default_manager.all(), ["<Child2: b1>"])
self.assertQuerysetEqual(Child2.restricted.all(), ["<Child2: b2>"])
self.assertQuerysetEqual(Child3._default_manager.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager1.all(), ["<Child3: c1>"])
self.assertQuerysetEqual(Child3.manager2.all(), ["<Child3: c2>"])
# Since Child6 inherits from Child4, the corresponding rows from f1 and
# f2 also appear here. This is the expected result.
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
"<Child4: d1>",
"<Child4: d2>",
"<Child4: f1>",
"<Child4: f2>"
]
)
self.assertQuerysetEqual(Child4.manager1.all(), [
"<Child4: d1>",
"<Child4: f1>"
],
ordered=False
)
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>"])
self.assertQuerysetEqual(Child7._default_manager.order_by('name'), [
"<Child7: barney>",
"<Child7: fred>"
]
)
def test_abstract_manager(self):
# Accessing the manager on an abstract model should
# raise an attribute error with an appropriate message.
# This error message isn't ideal, but if the model is abstract and
# a lot of the class instantiation logic isn't invoked; if the
# manager is implied, then we don't get a hook to install the
# error-raising manager.
msg = "type object 'AbstractBase3' has no attribute 'objects'"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase3.objects.all()
def test_custom_abstract_manager(self):
# Accessing the manager on an abstract model with an custom
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase2 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase2.restricted.all()
def test_explicit_abstract_manager(self):
# Accessing the manager on an abstract model with an explicit
# manager should raise an attribute error with an appropriate
# message.
msg = "Manager isn't available; AbstractBase1 is abstract"
with self.assertRaisesMessage(AttributeError, msg):
AbstractBase1.objects.all()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model should
# raise an attribute error with a helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_custom_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
stuff = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.stuff.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
@override_settings(TEST_SWAPPABLE_MODEL='managers_regress.Parent')
def test_explicit_swappable_manager(self):
# The models need to be removed after the test in order to prevent bad
# interactions with the flush operation in other tests.
_old_models = apps.app_configs['managers_regress'].models.copy()
try:
class SwappableModel(models.Model):
objects = models.Manager()
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
# Accessing the manager on a swappable model with an
# explicit manager should raise an attribute error with a
# helpful message
msg = (
"Manager isn't available; 'managers_regress.SwappableModel' "
"has been swapped for 'managers_regress.Parent'"
)
with self.assertRaisesMessage(AttributeError, msg):
SwappableModel.objects.all()
finally:
apps.app_configs['managers_regress'].models = _old_models
apps.all_models['managers_regress'] = _old_models
apps.clear_cache()
def test_regress_3871(self):
related = RelatedModel.objects.create()
relation = RelationModel()
relation.fk = related
relation.gfk = related
relation.save()
relation.m2m.add(related)
t = Template('{{ related.test_fk.all.0 }}{{ related.test_gfk.all.0 }}{{ related.test_m2m.all.0 }}')
self.assertEqual(
t.render(Context({'related': related})),
''.join([force_text(relation.pk)] * 3),
)
def test_field_can_be_called_exact(self):
# Make sure related managers core filters don't include an
# explicit `__exact` lookup that could be interpreted as a
# reference to a foreign `exact` field. refs #23940.
related = RelatedModel.objects.create(exact=False)
relation = related.test_fk.create()
self.assertEqual(related.test_fk.get(), relation)
| bsd-3-clause |
kabracity/Flexget | flexget/plugins/metainfo/tmdb_lookup.py | 5 | 2871 | from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils import imdb
from flexget.utils.log import log_once
try:
# TODO: Fix this after api_tmdb has module level functions
from flexget.plugins.api_tmdb import ApiTmdb
lookup = ApiTmdb.lookup
except ImportError:
raise plugin.DependencyError(issued_by='tmdb_lookup', missing='api_tmdb')
log = logging.getLogger('tmdb_lookup')
class PluginTmdbLookup(object):
"""Retrieves tmdb information for entries.
Example:
tmdb_lookup: yes
"""
field_map = {
'tmdb_name': 'name',
'tmdb_id': 'id',
'imdb_id': 'imdb_id',
'tmdb_year': 'year',
'tmdb_popularity': 'popularity',
'tmdb_rating': 'rating',
'tmdb_genres': lambda movie: [genre.name for genre in movie.genres],
'tmdb_released': 'released',
'tmdb_votes': 'votes',
'tmdb_certification': 'certification',
'tmdb_posters': lambda movie: [poster.url for poster in movie.posters],
'tmdb_runtime': 'runtime',
'tmdb_tagline': 'tagline',
'tmdb_budget': 'budget',
'tmdb_revenue': 'revenue',
'tmdb_homepage': 'homepage',
'tmdb_trailer': 'trailer',
# Generic fields filled by all movie lookup plugins:
'movie_name': 'name',
'movie_year': 'year'}
def validator(self):
from flexget import validator
return validator.factory('boolean')
def lazy_loader(self, entry, field):
"""Does the lookup for this entry and populates the entry fields."""
imdb_id = (entry.get('imdb_id', eval_lazy=False) or
imdb.extract_id(entry.get('imdb_url', eval_lazy=False)))
try:
movie = lookup(smart_match=entry['title'],
tmdb_id=entry.get('tmdb_id', eval_lazy=False),
imdb_id=imdb_id)
entry.update_using_map(self.field_map, movie)
except LookupError:
log_once('TMDB lookup failed for %s' % entry['title'], log, logging.WARN)
# Set all of our fields to None if the lookup failed
entry.unregister_lazy_fields(self.field_map, self.lazy_loader)
return entry[field]
def lookup(self, entry):
"""
Populates all lazy fields to an Entry. May be called by other plugins
requiring tmdb info on an Entry
:param entry: Entry instance
"""
entry.register_lazy_fields(self.field_map, self.lazy_loader)
def on_task_metainfo(self, task, config):
if not config:
return
for entry in task.entries:
self.lookup(entry)
@event('plugin.register')
def register_plugin():
plugin.register(PluginTmdbLookup, 'tmdb_lookup', api_ver=2)
| mit |
analyseuc3m/ANALYSE-v1 | common/djangoapps/auth_exchange/tests/test_forms.py | 113 | 2792 | # pylint: disable=no-member
"""
Tests for OAuth token exchange forms
"""
import unittest
from django.conf import settings
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
import httpretty
from provider import scope
import social.apps.django_app.utils as social_utils
from auth_exchange.forms import AccessTokenExchangeForm
from auth_exchange.tests.utils import AccessTokenExchangeTestMixin
from third_party_auth.tests.utils import ThirdPartyOAuthTestMixinFacebook, ThirdPartyOAuthTestMixinGoogle
class AccessTokenExchangeFormTest(AccessTokenExchangeTestMixin):
"""
Mixin that defines test cases for AccessTokenExchangeForm
"""
def setUp(self):
super(AccessTokenExchangeFormTest, self).setUp()
self.request = RequestFactory().post("dummy_url")
redirect_uri = 'dummy_redirect_url'
SessionMiddleware().process_request(self.request)
self.request.social_strategy = social_utils.load_strategy(self.request)
# pylint: disable=no-member
self.request.backend = social_utils.load_backend(self.request.social_strategy, self.BACKEND, redirect_uri)
def _assert_error(self, data, expected_error, expected_error_description):
form = AccessTokenExchangeForm(request=self.request, data=data)
self.assertEqual(
form.errors,
{"error": expected_error, "error_description": expected_error_description}
)
self.assertNotIn("partial_pipeline", self.request.session)
def _assert_success(self, data, expected_scopes):
form = AccessTokenExchangeForm(request=self.request, data=data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["user"], self.user)
self.assertEqual(form.cleaned_data["client"], self.oauth_client)
self.assertEqual(scope.to_names(form.cleaned_data["scope"]), expected_scopes)
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
@httpretty.activate
class AccessTokenExchangeFormTestFacebook(
AccessTokenExchangeFormTest,
ThirdPartyOAuthTestMixinFacebook,
TestCase
):
"""
Tests for AccessTokenExchangeForm used with Facebook
"""
pass
# This is necessary because cms does not implement third party auth
@unittest.skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
@httpretty.activate
class AccessTokenExchangeFormTestGoogle(
AccessTokenExchangeFormTest,
ThirdPartyOAuthTestMixinGoogle,
TestCase
):
"""
Tests for AccessTokenExchangeForm used with Google
"""
pass
| agpl-3.0 |
SydneyUniLibrary/auto-holds | patron/migrations/0007_auto_20160316_0257.py | 1 | 1269 | # Copyright 2016 Susan Bennett, David Mitchell, Jim Nicholls
#
# This file is part of AutoHolds.
#
# AutoHolds is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# AutoHolds is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with AutoHolds. If not, see <http://www.gnu.org/licenses/>.
#
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-16 02:57
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('patron', '0006_registration_language'),
]
operations = [
migrations.AlterField(
model_name='registration',
name='format',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='patron.Format'),
),
]
| gpl-3.0 |
aabbox/kbengine | kbe/src/lib/python/Lib/test/test_re.py | 60 | 68082 | from test.support import verbose, run_unittest, gc_collect, bigmemtest, _2G, \
cpython_only, captured_stdout
import io
import re
from re import Scanner
import sre_compile
import sre_constants
import sys
import string
import traceback
import unittest
from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefully modeled to
# cover most of the code.
class S(str):
def __getitem__(self, index):
return S(super().__getitem__(index))
class B(bytes):
def __getitem__(self, index):
return B(super().__getitem__(index))
class ReTests(unittest.TestCase):
def assertTypedEqual(self, actual, expect, msg=None):
self.assertEqual(actual, expect, msg)
def recurse(actual, expect):
if isinstance(expect, (tuple, list)):
for x, y in zip(actual, expect):
recurse(x, y)
else:
self.assertIs(type(actual), type(expect), msg)
recurse(actual, expect)
def test_keep_buffer(self):
# See bug 14212
b = bytearray(b'x')
it = re.finditer(b'a', b)
with self.assertRaises(BufferError):
b.extend(b'x'*400)
list(it)
del it
gc_collect()
b.extend(b'x'*400)
def test_weakref(self):
s = 'QabbbcR'
x = re.compile('ab+c')
y = proxy(x)
self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertIsNone(re.search('x', 'aaa'))
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertIsNone(re.match('a+', 'xxx'))
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertTypedEqual(re.sub('y', 'a', 'xyz'), 'xaz')
self.assertTypedEqual(re.sub('y', S('a'), S('xyz')), 'xaz')
self.assertTypedEqual(re.sub(b'y', b'a', b'xyz'), b'xaz')
self.assertTypedEqual(re.sub(b'y', B(b'a'), B(b'xyz')), b'xaz')
self.assertTypedEqual(re.sub(b'y', bytearray(b'a'), bytearray(b'xyz')), b'xaz')
self.assertTypedEqual(re.sub(b'y', memoryview(b'a'), memoryview(b'xyz')), b'xaz')
for y in ("\xe0", "\u0430", "\U0001d49c"):
self.assertEqual(re.sub(y, 'a', 'x%sz' % y), 'xaz')
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_groups(self):
re.compile('(?P<a>x)(?P=a)(?(a)y)')
re.compile('(?P<a1>x)(?P=a1)(?(a1)y)')
self.assertRaises(re.error, re.compile, '(?P<a>)(?P<a>)')
self.assertRaises(re.error, re.compile, '(?Px)')
self.assertRaises(re.error, re.compile, '(?P=)')
self.assertRaises(re.error, re.compile, '(?P=1)')
self.assertRaises(re.error, re.compile, '(?P=a)')
self.assertRaises(re.error, re.compile, '(?P=a1)')
self.assertRaises(re.error, re.compile, '(?P=a.)')
self.assertRaises(re.error, re.compile, '(?P<)')
self.assertRaises(re.error, re.compile, '(?P<>)')
self.assertRaises(re.error, re.compile, '(?P<1>)')
self.assertRaises(re.error, re.compile, '(?P<a.>)')
self.assertRaises(re.error, re.compile, '(?())')
self.assertRaises(re.error, re.compile, '(?(a))')
self.assertRaises(re.error, re.compile, '(?(1a))')
self.assertRaises(re.error, re.compile, '(?(a.))')
# New valid/invalid identifiers in Python 3
re.compile('(?P<µ>x)(?P=µ)(?(µ)y)')
re.compile('(?P<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>x)(?P=𝔘𝔫𝔦𝔠𝔬𝔡𝔢)(?(𝔘𝔫𝔦𝔠𝔬𝔡𝔢)y)')
self.assertRaises(re.error, re.compile, '(?P<©>x)')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<-1>', 'xx')
# New valid/invalid identifiers in Python 3
self.assertEqual(re.sub('(?P<µ>x)', r'\g<µ>', 'xx'), 'xx')
self.assertEqual(re.sub('(?P<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>x)', r'\g<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>', 'xx'), 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', r'\g<©>', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
for string in ":a:b::c", S(":a:b::c"):
self.assertTypedEqual(re.split(":", string),
['', 'a', 'b', '', 'c'])
self.assertTypedEqual(re.split(":*", string),
['', 'a', 'b', 'c'])
self.assertTypedEqual(re.split("(:*)", string),
['', ':', 'a', ':', 'b', '::', 'c'])
for string in (b":a:b::c", B(b":a:b::c"), bytearray(b":a:b::c"),
memoryview(b":a:b::c")):
self.assertTypedEqual(re.split(b":", string),
[b'', b'a', b'b', b'', b'c'])
self.assertTypedEqual(re.split(b":*", string),
[b'', b'a', b'b', b'c'])
self.assertTypedEqual(re.split(b"(:*)", string),
[b'', b':', b'a', b':', b'b', b'::', b'c'])
for a, b, c in ("\xe0\xdf\xe7", "\u0430\u0431\u0432",
"\U0001d49c\U0001d49e\U0001d4b5"):
string = ":%s:%s::%s" % (a, b, c)
self.assertEqual(re.split(":", string), ['', a, b, '', c])
self.assertEqual(re.split(":*", string), ['', a, b, c])
self.assertEqual(re.split("(:*)", string),
['', ':', a, ':', b, '::', c])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
for string in "a:b::c:::d", S("a:b::c:::d"):
self.assertTypedEqual(re.findall(":+", string),
[":", "::", ":::"])
self.assertTypedEqual(re.findall("(:+)", string),
[":", "::", ":::"])
self.assertTypedEqual(re.findall("(:)(:*)", string),
[(":", ""), (":", ":"), (":", "::")])
for string in (b"a:b::c:::d", B(b"a:b::c:::d"), bytearray(b"a:b::c:::d"),
memoryview(b"a:b::c:::d")):
self.assertTypedEqual(re.findall(b":+", string),
[b":", b"::", b":::"])
self.assertTypedEqual(re.findall(b"(:+)", string),
[b":", b"::", b":::"])
self.assertTypedEqual(re.findall(b"(:)(:*)", string),
[(b":", b""), (b":", b":"), (b":", b"::")])
for x in ("\xe0", "\u0430", "\U0001d49c"):
xx = x * 2
xxx = x * 3
string = "a%sb%sc%sd" % (x, xx, xxx)
self.assertEqual(re.findall("%s+" % x, string), [x, xx, xxx])
self.assertEqual(re.findall("(%s+)" % x, string), [x, xx, xxx])
self.assertEqual(re.findall("(%s)(%s*)" % (x, x), string),
[(x, ""), (x, x), (x, xx)])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
for string in 'a', S('a'):
self.assertEqual(re.match('a', string).groups(), ())
self.assertEqual(re.match('(a)', string).groups(), ('a',))
self.assertEqual(re.match('(a)', string).group(0), 'a')
self.assertEqual(re.match('(a)', string).group(1), 'a')
self.assertEqual(re.match('(a)', string).group(1, 1), ('a', 'a'))
for string in b'a', B(b'a'), bytearray(b'a'), memoryview(b'a'):
self.assertEqual(re.match(b'a', string).groups(), ())
self.assertEqual(re.match(b'(a)', string).groups(), (b'a',))
self.assertEqual(re.match(b'(a)', string).group(0), b'a')
self.assertEqual(re.match(b'(a)', string).group(1), b'a')
self.assertEqual(re.match(b'(a)', string).group(1, 1), (b'a', b'a'))
for a in ("\xe0", "\u0430", "\U0001d49c"):
self.assertEqual(re.match(a, a).groups(), ())
self.assertEqual(re.match('(%s)' % a, a).groups(), (a,))
self.assertEqual(re.match('(%s)' % a, a).group(0), a)
self.assertEqual(re.match('(%s)' % a, a).group(1), a)
self.assertEqual(re.match('(%s)' % a, a).group(1, 1), (a, a))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_fullmatch(self):
# Issue 16203: Proposal: add re.fullmatch() method.
self.assertEqual(re.fullmatch(r"a", "a").span(), (0, 1))
for string in "ab", S("ab"):
self.assertEqual(re.fullmatch(r"a|ab", string).span(), (0, 2))
for string in b"ab", B(b"ab"), bytearray(b"ab"), memoryview(b"ab"):
self.assertEqual(re.fullmatch(br"a|ab", string).span(), (0, 2))
for a, b in "\xe0\xdf", "\u0430\u0431", "\U0001d49c\U0001d49e":
r = r"%s|%s" % (a, a + b)
self.assertEqual(re.fullmatch(r, a + b).span(), (0, 2))
self.assertEqual(re.fullmatch(r".*?$", "abc").span(), (0, 3))
self.assertEqual(re.fullmatch(r".*?", "abc").span(), (0, 3))
self.assertEqual(re.fullmatch(r"a.*?b", "ab").span(), (0, 2))
self.assertEqual(re.fullmatch(r"a.*?b", "abb").span(), (0, 3))
self.assertEqual(re.fullmatch(r"a.*?b", "axxb").span(), (0, 4))
self.assertIsNone(re.fullmatch(r"a+", "ab"))
self.assertIsNone(re.fullmatch(r"abc$", "abc\n"))
self.assertIsNone(re.fullmatch(r"abc\Z", "abc\n"))
self.assertIsNone(re.fullmatch(r"(?m)abc$", "abc\n"))
self.assertEqual(re.fullmatch(r"ab(?=c)cd", "abcd").span(), (0, 4))
self.assertEqual(re.fullmatch(r"ab(?<=b)cd", "abcd").span(), (0, 4))
self.assertEqual(re.fullmatch(r"(?=a|ab)ab", "ab").span(), (0, 2))
self.assertEqual(
re.compile(r"bc").fullmatch("abcd", pos=1, endpos=3).span(), (1, 3))
self.assertEqual(
re.compile(r".*?$").fullmatch("abcd", pos=1, endpos=3).span(), (1, 3))
self.assertEqual(
re.compile(r".*?").fullmatch("abcd", pos=1, endpos=3).span(), (1, 3))
def test_re_groupref_exists(self):
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertIsNone(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'))
self.assertIsNone(re.match('^(\()?([^()]+)(?(1)\))$', '(a'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
# Tests for bug #1177831: exercise groups other than the first group
p = re.compile('(?P<g1>a)(?P<g2>b)?((?(g2)c|d))')
self.assertEqual(p.match('abc').groups(),
('a', 'b', 'c'))
self.assertEqual(p.match('ad').groups(),
('a', None, 'd'))
self.assertIsNone(p.match('abd'))
self.assertIsNone(p.match('ac'))
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertIsNone(re.match(r'^(\|)?([^()]+)\1$', 'a|'))
self.assertIsNone(re.match(r'^(\|)?([^()]+)\1$', '|a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertIsNone(re.match("^(\w){1}$", "abc"))
self.assertIsNone(re.match("^(\w){1}?$", "abc"))
self.assertIsNone(re.match("^(\w){1,2}$", "abc"))
self.assertIsNone(re.match("^(\w){1,2}?$", "abc"))
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertIsNone(re.match("^x{1}$", "xxx"))
self.assertIsNone(re.match("^x{1}?$", "xxx"))
self.assertIsNone(re.match("^x{1,2}$", "xxx"))
self.assertIsNone(re.match("^x{1,2}?$", "xxx"))
self.assertTrue(re.match("^x{3}$", "xxx"))
self.assertTrue(re.match("^x{1,3}$", "xxx"))
self.assertTrue(re.match("^x{1,4}$", "xxx"))
self.assertTrue(re.match("^x{3,4}?$", "xxx"))
self.assertTrue(re.match("^x{3}?$", "xxx"))
self.assertTrue(re.match("^x{1,3}?$", "xxx"))
self.assertTrue(re.match("^x{1,4}?$", "xxx"))
self.assertTrue(re.match("^x{3,4}?$", "xxx"))
self.assertIsNone(re.match("^x{}$", "xxx"))
self.assertTrue(re.match("^x{}$", "x{}"))
def test_getattr(self):
self.assertEqual(re.compile("(?i)(a)(b)").pattern, "(?i)(a)(b)")
self.assertEqual(re.compile("(?i)(a)(b)").flags, re.I | re.U)
self.assertEqual(re.compile("(?i)(a)(b)").groups, 2)
self.assertEqual(re.compile("(?i)(a)(b)").groupindex, {})
self.assertEqual(re.compile("(?i)(?P<first>a)(?P<other>b)").groupindex,
{'first': 1, 'other': 2})
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertTrue(re.match("(a)", "a").re)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.ASCII).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.ASCII).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertIsNone(re.search(r"^\Aabc\Z$", "\nabc\n", re.M))
self.assertEqual(re.search(br"\b(b.)\b",
b"abcd abc bcd bx").group(1), b"bx")
self.assertEqual(re.search(br"\B(b.)\B",
b"abc bcd bc abxd").group(1), b"bx")
self.assertEqual(re.search(br"\b(b.)\b",
b"abcd abc bcd bx", re.LOCALE).group(1), b"bx")
self.assertEqual(re.search(br"\B(b.)\B",
b"abc bcd bc abxd", re.LOCALE).group(1), b"bx")
self.assertEqual(re.search(br"^abc$", b"\nabc\n", re.M).group(0), b"abc")
self.assertEqual(re.search(br"^\Aabc\Z$", b"abc", re.M).group(0), b"abc")
self.assertIsNone(re.search(br"^\Aabc\Z$", b"\nabc\n", re.M))
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(br"\d\D\w\W\s\S",
b"1aa! a").group(0), b"1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.ASCII).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(br"\d\D\w\W\s\S",
b"1aa! a", re.LOCALE).group(0), b"1aa! a")
def test_string_boundaries(self):
# See http://bugs.python.org/issue10713
self.assertEqual(re.search(r"\b(abc)\b", "abc").group(1),
"abc")
# There's a word boundary at the start of a string.
self.assertTrue(re.match(r"\b", "abc"))
# A non-empty string includes a non-boundary zero-length match.
self.assertTrue(re.search(r"\B", "abc"))
# There is no non-boundary match at the start of a string.
self.assertFalse(re.match(r"\B", "abc"))
# However, an empty string contains no word boundaries, and also no
# non-boundaries.
self.assertIsNone(re.search(r"\B", ""))
# This one is questionable and different from the perlre behaviour,
# but describes current behavior.
self.assertIsNone(re.search(r"\b", ""))
# A single word-character string has two boundaries, but no
# non-boundary gaps.
self.assertEqual(len(re.findall(r"\b", "a")), 2)
self.assertEqual(len(re.findall(r"\B", "a")), 0)
# If there are no words, there are no boundaries
self.assertEqual(len(re.findall(r"\b", " ")), 0)
self.assertEqual(len(re.findall(r"\b", " ")), 0)
# Can match around the whitespace.
self.assertEqual(len(re.findall(r"\B", " ")), 2)
def test_bigcharset(self):
self.assertEqual(re.match("([\u2222\u2223])",
"\u2222").group(1), "\u2222")
r = '[%s]' % ''.join(map(chr, range(256, 2**16, 255)))
self.assertEqual(re.match(r, "\uff01").group(), "\uff01")
def test_big_codesize(self):
# Issue #1160
r = re.compile('|'.join(('%d'%x for x in range(10000))))
self.assertTrue(r.match('1000'))
self.assertTrue(r.match('9999'))
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC")
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(b"abc", b"ABC", re.I).group(0), b"ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def assertMatch(self, pattern, text, match=None, span=None,
matcher=re.match):
if match is None and span is None:
# the pattern matches the whole text
match = text
span = (0, len(text))
elif match is None or span is None:
raise ValueError('If match is not None, span should be specified '
'(and vice versa).')
m = matcher(pattern, text)
self.assertTrue(m)
self.assertEqual(m.group(), match)
self.assertEqual(m.span(), span)
def test_re_escape(self):
alnum_chars = string.ascii_letters + string.digits + '_'
p = ''.join(chr(i) for i in range(256))
for c in p:
if c in alnum_chars:
self.assertEqual(re.escape(c), c)
elif c == '\x00':
self.assertEqual(re.escape(c), '\\000')
else:
self.assertEqual(re.escape(c), '\\' + c)
self.assertMatch(re.escape(c), c)
self.assertMatch(re.escape(p), p)
def test_re_escape_byte(self):
alnum_chars = (string.ascii_letters + string.digits + '_').encode('ascii')
p = bytes(range(256))
for i in p:
b = bytes([i])
if b in alnum_chars:
self.assertEqual(re.escape(b), b)
elif i == 0:
self.assertEqual(re.escape(b), b'\\000')
else:
self.assertEqual(re.escape(b), b'\\' + b)
self.assertMatch(re.escape(b), b)
self.assertMatch(re.escape(p), p)
def test_re_escape_non_ascii(self):
s = 'xxx\u2620\u2620\u2620xxx'
s_escaped = re.escape(s)
self.assertEqual(s_escaped, 'xxx\\\u2620\\\u2620\\\u2620xxx')
self.assertMatch(s_escaped, s)
self.assertMatch('.%s+.' % re.escape('\u2620'), s,
'x\u2620\u2620\u2620x', (2, 7), re.search)
def test_re_escape_non_ascii_bytes(self):
b = 'y\u2620y\u2620y'.encode('utf-8')
b_escaped = re.escape(b)
self.assertEqual(b_escaped, b'y\\\xe2\\\x98\\\xa0y\\\xe2\\\x98\\\xa0y')
self.assertMatch(b_escaped, b)
res = re.findall(re.escape('\u2620'.encode('utf-8')), b)
self.assertEqual(len(res), 2)
def test_pickling(self):
import pickle
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)', re.UNICODE)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(oldpat, proto)
newpat = pickle.loads(pickled)
self.assertEqual(newpat, oldpat)
# current pickle expects the _compile() reconstructor in re module
from re import _compile
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertTrue(re.compile('^pattern$', flag))
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]:
if i < 256:
self.assertTrue(re.match(r"\%03o" % i, chr(i)))
self.assertTrue(re.match(r"\%03o0" % i, chr(i)+"0"))
self.assertTrue(re.match(r"\%03o8" % i, chr(i)+"8"))
self.assertTrue(re.match(r"\x%02x" % i, chr(i)))
self.assertTrue(re.match(r"\x%02x0" % i, chr(i)+"0"))
self.assertTrue(re.match(r"\x%02xz" % i, chr(i)+"z"))
if i < 0x10000:
self.assertTrue(re.match(r"\u%04x" % i, chr(i)))
self.assertTrue(re.match(r"\u%04x0" % i, chr(i)+"0"))
self.assertTrue(re.match(r"\u%04xz" % i, chr(i)+"z"))
self.assertTrue(re.match(r"\U%08x" % i, chr(i)))
self.assertTrue(re.match(r"\U%08x0" % i, chr(i)+"0"))
self.assertTrue(re.match(r"\U%08xz" % i, chr(i)+"z"))
self.assertTrue(re.match(r"\0", "\000"))
self.assertTrue(re.match(r"\08", "\0008"))
self.assertTrue(re.match(r"\01", "\001"))
self.assertTrue(re.match(r"\018", "\0018"))
self.assertTrue(re.match(r"\567", chr(0o167)))
self.assertRaises(re.error, re.match, r"\911", "")
self.assertRaises(re.error, re.match, r"\x1", "")
self.assertRaises(re.error, re.match, r"\x1z", "")
self.assertRaises(re.error, re.match, r"\u123", "")
self.assertRaises(re.error, re.match, r"\u123z", "")
self.assertRaises(re.error, re.match, r"\U0001234", "")
self.assertRaises(re.error, re.match, r"\U0001234z", "")
self.assertRaises(re.error, re.match, r"\U00110000", "")
def test_sre_character_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]:
if i < 256:
self.assertTrue(re.match(r"[\%o]" % i, chr(i)))
self.assertTrue(re.match(r"[\%o8]" % i, chr(i)))
self.assertTrue(re.match(r"[\%03o]" % i, chr(i)))
self.assertTrue(re.match(r"[\%03o0]" % i, chr(i)))
self.assertTrue(re.match(r"[\%03o8]" % i, chr(i)))
self.assertTrue(re.match(r"[\x%02x]" % i, chr(i)))
self.assertTrue(re.match(r"[\x%02x0]" % i, chr(i)))
self.assertTrue(re.match(r"[\x%02xz]" % i, chr(i)))
if i < 0x10000:
self.assertTrue(re.match(r"[\u%04x]" % i, chr(i)))
self.assertTrue(re.match(r"[\u%04x0]" % i, chr(i)))
self.assertTrue(re.match(r"[\u%04xz]" % i, chr(i)))
self.assertTrue(re.match(r"[\U%08x]" % i, chr(i)))
self.assertTrue(re.match(r"[\U%08x0]" % i, chr(i)+"0"))
self.assertTrue(re.match(r"[\U%08xz]" % i, chr(i)+"z"))
self.assertTrue(re.match(r"[\U0001d49c-\U0001d4b5]", "\U0001d49e"))
self.assertRaises(re.error, re.match, r"[\911]", "")
self.assertRaises(re.error, re.match, r"[\x1z]", "")
self.assertRaises(re.error, re.match, r"[\u123z]", "")
self.assertRaises(re.error, re.match, r"[\U0001234z]", "")
self.assertRaises(re.error, re.match, r"[\U00110000]", "")
def test_sre_byte_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertTrue(re.match((r"\%03o" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"\%03o0" % i).encode(), bytes([i])+b"0"))
self.assertTrue(re.match((r"\%03o8" % i).encode(), bytes([i])+b"8"))
self.assertTrue(re.match((r"\x%02x" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"\x%02x0" % i).encode(), bytes([i])+b"0"))
self.assertTrue(re.match((r"\x%02xz" % i).encode(), bytes([i])+b"z"))
self.assertTrue(re.match(br"\u", b'u'))
self.assertTrue(re.match(br"\U", b'U'))
self.assertTrue(re.match(br"\0", b"\000"))
self.assertTrue(re.match(br"\08", b"\0008"))
self.assertTrue(re.match(br"\01", b"\001"))
self.assertTrue(re.match(br"\018", b"\0018"))
self.assertTrue(re.match(br"\567", bytes([0o167])))
self.assertRaises(re.error, re.match, br"\911", b"")
self.assertRaises(re.error, re.match, br"\x1", b"")
self.assertRaises(re.error, re.match, br"\x1z", b"")
def test_sre_byte_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertTrue(re.match((r"[\%o]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\%o8]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\%03o]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\%03o0]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\%03o8]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\x%02x]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\x%02x0]" % i).encode(), bytes([i])))
self.assertTrue(re.match((r"[\x%02xz]" % i).encode(), bytes([i])))
self.assertTrue(re.match(br"[\u]", b'u'))
self.assertTrue(re.match(br"[\U]", b'U'))
self.assertRaises(re.error, re.match, br"[\911]", "")
self.assertRaises(re.error, re.match, br"[\x1z]", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertIsNone(re.match(r'(a)?a','a').lastindex)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still used to hit the recursion limit, before the
# non-recursive scheme was implemented.
self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001)
def test_bug_612074(self):
pat="["+re.escape("\u2039")+"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty cases that used to overflow the straightforward recursive
# implementation of repeated groups.
self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x')
self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x')
self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x')
def test_unlimited_zero_width_repeat(self):
# Issue #9669
self.assertIsNone(re.match(r'(?:a?)*y', 'z'))
self.assertIsNone(re.match(r'(?:a?)+y', 'z'))
self.assertIsNone(re.match(r'(?:a?){2,}y', 'z'))
self.assertIsNone(re.match(r'(?:a?)*?y', 'z'))
self.assertIsNone(re.match(r'(?:a?)+?y', 'z'))
self.assertIsNone(re.match(r'(?:a?){2,}?y', 'z'))
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertTrue(scanner.scanner.scanner("").pattern)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
class my_unicode(str): pass
pat = re.compile(my_unicode("abc"))
self.assertIsNone(pat.match("xyz"))
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", 1, 10)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", pos=1, endpos=10)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", endpos=10, pos=1)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", pos=3, endpos=8)
self.assertEqual([item.group(0) for item in iter],
["::", "::"])
def test_bug_926075(self):
self.assertIsNot(re.compile('bug_926075'),
re.compile(b'bug_926075'))
def test_bug_931848(self):
pattern = "[\u002E\u3002\uFF0E\uFF61]"
self.assertEqual(re.compile(pattern).split("a.b.c"),
['a','b','c'])
def test_bug_581080(self):
iter = re.finditer(r"\s", "a b")
self.assertEqual(next(iter).span(), (1,2))
self.assertRaises(StopIteration, next, iter)
scanner = re.compile(r"\s").scanner("a b")
self.assertEqual(scanner.search().span(), (1, 2))
self.assertIsNone(scanner.search())
def test_bug_817234(self):
iter = re.finditer(r".*", "asdf")
self.assertEqual(next(iter).span(), (0, 4))
self.assertEqual(next(iter).span(), (4, 4))
self.assertRaises(StopIteration, next, iter)
def test_bug_6561(self):
# '\d' should match characters in Unicode category 'Nd'
# (Number, Decimal Digit), but not those in 'Nl' (Number,
# Letter) or 'No' (Number, Other).
decimal_digits = [
'\u0037', # '\N{DIGIT SEVEN}', category 'Nd'
'\u0e58', # '\N{THAI DIGIT SIX}', category 'Nd'
'\uff10', # '\N{FULLWIDTH DIGIT ZERO}', category 'Nd'
]
for x in decimal_digits:
self.assertEqual(re.match('^\d$', x).group(0), x)
not_decimal_digits = [
'\u2165', # '\N{ROMAN NUMERAL SIX}', category 'Nl'
'\u3039', # '\N{HANGZHOU NUMERAL TWENTY}', category 'Nl'
'\u2082', # '\N{SUBSCRIPT TWO}', category 'No'
'\u32b4', # '\N{CIRCLED NUMBER THIRTY NINE}', category 'No'
]
for x in not_decimal_digits:
self.assertIsNone(re.match('^\d$', x))
def test_empty_array(self):
# SF buf 1647541
import array
for typecode in 'bBuhHiIlLfd':
a = array.array(typecode)
self.assertIsNone(re.compile(b"bla").match(a))
self.assertEqual(re.compile(b"").match(a).groups(), ())
def test_inline_flags(self):
# Bug #1700
upper_char = chr(0x1ea0) # Latin Capital Letter A with Dot Bellow
lower_char = chr(0x1ea1) # Latin Small Letter A with Dot Bellow
p = re.compile(upper_char, re.I | re.U)
q = p.match(lower_char)
self.assertTrue(q)
p = re.compile(lower_char, re.I | re.U)
q = p.match(upper_char)
self.assertTrue(q)
p = re.compile('(?i)' + upper_char, re.U)
q = p.match(lower_char)
self.assertTrue(q)
p = re.compile('(?i)' + lower_char, re.U)
q = p.match(upper_char)
self.assertTrue(q)
p = re.compile('(?iu)' + upper_char)
q = p.match(lower_char)
self.assertTrue(q)
p = re.compile('(?iu)' + lower_char)
q = p.match(upper_char)
self.assertTrue(q)
def test_dollar_matches_twice(self):
"$ matches the end of string, and just before the terminating \n"
pattern = re.compile('$')
self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#')
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
pattern = re.compile('$', re.MULTILINE)
self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#' )
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
def test_bytes_str_mixing(self):
# Mixing str and bytes is disallowed
pat = re.compile('.')
bpat = re.compile(b'.')
self.assertRaises(TypeError, pat.match, b'b')
self.assertRaises(TypeError, bpat.match, 'b')
self.assertRaises(TypeError, pat.sub, b'b', 'c')
self.assertRaises(TypeError, pat.sub, 'b', b'c')
self.assertRaises(TypeError, pat.sub, b'b', b'c')
self.assertRaises(TypeError, bpat.sub, b'b', 'c')
self.assertRaises(TypeError, bpat.sub, 'b', b'c')
self.assertRaises(TypeError, bpat.sub, 'b', 'c')
def test_ascii_and_unicode_flag(self):
# String patterns
for flags in (0, re.UNICODE):
pat = re.compile('\xc0', flags | re.IGNORECASE)
self.assertTrue(pat.match('\xe0'))
pat = re.compile('\w', flags)
self.assertTrue(pat.match('\xe0'))
pat = re.compile('\xc0', re.ASCII | re.IGNORECASE)
self.assertIsNone(pat.match('\xe0'))
pat = re.compile('(?a)\xc0', re.IGNORECASE)
self.assertIsNone(pat.match('\xe0'))
pat = re.compile('\w', re.ASCII)
self.assertIsNone(pat.match('\xe0'))
pat = re.compile('(?a)\w')
self.assertIsNone(pat.match('\xe0'))
# Bytes patterns
for flags in (0, re.ASCII):
pat = re.compile(b'\xc0', flags | re.IGNORECASE)
self.assertIsNone(pat.match(b'\xe0'))
pat = re.compile(b'\w', flags)
self.assertIsNone(pat.match(b'\xe0'))
# Incompatibilities
self.assertRaises(ValueError, re.compile, b'\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, b'(?u)\w')
self.assertRaises(ValueError, re.compile, '\w', re.UNICODE | re.ASCII)
self.assertRaises(ValueError, re.compile, '(?u)\w', re.ASCII)
self.assertRaises(ValueError, re.compile, '(?a)\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, '(?au)\w')
def test_bug_6509(self):
# Replacement strings of both types must parse properly.
# all strings
pat = re.compile('a(\w)')
self.assertEqual(pat.sub('b\\1', 'ac'), 'bc')
pat = re.compile('a(.)')
self.assertEqual(pat.sub('b\\1', 'a\u1234'), 'b\u1234')
pat = re.compile('..')
self.assertEqual(pat.sub(lambda m: 'str', 'a5'), 'str')
# all bytes
pat = re.compile(b'a(\w)')
self.assertEqual(pat.sub(b'b\\1', b'ac'), b'bc')
pat = re.compile(b'a(.)')
self.assertEqual(pat.sub(b'b\\1', b'a\xCD'), b'b\xCD')
pat = re.compile(b'..')
self.assertEqual(pat.sub(lambda m: b'bytes', b'a5'), b'bytes')
def test_dealloc(self):
# issue 3299: check for segfault in debug build
import _sre
# the overflow limit is different on wide and narrow builds and it
# depends on the definition of SRE_CODE (see sre.h).
# 2**128 should be big enough to overflow on both. For smaller values
# a RuntimeError is raised instead of OverflowError.
long_overflow = 2**128
self.assertRaises(TypeError, re.finditer, "a", {})
self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
self.assertRaises(TypeError, _sre.compile, {}, 0, [])
def test_search_dot_unicode(self):
self.assertTrue(re.search("123.*-", '123abc-'))
self.assertTrue(re.search("123.*-", '123\xe9-'))
self.assertTrue(re.search("123.*-", '123\u20ac-'))
self.assertTrue(re.search("123.*-", '123\U0010ffff-'))
self.assertTrue(re.search("123.*-", '123\xe9\u20ac\U0010ffff-'))
def test_compile(self):
# Test return value when given string and pattern as parameter
pattern = re.compile('random pattern')
self.assertIsInstance(pattern, re._pattern_type)
same_pattern = re.compile(pattern)
self.assertIsInstance(same_pattern, re._pattern_type)
self.assertIs(same_pattern, pattern)
# Test behaviour when not given a string or pattern as parameter
self.assertRaises(TypeError, re.compile, 0)
def test_bug_13899(self):
# Issue #13899: re pattern r"[\A]" should work like "A" but matches
# nothing. Ditto B and Z.
self.assertEqual(re.findall(r'[\A\B\b\C\Z]', 'AB\bCZ'),
['A', 'B', '\b', 'C', 'Z'])
@bigmemtest(size=_2G, memuse=1)
def test_large_search(self, size):
# Issue #10182: indices were 32-bit-truncated.
s = 'a' * size
m = re.search('$', s)
self.assertIsNotNone(m)
self.assertEqual(m.start(), size)
self.assertEqual(m.end(), size)
# The huge memuse is because of re.sub() using a list and a join()
# to create the replacement result.
@bigmemtest(size=_2G, memuse=16 + 2)
def test_large_subn(self, size):
# Issue #10182: indices were 32-bit-truncated.
s = 'a' * size
r, n = re.subn('', '', s)
self.assertEqual(r, s)
self.assertEqual(n, size + 1)
def test_bug_16688(self):
# Issue 16688: Backreferences make case-insensitive regex fail on
# non-ASCII strings.
self.assertEqual(re.findall(r"(?i)(a)\1", "aa \u0100"), ['a'])
self.assertEqual(re.match(r"(?s).{1,3}", "\u0100\u0100").span(), (0, 2))
def test_repeat_minmax_overflow(self):
# Issue #13169
string = "x" * 100000
self.assertEqual(re.match(r".{65535}", string).span(), (0, 65535))
self.assertEqual(re.match(r".{,65535}", string).span(), (0, 65535))
self.assertEqual(re.match(r".{65535,}?", string).span(), (0, 65535))
self.assertEqual(re.match(r".{65536}", string).span(), (0, 65536))
self.assertEqual(re.match(r".{,65536}", string).span(), (0, 65536))
self.assertEqual(re.match(r".{65536,}?", string).span(), (0, 65536))
# 2**128 should be big enough to overflow both SRE_CODE and Py_ssize_t.
self.assertRaises(OverflowError, re.compile, r".{%d}" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{,%d}" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{%d,}?" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{%d,%d}" % (2**129, 2**128))
@cpython_only
def test_repeat_minmax_overflow_maxrepeat(self):
try:
from _sre import MAXREPEAT
except ImportError:
self.skipTest('requires _sre.MAXREPEAT constant')
string = "x" * 100000
self.assertIsNone(re.match(r".{%d}" % (MAXREPEAT - 1), string))
self.assertEqual(re.match(r".{,%d}" % (MAXREPEAT - 1), string).span(),
(0, 100000))
self.assertIsNone(re.match(r".{%d,}?" % (MAXREPEAT - 1), string))
self.assertRaises(OverflowError, re.compile, r".{%d}" % MAXREPEAT)
self.assertRaises(OverflowError, re.compile, r".{,%d}" % MAXREPEAT)
self.assertRaises(OverflowError, re.compile, r".{%d,}?" % MAXREPEAT)
def test_backref_group_name_in_exception(self):
# Issue 17341: Poor error message when compiling invalid regex
with self.assertRaisesRegex(sre_constants.error, '<foo>'):
re.compile('(?P=<foo>)')
def test_group_name_in_exception(self):
# Issue 17341: Poor error message when compiling invalid regex
with self.assertRaisesRegex(sre_constants.error, '\?foo'):
re.compile('(?P<?foo>)')
def test_issue17998(self):
for reps in '*', '+', '?', '{1}':
for mod in '', '?':
pattern = '.' + reps + mod + 'yz'
self.assertEqual(re.compile(pattern, re.S).findall('xyz'),
['xyz'], msg=pattern)
pattern = pattern.encode()
self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'),
[b'xyz'], msg=pattern)
def test_match_repr(self):
for string in '[abracadabra]', S('[abracadabra]'):
m = re.search(r'(.+)(.*?)\1', string)
self.assertEqual(repr(m), "<%s.%s object; "
"span=(1, 12), match='abracadabra'>" %
(type(m).__module__, type(m).__qualname__))
for string in (b'[abracadabra]', B(b'[abracadabra]'),
bytearray(b'[abracadabra]'),
memoryview(b'[abracadabra]')):
m = re.search(rb'(.+)(.*?)\1', string)
self.assertEqual(repr(m), "<%s.%s object; "
"span=(1, 12), match=b'abracadabra'>" %
(type(m).__module__, type(m).__qualname__))
first, second = list(re.finditer("(aa)|(bb)", "aa bb"))
self.assertEqual(repr(first), "<%s.%s object; "
"span=(0, 2), match='aa'>" %
(type(second).__module__, type(first).__qualname__))
self.assertEqual(repr(second), "<%s.%s object; "
"span=(3, 5), match='bb'>" %
(type(second).__module__, type(second).__qualname__))
def test_bug_2537(self):
# issue 2537: empty submatches
for outer_op in ('{0,}', '*', '+', '{1,187}'):
for inner_op in ('{0,}', '*', '?'):
r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op))
m = r.match("xyyzy")
self.assertEqual(m.group(0), "xyy")
self.assertEqual(m.group(1), "")
self.assertEqual(m.group(2), "y")
def test_debug_flag(self):
with captured_stdout() as out:
re.compile('foo', re.DEBUG)
self.assertEqual(out.getvalue().splitlines(),
['literal 102 ', 'literal 111 ', 'literal 111 '])
# Debug output is output again even a second time (bypassing
# the cache -- issue #20426).
with captured_stdout() as out:
re.compile('foo', re.DEBUG)
self.assertEqual(out.getvalue().splitlines(),
['literal 102 ', 'literal 111 ', 'literal 111 '])
def test_keyword_parameters(self):
# Issue #20283: Accepting the string keyword parameter.
pat = re.compile(r'(ab)')
self.assertEqual(
pat.match(string='abracadabra', pos=7, endpos=10).span(), (7, 9))
self.assertEqual(
pat.fullmatch(string='abracadabra', pos=7, endpos=9).span(), (7, 9))
self.assertEqual(
pat.search(string='abracadabra', pos=3, endpos=10).span(), (7, 9))
self.assertEqual(
pat.findall(string='abracadabra', pos=3, endpos=10), ['ab'])
self.assertEqual(
pat.split(string='abracadabra', maxsplit=1),
['', 'ab', 'racadabra'])
self.assertEqual(
pat.scanner(string='abracadabra', pos=3, endpos=10).search().span(),
(7, 9))
def test_bug_20998(self):
# Issue #20998: Fullmatch of repeated single character pattern
# with ignore case.
self.assertEqual(re.fullmatch('[a-c]+', 'ABC', re.I).span(), (0, 3))
class PatternReprTests(unittest.TestCase):
def check(self, pattern, expected):
self.assertEqual(repr(re.compile(pattern)), expected)
def check_flags(self, pattern, flags, expected):
self.assertEqual(repr(re.compile(pattern, flags)), expected)
def test_without_flags(self):
self.check('random pattern',
"re.compile('random pattern')")
def test_single_flag(self):
self.check_flags('random pattern', re.IGNORECASE,
"re.compile('random pattern', re.IGNORECASE)")
def test_multiple_flags(self):
self.check_flags('random pattern', re.I|re.S|re.X,
"re.compile('random pattern', "
"re.IGNORECASE|re.DOTALL|re.VERBOSE)")
def test_unicode_flag(self):
self.check_flags('random pattern', re.U,
"re.compile('random pattern')")
self.check_flags('random pattern', re.I|re.S|re.U,
"re.compile('random pattern', "
"re.IGNORECASE|re.DOTALL)")
def test_inline_flags(self):
self.check('(?i)pattern',
"re.compile('(?i)pattern', re.IGNORECASE)")
def test_unknown_flags(self):
self.check_flags('random pattern', 0x123000,
"re.compile('random pattern', 0x123000)")
self.check_flags('random pattern', 0x123000|re.I,
"re.compile('random pattern', re.IGNORECASE|0x123000)")
def test_bytes(self):
self.check(b'bytes pattern',
"re.compile(b'bytes pattern')")
self.check_flags(b'bytes pattern', re.A,
"re.compile(b'bytes pattern', re.ASCII)")
def test_quotes(self):
self.check('random "double quoted" pattern',
'''re.compile('random "double quoted" pattern')''')
self.check("random 'single quoted' pattern",
'''re.compile("random 'single quoted' pattern")''')
self.check('''both 'single' and "double" quotes''',
'''re.compile('both \\'single\\' and "double" quotes')''')
def test_long_pattern(self):
pattern = 'Very %spattern' % ('long ' * 1000)
r = repr(re.compile(pattern))
self.assertLess(len(r), 300)
self.assertEqual(r[:30], "re.compile('Very long long lon")
r = repr(re.compile(pattern, re.I))
self.assertLess(len(r), 300)
self.assertEqual(r[:30], "re.compile('Very long long lon")
self.assertEqual(r[-16:], ", re.IGNORECASE)")
class ImplementationTest(unittest.TestCase):
"""
Test implementation details of the re module.
"""
def test_overlap_table(self):
f = sre_compile._generate_overlap_table
self.assertEqual(f(""), [])
self.assertEqual(f("a"), [0])
self.assertEqual(f("abcd"), [0, 0, 0, 0])
self.assertEqual(f("aaaa"), [0, 1, 2, 3])
self.assertEqual(f("ababba"), [0, 0, 1, 2, 0, 1])
self.assertEqual(f("abcabdac"), [0, 0, 0, 1, 2, 0, 1, 0])
def run_re_tests():
from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print('Running re_tests test suite')
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print('=== Syntax error:', t)
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print('*** Unexpected error ***', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error as msg:
print('=== Unexpected exception', t, repr(msg))
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print('=== Succeeded incorrectly', t)
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print('=== grouping error', t, end=' ')
print(repr(repl) + ' should be ' + repr(expected))
else:
print('=== Failed incorrectly', t)
# Try the match with both pattern and string converted to
# bytes, and check that it still succeeds.
try:
bpat = bytes(pattern, "ascii")
bs = bytes(s, "ascii")
except UnicodeEncodeError:
# skip non-ascii tests
pass
else:
try:
bpat = re.compile(bpat)
except Exception:
print('=== Fails on bytes pattern compile', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
bytes_result = bpat.search(bs)
if bytes_result is None:
print('=== Fails on bytes pattern match', t)
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print('=== Failed on range-limited match', t)
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print('=== Fails on case-insensitive match', t)
# Try the match with LOCALE enabled, and check that it
# still succeeds.
if '(?u)' not in pattern:
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print('=== Fails on locale-sensitive match', t)
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print('=== Fails on unicode-sensitive match', t)
def test_main():
run_unittest(__name__)
run_re_tests()
if __name__ == "__main__":
test_main()
| lgpl-3.0 |
kingvuplus/xrd-alliance | lib/python/Plugins/SystemPlugins/Satfinder/plugin.py | 9 | 20104 | from enigma import eDVBResourceManager,\
eDVBFrontendParametersSatellite, eDVBFrontendParametersTerrestrial
from Screens.ScanSetup import ScanSetup, buildTerTransponder
from Screens.ServiceScan import ServiceScan
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Components.Sources.FrontendStatus import FrontendStatus
from Components.ActionMap import ActionMap
from Components.NimManager import nimmanager, getConfigSatlist
from Components.config import config, ConfigSelection, getConfigListEntry
from Components.TuneTest import Tuner
from Tools.Transponder import getChannelNumber, channel2frequency
class Satfinder(ScanSetup, ServiceScan):
def __init__(self, session):
self.initcomplete = False
service = session and session.nav.getCurrentService()
feinfo = service and service.frontendInfo()
self.frontendData = feinfo and feinfo.getAll(True)
del feinfo
del service
self.typeOfTuningEntry = None
self.systemEntry = None
self.satfinderTunerEntry = None
self.satEntry = None
self.typeOfInputEntry = None
ScanSetup.__init__(self, session)
self.setTitle(_("Satfinder"))
self["introduction"].setText(_("Press OK to scan"))
self["Frontend"] = FrontendStatus(frontend_source = lambda : self.frontend, update_interval = 100)
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"save": self.keyGoScan,
"ok": self.keyGoScan,
"cancel": self.keyCancel,
}, -3)
self.initcomplete = True
self.session.postScanService = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.session.nav.stopService()
self.onClose.append(self.__onClose)
self.onShow.append(self.prepareFrontend)
def openFrontend(self):
res_mgr = eDVBResourceManager.getInstance()
if res_mgr:
self.raw_channel = res_mgr.allocateRawChannel(self.feid)
if self.raw_channel:
self.frontend = self.raw_channel.getFrontend()
if self.frontend:
return True
return False
def prepareFrontend(self):
self.frontend = None
if not self.openFrontend():
self.session.nav.stopService()
if not self.openFrontend():
if self.session.pipshown:
from Screens.InfoBar import InfoBar
InfoBar.instance and hasattr(InfoBar.instance, "showPiP") and InfoBar.instance.showPiP()
if not self.openFrontend():
self.frontend = None # in normal case this should not happen
self.tuner = Tuner(self.frontend)
self.retune(None)
def __onClose(self):
self.session.nav.playService(self.session.postScanService)
def newConfig(self):
cur = self["config"].getCurrent()
if cur in (self.typeOfTuningEntry, self.systemEntry, self.typeOfInputEntry):
self.createSetup()
elif cur == self.satfinderTunerEntry:
self.feid = int(self.satfinder_scan_nims.value)
self.createSetup()
self.prepareFrontend()
if self.frontend == None:
msg = _("Tuner not available.")
if self.session.nav.RecordTimer.isRecording():
msg += _("\nRecording in progress.")
self.session.open(MessageBox, msg, MessageBox.TYPE_ERROR)
elif cur == self.satEntry:
self.createSetup()
else:
self.retune(None)
def createSetup(self):
self.list = []
self.satfinderTunerEntry = getConfigListEntry(_("Tuner"), self.satfinder_scan_nims)
self.list.append(self.satfinderTunerEntry)
if nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-S"):
self.tuning_sat = self.scan_satselection[self.getSelectedSatIndex(self.feid)]
self.satEntry = getConfigListEntry(_('Satellite'), self.tuning_sat)
self.list.append(self.satEntry)
self.typeOfTuningEntry = getConfigListEntry(_('Tune'), self.tuning_type)
if len(nimmanager.getTransponders(int(self.tuning_sat.value))) < 1: # Only offer 'predefined transponder' if some transponders exist
self.tuning_type.value = "single_transponder"
else:
self.list.append(self.typeOfTuningEntry)
nim = nimmanager.nim_slots[self.feid]
if self.tuning_type.value == "single_transponder":
if nim.isCompatible("DVB-S2"):
self.systemEntry = getConfigListEntry(_('System'), self.scan_sat.system)
self.list.append(self.systemEntry)
else:
# downgrade to dvb-s, in case a -s2 config was active
self.scan_sat.system.value = eDVBFrontendParametersSatellite.System_DVB_S
self.list.append(getConfigListEntry(_('Frequency'), self.scan_sat.frequency))
self.list.append(getConfigListEntry(_('Polarization'), self.scan_sat.polarization))
self.list.append(getConfigListEntry(_('Symbol rate'), self.scan_sat.symbolrate))
self.list.append(getConfigListEntry(_('Inversion'), self.scan_sat.inversion))
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec))
elif self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
self.list.append(getConfigListEntry(_("FEC"), self.scan_sat.fec_s2))
self.modulationEntry = getConfigListEntry(_('Modulation'), self.scan_sat.modulation)
self.list.append(self.modulationEntry)
self.list.append(getConfigListEntry(_('Roll-off'), self.scan_sat.rolloff))
self.list.append(getConfigListEntry(_('Pilot'), self.scan_sat.pilot))
elif self.tuning_type.value == "predefined_transponder":
self.updatePreDefTransponders()
self.list.append(getConfigListEntry(_("Transponder"), self.preDefTransponders))
elif nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-C"):
self.typeOfTuningEntry = getConfigListEntry(_('Tune'), self.tuning_type)
if config.Nims[self.feid].cable.scan_type.value != "provider" or len(nimmanager.getTranspondersCable(int(self.satfinder_scan_nims.value))) < 1: # only show 'predefined transponder' if in provider mode and transponders exist
self.tuning_type.value = "single_transponder"
else:
self.list.append(self.typeOfTuningEntry)
if self.tuning_type.value == "single_transponder":
self.list.append(getConfigListEntry(_("Frequency"), self.scan_cab.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_cab.inversion))
self.list.append(getConfigListEntry(_("Symbol rate"), self.scan_cab.symbolrate))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_cab.modulation))
self.list.append(getConfigListEntry(_("FEC"), self.scan_cab.fec))
elif self.tuning_type.value == "predefined_transponder":
self.scan_nims.value = self.satfinder_scan_nims.value
self.predefinedCabTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.CableTransponders))
elif nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-T"):
self.typeOfTuningEntry = getConfigListEntry(_('Tune'), self.tuning_type)
region = nimmanager.getTerrestrialDescription(int(self.satfinder_scan_nims.value))
if len(nimmanager.getTranspondersTerrestrial(region)) < 1: # Only offer 'predefined transponder' if some transponders exist
self.tuning_type.value = "single_transponder"
else:
self.list.append(self.typeOfTuningEntry)
if self.tuning_type.value == "single_transponder":
if nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-T2"):
self.systemEntryTerr = getConfigListEntry(_('System'), self.scan_ter.system)
self.list.append(self.systemEntryTerr)
else:
self.scan_ter.system.value = eDVBFrontendParametersTerrestrial.System_DVB_T
self.typeOfInputEntry = getConfigListEntry(_("Use frequency or channel"), self.scan_input_as)
if self.ter_channel_input:
self.list.append(self.typeOfInputEntry)
else:
self.scan_input_as.value = self.scan_input_as.choices[0]
if self.ter_channel_input and self.scan_input_as.value == "channel":
channel = getChannelNumber(self.scan_ter.frequency.value*1000, self.ter_tnumber)
if channel:
self.scan_ter.channel.value = int(channel.replace("+","").replace("-",""))
self.list.append(getConfigListEntry(_("Channel"), self.scan_ter.channel))
else:
prev_val = self.scan_ter.frequency.value
self.scan_ter.frequency.value = channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)/1000
if self.scan_ter.frequency.value == 474000:
self.scan_ter.frequency.value = prev_val
self.list.append(getConfigListEntry(_("Frequency"), self.scan_ter.frequency))
self.list.append(getConfigListEntry(_("Inversion"), self.scan_ter.inversion))
self.list.append(getConfigListEntry(_("Bandwidth"), self.scan_ter.bandwidth))
self.list.append(getConfigListEntry(_("Code rate HP"), self.scan_ter.fechigh))
self.list.append(getConfigListEntry(_("Code rate LP"), self.scan_ter.feclow))
self.list.append(getConfigListEntry(_("Modulation"), self.scan_ter.modulation))
self.list.append(getConfigListEntry(_("Transmission mode"), self.scan_ter.transmission))
self.list.append(getConfigListEntry(_("Guard interval"), self.scan_ter.guard))
self.list.append(getConfigListEntry(_("Hierarchy info"), self.scan_ter.hierarchy))
if self.scan_ter.system.value == eDVBFrontendParametersTerrestrial.System_DVB_T2:
self.list.append(getConfigListEntry(_('PLP ID'), self.scan_ter.plp_id))
elif self.tuning_type.value == "predefined_transponder":
self.scan_nims.value = self.satfinder_scan_nims.value
self.predefinedTerrTranspondersList()
self.list.append(getConfigListEntry(_('Transponder'), self.TerrestrialTransponders))
self.retune(None)
self["config"].list = self.list
self["config"].l.setList(self.list)
def createConfig(self, foo):
self.tuning_type = ConfigSelection(default = "predefined_transponder", choices = [("single_transponder", _("User defined transponder")), ("predefined_transponder", _("Predefined transponder"))])
self.orbital_position = 192
if self.frontendData and self.frontendData.has_key('orbital_position'):
self.orbital_position = self.frontendData['orbital_position']
ScanSetup.createConfig(self, self.frontendData)
for x in (self.scan_sat.frequency,
self.scan_sat.inversion, self.scan_sat.symbolrate,
self.scan_sat.polarization, self.scan_sat.fec, self.scan_sat.pilot,
self.scan_sat.fec_s2, self.scan_sat.fec, self.scan_sat.modulation,
self.scan_sat.rolloff, self.scan_sat.system,
self.scan_ter.channel, self.scan_ter.frequency, self.scan_ter.inversion,
self.scan_ter.bandwidth, self.scan_ter.fechigh, self.scan_ter.feclow,
self.scan_ter.modulation, self.scan_ter.transmission,
self.scan_ter.guard, self.scan_ter.hierarchy, self.scan_ter.plp_id,
self.scan_cab.frequency, self.scan_cab.inversion, self.scan_cab.symbolrate,
self.scan_cab.modulation, self.scan_cab.fec):
x.addNotifier(self.retune, initial_call = False)
satfinder_nim_list = []
for n in nimmanager.nim_slots:
if not (n.isCompatible("DVB-S") or n.isCompatible("DVB-T") or n.isCompatible("DVB-C")):
continue
if n.config_mode in ("loopthrough", "satposdepends", "nothing"):
continue
if n.isCompatible("DVB-S") and n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
satfinder_nim_list.append((str(n.slot), n.friendly_full_description))
self.satfinder_scan_nims = ConfigSelection(choices = satfinder_nim_list)
if self.frontendData is not None and len(satfinder_nim_list) > 0: # open the plugin with the currently active NIM as default
self.satfinder_scan_nims.setValue(str(self.frontendData.get("tuner_number", satfinder_nim_list[0][0])))
self.feid = int(self.satfinder_scan_nims.value)
self.satList = []
self.scan_satselection = []
for slot in nimmanager.nim_slots:
if slot.isCompatible("DVB-S"):
self.satList.append(nimmanager.getSatListForNim(slot.slot))
self.scan_satselection.append(getConfigSatlist(self.orbital_position, self.satList[slot.slot]))
else:
self.satList.append(None)
if self.frontendData:
ttype = self.frontendData.get("tuner_type", "UNKNOWN")
if ttype == "DVB-S" and self.predefinedTranspondersList(self.getSelectedSatIndex(self.feid)) is None and len(nimmanager.getTransponders(self.getSelectedSatIndex(self.feid))) > 0:
self.tuning_type.value = "single_transponder"
elif ttype == "DVB-T" and self.predefinedTerrTranspondersList() is None and len(nimmanager.getTranspondersTerrestrial(nimmanager.getTerrestrialDescription(self.feid))) > 0:
self.tuning_type.value = "single_transponder"
elif ttype == "DVB-C" and self.predefinedCabTranspondersList() is None and len(nimmanager.getTranspondersCable(self.feid)) > 0:
self.tuning_type.value = "single_transponder"
def getSelectedSatIndex(self, v):
index = 0
none_cnt = 0
for n in self.satList:
if self.satList[index] is None:
none_cnt += 1
if index == int(v):
return index-none_cnt
index += 1
return -1
def updatePreDefTransponders(self):
ScanSetup.predefinedTranspondersList(self, self.tuning_sat.orbital_position)
def retuneCab(self, configElement):
if not nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-C"):
return
if self.initcomplete:
if self.tuning_type.value == "single_transponder":
transponder = (
self.scan_cab.frequency.value*1000,
self.scan_cab.symbolrate.value*1000,
self.scan_cab.modulation.value,
self.scan_cab.fec.value,
self.scan_cab.inversion.value
)
if self.initcomplete:
self.tuner.tuneCab(transponder)
self.transponder = transponder
elif self.tuning_type.value == "predefined_transponder":
tps = nimmanager.getTranspondersCable(int(self.satfinder_scan_nims.value))
if len(tps) > self.CableTransponders.index :
tp = tps[self.CableTransponders.index]
# tp = 0 transponder type, 1 freq, 2 sym, 3 mod, 4 fec, 5 inv, 6 sys
transponder = (tp[1], tp[2], tp[3], tp[4], tp[5])
if self.initcomplete:
self.tuner.tuneCab(transponder)
self.transponder = transponder
def retuneTerr(self, configElement):
if not nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-T"):
return self.retuneCab(configElement)
if self.initcomplete:
if self.scan_input_as.value == "channel":
frequency = channel2frequency(self.scan_ter.channel.value, self.ter_tnumber)
else:
frequency = self.scan_ter.frequency.value * 1000
if self.tuning_type.value == "single_transponder":
transponder = [
2, #TERRESTRIAL
frequency,
self.scan_ter.bandwidth.value,
self.scan_ter.modulation.value,
self.scan_ter.fechigh.value,
self.scan_ter.feclow.value,
self.scan_ter.guard.value,
self.scan_ter.transmission.value,
self.scan_ter.hierarchy.value,
self.scan_ter.inversion.value,
self.scan_ter.system.value,
self.scan_ter.plp_id.value]
if self.initcomplete:
self.tuner.tuneTerr(transponder[1], transponder[9], transponder[2], transponder[4], transponder[5], transponder[3], transponder[7], transponder[6], transponder[8], transponder[10], transponder[11])
self.transponder = transponder
elif self.tuning_type.value == "predefined_transponder":
region = nimmanager.getTerrestrialDescription(int(self.satfinder_scan_nims.value))
tps = nimmanager.getTranspondersTerrestrial(region)
if len(tps) > self.TerrestrialTransponders.index :
transponder = tps[self.TerrestrialTransponders.index]
# frequency 1, inversion 9, bandwidth 2, fechigh 4, feclow 5, modulation 3, transmission 7, guard 6, hierarchy 8, system 10, plpid 11
if self.initcomplete:
self.tuner.tuneTerr(transponder[1], transponder[9], transponder[2], transponder[4], transponder[5], transponder[3], transponder[7], transponder[6], transponder[8], transponder[10], transponder[11])
self.transponder = transponder
def retune(self, configElement): # satellite
if not nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-S"):
return self.retuneTerr(configElement)
if not self.tuning_sat.value:
return
satpos = int(self.tuning_sat.value)
if self.tuning_type.value == "single_transponder":
if self.scan_sat.system.value == eDVBFrontendParametersSatellite.System_DVB_S2:
fec = self.scan_sat.fec_s2.value
else:
fec = self.scan_sat.fec.value
transponder = (
self.scan_sat.frequency.value,
self.scan_sat.symbolrate.value,
self.scan_sat.polarization.value,
fec,
self.scan_sat.inversion.value,
satpos,
self.scan_sat.system.value,
self.scan_sat.modulation.value,
self.scan_sat.rolloff.value,
self.scan_sat.pilot.value)
if self.initcomplete:
self.tuner.tune(transponder)
self.transponder = transponder
elif self.tuning_type.value == "predefined_transponder":
tps = nimmanager.getTransponders(satpos)
if len(tps) > self.preDefTransponders.index:
tp = tps[self.preDefTransponders.index]
transponder = (tp[1] / 1000, tp[2] / 1000,
tp[3], tp[4], 2, satpos, tp[5], tp[6], tp[8], tp[9])
if self.initcomplete:
self.tuner.tune(transponder)
self.transponder = transponder
def keyGoScan(self):
self.frontend = None
if self.raw_channel:
del(self.raw_channel)
tlist = []
if nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-S"):
self.addSatTransponder(tlist,
self.transponder[0], # frequency
self.transponder[1], # sr
self.transponder[2], # pol
self.transponder[3], # fec
self.transponder[4], # inversion
self.tuning_sat.orbital_position,
self.transponder[6], # system
self.transponder[7], # modulation
self.transponder[8], # rolloff
self.transponder[9] # pilot
)
elif nimmanager.nim_slots[int(self.satfinder_scan_nims.value)].isCompatible("DVB-T"):
parm = buildTerTransponder(
self.transponder[1], # frequency
self.transponder[9], # inversion
self.transponder[2], # bandwidth
self.transponder[4], # fechigh
self.transponder[5], # feclow
self.transponder[3], # modulation
self.transponder[7], # transmission
self.transponder[6], # guard
self.transponder[8], # hierarchy
self.transponder[10], # system
self.transponder[11] # plpid
)
tlist.append(parm)
else: # DVB-C
self.addCabTransponder(tlist,
self.transponder[0], # frequency
self.transponder[1], # sr
self.transponder[2], # modulation
self.transponder[3], # fec_inner
self.transponder[4] # inversion
)
self.startScan(tlist, self.feid)
def startScan(self, tlist, feid):
flags = 0
networkid = 0
self.session.openWithCallback(self.startScanCallback, ServiceScan, [{"transponders": tlist, "feid": feid, "flags": flags, "networkid": networkid}])
def startScanCallback(self, answer=None):
if answer:
self.doCloseRecursive()
def keyCancel(self):
if self.session.postScanService and self.frontend:
self.frontend = None
del self.raw_channel
self.close(False)
def doCloseRecursive(self):
if self.session.postScanService and self.frontend:
self.frontend = None
del self.raw_channel
self.close(True)
def SatfinderMain(session, close=None, **kwargs):
nims = nimmanager.nim_slots
nimList = []
for n in nims:
if not (n.isCompatible("DVB-S") or n.isCompatible("DVB-T") or n.isCompatible("DVB-C")):
continue
if n.config_mode in ("loopthrough", "satposdepends", "nothing"):
continue
if n.isCompatible("DVB-S") and n.config_mode == "advanced" and len(nimmanager.getSatListForNim(n.slot)) < 1:
continue
nimList.append(n)
if len(nimList) == 0:
session.open(MessageBox, _("No satellite, terrestrial or cable tuner is configured. Please check your tuner setup."), MessageBox.TYPE_ERROR)
else:
session.openWithCallback(close, Satfinder)
def SatfinderStart(menuid, **kwargs):
if menuid == "scan":
return [(_("Satfinder"), SatfinderMain, "satfinder", 35, True)]
else:
return []
def Plugins(**kwargs):
if nimmanager.hasNimType("DVB-S") or nimmanager.hasNimType("DVB-T") or nimmanager.hasNimType("DVB-C"):
return PluginDescriptor(name=_("Satfinder"), description=_("Helps setting up your antenna"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc=SatfinderStart)
else:
return []
| gpl-2.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/docutils/parsers/__init__.py | 126 | 1657 | # $Id: __init__.py 7646 2013-04-17 14:17:37Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
This package contains Docutils parser modules.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import Component
if sys.version_info < (2,5):
from docutils._compat import __import__
class Parser(Component):
component_type = 'parser'
config_section = 'parsers'
def parse(self, inputstring, document):
"""Override to parse `inputstring` into document tree `document`."""
raise NotImplementedError('subclass must override this method')
def setup_parse(self, inputstring, document):
"""Initial parse setup. Call at start of `self.parse()`."""
self.inputstring = inputstring
self.document = document
document.reporter.attach_observer(document.note_parse_message)
def finish_parse(self):
"""Finalize parse details. Call at end of `self.parse()`."""
self.document.reporter.detach_observer(
self.document.note_parse_message)
_parser_aliases = {
'restructuredtext': 'rst',
'rest': 'rst',
'restx': 'rst',
'rtxt': 'rst',}
def get_parser_class(parser_name):
"""Return the Parser class from the `parser_name` module."""
parser_name = parser_name.lower()
if parser_name in _parser_aliases:
parser_name = _parser_aliases[parser_name]
try:
module = __import__(parser_name, globals(), locals(), level=1)
except ImportError:
module = __import__(parser_name, globals(), locals(), level=0)
return module.Parser
| agpl-3.0 |
lokirius/python-for-android | python-build/python-libs/gdata/src/gdata/blogger/data.py | 136 | 4030 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data model classes for parsing and generating XML for the Blogger API."""
__author__ = 'j.s@google.com (Jeff Scudder)'
import re
import atom.core
import gdata.data
LABEL_SCHEME = 'http://www.blogger.com/atom/ns#'
THR_TEMPLATE = '{http://purl.org/syndication/thread/1.0}%s'
BLOG_NAME_PATTERN = re.compile('(http://)(\w*)')
BLOG_ID_PATTERN = re.compile('(tag:blogger.com,1999:blog-)(\w*)')
BLOG_ID2_PATTERN = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)')
POST_ID_PATTERN = re.compile(
'(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)')
COMMENT_ID_PATTERN = re.compile('.*-(\w*)$')
class BloggerEntry(gdata.data.GDEntry):
"""Adds convenience methods inherited by all Blogger entries."""
def get_blog_id(self):
"""Extracts the Blogger id of this blog.
This method is useful when contructing URLs by hand. The blog id is
often used in blogger operation URLs. This should not be confused with
the id member of a BloggerBlog. The id element is the Atom id XML element.
The blog id which this method returns is a part of the Atom id.
Returns:
The blog's unique id as a string.
"""
if self.id.text:
match = BLOG_ID_PATTERN.match(self.id.text)
if match:
return match.group(2)
else:
return BLOG_ID2_PATTERN.match(self.id.text).group(2)
return None
GetBlogId = get_blog_id
def get_blog_name(self):
"""Finds the name of this blog as used in the 'alternate' URL.
An alternate URL is in the form 'http://blogName.blogspot.com/'. For an
entry representing the above example, this method would return 'blogName'.
Returns:
The blog's URL name component as a string.
"""
for link in self.link:
if link.rel == 'alternate':
return BLOG_NAME_PATTERN.match(link.href).group(2)
return None
GetBlogName = get_blog_name
class Blog(BloggerEntry):
"""Represents a blog which belongs to the user."""
class BlogFeed(gdata.data.GDFeed):
entry = [Blog]
class BlogPost(BloggerEntry):
"""Represents a single post on a blog."""
def add_label(self, label):
"""Adds a label to the blog post.
The label is represented by an Atom category element, so this method
is shorthand for appending a new atom.Category object.
Args:
label: str
"""
self.category.append(atom.data.Category(scheme=LABEL_SCHEME, term=label))
AddLabel = add_label
def get_post_id(self):
"""Extracts the postID string from the entry's Atom id.
Returns: A string of digits which identify this post within the blog.
"""
if self.id.text:
return POST_ID_PATTERN.match(self.id.text).group(4)
return None
GetPostId = get_post_id
class BlogPostFeed(gdata.data.GDFeed):
entry = [BlogPost]
class InReplyTo(atom.core.XmlElement):
_qname = THR_TEMPLATE % 'in-reply-to'
href = 'href'
ref = 'ref'
source = 'source'
type = 'type'
class Comment(BloggerEntry):
"""Blog post comment entry in a feed listing comments on a post or blog."""
in_reply_to = InReplyTo
def get_comment_id(self):
"""Extracts the commentID string from the entry's Atom id.
Returns: A string of digits which identify this post within the blog.
"""
if self.id.text:
return COMMENT_ID_PATTERN.match(self.id.text).group(1)
return None
GetCommentId = get_comment_id
class CommentFeed(gdata.data.GDFeed):
entry = [Comment]
| apache-2.0 |
anirudhvenkats/clowdflows | workflows/crossbee/library_gen.py | 6 | 7383 | # -----------------------------------------------------------------------------------------------------
# WARNING: THIS IS AUTOMATICALLY GENERATED FILE, DO NOT EDIT IT MANUALLY AS YOU MAY LOOSE YOUR CHANGES!
# -----------------------------------------------------------------------------------------------------
from import_dotnet import *
from serialization_utils import *
def crossbee_construct_standard_heurisitc(inputDict):
_name = ToString(inputDict['name'])
_heurisitcSpec = ToEnum(CrossBeeInterfaces.Heurisitcs.StandardHeurisitc.Specification, inputDict['heurisitcSpec'], CrossBeeInterfaces.Heurisitcs.StandardHeurisitc.Specification.random)
execResult = CrossBeeIntf.ConstructStandardHeurisitc(_name, _heurisitcSpec)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['heurisitc'] = execResultPy
return outputDict
def crossbee_construct_all_standard_heurisitc(inputDict):
execResult = CrossBeeIntf.ConstructAllStandardHeurisitc()
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['heurisitcs'] = execResultPy
return outputDict
def crossbee_construct_outlier_heuristics(inputDict):
_name = ToString(inputDict['name'])
_relative = ToBool(inputDict['relative'])
_outlierDocumentIndexes = ToNetObj(inputDict['outlierDocumentIndexes'])
execResult = CrossBeeIntf.ConstructOutlierHeuristics(_name, _relative, _outlierDocumentIndexes)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['newHeurisitcs'] = execResultPy
return outputDict
def crossbee_construct_calculated_heuristics(inputDict):
_name = ToString(inputDict['name'])
_calc = ToEnum(CrossBeeInterfaces.Heurisitcs.CalculatedHeustistic.Calculation, inputDict['calc'], CrossBeeInterfaces.Heurisitcs.CalculatedHeustistic.Calculation.Sum)
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.ConstructCalculatedHeuristics(_name, _calc, _heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['newHeurisitcs'] = execResultPy
return outputDict
def crossbee_construct_ensemble_heuristics(inputDict):
_name = ToString(inputDict['name'])
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.ConstructEnsembleHeuristics(_name, _heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['newHeurisitcs'] = execResultPy
return outputDict
def crossbee_combine_heuristics(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.CombineHeuristics(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['newHeurisitcs'] = execResultPy
return outputDict
def crossbee_get_heuristic_names(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.GetHeuristicNames(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['names'] = execResultPy
return outputDict
def crossbee_get_heuristic_structure(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.GetHeuristicStructure(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['structure'] = execResultPy
return outputDict
def crossbee_load_outlier_heuristics(inputDict):
_namePrefix = ToString(inputDict['namePrefix'])
_specification = ToString(inputDict['specification'])
_relative = ToBool(inputDict['relative'])
execResult = CrossBeeIntf.LoadOutlierHeuristics(_namePrefix, _specification, _relative)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['newHeurisitcs'] = execResultPy
return outputDict
def crossbee_outlier_heuristics_spec(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.OutlierHeuristicsSpec(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['specification'] = execResultPy
return outputDict
def crossbee_outlier_detection_via_cross_validation(inputDict):
_csf = ToNetObj(inputDict['csf'])
_ds = ToNetObj(inputDict['ds'])
_repetitionCount = ToInt(inputDict['repetitionCount'])
_outlierThreshold = ToInt(inputDict['outlierThreshold'])
_numOfSets = ToInt(inputDict['numOfSets'])
_random = ToBool(inputDict['random'])
_useSeed = ToBool(inputDict['useSeed'])
_randomSeed = ToInt(inputDict['randomSeed'])
_outlierWeighting = ToEnum(CrossBeeInterfaces.CrossBeeIntf.OutlierWeighting, inputDict['outlierWeighting'], CrossBeeInterfaces.CrossBeeIntf.OutlierWeighting.RelativePercentage)
execResult = CrossBeeIntf.OutlierDetectionViaCrossValidation(_csf, _ds, _repetitionCount, _outlierThreshold, _numOfSets, _random, _useSeed, _randomSeed, _outlierWeighting)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['out'] = execResultPy
return outputDict
def crossbee_apply_heurisitcs(inputDict):
_termDataset = ToNetObj(inputDict['termDataset'])
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.ApplyHeurisitcs(_termDataset, _heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['heur'] = execResultPy
return outputDict
def crossbee_select_heuristics(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.SelectHeuristics(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['heuristics'] = execResultPy
return outputDict
def crossbee_rank_terms(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
execResult = CrossBeeIntf.RankTerms(_heuristics)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['table'] = execResultPy
return outputDict
def crossbee_explore_in_crossbee(inputDict):
_parsedDoc = ToNetObj(inputDict['parsedDoc'])
_heuristics = ToNetObj(inputDict['heuristics'])
_bterms = ToNetObj(inputDict['bterms'])
execResult = CrossBeeIntf.ExploreInCrossbee(_parsedDoc, _heuristics, _bterms)
execResultPy = ToPyObj(execResult)
outputDict = {}
return outputDict
def crossbee_get_roc_curves(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
_bterms = ToNetObj(inputDict['bterms'])
execResult = CrossBeeIntf.GetRocCurves(_heuristics, _bterms)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['roc'] = execResultPy
return outputDict
def crossbee_display_roc_curves(inputDict):
_roc = ToNetObj(inputDict['roc'])
execResult = CrossBeeIntf.DisplayRocCurves(_roc)
execResultPy = ToPyObj(execResult)
outputDict = {}
return outputDict
def crossbee_get_performance_measures(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
_bterms = ToNetObj(inputDict['bterms'])
execResult = CrossBeeIntf.GetPerformanceMeasures(_heuristics, _bterms)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['perf'] = execResultPy
return outputDict
def crossbee_get_viper_measures(inputDict):
_heuristics = ToNetObj(inputDict['heuristics'])
_bterms = ToNetObj(inputDict['bterms'])
execResult = CrossBeeIntf.GetViperMeasures(_heuristics, _bterms)
execResultPy = ToPyObj(execResult)
outputDict = {}
outputDict['perf'] = execResultPy
return outputDict
| gpl-3.0 |
davidzchen/tensorflow | tensorflow/python/training/tracking/util_test.py | 8 | 40799 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import weakref
from absl.testing import parameterized
import six
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import template
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import save as saved_model_save
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training.saving import checkpoint_options
from tensorflow.python.training.tracking import base
from tensorflow.python.training.tracking import graph_view
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import util as trackable_utils
class NonLayerTrackable(tracking.AutoTrackable):
def __init__(self):
super(NonLayerTrackable, self).__init__()
self.a_variable = trackable_utils.add_variable(
self, name="a_variable", shape=[])
class InterfaceTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testAddVariable(self):
obj = NonLayerTrackable()
with self.assertRaisesRegex(ValueError, "do not specify shape"):
trackable_utils.add_variable(
obj, name="shape_specified_twice", shape=[], initializer=1)
constant_initializer = trackable_utils.add_variable(
obj, name="constant_initializer", initializer=1)
with variable_scope.variable_scope("some_variable_scope"):
ones_initializer = trackable_utils.add_variable(
obj,
name="ones_initializer",
shape=[2],
initializer=init_ops.ones_initializer(dtype=dtypes.float32))
bare_initializer = trackable_utils.add_variable(
obj,
name="bare_initializer",
shape=[2, 2],
dtype=dtypes.float64,
initializer=init_ops.zeros_initializer)
# Even in graph mode, there are no naming conflicts between objects, only
# naming conflicts within an object.
other_duplicate = resource_variable_ops.ResourceVariable(
name="duplicate", initial_value=1.)
duplicate = trackable_utils.add_variable(
obj, name="duplicate", shape=[])
with self.assertRaisesRegex(ValueError, "'duplicate'.*already declared"):
trackable_utils.add_variable(obj, name="duplicate", shape=[])
self.evaluate(trackable_utils.gather_initializers(obj))
self.assertEqual("constant_initializer:0", constant_initializer.name)
self.assertEqual(1, self.evaluate(constant_initializer))
self.assertEqual("some_variable_scope/ones_initializer:0",
ones_initializer.name)
self.assertAllEqual([1, 1], self.evaluate(ones_initializer))
self.assertAllEqual([[0., 0.],
[0., 0.]], self.evaluate(bare_initializer))
self.assertEqual("a_variable:0", obj.a_variable.name)
self.assertEqual("duplicate:0", other_duplicate.name)
if context.executing_eagerly():
# When executing eagerly, there's no uniquification of variable names. The
# checkpoint name will be the same.
self.assertEqual("duplicate:0", duplicate.name)
else:
# The .name attribute may be globally influenced, but the checkpoint name
# won't be (tested below).
self.assertEqual("duplicate_1:0", duplicate.name)
named_variables, _, _ = (
graph_view.ObjectGraphView(obj).serialize_object_graph())
expected_checkpoint_names = (
"a_variable/.ATTRIBUTES/VARIABLE_VALUE",
"bare_initializer/.ATTRIBUTES/VARIABLE_VALUE",
"constant_initializer/.ATTRIBUTES/VARIABLE_VALUE",
"duplicate/.ATTRIBUTES/VARIABLE_VALUE",
"ones_initializer/.ATTRIBUTES/VARIABLE_VALUE",
)
six.assertCountEqual(
self, expected_checkpoint_names, [v.name for v in named_variables])
def testInitNotCalled(self):
class NoInit(tracking.AutoTrackable):
def __init__(self):
pass
# __init__ for Trackable will be called implicitly.
trackable_utils.add_variable(NoInit(), "var", shape=[])
def testShapeDtype(self):
root = tracking.AutoTrackable()
v1 = trackable_utils.add_variable(
root, name="v1", initializer=3., dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v1.dtype)
v2 = trackable_utils.add_variable(
root,
name="v2",
shape=[3],
initializer=init_ops.ones_initializer,
dtype=dtypes.float64)
self.assertEqual(dtypes.float64, v2.dtype)
self.assertAllEqual([1., 1., 1.], self.evaluate(v2))
def testNotTrackable(self):
class CallsFunctionalStuff(
tracking.NotTrackable, tracking.AutoTrackable):
pass
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
checkpoint = trackable_utils.Checkpoint(x=CallsFunctionalStuff())
with self.assertRaises(NotImplementedError):
checkpoint.save(prefix)
class CallsFunctionalStuffOtherMRO(
tracking.AutoTrackable, tracking.NotTrackable):
pass
checkpoint_reversed = trackable_utils.Checkpoint(
x=CallsFunctionalStuffOtherMRO())
with self.assertRaises(NotImplementedError):
checkpoint_reversed.save(prefix)
class _MirroringSaveable(saver_lib.BaseSaverBuilder.SaveableObject):
def __init__(self, primary_variable, mirrored_variable, name):
self._primary_variable = primary_variable
self._mirrored_variable = mirrored_variable
tensor = self._primary_variable.read_value()
spec = saver_lib.BaseSaverBuilder.SaveSpec(
tensor=tensor,
slice_spec="",
name=name)
super(_MirroringSaveable, self).__init__(
tensor, [spec], name)
def restore(self, restored_tensors, restored_shapes):
"""Restore the same value into both variables."""
tensor, = restored_tensors
return control_flow_ops.group(
self._primary_variable.assign(tensor),
self._mirrored_variable.assign(tensor))
class _OwnsMirroredVariables(base.Trackable):
"""A Trackable object which returns a more complex SaveableObject."""
def __init__(self):
self.non_dep_variable = variable_scope.get_variable(
name="non_dep_variable", initializer=6., use_resource=True)
self.mirrored = variable_scope.get_variable(
name="mirrored", initializer=15., use_resource=True)
def _gather_saveables_for_checkpoint(self):
def _saveable_factory(name=self.non_dep_variable.name):
return _MirroringSaveable(
primary_variable=self.non_dep_variable,
mirrored_variable=self.mirrored,
name=name)
return {base.VARIABLE_VALUE_KEY: _saveable_factory}
# The Saver sorts by name before parsing, so we need a name property.
@property
def name(self):
return self.non_dep_variable.name
class CheckpointingTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testMoreComplexSaveableReturned(self):
v = _OwnsMirroredVariables()
checkpoint = trackable_utils.Checkpoint(v=v)
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
self.evaluate(v.non_dep_variable.assign(42.))
save_path = checkpoint.save(prefix)
self.evaluate(v.non_dep_variable.assign(43.))
self.evaluate(v.mirrored.assign(44.))
checkpoint.restore(save_path).assert_consumed().initialize_or_restore()
self.assertEqual(42., self.evaluate(v.non_dep_variable))
self.assertEqual(42., self.evaluate(v.mirrored))
self.evaluate(v.non_dep_variable.assign(44.))
save_path = checkpoint.save(prefix)
self.evaluate(v.non_dep_variable.assign(45.))
checkpoint.restore(save_path).assert_consumed().initialize_or_restore()
self.assertEqual(44., self.evaluate(v.non_dep_variable))
self.assertEqual(44., self.evaluate(v.mirrored))
@test_util.run_in_graph_and_eager_modes
def testMoreComplexSaveableReturnedWithGlobalName(self):
# The same object can also be saved using the name-based saver.
v = _OwnsMirroredVariables()
saver = saver_lib.Saver(var_list=[v])
test_dir = self.get_temp_dir()
prefix = os.path.join(test_dir, "ckpt")
with self.cached_session() as sess:
self.evaluate(v.non_dep_variable.assign(42.))
save_path = saver.save(sess, prefix)
self.evaluate(v.non_dep_variable.assign(43.))
self.evaluate(v.mirrored.assign(44.))
saver.restore(sess, save_path)
self.assertEqual(42., self.evaluate(v.non_dep_variable))
self.assertEqual(42., self.evaluate(v.mirrored))
@test_util.run_in_graph_and_eager_modes
def testAssertConsumedNoCheckpoint(self):
prefix = os.path.join(self.get_temp_dir(), "ckpt")
v = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(v.initializer)
ckpt = trackable_utils.Checkpoint(v=v)
self.evaluate(trackable_utils.gather_initializers(ckpt))
save_path = ckpt.save(file_prefix=prefix)
status = ckpt.restore(save_path=save_path)
del ckpt
status.assert_consumed()
@test_util.run_in_graph_and_eager_modes
def testPassingCheckpointOptions(self):
localhost = "/job:localhost/device:CPU:0"
options = checkpoint_options.CheckpointOptions(
experimental_io_device=localhost)
prefix = os.path.join(self.get_temp_dir(), "ckpt")
v = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(v.initializer)
ckpt = trackable_utils.Checkpoint(v=v)
self.evaluate(trackable_utils.gather_initializers(ckpt))
save_path = ckpt.save(file_prefix=prefix, options=options)
status = ckpt.restore(save_path=save_path, options=options)
del ckpt
status.assert_consumed()
# In graph mode, verify that the save and restore ops were set to run on
# localhost.
if not context.executing_eagerly():
for op in ops.get_default_graph().get_operations():
if op.type in ("SaveV2", "RestoreV2"):
self.assertEqual(localhost, op.device)
@test_util.run_in_graph_and_eager_modes
def testFreezing(self):
with test_util.use_gpu():
# Save an object-based checkpoint using a frozen saver
directory = self.get_temp_dir()
prefix = os.path.join(directory, "ckpt")
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = trackable_utils.Checkpoint(v=v)
self.evaluate(v.assign(3))
# Create the save counter so assert_consumed doesn't complain about it not
# existing in the checkpoint on restore.
self.evaluate(checkpoint.save_counter.assign(12))
saver = trackable_utils.frozen_saver(checkpoint)
with ops.device("cpu:0"):
prefix_tensor = constant_op.constant(prefix)
self.evaluate(saver.save(prefix_tensor))
self.evaluate(v.assign(10))
# Use the frozen saver to restore the same object graph
self.evaluate(saver.restore(prefix_tensor))
self.assertEqual(3, self.evaluate(v))
# Restore using another frozen saver on an identical object graph
del v, checkpoint, saver
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = trackable_utils.Checkpoint(v=v)
saver = trackable_utils.frozen_saver(checkpoint)
self.evaluate(saver.restore(prefix_tensor))
self.assertEqual(3, self.evaluate(v))
# Restore as an object-based checkpoint
del v, checkpoint, saver
checkpoint = trackable_utils.Checkpoint()
status = checkpoint.restore(prefix)
v = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
if context.executing_eagerly():
self.assertEqual(12, self.evaluate(checkpoint.save_counter))
self.assertEqual(0, self.evaluate(v))
checkpoint.v = v
status.assert_consumed().run_restore_ops()
self.assertEqual(3, self.evaluate(v))
self.assertEqual(12, self.evaluate(checkpoint.save_counter))
@test_util.run_in_graph_and_eager_modes
def testCustomNumbering(self):
directory = self.get_temp_dir()
prefix = os.path.join(directory, "ckpt")
step = resource_variable_ops.ResourceVariable(0, dtype=dtypes.int64)
checkpoint = trackable_utils.Checkpoint(step=step)
self.evaluate(step.initializer)
for i in range(5):
path = checkpoint.write("%s-%d" % (prefix, self.evaluate(step)))
expected_suffix = "-%d" % (2 * i,)
if not path.endswith(expected_suffix):
self.fail("%s should have suffix %s" % (path, expected_suffix))
self.evaluate(step.assign_add(2))
def testPartialRestoreWarningAttribute(self):
with context.eager_mode():
original_root = trackable_utils.Checkpoint(v1=variables_lib.Variable(2.),
v2=variables_lib.Variable(3.))
prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_path = original_root.save(prefix)
partial_root = trackable_utils.Checkpoint(v1=base.Trackable(),
v2=variables_lib.Variable(0.))
weak_partial_root = weakref.ref(partial_root)
with test.mock.patch.object(logging, "warning") as mock_log:
# Note: Unlike in testPartialRestoreWarningObject, the warning actually
# prints immediately here, since all of the objects have been created
# and there's no deferred restoration sitting around.
partial_root.restore(save_path)
self.assertEqual(3., partial_root.v2.numpy())
del partial_root
self.assertIsNone(weak_partial_root())
messages = str(mock_log.call_args_list)
self.assertIn("(root).v1", messages)
self.assertNotIn("(root).v2", messages)
self.assertIn("expect_partial()", messages)
def testAttributeException(self):
with context.eager_mode():
original_root = trackable_utils.Checkpoint(v1=variables_lib.Variable(2.),
v2=variables_lib.Variable(3.))
prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_path = original_root.save(prefix)
partial_root = trackable_utils.Checkpoint(v1=base.Trackable(),
v2=variables_lib.Variable(0.))
status = partial_root.restore(save_path)
with self.assertRaisesRegex(AssertionError,
r"Unused attributes(.|\n)*\(root\).v1"):
status.assert_consumed()
def testSilencePartialWarning(self):
with context.eager_mode():
original_root = trackable_utils.Checkpoint(v1=variables_lib.Variable(2.),
v2=variables_lib.Variable(3.))
prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_path = original_root.save(prefix)
partial_root = trackable_utils.Checkpoint(v1=variables_lib.Variable(0.))
weak_partial_root = weakref.ref(partial_root)
weak_v1 = weakref.ref(partial_root.v1)
partial_root.restore(save_path).expect_partial()
self.assertEqual(2., partial_root.v1.numpy())
with test.mock.patch.object(logging, "warning") as mock_log:
del partial_root
self.assertIsNone(weak_partial_root())
self.assertIsNone(weak_v1())
self.assertEmpty(mock_log.call_args_list)
def _get_checkpoint_name(self, name):
root = tracking.AutoTrackable()
trackable_utils.add_variable(
root, name=name, shape=[1, 2], dtype=dtypes.float64)
(named_variable,), _, _ = graph_view.ObjectGraphView(
root).serialize_object_graph()
with ops.name_scope("root/" + named_variable.name):
pass # Make sure we can use this as an op name if we prefix it.
return named_variable.name
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testVariableNameEscaping(self):
suffix = "/.ATTRIBUTES/VARIABLE_VALUE"
self.assertEqual(r"a.Sb.Sc" + suffix, self._get_checkpoint_name(r"a/b/c"))
self.assertEqual(r"b" + suffix, self._get_checkpoint_name(r"b"))
self.assertEqual(r"c.S" + suffix, self._get_checkpoint_name(r"c/"))
self.assertEqual(r"d.S..S" + suffix, self._get_checkpoint_name(r"d/.S"))
self.assertEqual(r"d.S..ATTRIBUTES.Sf" + suffix,
self._get_checkpoint_name(r"d/.ATTRIBUTES/f"))
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testNumberedPath(self):
root = tracking.AutoTrackable()
leaf = tracking.AutoTrackable()
root.leaf = leaf
trackable_utils.add_variable(leaf, name="v", shape=[])
(named_variable,), _, _ = graph_view.ObjectGraphView(
root).serialize_object_graph()
self.assertEqual(r"leaf/v/.ATTRIBUTES/VARIABLE_VALUE", named_variable.name)
@test_util.run_in_graph_and_eager_modes
def testLocalNameValidation(self):
root = tracking.AutoTrackable()
leaf = tracking.AutoTrackable()
# Dots are escaped, which avoids conflicts with reserved names.
root._track_trackable(leaf, name=".ATTRIBUTES")
trackable_utils.add_variable(trackable=leaf, name="a", shape=[])
(named_variable,), _, _ = graph_view.ObjectGraphView(
root).serialize_object_graph()
self.assertEqual("..ATTRIBUTES/a/.ATTRIBUTES/VARIABLE_VALUE",
named_variable.name)
@test_util.run_in_graph_and_eager_modes
def testLateDependencyTracking(self):
class Dependency(tracking.AutoTrackable):
def build(self):
self.var = trackable_utils.add_variable(
self, "var", initializer=0.)
class LateDependencies(trackable_utils.Checkpoint):
def add_dep(self):
self.dep = Dependency()
self.dep.build()
original = LateDependencies()
original.add_dep()
self.evaluate(state_ops.assign(original.dep.var, 123.))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = original.save(checkpoint_prefix)
load_into = LateDependencies()
status = load_into.restore(save_path)
status.assert_existing_objects_matched()
with self.assertRaises(AssertionError):
status.assert_consumed()
load_into.add_dep()
status.assert_consumed()
status.assert_existing_objects_matched().run_restore_ops()
self.assertEqual(123., self.evaluate(load_into.dep.var))
@test_util.run_in_graph_and_eager_modes
def testDepAfterVar(self):
class Dependency(tracking.AutoTrackable):
def build(self):
self.var = trackable_utils.add_variable(
self, "var", initializer=0.)
class DepAfterVar(trackable_utils.Checkpoint):
def add_dep(self):
dep = Dependency()
dep.build()
self.dep = dep
dep_after_var = DepAfterVar()
dep_after_var.add_dep()
self.evaluate(state_ops.assign(dep_after_var.dep.var, -14.))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = dep_after_var.save(checkpoint_prefix)
loaded_dep_after_var = DepAfterVar()
status = loaded_dep_after_var.restore(save_path)
loaded_dep_after_var.add_dep()
status.assert_consumed()
status.run_restore_ops()
self.assertEqual(-14., self.evaluate(loaded_dep_after_var.dep.var))
@test_util.run_in_graph_and_eager_modes
def testOverlappingRestores(self):
checkpoint_directory = self.get_temp_dir()
save_root = trackable_utils.Checkpoint()
save_root.dep = tracking.AutoTrackable()
save_root.dep.var = trackable_utils.add_variable(
save_root.dep, name="var", initializer=0.)
self.evaluate(state_ops.assign(save_root.dep.var, 12.))
first_path = save_root.save(os.path.join(checkpoint_directory, "first"))
self.evaluate(state_ops.assign(save_root.dep.var, 13.))
second_path = save_root.save(os.path.join(checkpoint_directory, "second"))
first_root = trackable_utils.Checkpoint()
second_root = trackable_utils.Checkpoint()
first_status = first_root.restore(first_path)
second_status = second_root.restore(second_path)
load_dep = tracking.AutoTrackable()
load_dep.var = trackable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
first_status.assert_consumed()
first_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
second_root.dep = load_dep
second_status.assert_consumed()
second_status.run_restore_ops()
self.assertEqual(13., self.evaluate(load_dep.var))
# Try again with the order of the restore() reversed. The last restore
# determines the final value.
first_root = trackable_utils.Checkpoint()
second_root = trackable_utils.Checkpoint()
second_status = second_root.restore(second_path)
first_status = first_root.restore(first_path)
load_dep = tracking.AutoTrackable()
load_dep.var = trackable_utils.add_variable(
load_dep, name="var", shape=[])
first_root.dep = load_dep
first_status.assert_consumed()
first_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
second_root.dep = load_dep
second_status.assert_consumed()
second_status.run_restore_ops()
self.assertEqual(12., self.evaluate(load_dep.var))
@test_util.run_in_graph_and_eager_modes
def testAmbiguousLoad(self):
# Not OK to split one checkpoint object into two
checkpoint_directory = self.get_temp_dir()
save_root = trackable_utils.Checkpoint()
save_root.dep_one = tracking.AutoTrackable()
save_root.dep_two = tracking.AutoTrackable()
dep_three = tracking.AutoTrackable()
save_root.dep_one.dep_three = dep_three
save_root.dep_two.dep_three = dep_three
trackable_utils.add_variable(dep_three, name="var", initializer=0.)
self.evaluate(trackable_utils.gather_initializers(save_root))
save_path = save_root.save(os.path.join(checkpoint_directory, "ckpt"))
load_root = trackable_utils.Checkpoint()
status = load_root.restore(save_path)
load_root.dep_one = tracking.AutoTrackable()
load_root.dep_two = tracking.AutoTrackable()
load_root.dep_one.dep_three = tracking.AutoTrackable()
load_root.dep_two.dep_three = tracking.AutoTrackable()
trackable_utils.add_variable(
load_root.dep_one.dep_three, name="var", initializer=0.)
trackable_utils.add_variable(
load_root.dep_two.dep_three, name="var", initializer=0.)
with self.assertRaises(AssertionError):
status.assert_consumed()
with self.assertRaises(AssertionError):
status.assert_existing_objects_matched()
@test_util.run_in_graph_and_eager_modes
def testObjectsCombined(self):
# Currently fine to load two checkpoint objects into one Python object
checkpoint_directory = self.get_temp_dir()
save_root = trackable_utils.Checkpoint()
save_root.dep_one = tracking.AutoTrackable()
save_root.dep_two = tracking.AutoTrackable()
trackable_utils.add_variable(
save_root.dep_one, name="var1", initializer=32., dtype=dtypes.float64)
trackable_utils.add_variable(
save_root.dep_two, name="var2", initializer=64., dtype=dtypes.float64)
self.evaluate(trackable_utils.gather_initializers(save_root))
save_path = save_root.save(os.path.join(checkpoint_directory, "ckpt"))
load_root = trackable_utils.Checkpoint()
load_root.dep_one = tracking.AutoTrackable()
load_root.dep_two = load_root.dep_one
v1 = trackable_utils.add_variable(
load_root.dep_one, name="var1", shape=[], dtype=dtypes.float64)
v2 = trackable_utils.add_variable(
load_root.dep_one, name="var2", shape=[], dtype=dtypes.float64)
status = load_root.restore(
save_path).assert_consumed().assert_existing_objects_matched()
status.run_restore_ops()
self.assertEqual(32., self.evaluate(v1))
self.assertEqual(64., self.evaluate(v2))
@test_util.run_in_graph_and_eager_modes
def testEmptyContainersIgnored(self):
checkpoint_directory = self.get_temp_dir()
save_root = trackable_utils.Checkpoint(a=[])
path = save_root.save(checkpoint_directory)
load_root = trackable_utils.Checkpoint(b=[])
load_root.dep = []
load_root.dep.append([])
status = load_root.restore(path)
status.assert_consumed()
status.assert_existing_objects_matched()
status.assert_nontrivial_match()
@test_util.run_in_graph_and_eager_modes
def testDependencyLoop(self):
# Note: this test creates garbage during eager execution because it
# purposefully creates a reference cycle.
first = trackable_utils.Checkpoint()
second = trackable_utils.Checkpoint()
first.second = second
second.first = first
first.v = trackable_utils.add_variable(
first, "v1", initializer=[3., 1., 4.])
second.v = trackable_utils.add_variable(
second, "v2", initializer=[1., 1., 2., 3.])
self.evaluate(trackable_utils.gather_initializers(first))
checkpoint_directory = self.get_temp_dir()
save_path = first.save(os.path.join(checkpoint_directory, "ckpt"))
# Test deferred loading
first_load = trackable_utils.Checkpoint()
status = first_load.restore(save_path)
second_load = tracking.AutoTrackable()
first_load.second = second_load
second_load.first = first_load
with self.assertRaises(AssertionError):
status.assert_consumed()
first_load.v = trackable_utils.add_variable(
first_load, "v1", shape=[3])
second_load.v = trackable_utils.add_variable(
second_load, "v2", shape=[4])
status.assert_consumed()
status.run_restore_ops()
self.assertAllEqual([3., 1., 4.], self.evaluate(first_load.v))
self.assertAllEqual([1., 1., 2., 3.], self.evaluate(second_load.v))
# Test loading when variables have already been created
self.evaluate(first_load.v.assign([2., 7., 1.]))
self.assertAllEqual([2., 7., 1.], self.evaluate(first_load.v))
self.evaluate(second_load.v.assign([2., 7., 1., 8.]))
self.assertAllEqual([2., 7., 1., 8.], self.evaluate(second_load.v))
status = first_load.restore(save_path).assert_consumed()
status.run_restore_ops()
self.assertAllEqual([3., 1., 4.], self.evaluate(first_load.v))
self.assertAllEqual([1., 1., 2., 3.], self.evaluate(second_load.v))
@test_util.run_in_graph_and_eager_modes
def testRestoreOnAssign(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
first = trackable_utils.Checkpoint()
first.var1 = variables_lib.Variable(0., name="outside_var")
first.var2 = variables_lib.Variable(0., name="blah")
self.evaluate(first.var1.assign(4.))
self.evaluate(first.var2.assign(8.))
save_path = first.save(checkpoint_prefix)
second = trackable_utils.Checkpoint()
second.var2 = variables_lib.Variable(0., name="blah")
status = second.restore(save_path)
recreated_var1 = variables_lib.Variable(0., name="outside_var")
status.run_restore_ops()
self.assertEqual(8., self.evaluate(second.var2))
self.evaluate(recreated_var1.assign(-2.))
self.assertEqual(-2., self.evaluate(recreated_var1))
second.var1 = recreated_var1
status.run_restore_ops()
self.assertEqual(4., self.evaluate(recreated_var1))
@test_util.run_in_graph_and_eager_modes
def testCheckpointState(self):
# No checkpoints are deleted by default
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.AutoTrackable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(trackable_utils.gather_initializers(obj))
saver = trackable_utils.Checkpoint(obj=obj)
for _ in range(10):
saver.save(checkpoint_prefix)
expected_filenames = ["checkpoint"]
for checkpoint_number in range(1, 11):
expected_filenames.append("ckpt-%d.index" % (checkpoint_number,))
self.assertEmpty(
set(expected_filenames)
- set(os.listdir(checkpoint_directory)))
@test_util.run_in_graph_and_eager_modes
def testCheckpointStateChangingVarList(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
obj = tracking.AutoTrackable()
obj.var = variable_scope.get_variable(name="v", initializer=0.)
self.evaluate(trackable_utils.gather_initializers(obj))
checkpoint = trackable_utils.Checkpoint(obj=obj)
looped_variables = []
for iteration in range(10):
new_variable = resource_variable_ops.ResourceVariable(iteration)
self.evaluate(new_variable.initializer)
setattr(checkpoint, "var_%d" % iteration, new_variable)
checkpoint.save(checkpoint_prefix)
looped_variables.append(new_variable)
expected_filenames = ["checkpoint"]
# We've copied the saver each time, but checkpoint management should still
# be consistent. Nothing gets deleted.
for checkpoint_number in range(1, 11):
expected_filenames.append("ckpt-%d.index" % (checkpoint_number,))
self.assertEmpty(
set(expected_filenames)
- set(os.listdir(checkpoint_directory)))
self.assertEqual(
checkpoint_prefix + "-10",
checkpoint_management.latest_checkpoint(checkpoint_directory))
# The checkpoint list only contains the most recent checkpoint, but they're
# all on disk. This means we won't eventually run into proto size limits.
self.assertEqual(
[checkpoint_prefix + "-10"],
(checkpoint_management.get_checkpoint_state(checkpoint_directory)
.all_model_checkpoint_paths))
for v in looped_variables:
self.evaluate(v.assign(314))
checkpoint.restore(checkpoint_prefix + "-6").run_restore_ops()
self.assertEqual(314, self.evaluate(checkpoint.var_9))
self.assertEqual(314, self.evaluate(checkpoint.var_8))
self.assertEqual(314, self.evaluate(checkpoint.var_6))
self.assertEqual(5, self.evaluate(checkpoint.var_5))
self.assertEqual(1, self.evaluate(checkpoint.var_1))
self.assertEqual(0, self.evaluate(checkpoint.var_0))
checkpoint.restore(checkpoint_prefix + "-10").run_restore_ops()
self.assertEqual(9, self.evaluate(checkpoint.var_9))
self.assertEqual(8, self.evaluate(checkpoint.var_8))
self.assertEqual(1, self.evaluate(checkpoint.var_1))
self.assertEqual(0, self.evaluate(checkpoint.var_0))
@test_util.run_in_graph_and_eager_modes
def test_restore_after_adding_empty_trackable_data_structure(self):
model = NonLayerTrackable()
checkpoint = trackable_utils.Checkpoint(model=model)
checkpoint.restore(None).initialize_or_restore()
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
del model, checkpoint
model = NonLayerTrackable()
model.dict = {"a": 1}
model.list = {"b": 1}
checkpoint = trackable_utils.Checkpoint(model=model)
load_status = checkpoint.restore(save_path)
load_status.assert_existing_objects_matched().run_restore_ops()
@test_util.run_in_graph_and_eager_modes
def test_write_checkpoint_from_function(self):
checkpoint_prefix = os.path.join(self.get_temp_dir(), "ckpt")
save_checkpoint = trackable_utils.Checkpoint(v=variables_lib.Variable(1.))
@def_function.function
def _write_checkpoint():
save_path = save_checkpoint.write(checkpoint_prefix)
return save_path
self.evaluate([save_checkpoint.v.initializer])
self.evaluate(_write_checkpoint())
load_checkpoint = trackable_utils.Checkpoint(v=variables_lib.Variable(0.))
# Use read() instead of restore() which allows us to check that all
# existing objects were loaded.
status = load_checkpoint.read(checkpoint_prefix)
status.assert_existing_objects_matched()
status.assert_consumed()
status.run_restore_ops()
self.assertEqual(1., self.evaluate(load_checkpoint.v))
self.evaluate(save_checkpoint.v.assign(3.))
self.evaluate(_write_checkpoint())
self.evaluate(save_checkpoint.v.assign(0.))
status = load_checkpoint.read(checkpoint_prefix)
status.assert_existing_objects_matched()
status.assert_consumed()
status.run_restore_ops()
self.assertEqual(3., self.evaluate(load_checkpoint.v))
def test_inititialize_with_data_structures(self):
checkpoint = trackable_utils.Checkpoint(
a=[variables_lib.Variable(0.), variables_lib.Variable(1.)],
b={"a": variables_lib.Variable(2.), "b": variables_lib.Variable(3.)})
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
load_checkpoint = trackable_utils.Checkpoint(
a=[variables_lib.Variable(4.), variables_lib.Variable(5.)],
b={"a": variables_lib.Variable(6.), "b": variables_lib.Variable(7.)})
load_checkpoint.restore(save_path)
self.assertAllClose(self.evaluate(load_checkpoint.a), [0, 1])
self.assertAllClose(self.evaluate(load_checkpoint.b), {"a": 2, "b": 3})
def _create_trackable(self):
class Model(tracking.AutoTrackable):
def __init__(self):
self.v = variables_lib.Variable(2.)
def __call__(self, x):
return self.v * x
return Model()
def test_initialize_with_root_object(self):
model = self._create_trackable()
input_value = constant_op.constant([[3.]])
expected_output = self.evaluate(model(input_value))
model.deferred_variable = variables_lib.Variable(5.)
checkpoint = trackable_utils.Checkpoint(model)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
new_model = self._create_trackable()
load_checkpoint = trackable_utils.Checkpoint(new_model)
load_checkpoint.restore(save_path)
self.assertAllClose(expected_output, new_model(input_value))
new_model.deferred_variable = variables_lib.Variable(1.)
self.assertEqual(self.evaluate(new_model.deferred_variable), 5)
def test_initialize_with_root_object_and_kwargs(self):
model = self._create_trackable()
model.v.assign(3.)
separate_variable = variables_lib.Variable(5.)
with self.assertRaisesRegex(ValueError, "root.v already exists"):
trackable_utils.Checkpoint(model, v=separate_variable)
checkpoint = trackable_utils.Checkpoint(
model, separate_variable=separate_variable)
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = checkpoint.save(checkpoint_prefix)
# Case 1: Loading checkpoint with same configuration.
new_model = self._create_trackable()
separate_variable = variables_lib.Variable(1.)
load_checkpoint = trackable_utils.Checkpoint(
new_model, separate_variable=separate_variable)
load_checkpoint.restore(save_path).assert_consumed()
self.assertEqual(self.evaluate(new_model.v), 3)
self.assertEqual(self.evaluate(separate_variable), 5)
self.assertEqual(self.evaluate(load_checkpoint.save_counter), 1)
# Case 2: Loading checkpoint where v and separate_variable are swapped:
# v is not attached to the root, while separate variable is attached to root
new_model = tracking.AutoTrackable()
new_model.separate_variable = variables_lib.Variable(200.)
v = variables_lib.Variable(100.)
load_checkpoint = trackable_utils.Checkpoint(new_model, v=v)
load_checkpoint.restore(save_path).assert_consumed()
self.assertEqual(self.evaluate(v), 3)
self.assertEqual(self.evaluate(new_model.separate_variable), 5)
self.assertEqual(self.evaluate(load_checkpoint.save_counter), 1)
# Case 3: Loading checkpoint where no root object is specified
separate_variable = variables_lib.Variable(200.)
v = variables_lib.Variable(100.)
load_checkpoint = trackable_utils.Checkpoint(
v=v, separate_variable=separate_variable)
load_checkpoint.restore(save_path).assert_consumed()
self.assertEqual(self.evaluate(v), 3)
self.assertEqual(self.evaluate(new_model.separate_variable), 5)
self.assertEqual(self.evaluate(load_checkpoint.save_counter), 1)
def test_checkpoint_saved_model_compatibility(self):
model = self._create_trackable()
input_value = constant_op.constant([[3.]])
expected_output = self.evaluate(model(input_value))
model.deferred_variable = variables_lib.Variable(5.)
saved_model_dir = os.path.join(self.get_temp_dir(), "saved_model")
saved_model_save.save(model, saved_model_dir)
new_model = self._create_trackable()
load_checkpoint = trackable_utils.Checkpoint(new_model)
with self.assertRaisesRegex(errors_impl.NotFoundError,
"Could not find checkpoint or SavedModel"):
load_checkpoint.restore(saved_model_dir + "no").expect_partial()
load_checkpoint.restore(saved_model_dir).expect_partial()
self.assertAllClose(expected_output, new_model(input_value))
new_model.deferred_variable = variables_lib.Variable(1.)
self.assertEqual(self.evaluate(new_model.deferred_variable), 5)
class TemplateTests(parameterized.TestCase, test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_trackable_save_restore_nested(self):
def _inner_template():
v = variable_scope.get_variable(
"v", shape=[1], initializer=init_ops.zeros_initializer())
return v
def _outer_template():
first_inner = template.make_template("i1", _inner_template)
second_inner = template.make_template("i2", _inner_template)
v1 = first_inner()
v2 = second_inner()
v3 = second_inner()
return (first_inner, second_inner), (v1, v2, v3)
with variable_scope.variable_scope("ignored"):
save_template = template.make_template("s1", _outer_template)
save_root = trackable_utils.Checkpoint(my_template=save_template)
(inner_template_one, inner_template_two), _ = save_template()
self.evaluate(inner_template_one.variables[0].assign([20.]))
self.evaluate(inner_template_two.variables[0].assign([25.]))
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, "ckpt")
save_path = save_root.save(checkpoint_prefix)
load_template = template.make_template("s2", _outer_template)
load_root = trackable_utils.Checkpoint(my_template=load_template)
status = load_root.restore(save_path)
(inner_template_one, inner_template_two), (v1, v2, v3) = load_template()
outer_template_dependencies = load_root.my_template._checkpoint_dependencies
self.assertLen(outer_template_dependencies, 2)
self.assertEqual("i1", outer_template_dependencies[0].name)
self.assertIs(inner_template_one, outer_template_dependencies[0].ref)
self.assertEqual("i2", outer_template_dependencies[1].name)
self.assertIs(inner_template_two, outer_template_dependencies[1].ref)
self.assertLen(inner_template_one._checkpoint_dependencies, 1)
self.assertEqual("v", inner_template_one._checkpoint_dependencies[0].name)
self.assertLen(inner_template_two._checkpoint_dependencies, 1)
self.assertEqual("v", inner_template_two._checkpoint_dependencies[0].name)
status.assert_consumed().run_restore_ops()
self.assertAllEqual([20.], self.evaluate(v1))
self.assertAllEqual([25.], self.evaluate(v2))
self.assertAllEqual([25.], self.evaluate(v3))
if __name__ == "__main__":
ops.enable_eager_execution()
test.main()
| apache-2.0 |
xen0n/gingerprawn | gingerprawn/shrimp/academic/academic_curricula_frame.py | 1 | 8581 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# JNMaster / academic / curriculum querier interface
# the original code was contributed by Chen Huayue, later almost entirely
# rewritten by Wang Xuerui.
# Copyright (C) 2011 Chen Huayue <489412949@qq.com>
# Copyright (C) 2011 Wang Xuerui <idontknw.wang@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import threading
import wx
# import wx.html
# dummy i18n
_ = lambda x: x
from gingerprawn.api import logger
logger.install()
# university academic affairs system's interface
from gingerprawn.api import univlib
jwxt = univlib.current.jwxt
# for common login behavior
from academic_login_mixin import JWXTLoginThreadMixin
#############################################################################
## SEPARATOR BETWEEN DECLARATIONS AND (MAINLY) GUI IMPLEMENTATION
#############################################################################
# statusbar class with a pulsing progress bar, very good for showing
# progress
from gingerprawn.api.ui.statusbar import ProgressStatusBar
# for an (again) very fancy curriculum page
from gingerprawn.api.ui.spangrid import RowSpanGrid
# Layout constants
KBSize=(300, 160)
showKBSize=(1280, 768)
FirstLineSpace = 10
LineHeight = 30 # modified for better look under wxGTK (Ubuntu Linux)
LineIndent = 10
class curricula_frame(wx.Frame, JWXTLoginThreadMixin):
# for layout use
def GetOrder(self, start=0):
cur = [start]
def incr(add, LineAdd=0):
cur[0] += add
return wx.Point(LineIndent + LineAdd,
FirstLineSpace + LineHeight * cur[0])
return incr
def _LoginThread(self, parent, userinfo, cfg_cache):
JWXTLoginThreadMixin._LoginThread(self, parent, userinfo, cfg_cache)
wx.CallAfter(parent.notify_status, _(u'获取学年与学期信息'))
self._kbList1, self._kbList2 = self._affairs.prepare4curriculum()
yr_default, tm_default = self._affairs.curriculum_defaults
yr_default = self._kbList1.index(yr_default)
tm_default = self._kbList2.index(tm_default)
# List of choice initialization postponed, because the lists
# are not yet available at the time of overall frame init
wx.CallAfter(self.kbL1.SetItems, self._kbList1)
wx.CallAfter(self.kbL2.SetItems, self._kbList2)
wx.CallAfter(self.kbL2.InvalidateBestSize)
wx.CallAfter(self.kbL1.Fit)
wx.CallAfter(self.kbL2.Fit)
wx.CallAfter(self.kbL1.Select, yr_default)
wx.CallAfter(self.kbL2.Select, tm_default)
wx.CallAfter(self.SetStatusText, _(u'请选择学年和学期'))
wx.CallAfter(parent.notify_status, _(u'准备就绪'))
wx.CallAfter(parent.toggle_working)
wx.CallAfter(self.Show)
return
def __init__(self, parent, userinfo, cfg_cache):
wx.Frame.__init__(self, parent, wx.ID_ANY, _(u'课表查询'), size=KBSize)
self.SetMaxSize(KBSize)
self.SetMinSize(KBSize)
self.__parent = parent
# bind the close handler to auto-logout before closing down
self.Bind(wx.EVT_CLOSE, self.OnClose)
# set background color to get some native feel on MSW
if wx.Platform == '__WXMSW__':
self.SetBackgroundColour(wx.SystemSettings.GetColour(
wx.SYS_COLOUR_3DFACE))
# this is preserved for the showing frame
self.__userid = userinfo['usr']
########################################################
## LAYOUT SPEC
order=self.GetOrder(0)
pnl = self.panelMain = wx.Panel(self, wx.ID_ANY, style=wx.EXPAND)
wx.StaticText(pnl, wx.ID_ANY, _(u'课表查询'), pos=order(0))
self.kbL1 = wx.Choice(pnl, pos=order(1), size=(130, -1))
self.kbL2 = wx.Choice(pnl, pos=order(0, 135), size=(60, -1))
self.kbB=wx.Button(pnl, label=_(u'查询'), pos=order(0, 204),
size=(60, -1))
wx.StaticText(pnl, label=_(u'请从下拉菜单中选择想查询的学期'),
pos=order(1))
self.Bind(wx.EVT_BUTTON, self.KB, self.kbB)
self.statusbar = ProgressStatusBar(self)
self.SetStatusBar(self.statusbar)
########################################################
## INITIALIZATION
thrd = threading.Thread(target=self._LoginThread,
args=(parent, userinfo, cfg_cache, ),
name='academic_LoginThread')
thrd.daemon = True
thrd.start()
def OnClose(self, evt):
thrd = threading.Thread(target=self._LogoutThread,
args=(self.__parent, ), # toggle=True
name='academic_LogoutThread')
thrd.daemon = True
thrd.start()
evt.Skip()
def _QueryThread(self, yr, tm):
wx.CallAfter(self.notify_status, _(u'查询中'))
wx.CallAfter(self.toggle_working)
try:
self._affairs.get_curriculum(yr, tm) #, raw=True)
except Exception, e:
logexception('unexpected exc:\n%s', `e`)
wx.CallAfter(self.notify_status,
_(u'查询出错,请重试;若仍然出错,请报告 Bug'))
wx.CallAfter(self.toggle_working)
return
_r = self._affairs.curriculum[(yr, tm)]
# gui operation must be protected
wx.CallAfter(self.do_showKB, _r)
wx.CallAfter(self.toggle_working)
return
def KB(self, evt):
# Gather and validate input.
yr = self._affairs.curriculum_years[self.kbL1.GetSelection()]
if yr == -1:
self.SetStatusText(_(u'学年不能为空'))
return
term = self._affairs.curriculum_terms[self.kbL2.GetSelection()]
# Data gathering complete, spawn worker thread.
thrd = threading.Thread(target=self._QueryThread,
args=(yr, term, ),
name='academic_QueryThread')
thrd.daemon = True
thrd.start()
def do_showKB(self, rawdata):
'''\
This GUI operation must be done in the main thread, so we have to
encapsulate it into a function.
'''
showKB(self, rawdata, self.__userid)
def notify_status(self, msg):
self.SetStatusText(msg)
def toggle_working(self):
self.statusbar.ToggleStatus()
class showKB(wx.Frame):
def __init__(self, parent, content, username):
wx.Frame.__init__(self, parent,
title=_(u'%s 的课表') % username,
size=showKBSize)
self.__parent = parent
self.Bind(wx.EVT_CLOSE, self.OnClose)
try:
self.curriculum_grid = RowSpanGrid(self, wx.ID_ANY, content)
except:
logexception('exc when opening grid window for result')
parent.SetStatusText(
_(u'无法展开课表,请重试;若仍然失败,请报告 Bug'))
# this auto size, thanks to Robin Dunn for pointing out the method in an
# earlier mail list post, has brought a MUCH BETTER look
self.curriculum_grid.AutoSize()
self.Fit()
# we're done, show up!
self.Show(True)
parent.notify_status(_(u'课表已打开'))
# html = wx.html.HtmlWindow(self)
# if wx.Platform == '__WXGTK__':
# html.SetStandardFonts()
# try:
# html.SetPage(content)
# self.Show(True)
# parent.notify_status(_(u'课表已打开'))
# except:
# logexception('exc when opening htmlwindow for result')
# parent.SetStatusText(
# _(u'无法展开课表,请重试;若仍然失败,请报告 Bug'))
def OnClose(self, evt):
self.__parent.notify_status(_(u'课表窗口已关闭'))
evt.Skip()
def invoke(prnt, userinfo, cfg_obj):
frame = curricula_frame(prnt, userinfo, cfg_obj)
# frame.Show()
# vi:ai:et:ts=4 sw=4 sts=4 fenc=utf-8
| gpl-3.0 |
PulsePod/old-www-do-not-use | lib/python2.7/site-packages/pip/commands/__init__.py | 476 | 2236 | """
Package containing all pip commands
"""
from pip.commands.bundle import BundleCommand
from pip.commands.completion import CompletionCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.unzip import UnzipCommand
from pip.commands.zip import ZipCommand
from pip.commands.wheel import WheelCommand
commands = {
BundleCommand.name: BundleCommand,
CompletionCommand.name: CompletionCommand,
FreezeCommand.name: FreezeCommand,
HelpCommand.name: HelpCommand,
SearchCommand.name: SearchCommand,
ShowCommand.name: ShowCommand,
InstallCommand.name: InstallCommand,
UninstallCommand.name: UninstallCommand,
UnzipCommand.name: UnzipCommand,
ZipCommand.name: ZipCommand,
ListCommand.name: ListCommand,
WheelCommand.name: WheelCommand,
}
commands_order = [
InstallCommand,
UninstallCommand,
FreezeCommand,
ListCommand,
ShowCommand,
SearchCommand,
WheelCommand,
ZipCommand,
UnzipCommand,
BundleCommand,
HelpCommand,
]
def get_summaries(ignore_hidden=True, ordered=True):
"""Yields sorted (command name, command summary) tuples."""
if ordered:
cmditems = _sort_commands(commands, commands_order)
else:
cmditems = commands.items()
for name, command_class in cmditems:
if ignore_hidden and command_class.hidden:
continue
yield (name, command_class.summary)
def get_similar_commands(name):
"""Command name auto-correct."""
from difflib import get_close_matches
close_commands = get_close_matches(name, commands.keys())
if close_commands:
guess = close_commands[0]
else:
guess = False
return guess
def _sort_commands(cmddict, order):
def keyfn(key):
try:
return order.index(key[1])
except ValueError:
# unordered items should come last
return 0xff
return sorted(cmddict.items(), key=keyfn)
| mit |
AnotherIvan/calibre | src/calibre/ebooks/epub/cfi/parse.py | 14 | 7366 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import regex, sys
from future_builtins import map, zip
is_narrow_build = sys.maxunicode < 0x10ffff
class Parser(object):
''' See epubcfi.ebnf for the specification that this parser tries to
follow. I have implemented it manually, since I dont want to depend on
grako, and the grammar is pretty simple. This parser is thread-safe, i.e.
it can be used from multiple threads simulataneously. '''
def __init__(self):
# All allowed unicode characters + escaped special characters
special_char = r'[\[\](),;=^]'
if is_narrow_build:
unescaped_char = '[[\t\n\r -\ud7ff\ue000-\ufffd]--%s]' % special_char
else:
unescaped_char = '[[\t\n\r -\ud7ff\ue000-\ufffd\U00010000-\U0010ffff]--%s]' % special_char
escaped_char = r'\^' + special_char
chars = r'(?:%s|(?:%s))+' % (unescaped_char, escaped_char)
chars_no_space = chars.replace('0020', '0021')
# No leading zeros allowed for integers
integer = r'(?:[1-9][0-9]*)|0'
# No leading zeros, except for numbers in (0, 1) and no trailing zeros for the fractional part
frac = r'\.[0-9]*[1-9]'
number = r'(?:[1-9][0-9]*(?:{0})?)|(?:0{0})|(?:0)'.format(frac)
c = lambda x:regex.compile(x, flags=regex.VERSION1)
# A step of the form /integer
self.step_pat = c(r'/(%s)' % integer)
# An id assertion of the form [characters]
self.id_assertion_pat = c(r'\[(%s)\]' % chars)
# A text offset of the form :integer
self.text_offset_pat = c(r':(%s)' % integer)
# A temporal offset of the form ~number
self.temporal_offset_pat = c(r'~(%s)' % number)
# A spatial offset of the form @number:number
self.spatial_offset_pat = c(r'@({0}):({0})'.format(number))
# A spatio-temporal offset of the form ~number@number:number
self.st_offset_pat = c(r'~({0})@({0}):({0})'.format(number))
# Text assertion patterns
self.ta1_pat = c(r'({0})(?:,({0})){{0,1}}'.format(chars))
self.ta2_pat = c(r',(%s)' % chars)
self.parameters_pat = c(r'(?:;(%s)=((?:%s,?)+))+' % (chars_no_space, chars))
self.csv_pat = c(r'(?:(%s),?)+' % chars)
# Unescape characters
unescape_pat = c(r'%s(%s)' % (escaped_char[:2], escaped_char[2:]))
self.unescape = lambda x: unescape_pat.sub(r'\1', x)
def parse_epubcfi(self, raw):
' Parse a full epubcfi of the form epubcfi(path [ , path , path ]) '
null = {}, {}, {}, raw
if not raw.startswith('epubcfi('):
return null
raw = raw[len('epubcfi('):]
parent_cfi, raw = self.parse_path(raw)
if not parent_cfi:
return null
start_cfi, end_cfi = {}, {}
if raw.startswith(','):
start_cfi, raw = self.parse_path(raw[1:])
if raw.startswith(','):
end_cfi, raw = self.parse_path(raw[1:])
if not start_cfi or not end_cfi:
return null
if raw.startswith(')'):
raw = raw[1:]
else:
return null
return parent_cfi, start_cfi, end_cfi, raw
def parse_path(self, raw):
' Parse the path component of an epubcfi of the form /step... '
path = {'steps':[]}
raw = self._parse_path(raw, path)
if not path['steps']:
path = {}
return path, raw
def do_match(self, pat, raw):
m = pat.match(raw)
if m is not None:
raw = raw[len(m.group()):]
return m, raw
def _parse_path(self, raw, ans):
m, raw = self.do_match(self.step_pat, raw)
if m is None:
return raw
ans['steps'].append({'num':int(m.group(1))})
m, raw = self.do_match(self.id_assertion_pat, raw)
if m is not None:
ans['steps'][-1]['id'] = self.unescape(m.group(1))
if raw.startswith('!'):
ans['redirect'] = r = {'steps':[]}
return self._parse_path(raw[1:], r)
else:
remaining_raw = self.parse_offset(raw, ans['steps'][-1])
return self._parse_path(raw, ans) if remaining_raw is None else remaining_raw
def parse_offset(self, raw, ans):
m, raw = self.do_match(self.text_offset_pat, raw)
if m is not None:
ans['text_offset'] = int(m.group(1))
return self.parse_text_assertion(raw, ans)
m, raw = self.do_match(self.st_offset_pat, raw)
if m is not None:
t, x, y = m.groups()
ans['temporal_offset'] = float(t)
ans['spatial_offset'] = tuple(map(float, (x, y)))
return raw
m, raw = self.do_match(self.temporal_offset_pat, raw)
if m is not None:
ans['temporal_offset'] = float(m.group(1))
return raw
m, raw = self.do_match(self.spatial_offset_pat, raw)
if m is not None:
ans['spatial_offset'] = tuple(map(float, m.groups()))
return raw
def parse_text_assertion(self, raw, ans):
oraw = raw
if not raw.startswith('['):
return oraw
raw = raw[1:]
ta = {}
m, raw = self.do_match(self.ta1_pat, raw)
if m is not None:
before, after = m.groups()
ta['before'] = self.unescape(before)
if after is not None:
ta['after'] = self.unescape(after)
else:
m, raw = self.do_match(self.ta2_pat, raw)
if m is not None:
ta['after'] = self.unescape(m.group(1))
# parse parameters
m, raw = self.do_match(self.parameters_pat, raw)
if m is not None:
params = {}
for name, value in zip(m.captures(1), m.captures(2)):
params[name] = tuple(map(self.unescape, self.csv_pat.match(value).captures(1)))
if params:
ta['params'] = params
if not raw.startswith(']'):
return oraw # no closing ] or extra content in the assertion
if ta:
ans['text_assertion'] = ta
return raw[1:]
_parser = None
def parser():
global _parser
if _parser is None:
_parser = Parser()
return _parser
def get_steps(pcfi):
ans = tuple(pcfi['steps'])
if 'redirect' in pcfi:
ans += get_steps(pcfi['redirect'])
return ans
def cfi_sort_key(cfi, only_path=True):
p = parser()
try:
if only_path:
pcfi = p.parse_path(cfi)[0]
else:
parent, start = p.parse_epubcfi(cfi)[:2]
pcfi = start or parent
except Exception:
import traceback
traceback.print_exc()
return ()
if not pcfi:
import sys
print ('Failed to parse CFI: %r' % pcfi, file=sys.stderr)
return ()
steps = get_steps(pcfi)
step_nums = tuple(s.get('num', 0) for s in steps)
step = steps[-1] if steps else {}
offsets = (step.get('temporal_offset', 0), tuple(reversed(step.get('spatial_offset', (0, 0)))), step.get('text_offset', 0), )
return (step_nums, offsets)
| gpl-3.0 |
ddico/account-analytic | account_analytic_plan_required/account.py | 11 | 4623 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Account analytic plan required module for OpenERP
# Copyright (C) 2014 Acsone (http://acsone.eu).
# @author Stéphane Bidoul <stephane.bidoul@acsone.eu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
from openerp.tools.translate import _
class account_account_type(orm.Model):
_inherit = "account.account.type"
def _get_policies(self, cr, uid, context=None):
"""This is the method to be inherited for adding policies"""
policies = super(account_account_type, self).\
_get_policies(cr, uid, context=context)
policies.extend([('always_plan',
_('Always (analytic distribution)')),
('always_plan_or_account',
_('Always (analytic account or distribution)'))])
return policies
class account_move_line(orm.Model):
_inherit = "account.move.line"
def _check_analytic_plan_required_msg(self, cr, uid, ids, context=None):
for move_line in self.browse(cr, uid, ids, context=context):
if move_line.analytic_account_id and move_line.analytics_id:
return _('Analytic account and analytic distribution '
'are mutually exclusive')
if move_line.debit == 0 and move_line.credit == 0:
continue
analytic_policy = self._get_analytic_policy(cr, uid,
move_line.account_id,
context=context)
if analytic_policy == 'always_plan' \
and not move_line.analytics_id:
return _("Analytic policy is set to "
"'Always (analytic distribution)' with account "
"%s '%s' but the analytic distribution is "
"missing in the account move line with "
"label '%s'.") % \
(move_line.account_id.code,
move_line.account_id.name,
move_line.name)
if analytic_policy == 'always_plan_or_account' \
and not move_line.analytic_account_id \
and not move_line.analytics_id:
return _("Analytic policy is set to "
"'Always (analytic account or distribution)' "
"with account %s '%s' but the analytic "
"distribution and the analytic account are "
"missing in the account move line "
"with label '%s'.") % \
(move_line.account_id.code,
move_line.account_id.name,
move_line.name)
elif analytic_policy == 'never' and move_line.analytics_id:
return _("Analytic policy is set to 'Never' with account "
"%s '%s' but the account move line with label "
"'%s' has an analytic distribution %s '%s'.") % \
(move_line.account_id.code,
move_line.account_id.name,
move_line.name,
move_line.analytic_account_id.code,
move_line.analytic_account_id.name)
def _check_analytic_plan_required(self, cr, uid, ids, context=None):
return not self._check_analytic_plan_required_msg(cr, uid, ids,
context=context)
_constraints = [
(_check_analytic_plan_required,
_check_analytic_plan_required_msg,
['analytic_account_id', 'analytics_id', 'account_id',
'debit', 'credit']),
]
| agpl-3.0 |
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/utils/unittest/case.py | 103 | 42486 | """Test case implementation"""
import sys
import difflib
import pprint
import re
import unittest
import warnings
from django.utils.unittest import result
from django.utils.unittest.util import\
safe_repr, safe_str, strclass,\
unorderable_list_difference
from django.utils.unittest.compatibility import wraps
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestResult.skip() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
# can't use super because Python 2.4 exceptions are old style
Exception.__init__(self)
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not (isinstance(test_item, type) and issubclass(test_item, TestCase)):
@wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"%s not raised" % (exc_name,))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class _TypeEqualityDict(object):
def __init__(self, testcase):
self.testcase = testcase
self._store = {}
def __setitem__(self, key, value):
self._store[key] = value
def __getitem__(self, key):
value = self._store[key]
if isinstance(value, basestring):
return getattr(self.testcase, value)
return value
def get(self, key, default=None):
if key in self._store:
return self[key]
return default
class TestCase(unittest.TestCase):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
# This attribute sets the maximum length of a diff in failure messages
# by assert methods using difflib. It is looked up as an instance attribute
# so can be configured by individual tests if required.
maxDiff = 80*8
# This attribute determines whether long messages (including repr of
# objects used in assert methods) will be printed on failure in *addition*
# to any explicit message passed.
longMessage = True
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._resultForDoCleanups = None
try:
testMethod = getattr(self, methodName)
except AttributeError:
raise ValueError("no such test method in %s: %s" % \
(self.__class__, methodName))
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = _TypeEqualityDict(self)
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(unicode, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("Use of a TestResult without an addSkip method is deprecated",
DeprecationWarning, 2)
result.addSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
self._resultForDoCleanups = result
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
success = False
try:
self.setUp()
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
try:
testMethod()
except self.failureException:
result.addFailure(self, sys.exc_info())
except _ExpectedFailure, e:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, e.exc_info)
else:
warnings.warn("Use of a TestResult without an addExpectedFailure method is deprecated",
DeprecationWarning)
result.addSuccess(self)
except _UnexpectedSuccess:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("Use of a TestResult without an addUnexpectedSuccess method is deprecated",
DeprecationWarning)
result.addFailure(self, sys.exc_info())
except SkipTest, e:
self._addSkip(result, str(e))
except Exception:
result.addError(self, sys.exc_info())
else:
success = True
try:
self.tearDown()
except Exception:
result.addError(self, sys.exc_info())
success = False
cleanUpSuccess = self.doCleanups()
success = success and cleanUpSuccess
if success:
result.addSuccess(self)
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
result = self._resultForDoCleanups
ok = True
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
try:
function(*args, **kwargs)
except Exception:
ok = False
result.addError(self, sys.exc_info())
return ok
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"Fail the test if the expression is true."
if expr:
msg = self._formatMessage(msg, "%s is not False" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not True" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
if callableObj is None:
return _AssertRaisesContext(excClass, self)
try:
callableObj(*args, **kwargs)
except excClass:
return
if hasattr(excClass,'__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException("%s not raised" % excName)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
# Synonyms for assertion methods
# The plurals are undocumented. Keep them that way to discourage use.
# Do not add more. Do not remove.
# Going through a deprecation cycle on these would annoy many people.
assertEquals = assertEqual
assertNotEquals = assertNotEqual
assertAlmostEquals = assertAlmostEqual
assertNotAlmostEquals = assertNotAlmostEqual
assert_ = assertTrue
# These fail* assertion method names are pending deprecation and will
# be a DeprecationWarning in 3.2; http://bugs.python.org/issue2578
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
('Please use %s instead.' % original_func.__name__),
PendingDeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
failUnlessEqual = _deprecate(assertEqual)
failIfEqual = _deprecate(assertNotEqual)
failUnlessAlmostEqual = _deprecate(assertAlmostEqual)
failIfAlmostEqual = _deprecate(assertNotAlmostEqual)
failUnless = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
def assertSequenceEqual(self, seq1, seq2,
msg=None, seq_type=None, max_diff=80*8):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
max_diff: Maximum size off the diff, larger diffs are not shown
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = repr(seq1)
seq2_repr = repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in xrange(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support
different types of sets, and is optimized for sets specifically
(parameters must support a difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError, e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError, e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1), safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertTrue(isinstance(d1, dict), 'First argument is not a dictionary')
self.assertTrue(isinstance(d2, dict), 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, expected, actual, msg=None):
"""Checks whether actual is a superset of expected."""
missing = []
mismatched = []
for key, value in expected.iteritems():
if key not in actual:
missing.append(key)
elif value != actual[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(actual[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertItemsEqual(self, expected_seq, actual_seq, msg=None):
"""An unordered sequence specific comparison. It asserts that
expected_seq and actual_seq contain the same elements. It is
the equivalent of::
self.assertEqual(sorted(expected_seq), sorted(actual_seq))
Raises with an error message listing which elements of expected_seq
are missing from actual_seq and vice versa if any.
Asserts that each element has the same count in both sequences.
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
try:
expected = sorted(expected_seq)
actual = sorted(actual_seq)
except TypeError:
# Unsortable items (example: set(), complex(), ...)
expected = list(expected_seq)
actual = list(actual_seq)
missing, unexpected = unorderable_list_difference(
expected, actual, ignore_duplicate=False
)
else:
return self.assertSequenceEqual(expected, actual, msg=msg)
errors = []
if missing:
errors.append('Expected, but missing:\n %s' %
safe_repr(missing))
if unexpected:
errors.append('Unexpected, but present:\n %s' %
safe_repr(unexpected))
if errors:
standardMsg = '\n'.join(errors)
self.fail(self._formatMessage(msg, standardMsg))
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertTrue(isinstance(first, basestring), (
'First argument is not a string'))
self.assertTrue(isinstance(second, basestring), (
'Second argument is not a string'))
if first != second:
standardMsg = '%s != %s' % (safe_repr(first, True), safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(first.splitlines(True),
second.splitlines(True)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegexp(self, expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regexp.
Args:
expected_exception: Exception class expected to be raised.
expected_regexp: Regexp (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
args: Extra args.
kwargs: Extra kwargs.
"""
if callable_obj is None:
return _AssertRaisesContext(expected_exception, self, expected_regexp)
try:
callable_obj(*args, **kwargs)
except expected_exception, exc_value:
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
else:
if hasattr(expected_exception, '__name__'):
excName = expected_exception.__name__
else:
excName = str(expected_exception)
raise self.failureException("%s not raised" % excName)
def assertRegexpMatches(self, text, expected_regexp, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(text):
msg = msg or "Regexp didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
raise self.failureException(msg)
def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regexp, basestring):
unexpected_regexp = re.compile(unexpected_regexp)
match = unexpected_regexp.search(text)
if match:
msg = msg or "Regexp matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regexp.pattern,
text)
raise self.failureException(msg)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
| bsd-3-clause |
theoryno3/scikit-learn | examples/linear_model/plot_bayesian_ridge.py | 248 | 2588 | """
=========================
Bayesian Ridge Regression
=========================
Computes a Bayesian Ridge Regression on a synthetic dataset.
See :ref:`bayesian_ridge_regression` for more information on the regressor.
Compared to the OLS (ordinary least squares) estimator, the coefficient
weights are slightly shifted toward zeros, which stabilises them.
As the prior on the weights is a Gaussian prior, the histogram of the
estimated weights is Gaussian.
The estimation of the model is done by iteratively maximizing the
marginal log-likelihood of the observations.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn.linear_model import BayesianRidge, LinearRegression
###############################################################################
# Generating simulated data with Gaussian weigthts
np.random.seed(0)
n_samples, n_features = 100, 100
X = np.random.randn(n_samples, n_features) # Create Gaussian data
# Create weigts with a precision lambda_ of 4.
lambda_ = 4.
w = np.zeros(n_features)
# Only keep 10 weights of interest
relevant_features = np.random.randint(0, n_features, 10)
for i in relevant_features:
w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
# Create noise with a precision alpha of 50.
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# Create the target
y = np.dot(X, w) + noise
###############################################################################
# Fit the Bayesian Ridge Regression and an OLS for comparison
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)
ols = LinearRegression()
ols.fit(X, y)
###############################################################################
# Plot true weights, estimated weights and histogram of the weights
plt.figure(figsize=(6, 5))
plt.title("Weights of the model")
plt.plot(clf.coef_, 'b-', label="Bayesian Ridge estimate")
plt.plot(w, 'g-', label="Ground truth")
plt.plot(ols.coef_, 'r--', label="OLS estimate")
plt.xlabel("Features")
plt.ylabel("Values of the weights")
plt.legend(loc="best", prop=dict(size=12))
plt.figure(figsize=(6, 5))
plt.title("Histogram of the weights")
plt.hist(clf.coef_, bins=n_features, log=True)
plt.plot(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
'ro', label="Relevant features")
plt.ylabel("Features")
plt.xlabel("Values of the weights")
plt.legend(loc="lower left")
plt.figure(figsize=(6, 5))
plt.title("Marginal log-likelihood")
plt.plot(clf.scores_)
plt.ylabel("Score")
plt.xlabel("Iterations")
plt.show()
| bsd-3-clause |
mikepea/fullerite | src/diamond/collectors/mysqlstat/mysql55.py | 24 | 8260 | # coding=utf-8
"""
Diamond collector that monitors relevant MySQL performance_schema values
For now only monitors replication load
[Blog](http://bit.ly/PbSkbN) announcement.
[Snippet](http://bit.ly/SHwYhT) to build example graph.
#### Dependencies
* MySQLdb
* MySQL 5.5.3+
"""
from __future__ import division
try:
import MySQLdb
from MySQLdb import MySQLError
except ImportError:
MySQLdb = None
import diamond
import time
import re
class MySQLPerfCollector(diamond.collector.Collector):
def process_config(self):
super(MySQLPerfCollector, self).process_config()
self.db = None
self.last_wait_count = {}
self.last_wait_sum = {}
self.last_timestamp = {}
self.last_data = {}
self.monitors = {
'slave_sql': {
'wait/synch/cond/sql/MYSQL_RELAY_LOG::update_cond':
'wait_for_update',
'wait/io/file/innodb/innodb_data_file':
'innodb_data_file',
'wait/io/file/innodb/innodb_log_file':
'innodb_log_file',
'wait/io/file/myisam/dfile':
'myisam_dfile',
'wait/io/file/myisam/kfile':
'myisam_kfile',
'wait/io/file/sql/binlog':
'binlog',
'wait/io/file/sql/relay_log_info':
'relaylog_info',
'wait/io/file/sql/relaylog':
'relaylog',
'wait/synch/mutex/innodb':
'innodb_mutex',
'wait/synch/mutex':
'other_mutex',
'wait/synch/rwlock':
'rwlocks',
'wait/io':
'other_io',
},
'slave_io': {
'wait/io/file/sql/relaylog_index':
'relaylog_index',
'wait/synch/mutex/sql/MYSQL_RELAY_LOG::LOCK_index':
'relaylog_index_lock',
'wait/synch/mutex/sql/Master_info::data_lock':
'master_info_lock',
'wait/synch/mutex/mysys/IO_CACHE::append_buffer_lock':
'append_buffer_lock',
'wait/synch/mutex/sql/LOG::LOCK_log':
'log_lock',
'wait/io/file/sql/master_info':
'master_info',
'wait/io/file/sql/relaylog':
'relaylog',
'wait/synch/mutex':
'other_mutex',
'wait/synch/rwlock':
'rwlocks',
'wait/io':
'other_io',
}
}
if self.config['hosts'].__class__.__name__ != 'list':
self.config['hosts'] = [self.config['hosts']]
# Move legacy config format to new format
if 'host' in self.config:
hoststr = "%s:%s@%s:%s/%s" % (
self.config['user'],
self.config['passwd'],
self.config['host'],
self.config['port'],
self.config['db'],
)
self.config['hosts'].append(hoststr)
def get_default_config_help(self):
config_help = super(MySQLPerfCollector, self).get_default_config_help()
config_help.update({
'hosts': 'List of hosts to collect from. Format is '
+ 'yourusername:yourpassword@host:'
+ 'port/performance_schema[/nickname]',
'slave': 'Collect Slave Replication Metrics',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MySQLPerfCollector, self).get_default_config()
config.update({
'path': 'mysql',
# Connection settings
'hosts': [],
'slave': 'False',
})
return config
def connect(self, params):
if MySQLdb is None:
self.log.error('Unable to import MySQLdb')
return
try:
self.db = MySQLdb.connect(**params)
except MySQLError, e:
self.log.error('MySQLPerfCollector couldnt connect to database %s',
e)
return {}
self.log.debug('MySQLPerfCollector: Connected to database.')
def query_list(self, query, params):
cursor = self.db.cursor()
cursor.execute(query, params)
return list(cursor.fetchall())
def slave_load(self, nickname, thread):
data = self.query_list("""
SELECT
his.event_name,
his.sum_timer_wait,
his.count_star,
cur.event_name,
UNIX_TIMESTAMP(SYSDATE())
FROM
events_waits_summary_by_thread_by_event_name his
JOIN threads thr USING (thread_id)
JOIN events_waits_current cur USING (thread_id)
WHERE
name = %s
ORDER BY
his.event_name
""", (thread,))
wait_sum = sum([x[1] for x in data])
wait_count = sum([x[2] for x in data])
timestamp = int(time.time())
if 0 in data and len(data[0]) > 5:
cur_event_name, timestamp = data[0][3:]
if thread not in self.last_wait_sum:
# Avoid bogus data
self.last_wait_sum[thread] = wait_sum
self.last_wait_count[thread] = wait_count
self.last_timestamp[thread] = timestamp
self.last_data[thread] = data
return
wait_delta = wait_sum - self.last_wait_sum[thread]
time_delta = (timestamp - self.last_timestamp[thread]) * 1000000000000
if time_delta == 0:
return
# Summarize a few things
thread_name = thread[thread.rfind('/') + 1:]
data.append(['wait/synch/mutex/innodb',
sum([x[1] for x in data if x[0].startswith(
'wait/synch/mutex/innodb')])])
data.append(['wait/synch/mutex',
sum([x[1] for x in data if x[0].startswith(
'wait/synch/mutex')
and x[0] not in self.monitors[thread_name]])
- data[-1][1]])
data.append(['wait/synch/rwlock',
sum([x[1] for x in data if x[0].startswith(
'wait/synch/rwlock')])])
data.append(['wait/io',
sum([x[1] for x in data if x[0].startswith(
'wait/io')
and x[0] not in self.monitors[thread_name]])])
for d in zip(self.last_data[thread], data):
if d[0][0] in self.monitors[thread_name]:
self.publish(nickname + thread_name + '.'
+ self.monitors[thread_name][d[0][0]],
(d[1][1] - d[0][1]) / time_delta * 100)
# Also log what's unaccounted for. This is where Actual Work gets done
self.publish(nickname + thread_name + '.other_work',
float(time_delta - wait_delta) / time_delta * 100)
self.last_wait_sum[thread] = wait_sum
self.last_wait_count[thread] = wait_count
self.last_timestamp[thread] = timestamp
self.last_data[thread] = data
def collect(self):
for host in self.config['hosts']:
matches = re.search(
'^([^:]*):([^@]*)@([^:]*):?([^/]*)/([^/]*)/?(.*)$', host)
if not matches:
continue
params = {}
params['host'] = matches.group(3)
try:
params['port'] = int(matches.group(4))
except ValueError:
params['port'] = 3306
params['db'] = matches.group(5)
params['user'] = matches.group(1)
params['passwd'] = matches.group(2)
nickname = matches.group(6)
if len(nickname):
nickname += '.'
self.connect(params=params)
if self.config['slave']:
self.slave_load(nickname, 'thread/sql/slave_io')
self.slave_load(nickname, 'thread/sql/slave_sql')
self.db.close()
| apache-2.0 |
openatv/enigma2 | lib/python/Plugins/SystemPlugins/ConfigurationBackup/plugin.py | 42 | 8494 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.Console import Console
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Pixmap import Pixmap
from Components.Label import Label
from Components.MenuList import MenuList
from Components.config import ConfigSelection, ConfigSubsection, KEY_LEFT, KEY_RIGHT, KEY_0, getConfigListEntry
from Components.ConfigList import ConfigList
from Plugins.Plugin import PluginDescriptor
from Tools.Directories import *
from os import path, makedirs, listdir
from time import localtime
from datetime import date
plugin_path = ""
# FIXME: harddiskmanager has a better overview about available mointpoints!
BackupPath = {
"mtd" : "/media/backup",
"hdd" : "/media/hdd/backup",
"usb" : "/media/usb/backup",
"cf" : "/media/cf/backup"
}
MountPoints = {
"mtd" : "/media/backup",
"hdd" : "/media/hdd",
"usb" : "/media/usb",
"cf" : "/media/cf"
}
class BackupSetup(Screen):
skin = """
<screen position="135,144" size="450,300" title="Backup and Restore" >
<widget name="config" position="10,10" size="430,240" />
<widget name="cancel" position="10,255" size="100,40" pixmap="~/red.png" transparent="1" alphatest="on" />
<widget name="canceltext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
<widget name="ok" position="120,255" size="100,40" pixmap="~/green.png" transparent="1" alphatest="on" />
<widget name="oktext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
<widget name="restore" position="230,255" size="100,40" pixmap="~/yellow.png" transparent="1" alphatest="on" />
<widget name="restoretext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
<widget name="backup" position="340,255" size="100,40" pixmap="~/blue.png" transparent="1" alphatest="on" />
<widget name="backuptext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
</screen>"""
def keyLeft(self):
self["config"].handleKey(KEY_LEFT)
def keyRight(self):
self["config"].handleKey(KEY_RIGHT)
def keyNumberGlobal(self, number):
print "You pressed number", number
if (self["config"].getCurrent()[1].parent.enabled == True):
self["config"].handleKey(KEY_0+number)
def keyCancel(self):
for x in self["config"].list:
x[1].cancel()
self.close()
def keySave(self):
for x in self["config"].list:
x[1].save()
self.close()
def __init__(self, session, args = None):
Screen.__init__(self, session)
self.skin_path = plugin_path
self["oktext"] = Label(_("OK"))
self["canceltext"] = Label(_("Cancel"))
self["backuptext"] = Label(_("Backup"))
self["restoretext"] = Label(_("Restore"))
self["restore"] = Pixmap()
self["backup"] = Pixmap()
self["ok"] = Pixmap()
self["cancel"] = Pixmap()
self.path = ""
self.list = []
self["config"] = ConfigList(self.list)
self.createSetup()
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"left": self.keyLeft,
"right": self.keyRight
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.keySave,
"blue": self.Backup,
"yellow": self.Restore,
})
def createSetup(self):
print "Creating BackupSetup"
self.list = [ ]
self["config"] = ConfigList(self.list)
self.backup = ConfigSubsection()
self.backup.type = ConfigSelection(choices = [("settings", _("enigma2 and network")), ("var", _("/var directory")), ("skin", _("/usr/share/enigma2 directory"))], default="settings")
self.backup.location = ConfigSelection(choices = [("mtd", _("Backup")), ("hdd", _("Harddisk")), ("usb", _("USB Stick")), ("cf", _("CF Drive"))])
self.list.append(getConfigListEntry(_("Backup Mode"), self.backup.type))
self.list.append(getConfigListEntry(_("Backup Location"), self.backup.location))
def createBackupfolders(self):
self.path = BackupPath[self.backup.location.value]
print "Creating Backup Folder if not already there..."
if (path.exists(self.path) == False):
makedirs(self.path)
def Backup(self):
print "this will start the backup now!"
self.session.openWithCallback(self.runBackup, MessageBox, _("Do you want to backup now?\nAfter pressing OK, please wait!"))
def Restore(self):
print "this will start the restore now!"
self.session.open(RestoreMenu, self.backup)
def runBackup(self, result):
if result:
if path.ismount(MountPoints[self.backup.location.value]):
self.createBackupfolders()
d = localtime()
dt = date(d.tm_year, d.tm_mon, d.tm_mday)
self.path = BackupPath[self.backup.location.value]
if self.backup.type.value == "settings":
print "Backup Mode: Settings"
self.session.open(Console, title = "Backup running", cmdlist = ["tar -czvf " + self.path + "/" + str(dt) + "_settings_backup.tar.gz /etc/enigma2/ /etc/network/interfaces /etc/wpa_supplicant.conf"])
elif self.backup.type.value == "var":
print "Backup Mode: var"
self.session.open(Console, title = "Backup running", cmdlist = [ "tar -czvf " + self.path + "/" + str(dt) + "_var_backup.tar.gz /var/"])
elif self.backup.type.value == "skin":
print "Backup Mode: skin"
self.session.open(Console, title ="Backup running", cmdlist = [ "tar -czvf " + self.path + "/" + str(dt) + "_skin_backup.tar.gz /usr/share/enigma2/"])
else:
self.session.open(MessageBox, _("Sorry your Backup destination does not exist\n\nPlease choose an other one."), MessageBox.TYPE_INFO)
class RestoreMenu(Screen):
skin = """
<screen position="135,144" size="450,300" title="Restore Backups" >
<widget name="filelist" position="10,10" size="430,240" scrollbarMode="showOnDemand" />
<widget name="cancel" position="120,255" size="100,40" pixmap="~/red.png" transparent="1" alphatest="on" />
<widget name="canceltext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
<widget name="restore" position="230,255" size="100,40" pixmap="~/yellow.png" transparent="1" alphatest="on" />
<widget name="restoretext" position="0,0" size="0,0" valign="center" halign="center" zPosition="2" font="Regular;20" transparent="1" foregroundColor="black" />
</screen>"""
def __init__(self, session, backup):
Screen.__init__(self, session)
self.skin_path = plugin_path
self.backup = backup
self["canceltext"] = Label(_("Cancel"))
self["restoretext"] = Label(_("Restore"))
self["restore"] = Pixmap()
self["cancel"] = Pixmap()
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"yellow": self.KeyOk,
})
self.flist = []
self["filelist"] = MenuList(self.flist)
self.fill_list()
def fill_list(self):
self.flist = []
self.path = BackupPath[self.backup.location.value]
if (path.exists(self.path) == False):
makedirs(self.path)
for file in listdir(self.path):
if (file.endswith(".tar.gz")):
self.flist.append((file))
self.entry = True
self["filelist"].l.setList(self.flist)
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
self.val = self.path + self.sel
self.session.openWithCallback(self.startRestore, MessageBox, _("are you sure you want to restore\nfollowing backup:\n" + self.sel + "\nEnigma2 will restart after the restore"))
def keyCancel(self):
self.close()
def startRestore(self, ret = False):
if (ret == True):
self.exe = True
self.session.open(Console, title = "Restore running", cmdlist = ["tar -xzvf " + self.path + "/" + self.sel + " -C /", "killall -9 enigma2"])
def Exit(self):
self.close()
def BackupMain(session, **kwargs):
session.open(BackupSetup)
def Plugins(path, **kwargs):
global plugin_path
plugin_path = path
return PluginDescriptor(name="Backup/Restore", description="Backup and Restore your Settings", icon="backup.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc=BackupMain)
| gpl-2.0 |
timqian/sms-tools | lectures/8-Sound-transformations/plots-code/sineModelFreqScale-orchestra.py | 21 | 2666 | import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import hamming, hanning, triang, blackmanharris, resample
import math
import sys, os, functools, time
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/'))
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/transformations/'))
import sineModel as SM
import stft as STFT
import utilFunctions as UF
import sineTransformations as SMT
(fs, x) = UF.wavread('../../../sounds/orchestra.wav')
w = np.hamming(801)
N = 2048
t = -90
minSineDur = .005
maxnSines = 150
freqDevOffset = 20
freqDevSlope = 0.02
Ns = 512
H = Ns/4
mX, pX = STFT.stftAnal(x, fs, w, N, H)
tfreq, tmag, tphase = SM.sineModelAnal(x, fs, w, N, H, t, maxnSines, minSineDur, freqDevOffset, freqDevSlope)
freqScaling = np.array([0, .8, 1, 1.2])
ytfreq = SMT.sineFreqScaling(tfreq, freqScaling)
y = SM.sineModelSynth(ytfreq, tmag, np.array([]), Ns, H, fs)
mY, pY = STFT.stftAnal(y, fs, w, N, H)
UF.wavwrite(y,fs, 'sineModelFreqScale-orchestra.wav')
maxplotfreq = 4000.0
plt.figure(1, figsize=(9.5, 7))
plt.subplot(4,1,1)
plt.plot(np.arange(x.size)/float(fs), x, 'b')
plt.axis([0,x.size/float(fs),min(x),max(x)])
plt.title('x (orchestra.wav)')
plt.subplot(4,1,2)
numFrames = int(tfreq[:,0].size)
frmTime = H*np.arange(numFrames)/float(fs)
tracks = tfreq*np.less(tfreq, maxplotfreq)
tracks[tracks<=0] = np.nan
plt.plot(frmTime, tracks, color='k', lw=1)
plt.autoscale(tight=True)
plt.title('sine frequencies')
maxplotbin = int(N*maxplotfreq/fs)
numFrames = int(mX[:,0].size)
frmTime = H*np.arange(numFrames)/float(fs)
binFreq = np.arange(maxplotbin+1)*float(fs)/N
plt.pcolormesh(frmTime, binFreq, np.transpose(mX[:,:maxplotbin+1]))
plt.autoscale(tight=True)
plt.subplot(4,1,3)
numFrames = int(ytfreq[:,0].size)
frmTime = H*np.arange(numFrames)/float(fs)
tracks = ytfreq*np.less(ytfreq, maxplotfreq)
tracks[tracks<=0] = np.nan
plt.plot(frmTime, tracks, color='k', lw=1)
plt.autoscale(tight=True)
plt.title('freq-scaled sine frequencies')
maxplotbin = int(N*maxplotfreq/fs)
numFrames = int(mY[:,0].size)
frmTime = H*np.arange(numFrames)/float(fs)
binFreq = np.arange(maxplotbin+1)*float(fs)/N
plt.pcolormesh(frmTime, binFreq, np.transpose(mY[:,:maxplotbin+1]))
plt.autoscale(tight=True)
plt.subplot(4,1,4)
plt.plot(np.arange(y.size)/float(fs), y, 'b')
plt.axis([0,y.size/float(fs),min(y),max(y)])
plt.title('y')
plt.tight_layout()
plt.savefig('sineModelFreqScale-orchestra.png')
plt.show()
| agpl-3.0 |
MediffRobotics/DeepRobotics | DeepLearnMaterials/lstm_tfl.py | 1 | 1796 | # -*- coding: utf-8 -*-
"""
Simple example using LSTM recurrent neural network to classify IMDB
sentiment dataset.
References:
- Long Short Term Memory, Sepp Hochreiter & Jurgen Schmidhuber, Neural
Computation 9(8): 1735-1780, 1997.
- Andrew L. Maas, Raymond E. Daly, Peter T. Pham, Dan Huang, Andrew Y. Ng,
and Christopher Potts. (2011). Learning Word Vectors for Sentiment
Analysis. The 49th Annual Meeting of the Association for Computational
Linguistics (ACL 2011).
Links:
- http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf
- http://ai.stanford.edu/~amaas/data/sentiment/
"""
from __future__ import division, print_function, absolute_import
import tflearn
from tflearn.data_utils import to_categorical, pad_sequences
from tflearn.datasets import imdb
# IMDB Dataset loading
train, test, _ = imdb.load_data(path='imdb.pkl', n_words=10000,
valid_portion=0.99)
trainX, trainY = train
#testX, testY = test
testX, testY = train
# Data preprocessing
# Sequence padding
trainX = pad_sequences(trainX, maxlen=100, value=0.)
testX = pad_sequences(testX, maxlen=100, value=0.)
# Converting labels to binary vectors
trainY = to_categorical(trainY, nb_classes=2)
testY = to_categorical(testY, nb_classes=2)
# Network building
net = tflearn.input_data([None, 100])
net = tflearn.embedding(net, input_dim=10000, output_dim=128)
net = tflearn.lstm(net, 128, dropout=0.8)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tflearn.regression(net, optimizer='adam', learning_rate=0.001,
loss='categorical_crossentropy')
# Training
model = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(trainX, trainY, n_epoch=1,validation_set=(testX, testY), show_metric=True,
batch_size=32)
| gpl-3.0 |
shubhdev/edxOnBaadal | openedx/core/djangoapps/profile_images/views.py | 23 | 6206 | """
This module implements the upload and remove endpoints of the profile image api.
"""
from contextlib import closing
import datetime
import logging
from django.utils.translation import ugettext as _
from django.utils.timezone import utc
from rest_framework import permissions, status
from rest_framework.parsers import MultiPartParser, FormParser
from rest_framework.response import Response
from rest_framework.views import APIView
from openedx.core.djangoapps.user_api.errors import UserNotFound
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser,
)
from openedx.core.lib.api.permissions import IsUserInUrl, IsUserInUrlOrStaff
from openedx.core.djangoapps.user_api.accounts.image_helpers import get_profile_image_names, set_has_profile_image
from .images import validate_uploaded_image, create_profile_images, remove_profile_images, ImageValidationError
log = logging.getLogger(__name__)
LOG_MESSAGE_CREATE = 'Generated and uploaded images %(image_names)s for user %(user_id)s'
LOG_MESSAGE_DELETE = 'Deleted images %(image_names)s for user %(user_id)s'
def _make_upload_dt():
"""
Generate a server-side timestamp for the upload. This is in a separate
function so its behavior can be overridden in tests.
"""
return datetime.datetime.utcnow().replace(tzinfo=utc)
class ProfileImageUploadView(APIView):
"""
**Use Cases**
Upload an image to be used for the user's profile.
The requesting user must be signed in. The signed in user can only
upload his or her own profile image.
**Example Requests**
POST /api/profile_images/v1/{username}/upload
**Response for POST**
If the requesting user tries to upload the image for a different user:
* If the requesting user has staff access, the request returns a 403
error.
* If the requesting user does not have staff access, the request returns
a 404 error.
If no user matches the "username" parameter, the request returns a 404
error.
If the upload could not be performed, the request returns a 400 error is
with details.
If the upload is successful, the request returns a 204 status with no
additional content.
"""
parser_classes = (MultiPartParser, FormParser,)
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrl)
def post(self, request, username):
"""
POST /api/profile_images/v1/{username}/upload
"""
# validate request:
# verify that the user's
# ensure any file was sent
if 'file' not in request.FILES:
return Response(
{
"developer_message": u"No file provided for profile image",
"user_message": _(u"No file provided for profile image"),
},
status=status.HTTP_400_BAD_REQUEST
)
# process the upload.
uploaded_file = request.FILES['file']
# no matter what happens, delete the temporary file when we're done
with closing(uploaded_file):
# image file validation.
try:
validate_uploaded_image(uploaded_file)
except ImageValidationError as error:
return Response(
{"developer_message": error.message, "user_message": error.user_message},
status=status.HTTP_400_BAD_REQUEST,
)
# generate profile pic and thumbnails and store them
profile_image_names = get_profile_image_names(username)
create_profile_images(uploaded_file, profile_image_names)
# update the user account to reflect that a profile image is available.
set_has_profile_image(username, True, _make_upload_dt())
log.info(
LOG_MESSAGE_CREATE,
{'image_names': profile_image_names.values(), 'user_id': request.user.id}
)
# send client response.
return Response(status=status.HTTP_204_NO_CONTENT)
class ProfileImageRemoveView(APIView):
"""
**Use Cases**
Remove all of the profile images associated with the user's account.
The requesting user must be signed in.
Users with staff access can remove profile images for other user
accounts.
Users without staff access can only remove their own profile images.
**Example Requests**
POST /api/profile_images/v1/{username}/remove
**Response for POST**
Requesting users who do not have staff access and try to remove another
user's profile image receive a 404 error.
If no user matches the "username" parameter, the request returns a 404
error.
If the request could not remove the image, the request returns a 400
error with details.
If the request successfully removes the image, the request returns a 204
status with no additional content.
"""
authentication_classes = (OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser)
permission_classes = (permissions.IsAuthenticated, IsUserInUrlOrStaff)
def post(self, request, username): # pylint: disable=unused-argument
"""
POST /api/profile_images/v1/{username}/remove
"""
try:
# update the user account to reflect that the images were removed.
set_has_profile_image(username, False)
# remove physical files from storage.
profile_image_names = get_profile_image_names(username)
remove_profile_images(profile_image_names)
log.info(
LOG_MESSAGE_DELETE,
{'image_names': profile_image_names.values(), 'user_id': request.user.id}
)
except UserNotFound:
return Response(status=status.HTTP_404_NOT_FOUND)
# send client response.
return Response(status=status.HTTP_204_NO_CONTENT)
| agpl-3.0 |
AOKP/kernel_samsung_jf | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
MrLoick/python-for-android | python3-alpha/python3-src/Lib/wsgiref/headers.py | 145 | 6682 | """Manage HTTP Response Headers
Much of this module is red-handedly pilfered from email.message in the stdlib,
so portions are Copyright (C) 2001,2002 Python Software Foundation, and were
written by Barry Warsaw.
"""
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
import re
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _formatparam(param, value=None, quote=1):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
if quote or tspecials.search(value):
value = value.replace('\\', '\\\\').replace('"', r'\"')
return '%s="%s"' % (param, value)
else:
return '%s=%s' % (param, value)
else:
return param
class Headers:
"""Manage a collection of HTTP response headers"""
def __init__(self,headers):
if type(headers) is not list:
raise TypeError("Headers must be a list of name/value tuples")
self._headers = headers
if __debug__:
for k, v in headers:
self._convert_string_type(k)
self._convert_string_type(v)
def _convert_string_type(self, value):
"""Convert/check value type."""
if type(value) is str:
return value
raise AssertionError("Header names/values must be"
" of type str (got {0})".format(repr(value)))
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __setitem__(self, name, val):
"""Set the value of a header."""
del self[name]
self._headers.append(
(self._convert_string_type(name), self._convert_string_type(val)))
def __delitem__(self,name):
"""Delete all occurrences of a header, if present.
Does *not* raise an exception if the header is missing.
"""
name = self._convert_string_type(name.lower())
self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
def __getitem__(self,name):
"""Get the first header value for 'name'
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, the first exactly which
occurrance gets returned is undefined. Use getall() to get all
the values matching a header field name.
"""
return self.get(name)
def __contains__(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
def get_all(self, name):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original header
list or were added to this instance, and may contain duplicates. Any
fields deleted and re-inserted are always appended to the header list.
If no fields exist with the given name, returns an empty list.
"""
name = self._convert_string_type(name.lower())
return [kv[1] for kv in self._headers if kv[0].lower()==name]
def get(self,name,default=None):
"""Get the first header value for 'name', or return 'default'"""
name = self._convert_string_type(name.lower())
for k,v in self._headers:
if k.lower()==name:
return v
return default
def keys(self):
"""Return a list of all the header field names.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all header values.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the header fields and values.
These will be sorted in the order they were in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def __repr__(self):
return "Headers(%r)" % self._headers
def __str__(self):
"""str() returns the formatted headers, complete with end line,
suitable for direct HTTP transmission."""
return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
def __bytes__(self):
return str(self).encode('iso-8859-1')
def setdefault(self,name,value):
"""Return first matching header value for 'name', or 'value'
If there is no header named 'name', add a new header with name 'name'
and value 'value'."""
result = self.get(name)
if result is None:
self._headers.append((self._convert_string_type(name),
self._convert_string_type(value)))
return value
else:
return result
def add_header(self, _name, _value, **_params):
"""Extended header setting.
_name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added.
Example:
h.add_header('content-disposition', 'attachment', filename='bud.gif')
Note that unlike the corresponding 'email.message' method, this does
*not* handle '(charset, language, value)' tuples: all values must be
strings or None.
"""
parts = []
if _value is not None:
_value = self._convert_string_type(_value)
parts.append(_value)
for k, v in _params.items():
k = self._convert_string_type(k)
if v is None:
parts.append(k.replace('_', '-'))
else:
v = self._convert_string_type(v)
parts.append(_formatparam(k.replace('_', '-'), v))
self._headers.append((self._convert_string_type(_name), "; ".join(parts)))
| apache-2.0 |
franek/weboob | modules/bnporc/perso/messages.py | 8 | 3353 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.browser import BasePage, BrokenPageError
from weboob.capabilities.messages import Message, Thread
from weboob.capabilities.base import NotLoaded
from weboob.tools.capabilities.messages.genericArticle import try_drop_tree
import re
from datetime import datetime
from lxml.html import make_links_absolute
__all__ = ['MessagesPage', 'MessagePage']
class MessagesPage(BasePage):
def iter_threads(self):
table = self.parser.select(self.document.getroot(), 'table#listeMessages', 1)
for tr in table.xpath('./tr'):
if tr.attrib.get('class', '') not in ('msgLu', 'msgNonLu'):
continue
author = unicode(self.parser.select(tr, 'td.colEmetteur', 1).text)
link = self.parser.select(tr, 'td.colObjet a', 1)
date_raw = self.parser.select(tr, 'td.colDate1', 1).attrib['data']
jsparams = re.search('\((.+)\)', link.attrib['onclick']).groups()[0]
jsparams = [i.strip('\'" ') for i in jsparams.split(',')]
page_id, _id, unread = jsparams
# this means unread on the website
unread = False if unread == "false" else True
# 2012/02/29:01h30min45sec
dt_match = re.match('(\d+)/(\d+)/(\d+):(\d+)h(\d+)min(\d+)sec', date_raw).groups()
dt_match = [int(d) for d in dt_match]
thread = Thread(_id)
thread._link_id = (page_id, unread)
thread.date = datetime(*dt_match)
thread.title = unicode(link.text)
message = Message(thread, 0)
message.set_empty_fields(None)
message.flags = message.IS_HTML
message.title = thread.title
message.date = thread.date
message.sender = author
message.content = NotLoaded # This is the only thing we are missing
thread.root = message
yield thread
class MessagePage(BasePage):
def get_content(self):
"""
Get the message content.
This page has a date, but it is less precise than the main list page,
so we only use it for the message content.
"""
try:
content = self.parser.select(self.document.getroot(),
'div.txtMessage div.contenu', 1)
except BrokenPageError:
# This happens with some old messages (2007)
content = self.parser.select(self.document.getroot(), 'div.txtMessage', 1)
content = make_links_absolute(content, self.url)
try_drop_tree(self.parser, content, 'script')
return self.parser.tostring(content)
| agpl-3.0 |
sclabs/sitestatus-nonrel | django/contrib/comments/admin.py | 361 | 3299 | from django.contrib import admin
from django.contrib.comments.models import Comment
from django.utils.translation import ugettext_lazy as _, ungettext
from django.contrib.comments import get_model
from django.contrib.comments.views.moderation import perform_flag, perform_approve, perform_delete
class CommentsAdmin(admin.ModelAdmin):
fieldsets = (
(None,
{'fields': ('content_type', 'object_pk', 'site')}
),
(_('Content'),
{'fields': ('user', 'user_name', 'user_email', 'user_url', 'comment')}
),
(_('Metadata'),
{'fields': ('submit_date', 'ip_address', 'is_public', 'is_removed')}
),
)
list_display = ('name', 'content_type', 'object_pk', 'ip_address', 'submit_date', 'is_public', 'is_removed')
list_filter = ('submit_date', 'site', 'is_public', 'is_removed')
date_hierarchy = 'submit_date'
ordering = ('-submit_date',)
raw_id_fields = ('user',)
search_fields = ('comment', 'user__username', 'user_name', 'user_email', 'user_url', 'ip_address')
actions = ["flag_comments", "approve_comments", "remove_comments"]
def get_actions(self, request):
actions = super(CommentsAdmin, self).get_actions(request)
# Only superusers should be able to delete the comments from the DB.
if not request.user.is_superuser and 'delete_selected' in actions:
actions.pop('delete_selected')
if not request.user.has_perm('comments.can_moderate'):
if 'approve_comments' in actions:
actions.pop('approve_comments')
if 'remove_comments' in actions:
actions.pop('remove_comments')
return actions
def flag_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_flag,
lambda n: ungettext('flagged', 'flagged', n))
flag_comments.short_description = _("Flag selected comments")
def approve_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_approve,
lambda n: ungettext('approved', 'approved', n))
approve_comments.short_description = _("Approve selected comments")
def remove_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_delete,
lambda n: ungettext('removed', 'removed', n))
remove_comments.short_description = _("Remove selected comments")
def _bulk_flag(self, request, queryset, action, done_message):
"""
Flag, approve, or remove some comments from an admin action. Actually
calls the `action` argument to perform the heavy lifting.
"""
n_comments = 0
for comment in queryset:
action(request, comment)
n_comments += 1
msg = ungettext(u'1 comment was successfully %(action)s.',
u'%(count)s comments were successfully %(action)s.',
n_comments)
self.message_user(request, msg % {'count': n_comments, 'action': done_message(n_comments)})
# Only register the default admin if the model is the built-in comment model
# (this won't be true if there's a custom comment app).
if get_model() is Comment:
admin.site.register(Comment, CommentsAdmin)
| bsd-3-clause |
SeattleTestbed/repy_v2 | namespace.py | 1 | 39911 | """
<Program>
namespace.py
<Started>
September 2009
<Author>
Justin Samuel
<Purpose>
This is the namespace layer that ensures separation of the namespaces of
untrusted code and our code. It provides a single public function to be
used to setup the context in which untrusted code is exec'd (that is, the
context that is seen as the __builtins__ by the untrusted code).
The general idea is that any function or object that is available between
trusted and untrusted code gets wrapped in a function or object that does
validation when the function or object is used. In general, if user code
is not calling any functions improperly, neither the user code nor our
trusted code should ever notice that the objects and functions they are
dealing with have been wrapped by this namespace layer.
All of our own api functions are wrapped in NamespaceAPIFunctionWrapper
objects whose wrapped_function() method is mapped in to the untrusted
code's context. When called, the wrapped_function() method performs
argument, return value, and exception validation as well as additional
wrapping and unwrapping, as needed, that is specific to the function
that was ultimately being called. If the return value or raised exceptions
are not considered acceptable, a NamespaceViolationError is raised. If the
arguments are not acceptable, a TypeError is raised.
Note that callback functions that are passed from untrusted user code
to trusted code are also wrapped (these are arguments to wrapped API
functions, so we get to wrap them before calling the underlying function).
The reason we wrap these is so that we can intercept calls to the callback
functions and wrap arguments passed to them, making sure that handles
passed as arguments to the callbacks get wrapped before user code sees them.
The function and object wrappers have been defined based on the API as
documented at https://seattle.cs.washington.edu/wiki/RepyLibrary
Example of using this module (this is really the only way to use the module):
import namespace
usercontext = {}
namespace.wrap_and_insert_api_functions(usercontext)
safe.safe_exec(usercode, usercontext)
The above code will result in the dict usercontext being populated with keys
that are the names of the functions available to the untrusted code (such as
'open') and the values are the wrapped versions of the actual functions to be
called (such as 'emulfile.emulated_open').
Note that some functions wrapped by this module lose some python argument
flexibility. Wrapped functions can generally only have keyword args in
situations where the arguments are optional. Using keyword arguments for
required args may not be supported, depending on the implementation of the
specific argument check/wrapping/unwrapping helper functions for that
particular wrapped function. If this becomes a problem, it can be dealt with
by complicating some of the argument checking/wrapping/unwrapping code in
this module to make the checking functions more flexible in how they take
their arguments.
Implementation details:
The majority of the code in this module is made up of helper functions to do
argument checking, etc. for specific wrapped functions.
The most important parts to look at in this module for maintenance and
auditing are the following:
USERCONTEXT_WRAPPER_INFO
The USERCONTEXT_WRAPPER_INFO is a dictionary that defines the API
functions that are wrapped and inserted into the user context when
wrap_and_insert_api_functions() is called.
FILE_OBJECT_WRAPPER_INFO
LOCK_OBJECT_WRAPPER_INFO
TCP_SOCKET_OBJECT_WRAPPER_INFO
TCP_SERVER_SOCKET_OBJECT_WRAPPER_INFO
UDP_SERVER_SOCKET_OBJECT_WRAPPER_INFO
VIRTUAL_NAMESPACE_OBJECT_WRAPPER_INFO
The above four dictionaries define the methods available on the wrapped
objects that are returned by wrapped functions. Additionally, timerhandle
and commhandle objects are wrapped but instances of these do not have any
public methods and so no *_WRAPPER_INFO dictionaries are defined for them.
NamespaceObjectWrapper
NamespaceAPIFunctionWrapper
The above two classes are the only two types of objects that will be
allowed in untrusted code. In fact, instances of NamespaceAPIFunctionWrapper
are never actually allowed in untrusted code. Rather, each function that
is wrapped has a single NamespaceAPIFunctionWrapper instance created
when wrap_and_insert_api_functions() is called and what is actually made
available to the untrusted code is the wrapped_function() method of each
of the corresponding NamespaceAPIFunctionWrapper instances.
NamespaceInternalError
If this error is raised anywhere (along with any other unexpected exceptions),
it should result in termination of the running program (see the except blocks
in NamespaceAPIFunctionWrapper.wrapped_function).
"""
import types
# To check if objects are thread.LockType objects.
import thread
import emulcomm
import emulfile
import emulmisc
import emultimer
import nonportable
import safe # Used to get SafeDict
import tracebackrepy
import virtual_namespace
from exception_hierarchy import *
# Save a copy of a few functions not available at runtime.
_saved_getattr = getattr
_saved_callable = callable
_saved_hash = hash
_saved_id = id
##############################################################################
# Public functions of this module to be called from the outside.
##############################################################################
def wrap_and_insert_api_functions(usercontext):
"""
This is the main public function in this module at the current time. It will
wrap each function in the usercontext dict in a wrapper with custom
restrictions for that specific function. These custom restrictions are
defined in the dictionary USERCONTEXT_WRAPPER_INFO.
"""
_init_namespace()
for function_name in USERCONTEXT_WRAPPER_INFO:
function_info = USERCONTEXT_WRAPPER_INFO[function_name]
wrapperobj = NamespaceAPIFunctionWrapper(function_info)
usercontext[function_name] = wrapperobj.wrapped_function
##############################################################################
# Helper functions for the above public function.
##############################################################################
# Whether _init_namespace() has already been called.
initialized = False
def _init_namespace():
"""
Performs one-time initialization of the namespace module.
"""
global initialized
if not initialized:
initialized = True
_prepare_wrapped_functions_for_object_wrappers()
# These dictionaries will ultimately contain keys whose names are allowed
# methods that can be called on the objects and values which are the wrapped
# versions of the functions which are exposed to users. If a dictionary
# is empty, it means no methods can be called on a wrapped object of that type.
file_object_wrapped_functions_dict = {}
lock_object_wrapped_functions_dict = {}
tcp_socket_object_wrapped_functions_dict = {}
tcp_server_socket_object_wrapped_functions_dict = {}
udp_server_socket_object_wrapped_functions_dict = {}
virtual_namespace_object_wrapped_functions_dict = {}
def _prepare_wrapped_functions_for_object_wrappers():
"""
Wraps functions that will be used whenever a wrapped object is created.
After this has been called, the dictionaries such as
file_object_wrapped_functions_dict have been populated and therefore can be
used by functions such as wrap_socket_obj().
"""
objects_tuples = [(FILE_OBJECT_WRAPPER_INFO, file_object_wrapped_functions_dict),
(LOCK_OBJECT_WRAPPER_INFO, lock_object_wrapped_functions_dict),
(TCP_SOCKET_OBJECT_WRAPPER_INFO, tcp_socket_object_wrapped_functions_dict),
(TCP_SERVER_SOCKET_OBJECT_WRAPPER_INFO, tcp_server_socket_object_wrapped_functions_dict),
(UDP_SERVER_SOCKET_OBJECT_WRAPPER_INFO, udp_server_socket_object_wrapped_functions_dict),
(VIRTUAL_NAMESPACE_OBJECT_WRAPPER_INFO, virtual_namespace_object_wrapped_functions_dict)]
for description_dict, wrapped_func_dict in objects_tuples:
for function_name in description_dict:
function_info = description_dict[function_name]
wrapperobj = NamespaceAPIFunctionWrapper(function_info, is_method=True)
wrapped_func_dict[function_name] = wrapperobj.wrapped_function
##############################################################################
# Helper functions.
##############################################################################
def _handle_internalerror(message, exitcode):
"""
Terminate the running program. This is used rather than
tracebackrepy.handle_internalerror directly in order to make testing easier."""
tracebackrepy.handle_internalerror(message, exitcode)
def _is_in(obj, sequence):
"""
A helper function to do identity ("is") checks instead of equality ("==")
when using X in [A, B, C] type constructs. So you would write:
if _is_in(type(foo), [int, long]):
instead of:
if type(foo) in [int, long]:
"""
for item in sequence:
if obj is item:
return True
return False
##############################################################################
# Constants that define which functions should be wrapped and how. These are
# used by the functions wrap_and_insert_api_functions() and
# wrap_builtin_functions().
##############################################################################
class BaseProcessor(object):
"""Base type for ValueProcess and ObjectProcessor."""
class ValueProcessor(BaseProcessor):
"""
This is for simple/builtin types and combinations of them. Basically,
anything that needs to be copied when used as an argument or return
value and doesn't need to be wrapped or unwrapped as it passes through
the namespace layer.
"""
def check(self):
raise NotImplementedError
def copy(self, val):
return _copy(val)
class ObjectProcessor(BaseProcessor):
"""
This is for for anything that needs to be wrapped or unwrapped (not copied)
as it passes through the namespace layer.
"""
def check(self):
raise NotImplementedError
def wrap(self, val):
raise NotImplementedError
def unwrap(self, val):
return val._wrapped__object
class Str(ValueProcessor):
"""Allows str or unicode."""
def __init__(self, maxlen=None, minlen=None):
self.maxlen = maxlen
self.minlen = minlen
def check(self, val):
if not _is_in(type(val), [str, unicode]):
raise RepyArgumentError("Invalid type %s" % type(val))
if self.maxlen is not None:
if len(val) > self.maxlen:
raise RepyArgumentError("Max string length is %s" % self.maxlen)
if self.minlen is not None:
if len(val) < self.minlen:
raise RepyArgumentError("Min string length is %s" % self.minlen)
class Int(ValueProcessor):
"""Allows int or long."""
def __init__(self, min=0):
self.min = min
def check(self, val):
if not _is_in(type(val), [int, long]):
raise RepyArgumentError("Invalid type %s" % type(val))
if val < self.min:
raise RepyArgumentError("Min value is %s." % self.min)
class NoneOrInt(ValueProcessor):
"""Allows a NoneType or an int. This doesn't enforce min limit on the
ints."""
def check(self, val):
if val is not None and not _is_in(type(val), [int, long]):
raise RepyArgumentError("Invalid type %s" % type(val))
class StrOrInt(ValueProcessor):
"""Allows a string or int. This doesn't enforce max/min/length limits on the
strings and ints."""
def check(self, val):
if not _is_in(type(val), [int, long, str, unicode]):
raise RepyArgumentError("Invalid type %s" % type(val))
class StrOrNone(ValueProcessor):
"""Allows str, unicode, or None."""
def check(self, val):
if val is not None:
Str().check(val)
class Float(ValueProcessor):
"""Allows float, int, or long."""
def __init__(self, allow_neg=False):
self.allow_neg = allow_neg
def check(self, val):
if not _is_in(type(val), [int, long, float]):
raise RepyArgumentError("Invalid type %s" % type(val))
if not self.allow_neg:
if val < 0:
raise RepyArgumentError("Must be non-negative.")
class Bool(ValueProcessor):
"""Allows bool."""
def check(self, val):
if type(val) is not bool:
raise RepyArgumentError("Invalid type %s" % type(val))
class ListOfStr(ValueProcessor):
"""Allows lists of strings. This doesn't enforce max/min/length limits on the
strings and ints."""
def check(self, val):
if not type(val) is list:
raise RepyArgumentError("Invalid type %s" % type(val))
for item in val:
Str().check(item)
class List(ValueProcessor):
"""Allows lists. The list may contain anything."""
def check(self, val):
if not type(val) is list:
raise RepyArgumentError("Invalid type %s" % type(val))
class Dict(ValueProcessor):
"""Allows dictionaries. The dictionaries may contain anything."""
def check(self, val):
if not type(val) is dict:
raise RepyArgumentError("Invalid type %s" % type(val))
class DictOfStrOrInt(ValueProcessor):
"""
Allows a tuple that contains dictionaries that only contain string keys
and str or int values. This doesn't enforce max/min/length limits on the
strings and ints.
"""
def check(self, val):
if not type(val) is dict:
raise RepyArgumentError("Invalid type %s" % type(val))
for key, value in val.items():
Str().check(key)
StrOrInt().check(value)
class Func(ValueProcessor):
"""Allows a user-defined function object."""
def check(self, val):
if not _is_in(type(val), [types.FunctionType, types.LambdaType, types.MethodType]):
raise RepyArgumentError("Invalid type %s" % type(val))
class NonCopiedVarArgs(ValueProcessor):
"""Allows any number of arguments. This must be the last arg listed. """
def check(self, val):
pass
def copy(self, val):
return val
class File(ObjectProcessor):
"""Allows File objects."""
def check(self, val):
if not isinstance(val, emulfile.emulated_file):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("file", val, file_object_wrapped_functions_dict)
class Lock(ObjectProcessor):
"""Allows Lock objects."""
def check(self, val):
if not isinstance(val, emulmisc.emulated_lock):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("lock", val, lock_object_wrapped_functions_dict)
class UDPServerSocket(ObjectProcessor):
"""Allows UDPServerSocket objects."""
def check(self, val):
if not isinstance(val, emulcomm.UDPServerSocket):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("socket", val, udp_server_socket_object_wrapped_functions_dict)
class TCPServerSocket(ObjectProcessor):
"""Allows TCPServerSocket objects."""
def check(self, val):
if not isinstance(val, emulcomm.TCPServerSocket):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("socket", val, tcp_server_socket_object_wrapped_functions_dict)
class TCPSocket(ObjectProcessor):
"""Allows TCPSocket objects."""
def check(self, val):
if not isinstance(val, emulcomm.EmulatedSocket):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("socket", val, tcp_socket_object_wrapped_functions_dict)
class VirtualNamespace(ObjectProcessor):
"""Allows VirtualNamespace objects."""
def check(self, val):
if not isinstance(val, virtual_namespace.VirtualNamespace):
raise RepyArgumentError("Invalid type %s" % type(val))
def wrap(self, val):
return NamespaceObjectWrapper("VirtualNamespace", val,
virtual_namespace_object_wrapped_functions_dict)
class SafeDict(ValueProcessor):
"""Allows SafeDict objects."""
# TODO: provide a copy function that won't actually copy so that
# references are maintained.
def check(self, val):
if not isinstance(val, safe.SafeDict):
raise RepyArgumentError("Invalid type %s" % type(val))
class DictOrSafeDict(ValueProcessor):
"""Allows SafeDict objects or regular dict objects."""
# TODO: provide a copy function that won't actually copy so that
# references are maintained.
def check(self, val):
if type(val) is not dict:
SafeDict().check(val)
# These are the functions in the user's name space excluding the builtins we
# allow. Each function is a key in the dictionary. Each value is a dictionary
# that defines the functions to be used by the wrapper when a call is
# performed. It is the same dictionary that is passed as a constructor to
# the NamespaceAPIFunctionWrapper class to create the actual wrappers.
# The public function wrap_and_insert_api_functions() uses this dictionary as
# the basis for what is populated in the user context. Anything function
# defined here will be wrapped and made available to untrusted user code.
USERCONTEXT_WRAPPER_INFO = {
'gethostbyname' :
{'func' : emulcomm.gethostbyname,
'args' : [Str()],
'return' : Str()},
'getmyip' :
{'func' : emulcomm.getmyip,
'args' : [],
'return' : Str()},
'sendmessage' :
{'func' : emulcomm.sendmessage,
'args' : [Str(), Int(), Str(), Str(), Int()],
'return' : Int()},
'listenformessage' :
{'func' : emulcomm.listenformessage,
'args' : [Str(), Int()],
'return' : UDPServerSocket()},
'openconnection' :
{'func' : emulcomm.openconnection,
'args' : [Str(), Int(), Str(), Int(), Float()],
# 'raise' : [AddressBindingError, PortRestrictedError, PortInUseError,
# ConnectionRefusedError, TimeoutError, RepyArgumentError],
'return' : TCPSocket()},
'listenforconnection' :
{'func' : emulcomm.listenforconnection,
'args' : [Str(), Int()],
'return' : TCPServerSocket()},
'openfile' :
{'func' : emulfile.emulated_open,
'args' : [Str(maxlen=120), Bool()],
'return' : File()},
'listfiles' :
{'func' : emulfile.listfiles,
'args' : [],
'return' : ListOfStr()},
'removefile' :
{'func' : emulfile.removefile,
'args' : [Str(maxlen=120)],
'return' : None},
'exitall' :
{'func' : emulmisc.exitall,
'args' : [],
'return' : None},
'createlock' :
{'func' : emulmisc.createlock,
'args' : [],
'return' : Lock()},
'getruntime' :
{'func' : emulmisc.getruntime,
'args' : [],
'return' : Float()},
'randombytes' :
{'func' : emulmisc.randombytes,
'args' : [],
'return' : Str(maxlen=1024, minlen=1024)},
'createthread' :
{'func' : emultimer.createthread,
'args' : [Func()],
'return' : None},
'sleep' :
{'func' : emultimer.sleep,
'args' : [Float()],
'return' : None},
'log' :
{'func' : emulmisc.log,
'args' : [NonCopiedVarArgs()],
'return' : None},
'getthreadname' :
{'func' : emulmisc.getthreadname,
'args' : [],
'return' : Str()},
'createvirtualnamespace' :
{'func' : virtual_namespace.createvirtualnamespace,
'args' : [Str(), Str()],
'return' : VirtualNamespace()},
'getresources' :
{'func' : nonportable.get_resources,
'args' : [],
'return' : (Dict(), Dict(), List())},
'getlasterror' :
{'func' : emulmisc.getlasterror,
'args' : [],
'return' : StrOrNone()},
}
FILE_OBJECT_WRAPPER_INFO = {
'close' :
{'func' : emulfile.emulated_file.close,
'args' : [],
'return' : None},
'readat' :
{'func' : emulfile.emulated_file.readat,
'args' : [NoneOrInt(), Int(min=0)],
'return' : Str()},
'writeat' :
{'func' : emulfile.emulated_file.writeat,
'args' : [Str(), Int(min=0)],
'return' : None},
}
TCP_SOCKET_OBJECT_WRAPPER_INFO = {
'close' :
{'func' : emulcomm.EmulatedSocket.close,
'args' : [],
'return' : Bool()},
'recv' :
{'func' : emulcomm.EmulatedSocket.recv,
'args' : [Int(min=1)],
'return' : Str()},
'send' :
{'func' : emulcomm.EmulatedSocket.send,
'args' : [Str()],
'return' : Int(min=0)},
}
# TODO: Figure out which real object should be wrapped. It doesn't appear
# to be implemented yet as there is no "getconnection" in the repy_v2 source.
TCP_SERVER_SOCKET_OBJECT_WRAPPER_INFO = {
'close' :
{'func' : emulcomm.TCPServerSocket.close,
'args' : [],
'return' : Bool()},
'getconnection' :
{'func' : emulcomm.TCPServerSocket.getconnection,
'args' : [],
'return' : (Str(), Int(), TCPSocket())},
}
UDP_SERVER_SOCKET_OBJECT_WRAPPER_INFO = {
'close' :
{'func' : emulcomm.UDPServerSocket.close,
'args' : [],
'return' : Bool()},
'getmessage' :
{'func' : emulcomm.UDPServerSocket.getmessage,
'args' : [],
'return' : (Str(), Int(), Str())},
}
LOCK_OBJECT_WRAPPER_INFO = {
'acquire' :
# A string for the target_func indicates a function by this name on the
# instance rather is what should be wrapped.
{'func' : 'acquire',
'args' : [Bool()],
'return' : Bool()},
'release' :
# A string for the target_func indicates a function by this name on the
# instance rather is what should be wrapped.
{'func' : 'release',
'args' : [],
'return' : None},
}
VIRTUAL_NAMESPACE_OBJECT_WRAPPER_INFO = {
# Evaluate must take a dict or SafeDict, and can
# only return a SafeDict. We must _not_ copy the
# dict since that will screw up the references in the dict.
'evaluate' :
{'func' : 'evaluate',
'args' : [DictOrSafeDict()],
'return' : SafeDict()},
}
##############################################################################
# The classes we define from which actual wrappers are instantiated.
##############################################################################
def _copy(obj, objectmap=None):
"""
<Purpose>
Create a deep copy of an object without using the python 'copy' module.
Using copy.deepcopy() doesn't work because builtins like id and hasattr
aren't available when this is called.
<Arguments>
obj
The object to make a deep copy of.
objectmap
A mapping between original objects and the corresponding copy. This is
used to handle circular references.
<Exceptions>
TypeError
If an object is encountered that we don't know how to make a copy of.
NamespaceViolationError
If an unexpected error occurs while copying. This isn't the greatest
solution, but in general the idea is we just need to abort the wrapped
function call.
<Side Effects>
A new reference is created to every non-simple type of object. That is,
everything except objects of type str, unicode, int, etc.
<Returns>
The deep copy of obj with circular/recursive references preserved.
"""
try:
# If this is a top-level call to _copy, create a new objectmap for use
# by recursive calls to _copy.
if objectmap is None:
objectmap = {}
# If this is a circular reference, use the copy we already made.
elif _saved_id(obj) in objectmap:
return objectmap[_saved_id(obj)]
# types.InstanceType is included because the user can provide an instance
# of a class of their own in the list of callback args to settimer.
if _is_in(type(obj), [str, unicode, int, long, float, complex, bool, frozenset,
types.NoneType, types.FunctionType, types.LambdaType,
types.MethodType, types.InstanceType]):
return obj
elif type(obj) is list:
temp_list = []
# Need to save this in the objectmap before recursing because lists
# might have circular references.
objectmap[_saved_id(obj)] = temp_list
for item in obj:
temp_list.append(_copy(item, objectmap))
return temp_list
elif type(obj) is tuple:
temp_list = []
for item in obj:
temp_list.append(_copy(item, objectmap))
# I'm not 100% confident on my reasoning here, so feel free to point
# out where I'm wrong: There's no way for a tuple to directly contain
# a circular reference to itself. Instead, it has to contain, for
# example, a dict which has the same tuple as a value. In that
# situation, we can avoid infinite recursion and properly maintain
# circular references in our copies by checking the objectmap right
# after we do the copy of each item in the tuple. The existence of the
# dictionary would keep the recursion from being infinite because those
# are properly handled. That just leaves making sure we end up with
# only one copy of the tuple. We do that here by checking to see if we
# just made a copy as a result of copying the items above. If so, we
# return the one that's already been made.
if _saved_id(obj) in objectmap:
return objectmap[_saved_id(obj)]
retval = tuple(temp_list)
objectmap[_saved_id(obj)] = retval
return retval
elif type(obj) is set:
temp_list = []
# We can't just store this list object in the objectmap because it isn't
# a set yet. If it's possible to have a set contain a reference to
# itself, this could result in infinite recursion. However, sets can
# only contain hashable items so I believe this can't happen.
for item in obj:
temp_list.append(_copy(item, objectmap))
retval = set(temp_list)
objectmap[_saved_id(obj)] = retval
return retval
elif type(obj) is dict:
temp_dict = {}
# Need to save this in the objectmap before recursing because dicts
# might have circular references.
objectmap[_saved_id(obj)] = temp_dict
for key, value in obj.items():
temp_key = _copy(key, objectmap)
temp_dict[temp_key] = _copy(value, objectmap)
return temp_dict
# We don't copy certain objects. This is because copying an emulated file
# object, for example, will cause the destructor of the original one to
# be invoked, which will close the actual underlying file. As the object
# is wrapped and the client does not have access to it, it's safe to not
# wrap it.
elif isinstance(obj, (NamespaceObjectWrapper, emulfile.emulated_file,
emulcomm.EmulatedSocket, emulcomm.TCPServerSocket,
emulcomm.UDPServerSocket, thread.LockType,
virtual_namespace.VirtualNamespace)):
return obj
else:
raise TypeError("_copy is not implemented for objects of type " + str(type(obj)))
except Exception, e:
raise NamespaceInternalError("_copy failed on " + str(obj) + " with message " + str(e))
class NamespaceInternalError(Exception):
"""Something went wrong and we should terminate."""
class NamespaceObjectWrapper(object):
"""
Instances of this class are used to wrap handles and objects returned by
api functions to the user code.
The methods that can be called on these instances are mostly limited to
what is in the allowed_functions_dict passed to the constructor. The
exception is that a simple __repr__() is defined as well as an __iter__()
and next(). However, instances won't really be iterable unless a next()
method is defined in the allowed_functions_dict.
"""
def __init__(self, wrapped_type_name, wrapped_object, allowed_functions_dict):
"""
<Purpose>
Constructor
<Arguments>
self
wrapped_type_name
The name (a string) of what type of wrapped object. For example,
this could be "timerhandle".
wrapped_object
The actual object to be wrapped.
allowed_functions_dict
A dictionary of the allowed methods that can be called on the object.
The keys should be the names of the methods, the values are the
wrapped functions that will be called.
"""
# Only one underscore at the front so python doesn't do its own mangling
# of the name. We're not trying to keep this private in the private class
# variable sense of python where nothing is really private, instead we just
# want a double-underscore in there as extra protection against untrusted
# code being able to access the values.
self._wrapped__type_name = wrapped_type_name
self._wrapped__object = wrapped_object
self._wrapped__allowed_functions_dict = allowed_functions_dict
def __getattr__(self, name):
"""
When a method is called on an instance, we look for the method in the
allowed_functions_dict that was provided to the constructor. If there
is such a method in there, we return a function that will properly
invoke the method with the correct 'self' as the first argument.
"""
if name in self._wrapped__allowed_functions_dict:
wrapped_func = self._wrapped__allowed_functions_dict[name]
def __do_func_call(*args, **kwargs):
return wrapped_func(self._wrapped__object, *args, **kwargs)
return __do_func_call
else:
# This is the standard way of handling "it doesn't exist as far as we
# are concerned" in __getattr__() methods.
raise AttributeError, name
def __iter__(self):
"""
We provide __iter__() as part of the class rather than through __getattr__
because python won't look for the attribute in the object to determine if
the object is iterable, instead it will look directly at the class the
object is an instance of. See the docstring for next() for more info.
"""
return self
def next(self):
"""
We provide next() as part of the class rather than through __getattr__
because python won't look for the attribute in the object to determine if
the object is iterable, instead it will look directly at the class the
object is an instance of. We don't want everything that is wrapped to
be considered iterable, though, so we return a TypeError if this gets
called but there isn't a wrapped next() method.
"""
if "next" in self._wrapped__allowed_functions_dict:
return self._wrapped__allowed_functions_dict["next"](self._wrapped__object)
raise TypeError("You tried to iterate a non-iterator of type " + str(type(self._wrapped__object)))
def __repr__(self):
return "<Namespace wrapped " + self._wrapped__type_name + ": " + repr(self._wrapped__object) + ">"
def __hash__(self):
return _saved_hash(self._wrapped__object)
def __eq__(self, other):
"""In addition to __hash__, this is necessary for use as dictionary keys."""
# We could either assume "other" is a wrapped object and try to compare
# its wrapped object against this wrapped object, or we could just compare
# the hashes of each. If we try to unwrap the other object, it means you
# couldn't compare a wrapped object to an unwrapped one.
return _saved_hash(self) == _saved_hash(other)
def __ne__(self, other):
"""
It's good for consistency to define __ne__ if one is defining __eq__,
though this is not needed for using objects as dictionary keys.
"""
return _saved_hash(self) != _saved_hash(other)
class NamespaceAPIFunctionWrapper(object):
"""
Instances of this class exist solely to provide function wrapping. This is
done by creating an instance of the class and then making available the
instance's wrapped_function() method to any code that should only be allowed
to call the wrapped version of the function.
"""
def __init__(self, func_dict, is_method=False):
"""
<Purpose>
Constructor.
<Arguments>
self
func_dict
A dictionary whose with the following keys whose values are the
corresponding funcion:
func (required) -- a function or a string of the name
of the method on the underlying object.
args (required)
return (required)
is_method -- if this is an object's method being wrapped
rather than a regular function.
<Exceptions>
None
<Side Effects>
None
<Returns>
None
"""
# Required in func_dict.
self.__func = func_dict["func"]
self.__args = func_dict["args"]
self.__return = func_dict["return"]
self.__is_method = is_method
# Make sure that the __target_func really is a function or a string
# indicating a function by that name on the underlying object should
# be called.
if not _saved_callable(self.__func) and type(self.__func) is not str:
raise TypeError("The func was neither callable nor a string when " +
"constructing a namespace-wrapped function. The object " +
"used for target_func was: " + repr(self.__func))
if type(self.__func) is str:
self.__func_name = self.__func
else:
self.__func_name = self.__func.__name__
def _process_args(self, args):
args_to_return = []
for index in range(len(args)):
# Armon: If there are more arguments than there are type specifications
# and we are using NonCopiedVarArgs, then check against that.
if index >= len(self.__args) and isinstance(self.__args[-1], NonCopiedVarArgs):
arg_type = self.__args[-1]
else:
arg_type = self.__args[index]
# We only copy simple types, which means we only copy ValueProcessor not
# ObjectProcessor arguments.
if isinstance(arg_type, ValueProcessor):
temparg = arg_type.copy(args[index])
elif isinstance(arg_type, ObjectProcessor):
temparg = arg_type.unwrap(args[index])
else:
raise NamespaceInternalError("Unknown argument expectation.")
arg_type.check(temparg)
args_to_return.append(temparg)
return args_to_return
def _process_retval_helper(self, processor, retval):
try:
if isinstance(processor, ValueProcessor):
tempretval = processor.copy(retval)
processor.check(tempretval)
elif isinstance(processor, ObjectProcessor):
processor.check(retval)
tempretval = processor.wrap(retval)
elif processor is None:
if retval is not None:
raise InternalRepyError("Expected None but wasn't.")
tempretval = None
else:
raise InternalRepyError("Unknown retval expectation.")
return tempretval
except RepyArgumentError, err:
raise InternalRepyError("Invalid retval type: %s" % err)
def _process_retval(self, retval):
try:
# Allow the return value to be a tuple of processors.
if type(retval) is tuple:
if len(retval) != len(self.__return):
raise InternalRepyError("Returned tuple of wrong size: %s" % str(retval))
tempretval = []
for index in range(len(retval)):
tempitem = self._process_retval_helper(self.__return[index], retval[index])
tempretval.append(tempitem)
tempretval = tuple(tempretval)
else:
tempretval = self._process_retval_helper(self.__return, retval)
except Exception, e:
raise InternalRepyError(
"Function '" + self.__func_name + "' returned with unallowed return type " +
str(type(retval)) + " : " + str(e))
return tempretval
def wrapped_function(self, *args, **kwargs):
"""
<Purpose>
Act as the function that is wrapped but perform all required sanitization
and checking of data that goes into and comes out of the underlying
function.
<Arguments>
self
*args
**kwargs
The arguments to the underlying function.
<Exceptions>
NamespaceViolationError
If some aspect of the arguments or function call is not allowed.
Anything else that the underlying function may raise.
<Side Effects>
Anything that the underyling function may do.
<Returns>
Anything that the underlying function may return.
"""
try:
# We don't allow keyword args.
if kwargs:
raise RepyArgumentError("Keyword arguments not allowed when calling %s." %
self.__func_name)
if self.__is_method:
# This is a method of an object instance rather than a standalone function.
# The "self" argument will be passed implicitly by python in some cases, so
# we remove it from the args we check. For the others, we'll add it back in
# after the check.
args_to_check = args[1:]
else:
args_to_check = args
if len(args_to_check) != len(self.__args):
if not self.__args or not isinstance(self.__args[-1:][0], NonCopiedVarArgs):
raise RepyArgumentError("Function '" + self.__func_name +
"' takes " + str(len(self.__args)) + " arguments, not " +
str(len(args_to_check)) + " as you provided.")
args_copy = self._process_args(args_to_check)
args_to_use = None
# If it's a string rather than a function, then this is our convention
# for indicating that we want to wrap the function of this particular
# object. We use this if the function to wrap isn't available without
# having the object around, such as with real lock objects.
if type(self.__func) is str:
func_to_call = _saved_getattr(args[0], self.__func)
args_to_use = args_copy
else:
func_to_call = self.__func
if self.__is_method:
# Sanity check the object we're adding back in as the "self" argument.
if not isinstance(args[0], (NamespaceObjectWrapper, emulfile.emulated_file,
emulcomm.EmulatedSocket, emulcomm.TCPServerSocket,
emulcomm.UDPServerSocket, thread.LockType,
virtual_namespace.VirtualNamespace)):
raise NamespaceInternalError("Wrong type for 'self' argument.")
# If it's a method but the function was not provided as a string, we
# actually do have to add the first argument back in. Yes, this whole
# area of code is ugly.
args_to_use = [args[0]] + args_copy
else:
args_to_use = args_copy
retval = func_to_call(*args_to_use)
return self._process_retval(retval)
except RepyException:
# TODO: this should be changed to RepyError along with all references to
# RepyException in the rest of the repy code.
# We allow any RepyError to continue up to the client code.
raise
except:
# Code evaluated inside a `VirtualNamespace` may raise arbitrary
# errors, including plain Python exceptions. Reraise these errors
# so that the calling user code sees them.
# (Otherwise, things like `NameError`s in a virtual namespace
# crash the sandbox despite being wrapped in `try`/`except`,
# see SeattleTestbed/repy_v2#132.)
if type(args[0]) == virtual_namespace.VirtualNamespace:
raise
# Non-`RepyException`s outside of `VirtualNamespace` methods
# are unexpected and indicative of a programming error on
# our side, so we terminate.
_handle_internalerror("Unexpected exception from within Repy API", 843)
| mit |
tsg-/pyeclib | pyeclib/utils.py | 1 | 3062 | # Copyright (c) 2013, 2014, Kevin Greenan (kmgreen2@gmail.com)
# Copyright (c) 2014, Tushar Gohad (tusharsg@gmail.com)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution. THIS SOFTWARE IS
# PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
# NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import traceback
def positive_int_value(param):
# Returns value as a positive int or raises ValueError otherwise
try:
value = int(param)
assert value > 0
except (TypeError, ValueError, AssertionError):
# Handle: TypeError for 'None', ValueError for non-int strings
# and AssertionError for values <= 0
raise ValueError('Must be an integer > 0, not "%s".' % param)
return value
def import_class(import_str):
"""
Returns a class from a string that specifies a module and/or class
:param import_str: import path, e.g. 'httplib.HTTPConnection'
:returns imported object
:raises: ImportedError if the class does not exist or the path is invalid
"""
(mod_str, separator, class_str) = import_str.rpartition('.')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ValueError, AttributeError):
raise ImportError('Class %s cannot be found (%)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
def create_instance(import_str, *args, **kwargs):
"""
Returns instance of class which imported by import path.
:param import_str: import path of class
:param \*args: indexed arguments for new instance
:param \*\*kwargs: keyword arguments for new instance
:returns: instance of imported class which instantiated with
arguments *args and **kwargs
"""
try:
object_class = import_class(import_str)
except Exception:
raise
instance = object_class(*args, **kwargs)
return instance
| bsd-2-clause |
ssh0/growing-string | triangular_lattice/diecutting/result_count_on_edge.py | 1 | 9360 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# written by Shotaro Fujimoto
# 2016-12-16
import matplotlib.pyplot as plt
# from mpl_toolkits.mplot3d.axes3d import Axes3D
import matplotlib.cm as cm
import numpy as np
import set_data_path
class Visualizer(object):
def __init__(self, subjects):
self.data_path_list = set_data_path.data_path
if len(subjects) != 0:
for subject in subjects:
getattr(self, 'result_' + subject)()
def load_data(self, _path):
data = np.load(_path)
beta = data['beta']
try:
size_dist_ave = data['size_dist_ave']
if len(size_dist_ave) == 0:
raise KeyError
return self.load_data_averaged(_path)
except KeyError:
pass
num_of_strings = data['num_of_strings']
frames = data['frames']
Ls = data['Ls'].astype(np.float)
# Ls = (3 * Ls * (Ls + 1) + 1)
size_dist = data['size_dist']
N0 = np.array([l[1] for l in size_dist], dtype=np.float) / num_of_strings
n0 = N0[1:]
S = np.array([np.sum(l) for l in size_dist], dtype=np.float) / num_of_strings
n1 = (S[1:] - n0) * 2.
N = []
for l in size_dist:
dot = np.dot(np.arange(len(l)), np.array(l).T)
N.append(dot)
# N = np.array([np.dot(np.arange(len(l)), np.array(l).T) for l in size_dist])
N_all = 3. * Ls * (Ls + 1.) + 1
N = np.array(N, dtype=np.float) / num_of_strings
N_minus = N_all - N
N_minus_rate = N_minus / N_all
n_minus = N_minus[1:] - N_minus[:-1]
n1_ave = n1 / np.sum(n1)
n2 = (6 * Ls[1:]) - (n0 + n1 + n_minus)
self.beta = beta
self.num_of_strings = num_of_strings
self.frames = frames
self.Ls = Ls
self.N = N
self.N_minus = N_minus
self.N_minus_rate = N_minus_rate
self.S = S
self.n0 = n0
self.n1 = n1
self.n2 = n2
self.n_minus = n_minus
self.n1_ave = n1_ave
def load_data_averaged(self, _path):
data = np.load(_path)
beta = data['beta']
num_of_strings = data['num_of_strings']
frames = data['frames']
Ls = data['Ls'].astype(np.float)
# Ls = (3 * Ls * (Ls + 1) + 1)
# size_dist = data['size_dist']
size_dist_ave = data['size_dist_ave']
N0 = np.array([l[1] for l in size_dist_ave], dtype=np.float)
n0 = N0[1:]
S = np.array([np.sum(l) for l in size_dist_ave], dtype=np.float)
n1 = (S[1:] - n0) * 2.
N = []
for l in size_dist_ave:
dot = np.dot(np.arange(len(l)), np.array(l).T)
N.append(dot)
# N = np.array([np.dot(np.arange(len(l)), np.array(l).T) for l in size_dist_ave])
N_all = 3. * Ls * (Ls + 1.) + 1
N = np.array(N, dtype=np.float)
N_minus = N_all - N
N_minus_rate = N_minus / N_all
n_minus = N_minus[1:] - N_minus[:-1]
n1_ave = n1 / np.sum(n1)
n2 = (6 * Ls[1:]) - (n0 + n1 + n_minus)
self.beta = beta
self.num_of_strings = num_of_strings
self.frames = frames
self.Ls = Ls
self.N = N
self.N_all = N_all
self.N_minus = N_minus
self.N_minus_rate = N_minus_rate
self.S = S
self.n_all = 6 * Ls[1:]
self.n0 = n0
self.n1 = n1
self.n2 = n2
self.n_minus = n_minus
self.n1_ave = n1_ave
def result_N(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.N[1:], '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('Occupied points in the cutting region' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$N$')
plt.show()
def result_N_minus_rate(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.N_minus_rate[1:], '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('The rate of not occupied site in all N' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$N_{-1} / N_{\mathrm{all}}$')
plt.show()
def result_n0(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.n0, '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('Averaged number of the sites which is the only member of \
a subcluster on the cutting edges.' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$n_{0}$')
plt.show()
def result_n1(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.n1, '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('Averaged number of the sites which is connected to a \
existing subcluster on the cutting edges.' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$n_{1}$')
plt.show()
def result_n2(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.n2, '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('Averaged number of the sites on the cutting edges which \
is connected to two neighbors.' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$n_{2}$')
plt.show()
def result_n_minus(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.n_minus, '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_title('Averaged number of the sites which is not occupied on \
the cutting edges.' +
' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$n_{-1}$')
plt.show()
def result_S(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
ax.plot(self.Ls[1:], self.S[1:] / np.sum(self.S[1:]), '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_ylim([0, ax.get_ylim()[1]])
ax.set_title('Averaged number of the subclusters in the cutted region.'
+ ' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$S$')
plt.show()
def result_S_rate(self):
fig, ax = plt.subplots()
for i, result_data_path in enumerate(self.data_path_list):
self.load_data(result_data_path)
# ax.plot(self.Ls[1:], self.S[1:] / np.sum(self.S[1:]), '.',
# ax.plot(self.Ls[1:], self.S[1:] / self.n_all, '.',
ax.plot(self.Ls[1:], self.S[1:] / self.N[1:], '.',
label=r'$\beta = %2.2f$' % self.beta,
color=cm.viridis(float(i) / len(self.data_path_list)))
ax.legend(loc='best')
ax.set_ylim([0, ax.get_ylim()[1]])
ax.set_title('Averaged number of the subclusters in the cutted region'
+ ' (normalized)'
+ ' (sample: {})'.format(self.num_of_strings))
ax.set_xlabel(r'Cutting size $L$')
ax.set_ylabel(r'$S$')
plt.show()
if __name__ == '__main__':
# subject: 'N', 'N_minus_rate', 'n0', 'n1', 'n2', 'n_minus', 'S'
main = Visualizer(
[
# 'N',
# 'N_minus_rate',
# 'n0',
# 'n1',
# 'n2',
# 'n_minus',
'S',
# 'S_rate'
]
)
| mit |
grantcolasurdo/geras | fifth_edition/alignment.py | 1 | 3115 | """Alignment object class"""
__author__ = "Grant Colasurdo"
class Alignment:
SHORT_TO_LONG = {
'N': 'Neutral',
'G': 'Good',
'E': 'Evil',
'C': 'Chaotic',
'L': 'Lawful',
'': ''
}
def __init__(self, short_string: str=""):
if "L" in short_string:
self._LawChaos = "L"
if "C" in short_string:
self._LawChaos = "C"
if "G" in short_string:
self._GoodEvil = "G"
if "E" in short_string:
self._GoodEvil = "E"
if "N" in short_string:
if "NG" in short_string or "NE" in short_string:
self._LawChaos = "N"
if "LN" in short_string or "CN" in short_string:
self._GoodEvil = "N"
if short_string == "N":
self._LawChaos = "N"
self._GoodEvil = "N"
@property
def short_string(self):
string = self._LawChaos + self._GoodEvil
if string == "NN":
string = "N"
return string
@property
def long_string(self):
law_chaos = self.short_to_long[self._LawChaos]
good_evil = self.short_to_long[self._GoodEvil]
string = (law_chaos + " " + good_evil).strip()
if string == "Neutral Neutral":
string = "True Neutral"
return string
@property
def is_lawful(self):
return self._LawChaos == "L"
@is_lawful.setter
def is_lawful(self, value: bool):
if value:
self._LawChaos = "L"
elif self.is_lawful:
self._LawChaos = ""
else:
pass
@property
def is_good(self):
return self._GoodEvil == "G"
@is_good.setter
def is_good(self, value: bool):
if value:
self._GoodEvil = "G"
elif self.is_good:
self._GoodEvil = ""
else:
pass
@property
def is_chaotic(self):
return self._LawChaos == "C"
@is_chaotic.setter
def is_chaotic(self, value: bool):
if value:
self._LawChaos = "C"
elif self.is_chaotic:
self._LawChaos = ""
else:
pass
@property
def is_evil(self):
return self._GoodEvil == "E"
@is_evil.setter
def is_evil(self, value: bool):
if value:
self._GoodEvil = "E"
elif self.is_evil:
self._GoodEvil = ""
else:
pass
@property
def is_neutral_law_chaos(self):
return self._LawChaos == "N"
@is_neutral_law_chaos.setter
def is_neutral_law_chaos(self, value: bool):
if value:
self._LawChaos = "N"
elif self.is_neutral_law_chaos:
self._LawChaos = ""
else:
pass
@property
def is_neutral_good_evil(self):
return self._GoodEvil == "N"
@is_neutral_good_evil.setter
def is_neutral_good_evil(self, value: bool):
if value:
self._GoodEvil = "N"
elif self.is_neutral_good_evil:
self._GoodEvil = ""
else:
pass
| gpl-2.0 |
geekboxzone/lollipop_external_chromium_org_third_party_WebKit | Source/devtools/scripts/jsdoc-validator/build_jsdoc_validator_jar.py | 66 | 5099 | #!/usr/bin/python
import hashlib
import operator
import os
import shutil
import stat
import subprocess
import sys
import tempfile
def rel_to_abs(rel_path):
return os.path.join(script_path, rel_path)
java_bin_path = os.getenv('JAVA_HOME', '')
if java_bin_path:
java_bin_path = os.path.join(java_bin_path, 'bin')
main_class = 'org.chromium.devtools.jsdoc.JsDocValidator'
jar_name = 'jsdoc-validator.jar'
hashes_name = 'hashes'
src_dir = 'src'
script_path = os.path.dirname(os.path.abspath(__file__))
closure_jar_relpath = os.path.join('..', 'closure', 'compiler.jar')
src_path = rel_to_abs(src_dir)
hashes_path = rel_to_abs(hashes_name)
def get_file_hash(file, blocksize=65536):
sha = hashlib.sha256()
buf = file.read(blocksize)
while len(buf) > 0:
sha.update(buf)
buf = file.read(blocksize)
return sha.hexdigest()
def traverse(hasher, path):
abs_path = rel_to_abs(path)
info = os.lstat(abs_path)
quoted_name = repr(path.replace('\\', '/'))
if stat.S_ISDIR(info.st_mode) and not os.path.basename(path).startswith('.'):
hasher.update('d ' + quoted_name + '\n')
for entry in sorted(os.listdir(abs_path)):
traverse(hasher, os.path.join(path, entry))
elif stat.S_ISREG(info.st_mode) and path.endswith('.java'):
hasher.update('r ' + quoted_name + ' ')
hasher.update(str(info.st_size) + ' ')
with open(abs_path, 'Ur') as file:
f_hash = get_file_hash(file)
hasher.update(f_hash + '\n')
def get_src_dir_hash(dir):
sha = hashlib.sha256()
traverse(sha, dir)
return sha.hexdigest()
def get_actual_hashes():
hashed_files = [(jar_name, True)]
hashes = {}
for (file_name, binary) in hashed_files:
try:
hash = get_file_hash(open(file_name, 'rb' if binary else 'r'))
hashes[file_name] = hash
except IOError:
hashes[file_name] = '0'
hashes[src_dir] = get_src_dir_hash(src_dir)
return hashes
def get_expected_hashes():
try:
with open(hashes_path, 'r') as file:
return {file_name: hash for (file_name, hash) in [(name.strip(), hash.strip()) for (hash, name) in [line.split(' ', 1) for line in file]]}
except:
return None
def run_and_communicate(command, error_template):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
proc.communicate()
if proc.returncode:
print >> sys.stderr, error_template % proc.returncode
sys.exit(proc.returncode)
def build_artifacts():
print 'Compiling...'
java_files = []
for root, dirs, files in sorted(os.walk(src_path)):
for file_name in files:
if file_name.endswith('.java'):
java_files.append(os.path.join(root, file_name))
bin_path = tempfile.mkdtemp()
manifest_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
manifest_file.write('Class-Path: %s\n' % closure_jar_relpath)
manifest_file.close()
javac_path = os.path.join(java_bin_path, 'javac')
javac_command = '%s -d %s -cp %s %s' % (javac_path, bin_path, rel_to_abs(closure_jar_relpath), ' '.join(java_files))
run_and_communicate(javac_command, 'Error: javac returned %d')
print 'Building jar...'
artifact_path = rel_to_abs(jar_name)
jar_path = os.path.join(java_bin_path, 'jar')
jar_command = '%s cvfme %s %s %s -C %s .' % (jar_path, artifact_path, manifest_file.name, main_class, bin_path)
run_and_communicate(jar_command, 'Error: jar returned %d')
finally:
os.remove(manifest_file.name)
shutil.rmtree(bin_path, True)
def update_hashes():
print 'Updating hashes...'
with open(hashes_path, 'w') as file:
file.writelines(['%s %s\n' % (hash, name) for (name, hash) in get_actual_hashes().iteritems()])
def hashes_modified():
expected_hashes = get_expected_hashes()
if not expected_hashes:
return [('<no expected hashes>', 1, 0)]
actual_hashes = get_actual_hashes()
results = []
for name, expected_hash in expected_hashes.iteritems():
actual_hash = actual_hashes.get(name)
if expected_hash != actual_hash:
results.append((name, expected_hash, actual_hash))
return results
def help():
print 'usage: %s [option]' % os.path.basename(__file__)
print 'Options:'
print '--force-rebuild: Rebuild classes and jar even if there are no source file changes'
print '--no-rebuild: Do not rebuild jar, just update hashes'
def main():
no_rebuild = False
force_rebuild = False
if len(sys.argv) > 1:
if sys.argv[1] == '--help':
help()
return
no_rebuild = sys.argv[1] == '--no-rebuild'
force_rebuild = sys.argv[1] == '--force-rebuild'
if not hashes_modified() and not force_rebuild:
print 'No modifications found, rebuild not required.'
return
if not no_rebuild:
build_artifacts()
update_hashes()
print 'Done.'
if __name__ == '__main__':
main()
| bsd-3-clause |
jonnary/keystone | keystone/token/provider.py | 5 | 22802 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Token provider interface."""
import abc
import base64
import datetime
import sys
import uuid
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
import six
from keystone.common import cache
from keystone.common import dependency
from keystone.common import manager
from keystone import exception
from keystone.i18n import _, _LE
from keystone.models import token_model
from keystone import notifications
from keystone.token import persistence
from keystone.token import utils
CONF = cfg.CONF
LOG = log.getLogger(__name__)
MEMOIZE = cache.get_memoization_decorator(section='token')
# NOTE(morganfainberg): This is for compatibility in case someone was relying
# on the old location of the UnsupportedTokenVersionException for their code.
UnsupportedTokenVersionException = exception.UnsupportedTokenVersionException
# supported token versions
V2 = token_model.V2
V3 = token_model.V3
VERSIONS = token_model.VERSIONS
def base64_encode(s):
"""Encode a URL-safe string."""
return base64.urlsafe_b64encode(s).rstrip('=')
def random_urlsafe_str():
"""Generate a random URL-safe string."""
# chop the padding (==) off the end of the encoding to save space
return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]
def random_urlsafe_str_to_bytes(s):
"""Convert a string generated by ``random_urlsafe_str()`` to bytes."""
# restore the padding (==) at the end of the string
return base64.urlsafe_b64decode(s + '==')
def default_expire_time():
"""Determine when a fresh token should expire.
Expiration time varies based on configuration (see ``[token] expiration``).
:returns: a naive UTC datetime.datetime object
"""
expire_delta = datetime.timedelta(seconds=CONF.token.expiration)
return timeutils.utcnow() + expire_delta
def audit_info(parent_audit_id):
"""Build the audit data for a token.
If ``parent_audit_id`` is None, the list will be one element in length
containing a newly generated audit_id.
If ``parent_audit_id`` is supplied, the list will be two elements in length
containing a newly generated audit_id and the ``parent_audit_id``. The
``parent_audit_id`` will always be element index 1 in the resulting
list.
:param parent_audit_id: the audit of the original token in the chain
:type parent_audit_id: str
:returns: Keystone token audit data
"""
audit_id = random_urlsafe_str()
if parent_audit_id is not None:
return [audit_id, parent_audit_id]
return [audit_id]
@dependency.provider('token_provider_api')
@dependency.requires('assignment_api', 'revoke_api')
class Manager(manager.Manager):
"""Default pivot point for the token provider backend.
See :mod:`keystone.common.manager.Manager` for more details on how this
dynamically calls the backend.
"""
driver_namespace = 'keystone.token.provider'
V2 = V2
V3 = V3
VERSIONS = VERSIONS
INVALIDATE_PROJECT_TOKEN_PERSISTENCE = 'invalidate_project_tokens'
INVALIDATE_USER_TOKEN_PERSISTENCE = 'invalidate_user_tokens'
_persistence_manager = None
def __init__(self):
super(Manager, self).__init__(CONF.token.provider)
self._register_callback_listeners()
def _register_callback_listeners(self):
# This is used by the @dependency.provider decorator to register the
# provider (token_provider_api) manager to listen for trust deletions.
callbacks = {
notifications.ACTIONS.deleted: [
['OS-TRUST:trust', self._trust_deleted_event_callback],
['user', self._delete_user_tokens_callback],
['domain', self._delete_domain_tokens_callback],
],
notifications.ACTIONS.disabled: [
['user', self._delete_user_tokens_callback],
['domain', self._delete_domain_tokens_callback],
['project', self._delete_project_tokens_callback],
],
notifications.ACTIONS.internal: [
[notifications.INVALIDATE_USER_TOKEN_PERSISTENCE,
self._delete_user_tokens_callback],
[notifications.INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE,
self._delete_user_project_tokens_callback],
[notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS,
self._delete_user_oauth_consumer_tokens_callback],
]
}
for event, cb_info in callbacks.items():
for resource_type, callback_fns in cb_info:
notifications.register_event_callback(event, resource_type,
callback_fns)
@property
def _needs_persistence(self):
return self.driver.needs_persistence()
@property
def _persistence(self):
# NOTE(morganfainberg): This should not be handled via __init__ to
# avoid dependency injection oddities circular dependencies (where
# the provider manager requires the token persistence manager, which
# requires the token provider manager).
if self._persistence_manager is None:
self._persistence_manager = persistence.PersistenceManager()
return self._persistence_manager
def _create_token(self, token_id, token_data):
try:
if isinstance(token_data['expires'], six.string_types):
token_data['expires'] = timeutils.normalize_time(
timeutils.parse_isotime(token_data['expires']))
self._persistence.create_token(token_id, token_data)
except Exception:
exc_info = sys.exc_info()
# an identical token may have been created already.
# if so, return the token_data as it is also identical
try:
self._persistence.get_token(token_id)
except exception.TokenNotFound:
six.reraise(*exc_info)
def validate_token(self, token_id, belongs_to=None):
unique_id = utils.generate_unique_id(token_id)
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token = self._validate_token(unique_id)
self._token_belongs_to(token, belongs_to)
self._is_valid_token(token)
return token
def check_revocation_v2(self, token):
try:
token_data = token['access']
except KeyError:
raise exception.TokenNotFound(_('Failed to validate token'))
token_values = self.revoke_api.model.build_token_values_v2(
token_data, CONF.identity.default_domain_id)
self.revoke_api.check_token(token_values)
def validate_v2_token(self, token_id, belongs_to=None):
unique_id = utils.generate_unique_id(token_id)
if self._needs_persistence:
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token_ref = self._persistence.get_token(unique_id)
else:
token_ref = token_id
token = self._validate_v2_token(token_ref)
token['access']['token']['id'] = token_id
self._token_belongs_to(token, belongs_to)
self._is_valid_token(token)
return token
def check_revocation_v3(self, token):
try:
token_data = token['token']
except KeyError:
raise exception.TokenNotFound(_('Failed to validate token'))
token_values = self.revoke_api.model.build_token_values(token_data)
self.revoke_api.check_token(token_values)
def check_revocation(self, token):
version = self.driver.get_token_version(token)
if version == V2:
return self.check_revocation_v2(token)
else:
return self.check_revocation_v3(token)
def validate_v3_token(self, token_id):
unique_id = utils.generate_unique_id(token_id)
# NOTE(lbragstad): Only go to persistent storage if we have a token to
# fetch from the backend. If the Fernet token provider is being used
# this step isn't necessary. The Fernet token reference is persisted in
# the token_id, so in this case set the token_ref as the identifier of
# the token.
if not self._needs_persistence:
token_ref = token_id
else:
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token_ref = self._persistence.get_token(unique_id)
token = self._validate_v3_token(token_ref)
self._is_valid_token(token)
return token
@MEMOIZE
def _validate_token(self, token_id):
if not self._needs_persistence:
return self.driver.validate_v3_token(token_id)
token_ref = self._persistence.get_token(token_id)
version = self.driver.get_token_version(token_ref)
if version == self.V3:
return self.driver.validate_v3_token(token_ref)
elif version == self.V2:
return self.driver.validate_v2_token(token_ref)
raise exception.UnsupportedTokenVersionException()
@MEMOIZE
def _validate_v2_token(self, token_id):
return self.driver.validate_v2_token(token_id)
@MEMOIZE
def _validate_v3_token(self, token_id):
return self.driver.validate_v3_token(token_id)
def _is_valid_token(self, token):
"""Verify the token is valid format and has not expired."""
current_time = timeutils.normalize_time(timeutils.utcnow())
try:
# Get the data we need from the correct location (V2 and V3 tokens
# differ in structure, Try V3 first, fall back to V2 second)
token_data = token.get('token', token.get('access'))
expires_at = token_data.get('expires_at',
token_data.get('expires'))
if not expires_at:
expires_at = token_data['token']['expires']
expiry = timeutils.normalize_time(
timeutils.parse_isotime(expires_at))
except Exception:
LOG.exception(_LE('Unexpected error or malformed token '
'determining token expiry: %s'), token)
raise exception.TokenNotFound(_('Failed to validate token'))
if current_time < expiry:
self.check_revocation(token)
# Token has not expired and has not been revoked.
return None
else:
raise exception.TokenNotFound(_('Failed to validate token'))
def _token_belongs_to(self, token, belongs_to):
"""Check if the token belongs to the right tenant.
This is only used on v2 tokens. The structural validity of the token
will have already been checked before this method is called.
"""
if belongs_to:
token_data = token['access']['token']
if ('tenant' not in token_data or
token_data['tenant']['id'] != belongs_to):
raise exception.Unauthorized()
def issue_v2_token(self, token_ref, roles_ref=None, catalog_ref=None):
token_id, token_data = self.driver.issue_v2_token(
token_ref, roles_ref, catalog_ref)
if self._needs_persistence:
data = dict(key=token_id,
id=token_id,
expires=token_data['access']['token']['expires'],
user=token_ref['user'],
tenant=token_ref['tenant'],
metadata=token_ref['metadata'],
token_data=token_data,
bind=token_ref.get('bind'),
trust_id=token_ref['metadata'].get('trust_id'),
token_version=self.V2)
self._create_token(token_id, data)
return token_id, token_data
def issue_v3_token(self, user_id, method_names, expires_at=None,
project_id=None, domain_id=None, auth_context=None,
trust=None, metadata_ref=None, include_catalog=True,
parent_audit_id=None):
token_id, token_data = self.driver.issue_v3_token(
user_id, method_names, expires_at, project_id, domain_id,
auth_context, trust, metadata_ref, include_catalog,
parent_audit_id)
if metadata_ref is None:
metadata_ref = {}
if 'project' in token_data['token']:
# project-scoped token, fill in the v2 token data
# all we care are the role IDs
# FIXME(gyee): is there really a need to store roles in metadata?
role_ids = [r['id'] for r in token_data['token']['roles']]
metadata_ref = {'roles': role_ids}
if trust:
metadata_ref.setdefault('trust_id', trust['id'])
metadata_ref.setdefault('trustee_user_id',
trust['trustee_user_id'])
data = dict(key=token_id,
id=token_id,
expires=token_data['token']['expires_at'],
user=token_data['token']['user'],
tenant=token_data['token'].get('project'),
metadata=metadata_ref,
token_data=token_data,
trust_id=trust['id'] if trust else None,
token_version=self.V3)
if self._needs_persistence:
self._create_token(token_id, data)
return token_id, token_data
def invalidate_individual_token_cache(self, token_id):
# NOTE(morganfainberg): invalidate takes the exact same arguments as
# the normal method, this means we need to pass "self" in (which gets
# stripped off).
# FIXME(morganfainberg): Does this cache actually need to be
# invalidated? We maintain a cached revocation list, which should be
# consulted before accepting a token as valid. For now we will
# do the explicit individual token invalidation.
self._validate_token.invalidate(self, token_id)
self._validate_v2_token.invalidate(self, token_id)
self._validate_v3_token.invalidate(self, token_id)
def revoke_token(self, token_id, revoke_chain=False):
revoke_by_expires = False
project_id = None
domain_id = None
token_ref = token_model.KeystoneToken(
token_id=token_id,
token_data=self.validate_token(token_id))
user_id = token_ref.user_id
expires_at = token_ref.expires
audit_id = token_ref.audit_id
audit_chain_id = token_ref.audit_chain_id
if token_ref.project_scoped:
project_id = token_ref.project_id
if token_ref.domain_scoped:
domain_id = token_ref.domain_id
if audit_id is None and not revoke_chain:
LOG.debug('Received token with no audit_id.')
revoke_by_expires = True
if audit_chain_id is None and revoke_chain:
LOG.debug('Received token with no audit_chain_id.')
revoke_by_expires = True
if revoke_by_expires:
self.revoke_api.revoke_by_expiration(user_id, expires_at,
project_id=project_id,
domain_id=domain_id)
elif revoke_chain:
self.revoke_api.revoke_by_audit_chain_id(audit_chain_id,
project_id=project_id,
domain_id=domain_id)
else:
self.revoke_api.revoke_by_audit_id(audit_id)
if CONF.token.revoke_by_id and self._needs_persistence:
self._persistence.delete_token(token_id=token_id)
def list_revoked_tokens(self):
return self._persistence.list_revoked_tokens()
def _trust_deleted_event_callback(self, service, resource_type, operation,
payload):
if CONF.token.revoke_by_id:
trust_id = payload['resource_info']
trust = self.trust_api.get_trust(trust_id, deleted=True)
self._persistence.delete_tokens(user_id=trust['trustor_user_id'],
trust_id=trust_id)
def _delete_user_tokens_callback(self, service, resource_type, operation,
payload):
if CONF.token.revoke_by_id:
user_id = payload['resource_info']
self._persistence.delete_tokens_for_user(user_id)
def _delete_domain_tokens_callback(self, service, resource_type,
operation, payload):
if CONF.token.revoke_by_id:
domain_id = payload['resource_info']
self._persistence.delete_tokens_for_domain(domain_id=domain_id)
def _delete_user_project_tokens_callback(self, service, resource_type,
operation, payload):
if CONF.token.revoke_by_id:
user_id = payload['resource_info']['user_id']
project_id = payload['resource_info']['project_id']
self._persistence.delete_tokens_for_user(user_id=user_id,
project_id=project_id)
def _delete_project_tokens_callback(self, service, resource_type,
operation, payload):
if CONF.token.revoke_by_id:
project_id = payload['resource_info']
self._persistence.delete_tokens_for_users(
self.assignment_api.list_user_ids_for_project(project_id),
project_id=project_id)
def _delete_user_oauth_consumer_tokens_callback(self, service,
resource_type, operation,
payload):
if CONF.token.revoke_by_id:
user_id = payload['resource_info']['user_id']
consumer_id = payload['resource_info']['consumer_id']
self._persistence.delete_tokens(user_id=user_id,
consumer_id=consumer_id)
@six.add_metaclass(abc.ABCMeta)
class Provider(object):
"""Interface description for a Token provider."""
@abc.abstractmethod
def needs_persistence(self):
"""Determine if the token should be persisted.
If the token provider requires that the token be persisted to a
backend this should return True, otherwise return False.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_token_version(self, token_data):
"""Return the version of the given token data.
If the given token data is unrecognizable,
UnsupportedTokenVersionException is raised.
:param token_data: token_data
:type token_data: dict
:returns: token version string
:raises: keystone.token.provider.UnsupportedTokenVersionException
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def issue_v2_token(self, token_ref, roles_ref=None, catalog_ref=None):
"""Issue a V2 token.
:param token_ref: token data to generate token from
:type token_ref: dict
:param roles_ref: optional roles list
:type roles_ref: dict
:param catalog_ref: optional catalog information
:type catalog_ref: dict
:returns: (token_id, token_data)
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def issue_v3_token(self, user_id, method_names, expires_at=None,
project_id=None, domain_id=None, auth_context=None,
trust=None, metadata_ref=None, include_catalog=True,
parent_audit_id=None):
"""Issue a V3 Token.
:param user_id: identity of the user
:type user_id: string
:param method_names: names of authentication methods
:type method_names: list
:param expires_at: optional time the token will expire
:type expires_at: string
:param project_id: optional project identity
:type project_id: string
:param domain_id: optional domain identity
:type domain_id: string
:param auth_context: optional context from the authorization plugins
:type auth_context: dict
:param trust: optional trust reference
:type trust: dict
:param metadata_ref: optional metadata reference
:type metadata_ref: dict
:param include_catalog: optional, include the catalog in token data
:type include_catalog: boolean
:param parent_audit_id: optional, the audit id of the parent token
:type parent_audit_id: string
:returns: (token_id, token_data)
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def validate_v2_token(self, token_ref):
"""Validate the given V2 token and return the token data.
Must raise Unauthorized exception if unable to validate token.
:param token_ref: the token reference
:type token_ref: dict
:returns: token data
:raises: keystone.exception.TokenNotFound
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def validate_v3_token(self, token_ref):
"""Validate the given V3 token and return the token_data.
:param token_ref: the token reference
:type token_ref: dict
:returns: token data
:raises: keystone.exception.TokenNotFound
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def _get_token_id(self, token_data):
"""Generate the token_id based upon the data in token_data.
:param token_data: token information
:type token_data: dict
returns: token identifier
"""
raise exception.NotImplemented() # pragma: no cover
| apache-2.0 |
anryko/ansible | test/units/module_utils/facts/hardware/test_linux.py | 93 | 7748 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from units.compat import unittest
from units.compat.mock import Mock, patch
from ansible.module_utils.facts import timeout
from ansible.module_utils.facts.hardware import linux
from . linux_data import LSBLK_OUTPUT, LSBLK_OUTPUT_2, LSBLK_UUIDS, MTAB, MTAB_ENTRIES, BIND_MOUNTS, STATVFS_INFO, UDEVADM_UUID, UDEVADM_OUTPUT
with open(os.path.join(os.path.dirname(__file__), '../fixtures/findmount_output.txt')) as f:
FINDMNT_OUTPUT = f.read()
GET_MOUNT_SIZE = {}
def mock_get_mount_size(mountpoint):
return STATVFS_INFO.get(mountpoint, {})
class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
# FIXME: mock.patch instead
def setUp(self):
timeout.GATHER_TIMEOUT = 10
def tearDown(self):
timeout.GATHER_TIMEOUT = None
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._mtab_entries', return_value=MTAB_ENTRIES)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._find_bind_mounts', return_value=BIND_MOUNTS)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._lsblk_uuid', return_value=LSBLK_UUIDS)
@patch('ansible.module_utils.facts.hardware.linux.get_mount_size', side_effect=mock_get_mount_size)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._udevadm_uuid', return_value=UDEVADM_UUID)
def test_get_mount_facts(self,
mock_get_mount_size,
mock_lsblk_uuid,
mock_find_bind_mounts,
mock_mtab_entries,
mock_udevadm_uuid):
module = Mock()
# Returns a LinuxHardware-ish
lh = linux.LinuxHardware(module=module, load_on_init=False)
# Nothing returned, just self.facts modified as a side effect
mount_facts = lh.get_mount_facts()
self.assertIsInstance(mount_facts, dict)
self.assertIn('mounts', mount_facts)
self.assertIsInstance(mount_facts['mounts'], list)
self.assertIsInstance(mount_facts['mounts'][0], dict)
home_expected = {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'device': '/dev/mapper/fedora_dhcp129--186-home',
'fstype': 'ext4',
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'mount': '/home',
'options': 'rw,seclabel,relatime,data=ordered',
'size_available': 410246647808,
'size_total': 433647640576,
'uuid': 'N/A'}
home_info = [x for x in mount_facts['mounts'] if x['mount'] == '/home'][0]
self.maxDiff = 4096
self.assertDictEqual(home_info, home_expected)
@patch('ansible.module_utils.facts.hardware.linux.get_file_content', return_value=MTAB)
def test_get_mtab_entries(self, mock_get_file_content):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
mtab_entries = lh._mtab_entries()
self.assertIsInstance(mtab_entries, list)
self.assertIsInstance(mtab_entries[0], list)
self.assertEqual(len(mtab_entries), 38)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(0, FINDMNT_OUTPUT, ''))
def test_find_bind_mounts(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
# If bind_mounts becomes another seq type, feel free to change
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 1)
self.assertIn('/not/a/real/bind_mount', bind_mounts)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_findmnt', return_value=(37, '', ''))
def test_find_bind_mounts_non_zero(self, mock_run_findmnt):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
def test_find_bind_mounts_no_findmnts(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
bind_mounts = lh._find_bind_mounts()
self.assertIsInstance(bind_mounts, set)
self.assertEqual(len(bind_mounts), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
def test_lsblk_uuid(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop9', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
def test_lsblk_uuid_no_lsblk(self):
module = Mock()
module.get_bin_path = Mock(return_value=None)
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertEqual(len(lsblk_uuids), 0)
@patch('ansible.module_utils.facts.hardware.linux.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
module = Mock()
lh = linux.LinuxHardware(module=module, load_on_init=False)
lsblk_uuids = lh._lsblk_uuid()
self.assertIsInstance(lsblk_uuids, dict)
self.assertIn(b'/dev/loop0', lsblk_uuids)
self.assertIn(b'/dev/sda1', lsblk_uuids)
self.assertEqual(lsblk_uuids[b'/dev/mapper/an-example-mapper with a space in the name'], b'84639acb-013f-4d2f-9392-526a572b4373')
self.assertEqual(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
def test_udevadm_uuid(self):
module = Mock()
module.run_command = Mock(return_value=(0, UDEVADM_OUTPUT, '')) # (rc, out, err)
lh = linux.LinuxHardware(module=module, load_on_init=False)
udevadm_uuid = lh._udevadm_uuid('mock_device')
self.assertEqual(udevadm_uuid, '57b1a3e7-9019-4747-9809-7ec52bba9179')
| gpl-3.0 |
PJB3005/MoMMI | MoMMI/Modules/chance.py | 1 | 3714 | import asyncio
import random
from typing import Match
from discord import Message
from MoMMI import command, MChannel
@command("pick", r"(?:pick|choose)\s*\((.*?)\)")
async def pick_command(channel: MChannel, match: Match, message: Message) -> None:
choices = [x.strip() for x in match.group(1).split(",")]
if len(choices) < 2:
await channel.send("You gotta provide at least 2 options.")
return
choice = random.choice(choices)
await channel.send(f"**{choice}**")
@command("roll", r"(\d+)d(\d+)(?:\+(\d+))?")
async def roll_command(channel: MChannel, match: Match, message: Message) -> None:
result = "Results: "
count = int(match.group(1))
if count > 100:
await channel.send("Ok look dude. A minute or two after this dice command got implemented bobda ran a god damn 10000000000000000000000000000d10. Now because it has to ITERATE those dice and 10000000000000000000000000000 is a giant fucking number, that locked up MoMMI completely because no amount of asyncio is gonna save this madness. Thank god for SIGKILL. THEN I got pinged by Intigracy telling me MoMMI locked up. *sigh*")
return
total = 0
for i in range(0, count):
if i > 0:
result += ", "
roll = random.randint(1, int(match.group(2)))
total += roll
result += str(roll)
mod = match.group(3)
if mod is not None:
result += f" + {mod}"
total += int(mod)
result += f" = {total}"
await channel.send(result)
@command("rand", r"rand\s*(-?\d+)\s*(-?\d+)")
async def rand_command(channel: MChannel, match: Match, message: Message) -> None:
msg = str(random.randint(int(match.group(1)), int(match.group(2))))
await channel.send(msg)
@command("magic8ball", r"(?:magic|magic8ball)")
async def magic8ball_command(channel: MChannel, match: Match, message: Message) -> None:
choice = random.choice([
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes, definitely",
"You may rely on it",
"As I see it, yes",
"Most likely",
"Outlook: Positive",
"Yes",
"Signs point to: Yes",
"Reply hazy, try again",
"Ask again later",
"Better to not tell you right now",
"Cannot predict now",
"Concentrate, then ask again",
"Do not count on it",
"My reply is: no",
"My sources say: no",
"Outlook: Negative",
"Very doubtful"
])
await channel.send(choice)
async def load(loop: asyncio.AbstractEventLoop) -> None:
from MoMMI.Modules.help import register_help
register_help(__name__, "dice", """The party enters the AI upload.
The room's power systems are completely off. At the back of the room is a hole into the core, molten out of the reinforced wall.
*I walk up to the room's APC and see if the APC still works.*
Everybody roll a dexterity saving throw.
*@MoMMI 1d20+0*
*Results: 1 = 1*""")
register_help(__name__, "magic8ball", """Unable to make important project decisions responsibly?
Need some reliable help from our lord and saviour RNGesus?
Simple, just run @MoMMI magic 'Do I delete the Discord server?' and let NT's latest proven MoMMI Random Number Generator Technology™ decide for you.
*Nanotrasen is not liable for any damages caused - material, bodily or psychologically - as a result of poor decision making as a result of the responses from this feature.*""")
register_help(__name__, "pick", """Man can you believe this? People actually want to do *fair* 50/50 picks between things? Kids these days.
Fine, just run @MoMMI pick(a,b,c) with as many comma separated values as you need. Normies.""")
| mit |
kevinthesun/mxnet | example/speech_recognition/stt_layer_fc.py | 52 | 6097 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
from stt_layer_batchnorm import batchnorm
def fc(net,
num_hidden,
act_type,
weight=None,
bias=None,
no_bias=False,
name=None
):
# when weight and bias doesn't have specific name
if weight is None and bias is None:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, no_bias=no_bias, name=name)
# when weight doesn't have specific name but bias has
elif weight is None and bias is not None:
if no_bias:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, no_bias=no_bias, name=name)
else:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, bias=bias, no_bias=no_bias, name=name)
# when bias doesn't have specific name but weight has
elif weight is not None and bias is None:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, weight=weight, no_bias=no_bias, name=name)
# when weight and bias specific name
else:
if no_bias:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, weight=weight, no_bias=no_bias, name=name)
else:
net = mx.sym.FullyConnected(data=net, num_hidden=num_hidden, weight=weight, bias=bias, no_bias=no_bias, name=name)
# activation
if act_type is not None:
net = mx.sym.Activation(data=net, act_type=act_type, name="%s_activation" % name)
return net
def sequence_fc(net,
seq_len,
num_layer,
prefix,
num_hidden_list=[],
act_type_list=[],
is_batchnorm=False,
dropout_rate=0,
):
if num_layer == len(num_hidden_list) == len(act_type_list):
if num_layer > 0:
weight_list = []
bias_list = []
for layer_index in range(num_layer):
weight_list.append(mx.sym.Variable(name='%s_sequence_fc%d_weight' % (prefix, layer_index)))
# if you use batchnorm bias do not have any effect
if not is_batchnorm:
bias_list.append(mx.sym.Variable(name='%s_sequence_fc%d_bias' % (prefix, layer_index)))
# batch normalization parameters
gamma_list = []
beta_list = []
if is_batchnorm:
for layer_index in range(num_layer):
gamma_list.append(mx.sym.Variable(name='%s_sequence_fc%d_gamma' % (prefix, layer_index)))
beta_list.append(mx.sym.Variable(name='%s_sequence_fc%d_beta' % (prefix, layer_index)))
# batch normalization parameters ends
if type(net) is mx.symbol.Symbol:
net = mx.sym.SliceChannel(data=net, num_outputs=seq_len, axis=1, squeeze_axis=1)
elif type(net) is list:
for net_index, one_net in enumerate(net):
if type(one_net) is not mx.symbol.Symbol:
raise Exception('%d th elements of the net should be mx.symbol.Symbol' % net_index)
else:
raise Exception('type of net should be whether mx.symbol.Symbol or list of mx.symbol.Symbol')
hidden_all = []
for seq_index in range(seq_len):
hidden = net[seq_index]
for layer_index in range(num_layer):
if dropout_rate > 0:
hidden = mx.sym.Dropout(data=hidden, p=dropout_rate)
if is_batchnorm:
hidden = fc(net=hidden,
num_hidden=num_hidden_list[layer_index],
act_type=None,
weight=weight_list[layer_index],
no_bias=is_batchnorm,
name="%s_t%d_l%d_fc" % (prefix, seq_index, layer_index)
)
# last layer doesn't have batchnorm
hidden = batchnorm(net=hidden,
gamma=gamma_list[layer_index],
beta=beta_list[layer_index],
name="%s_t%d_l%d_batchnorm" % (prefix, seq_index, layer_index))
hidden = mx.sym.Activation(data=hidden, act_type=act_type_list[layer_index],
name="%s_t%d_l%d_activation" % (prefix, seq_index, layer_index))
else:
hidden = fc(net=hidden,
num_hidden=num_hidden_list[layer_index],
act_type=act_type_list[layer_index],
weight=weight_list[layer_index],
bias=bias_list[layer_index]
)
hidden_all.append(hidden)
net = hidden_all
return net
else:
raise Exception("length doesn't met - num_layer:",
num_layer, ",len(num_hidden_list):",
len(num_hidden_list),
",len(act_type_list):",
len(act_type_list)
)
| apache-2.0 |
ntuecon/server | pyenv/Lib/site-packages/django/templatetags/cache.py | 471 | 3389 | from __future__ import unicode_literals
from django.core.cache import InvalidCacheBackendError, caches
from django.core.cache.utils import make_template_fragment_key
from django.template import (
Library, Node, TemplateSyntaxError, VariableDoesNotExist,
)
register = Library()
class CacheNode(Node):
def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name):
self.nodelist = nodelist
self.expire_time_var = expire_time_var
self.fragment_name = fragment_name
self.vary_on = vary_on
self.cache_name = cache_name
def render(self, context):
try:
expire_time = self.expire_time_var.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var)
try:
expire_time = int(expire_time)
except (ValueError, TypeError):
raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time)
if self.cache_name:
try:
cache_name = self.cache_name.resolve(context)
except VariableDoesNotExist:
raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.cache_name.var)
try:
fragment_cache = caches[cache_name]
except InvalidCacheBackendError:
raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name)
else:
try:
fragment_cache = caches['template_fragments']
except InvalidCacheBackendError:
fragment_cache = caches['default']
vary_on = [var.resolve(context) for var in self.vary_on]
cache_key = make_template_fragment_key(self.fragment_name, vary_on)
value = fragment_cache.get(cache_key)
if value is None:
value = self.nodelist.render(context)
fragment_cache.set(cache_key, value, expire_time)
return value
@register.tag('cache')
def do_cache(parser, token):
"""
This will cache the contents of a template fragment for a given amount
of time.
Usage::
{% load cache %}
{% cache [expire_time] [fragment_name] %}
.. some expensive processing ..
{% endcache %}
This tag also supports varying by a list of arguments::
{% load cache %}
{% cache [expire_time] [fragment_name] [var1] [var2] .. %}
.. some expensive processing ..
{% endcache %}
Optionally the cache to use may be specified thus::
{% cache .... using="cachename" %}
Each unique set of arguments will result in a unique cache entry.
"""
nodelist = parser.parse(('endcache',))
parser.delete_first_token()
tokens = token.split_contents()
if len(tokens) < 3:
raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0])
if len(tokens) > 3 and tokens[-1].startswith('using='):
cache_name = parser.compile_filter(tokens[-1][len('using='):])
tokens = tokens[:-1]
else:
cache_name = None
return CacheNode(nodelist,
parser.compile_filter(tokens[1]),
tokens[2], # fragment_name can't be a variable.
[parser.compile_filter(t) for t in tokens[3:]],
cache_name,
)
| bsd-3-clause |
accraze/bitcoin | qa/rpc-tests/test_framework/authproxy.py | 46 | 6096 |
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return round(o, 8)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
self._service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
None, None, False,
timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
False, timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except httplib.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
'method': self._service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
response = self._request('POST', self.__url.path, postdata)
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
return self._request('POST', self.__url.path, postdata)
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal)))
else:
log.debug("<-- "+responsedata)
return response
| mit |
openqt/algorithms | leetcode/python/lc919-complete-binary-tree-inserter.py | 1 | 2138 | # coding=utf-8
import unittest
"""919. Complete Binary Tree Inserter
https://leetcode.com/problems/complete-binary-tree-inserter/description/
A _complete_ binary tree is a binary tree in which every level, except
possibly the last, is completely filled, and all nodes are as far left as
possible.
Write a data structure `CBTInserter` that is initialized with a complete
binary tree and supports the following operations:
* `CBTInserter(TreeNode root)` initializes the data structure on a given tree with head node `root`;
* `CBTInserter.insert(int v)` will insert a `TreeNode` into the tree with value `node.val = v` so that the tree remains complete, **and returns the value of the parent of the inserted`TreeNode`** ;
* `CBTInserter.get_root()` will return the head node of the tree.
**Example 1:**
**Input:** inputs = ["CBTInserter","insert","get_root"], inputs = [[[1]],[2],[]]
**Output:** [null,1,[1,2]]
**Example 2:**
**Input:** inputs = ["CBTInserter","insert","insert","get_root"], inputs = [[[1,2,3,4,5,6]],[7],[8],[]]
**Output:** [null,3,4,[1,2,3,4,5,6,7,8]]
**Note:**
1. The initial given tree is complete and contains between `1` and `1000` nodes.
2. `CBTInserter.insert` is called at most `10000` times per test case.
3. Every value of a given or inserted node is between `0` and `5000`.
Similar Questions:
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class CBTInserter(object):
def __init__(self, root):
"""
:type root: TreeNode
"""
def insert(self, v):
"""
:type v: int
:rtype: int
"""
def get_root(self):
"""
:rtype: TreeNode
"""
# Your CBTInserter object will be instantiated and called as such:
# obj = CBTInserter(root)
# param_1 = obj.insert(v)
# param_2 = obj.get_root()
def test(self):
pass
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
ReganBell/QReview | build/lib/networkx/algorithms/flow/edmondskarp.py | 8 | 8222 | # -*- coding: utf-8 -*-
"""
Edmonds-Karp algorithm for maximum flow problems.
"""
__author__ = """ysitu <ysitu@users.noreply.github.com>"""
# Copyright (C) 2014 ysitu <ysitu@users.noreply.github.com>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.algorithms.flow.utils import *
__all__ = ['edmonds_karp']
def edmonds_karp_core(R, s, t, cutoff):
"""Implementation of the Edmonds-Karp algorithm.
"""
R_node = R.node
R_pred = R.pred
R_succ = R.succ
inf = R.graph['inf']
def augment(path):
"""Augment flow along a path from s to t.
"""
# Determine the path residual capacity.
flow = inf
it = iter(path)
u = next(it)
for v in it:
attr = R_succ[u][v]
flow = min(flow, attr['capacity'] - attr['flow'])
u = v
if flow * 2 > inf:
raise nx.NetworkXUnbounded(
'Infinite capacity path, flow unbounded above.')
# Augment flow along the path.
it = iter(path)
u = next(it)
for v in it:
R_succ[u][v]['flow'] += flow
R_succ[v][u]['flow'] -= flow
u = v
return flow
def bidirectional_bfs():
"""Bidirectional breadth-first search for an augmenting path.
"""
pred = {s: None}
q_s = [s]
succ = {t: None}
q_t = [t]
while True:
q = []
if len(q_s) <= len(q_t):
for u in q_s:
for v, attr in R_succ[u].items():
if v not in pred and attr['flow'] < attr['capacity']:
pred[v] = u
if v in succ:
return v, pred, succ
q.append(v)
if not q:
return None, None, None
q_s = q
else:
for u in q_t:
for v, attr in R_pred[u].items():
if v not in succ and attr['flow'] < attr['capacity']:
succ[v] = u
if v in pred:
return v, pred, succ
q.append(v)
if not q:
return None, None, None
q_t = q
# Look for shortest augmenting paths using breadth-first search.
flow_value = 0
while flow_value < cutoff:
v, pred, succ = bidirectional_bfs()
if pred is None:
break
path = [v]
# Trace a path from s to v.
u = v
while u != s:
u = pred[u]
path.append(u)
path.reverse()
# Trace a path from v to t.
u = v
while u != t:
u = succ[u]
path.append(u)
flow_value += augment(path)
return flow_value
def edmonds_karp_impl(G, s, t, capacity, residual, cutoff):
"""Implementation of the Edmonds-Karp algorithm.
"""
if s not in G:
raise nx.NetworkXError('node %s not in graph' % str(s))
if t not in G:
raise nx.NetworkXError('node %s not in graph' % str(t))
if s == t:
raise nx.NetworkXError('source and sink are the same node')
if residual is None:
R = build_residual_network(G, capacity)
else:
R = residual
# Initialize/reset the residual network.
for u in R:
for e in R[u].values():
e['flow'] = 0
if cutoff is None:
cutoff = float('inf')
R.graph['flow_value'] = edmonds_karp_core(R, s, t, cutoff)
return R
def edmonds_karp(G, s, t, capacity='capacity', residual=None, value_only=False,
cutoff=None):
"""Find a maximum single-commodity flow using the Edmonds-Karp algorithm.
This function returns the residual network resulting after computing
the maximum flow. See below for details about the conventions
NetworkX uses for defining residual networks.
This algorithm has a running time of `O(n m^2)` for `n` nodes and `m`
edges.
Parameters
----------
G : NetworkX graph
Edges of the graph are expected to have an attribute called
'capacity'. If this attribute is not present, the edge is
considered to have infinite capacity.
s : node
Source node for the flow.
t : node
Sink node for the flow.
capacity : string
Edges of the graph G are expected to have an attribute capacity
that indicates how much flow the edge can support. If this
attribute is not present, the edge is considered to have
infinite capacity. Default value: 'capacity'.
residual : NetworkX graph
Residual network on which the algorithm is to be executed. If None, a
new residual network is created. Default value: None.
value_only : bool
If True compute only the value of the maximum flow. This parameter
will be ignored by this algorithm because it is not applicable.
cutoff : integer, float
If specified, the algorithm will terminate when the flow value reaches
or exceeds the cutoff. In this case, it may be unable to immediately
determine a minimum cut. Default value: None.
Returns
-------
R : NetworkX DiGraph
Residual network after computing the maximum flow.
Raises
------
NetworkXError
The algorithm does not support MultiGraph and MultiDiGraph. If
the input graph is an instance of one of these two classes, a
NetworkXError is raised.
NetworkXUnbounded
If the graph has a path of infinite capacity, the value of a
feasible flow on the graph is unbounded above and the function
raises a NetworkXUnbounded.
See also
--------
:meth:`maximum_flow`
:meth:`minimum_cut`
:meth:`ford_fulkerson`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
Notes
-----
The residual network :samp:`R` from an input graph :samp:`G` has the
same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair
of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a
self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists
in :samp:`G`.
For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']`
is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists
in :samp:`G` or zero otherwise. If the capacity is infinite,
:samp:`R[u][v]['capacity']` will have a high arbitrary finite value
that does not affect the solution of the problem. This value is stored in
:samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`,
:samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and
satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`.
The flow value, defined as the total flow into :samp:`t`, the sink, is
stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not
specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such
that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum
:samp:`s`-:samp:`t` cut.
Examples
--------
>>> import networkx as nx
>>> from networkx.algorithms.flow import edmonds_karp
The functions that implement flow algorithms and output a residual
network, such as this one, are not imported to the base NetworkX
namespace, so you have to explicitly import them from the flow package.
>>> G = nx.DiGraph()
>>> G.add_edge('x','a', capacity=3.0)
>>> G.add_edge('x','b', capacity=1.0)
>>> G.add_edge('a','c', capacity=3.0)
>>> G.add_edge('b','c', capacity=5.0)
>>> G.add_edge('b','d', capacity=4.0)
>>> G.add_edge('d','e', capacity=2.0)
>>> G.add_edge('c','y', capacity=2.0)
>>> G.add_edge('e','y', capacity=3.0)
>>> R = edmonds_karp(G, 'x', 'y')
>>> flow_value = nx.maximum_flow_value(G, 'x', 'y')
>>> flow_value
3.0
>>> flow_value == R.graph['flow_value']
True
"""
R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff)
R.graph['algorithm'] = 'edmonds_karp'
return R
| bsd-3-clause |
icomms/rapidsms | apps/schools/tests.py | 3 | 14361 | from __future__ import absolute_import
import os, random, re
from datetime import datetime, timedelta
from xml.etree import ElementTree
from django.core.management.commands.dumpdata import Command
from rapidsms.tests.scripted import TestScript
from schools.app import App
from schools.models import *
from blaster.models import *
from locations.models import *
# Got these from http://en.wikipedia.org/wiki/Districts_and_Regions_of_Uganda
# and http://babynamesworld.parentsconnect.com/category-ugandan-names.html
SCHOOL_AND_LAST_NAMES = \
["Abbo","Adroa","Akiiki","Akiki","Balondemu","Bitek","Gwandoya","Kadokechi","Kigongo",
"Kissa","Kizza","Kyoga","Lutalo","Luzige","Madongo","Magomu","Mangeni","Masani",
"Mulogo","Munanire","Munyiga","Musoke","Nabirye","Nabulungi","Najja","Nakisisa","Namono",
"Nasiche","Ogwambi","Ojore","Okello","Taban","Wemusa","Wesesa","Zesiro","Zilabamuzale",
"Kalangala","Kampala","Kayunga","Kiboga","Luwero","Lyantonde","Masaka","Mityana","Mpigi",
"Mubende","Mukono","Nakaseke","Nakasongola","Rakai","Sembabule","Wakiso","Amuria","Budaka",
"Bududa","Bugiri","Bukedea","Bukwa","Busia","Butaleja","Iganga","Jinja","Kaberamaido",
"Kaliro","Kamuli","Kapchorwa","Katakwi","Kumi","Manafwa","Mayuge","Mbale","Namutumba",
"Pallisa","Sironko","Soroti","Tororo","Abim","Adjumani","Amolatar","Amuru","Apac","Arua",
"Dokolo","Gulu","Kaabong","Kitgum","Koboko","Kotido","Lira","Moroto","Moyo","Nakapiripirit",
"Nebbi","Nyadri","Oyam","Pader","Yumbe","Bulisa","Bundibugyo","Bushenyi","Hoima","Ibanda",
"Isingiro","Kabale","Kabarole","Kamwenge","Kanungu","Kasese","Kibale","Kiruhura","Kisoro",
"Kyenjojo","Masindi","Mbarara","Ntungamo","Rukungirie"]
REGIONS = ["Central", "Eastern", "Northern", "Western"]
# map regions to districts
DISTRICTS = \
{"Central": ["Kalangala","Kampala","Kayunga","Kiboga","Luwero","Lyantonde","Masaka","Mityana","Mpigi",
"Mubende","Mukono","Nakaseke","Nakasongola","Rakai","Sembabule","Wakiso"],
"Eastern": ["Amuria","Budaka","Bududa","Bugiri","Bukedea","Bukwa","Busia","Butaleja","Iganga","Jinja",
"Kaberamaido","Kaliro","Kamuli","Kapchorwa","Katakwi","Kumi","Manafwa","Mayuge","Mbale",
"Namutumba","Pallisa","Sironko","Soroti","Tororo"],
"Northern": ["Abim","Adjumani","Amolatar","Amuru","Apac","Arua","Dokolo","Gulu","Kaabong","Kitgum",
"Koboko","Kotido","Lira","Moroto","Moyo","Nakapiripirit","Nebbi","Nyadri","Oyam","Pader",
"Yumbe"],
"Western": ["Bulisa","Bundibugyo","Bushenyi","Hoima","Ibanda","Isingiro","Kabale","Kabarole",
"Kamwenge","Kanungu","Kasese","Kibale","Kiruhura","Kisoro","Kyenjojo","Masindi","Mbarara",
"Ntungamo","Rukungirie"]
}
# Got these from http://www.studentsoftheworld.info/penpals/stats.php3?Pays=UGA
FIRST_NAMES = \
["Sharon","Joseph","Martha","John","Maureen","Alex","Sarah","James","Faith","Moses","Grace",
"Henry","Esther","Patrick","Brenda","Julius","Gloria","David","Joan","Charles","Mercy",
"Peter","Mary","Okello","Ruth","Ronald","Juliet","Micheal","Vicky","Jude","Ritah","Paul",
"Florence","Isaac","Hellen","Brian","Diana","Emma","Racheal","Mark","Pearl","Solomon",
"Prossy","Lawrence","Rachel","George","Annet","Richard","Doreen","Jack","Winnie","Denis",
"Sylvia","Fred","Angel","Michael","Jackie","Robert","Cathy","Pius","Hope","Stephen",
"Lydia","Andrew","Pauline","Eric","Lilian","Kenneth","Nabukenya","Williams","Linda",
"Francis","Julie","Evans","Natasha","Joshua","Claire","Arthur","Annie","Ronnie",
"Christine","Ivan","Stella","Tonny","Betty","Daniel","Viola","Tom","Patience","Kaggwa",
"Keisha","Edward","Ciara","Kalungi","Sandra","Frank","Patricia","Jimmy","Lisa","Ben",
"Carol","Eddy","Freda","Ambrose","Remmy","Christopher","Becky","Edgar","Anna","Hakim",
"Marion","Derrick","Peace","Alfred","Clare","Marvin","Debbie","Matovu","Namutebi",
"Nicholas","Joy","Abdul","Miriam","Allan","Kevin","Mukasa","Rebecca","Okot","Barbara",
"Kyagaba","Dina","Joel","Bridget","Samuel","Karen","Mwesigwa","Pamella","Jonathan",
"Joanne","Ssali","Sweetie","Bukenya","Jasmine","Martin","Beyonce","Phillip","Evelyne",
"Nelly","Vivian","Benjamin","Dorah","Victor","Desire","Kimera","Jojo","Ssebulime",
"Flavia","Lutaaya","Nicole","Mbabaali","Immaculate","Kato","Jennifer","Angwella","Olivia",
"Ntambi","Barbie","Walter","Judith","Vincent","Iryn","Amos","Shannie","Timothy","Juliana",
"Semi","Jovia","Sunday","Ann","Trevor","Paula","Wasswa","Nakirya","Innocent","Irene",
"Arnold","Anita","Sammy","Mimi","Mathias","Pretty","Bob","Clara","Gerald","Angella","Otia",
"Mbabazi","Mayanja","Leah","Tadeo"]
SCHOOL_TYPES = ["Primary", "Secondary"]
class TestApp (TestScript):
apps = (App,)
def setUp(self):
self._create_locations()
def testSchoolToElement(self):
school = self._new_school()
elem = school.to_element()
expected = '<School latitude="%s" longitude="%s"><Name>%s</Name><Teachers>%s</Teachers></School>' %\
(school.latitude, school.longitude, school.name, school.teachers)
# unfortunately we added a bunch of random crap to the xml and I'm not fixing these now
#self.assertEqual(expected, ElementTree.tostring(elem))
#self.assertEqual(expected, school.to_xml())
def testGenerateData(self):
self._create_message_blasts()
blasts = MessageBlast.objects.all()
for i in range(100):
school = self._new_school()
headmaster = self._new_reporter(school, "headmaster")
self._create_groups(school)
# these go to everyone
for blast in blasts:
BlastedMessage.objects.create(blast=blast,reporter=headmaster)
to_populate = random.random()
if to_populate < .95:
self._populate_data(school, headmaster)
self._dumpdata()
def _dumpdata(self):
dumpdata = Command()
filename = os.path.abspath(os.path.join(os.path.dirname(__file__),"test_schools.json"))
options = { "indent" : 2 }
datadump = dumpdata.handle("locations", "reporters", "schools","blaster", **options)
file = open(filename, "w")
file.write(datadump)
file.close()
print "=== Successfully wrote fixtures to %s ===" % filename
def _new_school(self):
# estimate the rough boundaries of Uganda
lat, lon = _ugandan_coordinate()
min_students = 5
max_students = 35
name = "%s %s" % (random.choice(SCHOOL_AND_LAST_NAMES), random.choice(SCHOOL_TYPES))
parent = random.choice(Location.objects.filter(type__name="District"))
teachers = random.randint(3,20)
count = School.objects.filter(parent=parent).count()
school_code = "%(district)s%(school)03d" % \
{"district": parent.code, "school":count + 1}
school_type = LocationType.objects.get(name="School")
school = School.objects.create(latitude=str(lat), longitude=str(lon),
code=school_code, type=school_type,
teachers=teachers, name=name, parent=parent)
for year in range(1,3):
# only make 3 grades to keep things simple
girls = random.uniform(min_students, max_students)
boys = random.uniform(min_students, max_students)
Grade.objects.create(school=school,year=year,
girls=girls,boys=boys)
return school
def _create_groups(self, school):
for type in SCHOOL_GROUP_TYPES:
# headmasters are created separately so we guarantee exactly 1
# per school
if not type=="headmaster":
members = random.randint(0,5)
for i in range(members):
self._new_reporter(school, type)
def _new_reporter(self, school, type):
# create a reporter, add them to a school and the group type
firstname = random.choice(FIRST_NAMES)
lastname = random.choice(SCHOOL_AND_LAST_NAMES)
alias = Reporter.parse_name("%s %s" % (firstname, lastname))[0]
reporter = Reporter.objects.create(first_name=firstname,
last_name = lastname,
alias=alias, location=school)
reporter.save()
group = SchoolGroup.get_or_create(type, school)
reporter.groups.add(group)
reporter.save()
return reporter
def _populate_data(self, school, headmaster):
"""Poplate a single report of each type"""
now = datetime.now()
# report time anywhere from 2 weeks ago to 2 days from now
offset = random.randint(-2, 14)
date = now + timedelta(days=offset)
# each thing has a 90% chance of having a response
if random.random() < .90:
# say the water is 95% working
water = False if random.random() > .95 else True
SchoolWaterReport.objects.create(reporter=headmaster,
date=date,
school=school,
water_working=water)
message = BlastedMessage.objects.get(reporter=headmaster,
blast=self.WATER_BLAST)
text = "yes" if water else "no"
BlastResponse.objects.create(date=now,text=text,
message=message,success=True)
if random.random() < .90:
teachers = self._get_scaled_int(school.teachers,.95)
SchoolTeacherReport.objects.create(reporter=headmaster,
date=date,
school=school,
expected=school.teachers,
actual=teachers)
message = BlastedMessage.objects.get(reporter=headmaster,
blast=self.TEACHER_BLAST)
BlastResponse.objects.create(date=now,text=str(teachers),
message=message,success=True)
for grade in school.classes.all():
if random.random() < .90:
girls = self._get_scaled_int(grade.girls,.92)
GirlsAttendenceReport.objects.create(reporter=headmaster,
date=date,
grade=grade,
expected=grade.girls,
actual=girls)
if random.random() < .90:
boys = self._get_scaled_int(grade.boys,.92)
BoysAttendenceReport.objects.create(reporter=headmaster,
date=date,
grade=grade,
expected=grade.boys,
actual=boys)
def _get_scaled_int(self, value, likelihood):
"""Treats value as a set of unique occurrences, each of which has
likelihood percent chance of being true. Returns a random number
probabilistically = to one iteration of the values happening. If
likelihood = 0, returns 0, if likelihood = 1 returns value"""
count = 0
for i in range(value):
if random.random() < likelihood:
count += 1
return count
def _create_locations(self):
try:
LocationType.objects.get(name="School")
# assume if this is there that everything else is set, we probably loaded
# the fixtures.
return
except LocationType.DoesNotExist:
# just let this pass through to the rest of the method
pass
region_type = LocationType.objects.create(name="Region")
district_type = LocationType.objects.create(name="District")
school_type = LocationType.objects.create(name="School")
region_code = 1
for region_name in REGIONS:
region = Location.objects.create(type=region_type,
code=str(region_code),
name=region_name)
district_code = 1
for district_name in DISTRICTS[region_name]:
full_district_code = "%(region)s%(district)02d" % \
{"region":region_code, "district":district_code}
district = Location.objects.create(type=district_type,
code=full_district_code,
name=district_name,
parent=region)
district_code = district_code+1
region_code = region_code+1
def _create_message_blasts(self):
questions = [1,2]
now = datetime.now()
blasts = []
for question_id in questions:
question = BlastableMessage.objects.get(id=question_id)
blasts.append(MessageBlast.objects.create(message=question,date=now))
self.WATER_BLAST = blasts[0]
self.TEACHER_BLAST = blasts[1]
def _ugandan_coordinate():
min_lat = -0.964005
max_lat = 3.518904
min_lon = 29.992676
max_lon = 34.727783
lat = random.uniform(min_lat, max_lat)
lon = random.uniform(min_lon, max_lon)
if _in_lake(lat, lon):
return _ugandan_coordinate()
return (lat,lon)
def _in_lake(lat, lon):
# it's a big lake...
# we also use this to snip off the upper left area where the country
# cuts in.
return (lat < 0.400998 and lon > 31.761475) or \
(lat > 1.10955 and lon < 31.135254)
| lgpl-3.0 |
joshal/behave | behave4cmd0/command_util.py | 13 | 3803 | # -*- coding -*-
"""
Provides some command utility functions.
TODO:
matcher that ignores empty lines and whitespace and has contains comparison
"""
from __future__ import absolute_import
from behave4cmd0 import pathutil
from behave4cmd0.__setup import TOP, TOPA
import os.path
import shutil
from fnmatch import fnmatch
# -----------------------------------------------------------------------------
# CONSTANTS:
# -----------------------------------------------------------------------------
# HERE = os.path.dirname(__file__)
# TOP = os.path.join(HERE, "..")
# TOPA = os.path.abspath(TOP)
WORKDIR = os.path.join(TOP, "__WORKDIR__")
# -----------------------------------------------------------------------------
# UTILITY FUNCTIONS:
# -----------------------------------------------------------------------------
def workdir_save_coverage_files(workdir, destdir=None):
assert os.path.isdir(workdir)
if not destdir:
destdir = TOPA
if os.path.abspath(workdir) == os.path.abspath(destdir):
return # -- SKIP: Source directory is destination directory (SAME).
for fname in os.listdir(workdir):
if fnmatch(fname, ".coverage.*"):
# -- MOVE COVERAGE FILES:
sourcename = os.path.join(workdir, fname)
shutil.move(sourcename, destdir)
# def ensure_directory_exists(dirname):
# """
# Ensures that a directory exits.
# If it does not exist, it is automatically created.
# """
# if not os.path.exists(dirname):
# os.makedirs(dirname)
# assert os.path.exists(dirname)
# assert os.path.isdir(dirname)
def ensure_context_attribute_exists(context, name, default_value=None):
"""
Ensure a behave resource exists as attribute in the behave context.
If this is not the case, the attribute is created by using the default_value.
"""
if not hasattr(context, name):
setattr(context, name, default_value)
def ensure_workdir_exists(context):
"""
Ensures that the work directory exists.
In addition, the location of the workdir is stored as attribute in
the context object.
"""
ensure_context_attribute_exists(context, "workdir", None)
if not context.workdir:
context.workdir = os.path.abspath(WORKDIR)
pathutil.ensure_directory_exists(context.workdir)
# def create_textfile_with_contents(filename, contents):
# """
# Creates a textual file with the provided contents in the workdir.
# Overwrites an existing file.
# """
# ensure_directory_exists(os.path.dirname(filename))
# if os.path.exists(filename):
# os.remove(filename)
# outstream = open(filename, "w")
# outstream.write(contents)
# if not contents.endswith("\n"):
# outstream.write("\n")
# outstream.flush()
# outstream.close()
# assert os.path.exists(filename)
# def text_remove_empty_lines(text):
# """
# Whitespace normalization:
# - Strip empty lines
# - Strip trailing whitespace
# """
# lines = [ line.rstrip() for line in text.splitlines() if line.strip() ]
# return "\n".join(lines)
#
# def text_normalize(text):
# """
# Whitespace normalization:
# - Strip empty lines
# - Strip leading whitespace in a line
# - Strip trailing whitespace in a line
# - Normalize line endings
# """
# lines = [ line.strip() for line in text.splitlines() if line.strip() ]
# return "\n".join(lines)
# def posixpath_normpath(pathname):
# """
# Convert path into POSIX path:
# - Normalize path
# - Replace backslash with slash
# """
# backslash = '\\'
# pathname = os.path.normpath(pathname)
# if backslash in pathname:
# pathname = pathname.replace(backslash, '/')
# return pathname
| bsd-2-clause |
Fidge123/league-unlock-challenge | src/UpdateHallOfFame.py | 1 | 1936 | #!/usr/bin/env python3
"""Take match and player data, get champ and save it to the database"""
from os import environ
import psycopg2
# with open("db_config") as file:
# HOST = file.readline().strip()
# DBNAME = file.readline().strip()
# USER = file.readline().strip()
# PASS = file.readline().strip()
#
# CONN_STRING = "host=" + HOST + " dbname=" + DBNAME + " user=" + USER + " password=" + PASS
CONN_STRING = environ["DATABASE_URL"]
# local way
# "host=" + environ['HOST'] + " dbname=" + environ['DBNAME'] + " user=" + environ['USER'] + " password=" + environ['PW']
HOF_UPDATE = "UPDATE player_halloffame SET (playerid, region) = (%s,%s) WHERE hofid = %s AND place = %s;"
def update():
"""Update Hall of Fame data"""
conn = psycopg2.connect(CONN_STRING)
cursor = conn.cursor()
for hofid in range(1, 9):
if hofid == 1:
query = "SELECT id, region, ((kills + assists) / GREATEST(deaths, 1) * 1.0) AS kda FROM player ORDER BY kda DESC LIMIT 3;"
elif hofid == 2:
query = "SELECT id, region FROM player ORDER BY assists DESC LIMIT 3;"
elif hofid == 3:
query = "SELECT id, region FROM player ORDER BY kills DESC LIMIT 3;"
elif hofid == 4:
continue
elif hofid == 5:
query = "SELECT id, region FROM player ORDER BY minion DESC LIMIT 3;"
elif hofid == 6:
continue
elif hofid == 7:
query = "SELECT id, region FROM player ORDER BY highestcrit DESC LIMIT 3;"
elif hofid == 8:
query = "SELECT id, region FROM player ORDER BY ccduration DESC LIMIT 3;"
cursor.execute(query)
result = cursor.fetchall()
place = 1
for player in result:
player_id = player[0]
region = player[1]
data = (player_id, region, hofid, place)
cursor.execute(HOF_UPDATE, data)
place += 1
| gpl-3.0 |
mspark93/VTK | ThirdParty/Twisted/twisted/application/strports.py | 48 | 3139 | # -*- test-case-name: twisted.test.test_strports -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Construct listening port services from a simple string description.
@see: L{twisted.internet.endpoints.serverFromString}
@see: L{twisted.internet.endpoints.clientFromString}
"""
import warnings
from twisted.internet import endpoints
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.python.versions import Version
from twisted.application.internet import StreamServerEndpointService
def parse(description, factory, default='tcp'):
"""
This function is deprecated as of Twisted 10.2.
@see: L{twisted.internet.endpoints.server}
"""
return endpoints._parseServer(description, factory, default)
deprecatedModuleAttribute(
Version("Twisted", 10, 2, 0),
"in favor of twisted.internet.endpoints.serverFromString",
__name__, "parse")
_DEFAULT = object()
def service(description, factory, default=_DEFAULT, reactor=None):
"""
Return the service corresponding to a description.
@param description: The description of the listening port, in the syntax
described by L{twisted.internet.endpoints.server}.
@type description: C{str}
@param factory: The protocol factory which will build protocols for
connections to this service.
@type factory: L{twisted.internet.interfaces.IProtocolFactory}
@type default: C{str} or C{None}
@param default: Do not use this parameter. It has been deprecated since
Twisted 10.2.0.
@rtype: C{twisted.application.service.IService}
@return: the service corresponding to a description of a reliable
stream server.
@see: L{twisted.internet.endpoints.serverFromString}
"""
if reactor is None:
from twisted.internet import reactor
if default is _DEFAULT:
default = None
else:
message = "The 'default' parameter was deprecated in Twisted 10.2.0."
if default is not None:
message += (
" Use qualified endpoint descriptions; for example, "
"'tcp:%s'." % (description,))
warnings.warn(
message=message, category=DeprecationWarning, stacklevel=2)
svc = StreamServerEndpointService(
endpoints._serverFromStringLegacy(reactor, description, default),
factory)
svc._raiseSynchronously = True
return svc
def listen(description, factory, default=None):
"""Listen on a port corresponding to a description
@type description: C{str}
@type factory: L{twisted.internet.interfaces.IProtocolFactory}
@type default: C{str} or C{None}
@rtype: C{twisted.internet.interfaces.IListeningPort}
@return: the port corresponding to a description of a reliable
virtual circuit server.
See the documentation of the C{parse} function for description
of the semantics of the arguments.
"""
from twisted.internet import reactor
name, args, kw = parse(description, factory, default)
return getattr(reactor, 'listen'+name)(*args, **kw)
__all__ = ['parse', 'service', 'listen']
| bsd-3-clause |
master-g/vogenerator | google/protobuf/map_unittest_pb2.py | 30 | 122481 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/map_unittest.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2
from google.protobuf import unittest_no_arena_pb2 as google_dot_protobuf_dot_unittest__no__arena__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/map_unittest.proto',
package='protobuf_unittest',
syntax='proto3',
serialized_pb=_b('\n\"google/protobuf/map_unittest.proto\x12\x11protobuf_unittest\x1a\x1egoogle/protobuf/unittest.proto\x1a\'google/protobuf/unittest_no_arena.proto\"\xd6\x13\n\x07TestMap\x12\x46\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32Int32Entry\x12\x46\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt64Int64Entry\x12J\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint32Uint32Entry\x12J\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint64Uint64Entry\x12J\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint32Sint32Entry\x12J\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint64Sint64Entry\x12N\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed32Fixed32Entry\x12N\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed64Fixed64Entry\x12R\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry\x12R\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry\x12\x46\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32FloatEntry\x12H\n\x10map_int32_double\x18\x0c \x03(\x0b\x32..protobuf_unittest.TestMap.MapInt32DoubleEntry\x12\x42\n\rmap_bool_bool\x18\r \x03(\x0b\x32+.protobuf_unittest.TestMap.MapBoolBoolEntry\x12J\n\x11map_string_string\x18\x0e \x03(\x0b\x32/.protobuf_unittest.TestMap.MapStringStringEntry\x12\x46\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32BytesEntry\x12\x44\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32,.protobuf_unittest.TestMap.MapInt32EnumEntry\x12Y\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32\x36.protobuf_unittest.TestMap.MapInt32ForeignMessageEntry\x12[\n\x1amap_string_foreign_message\x18\x12 \x03(\x0b\x32\x37.protobuf_unittest.TestMap.MapStringForeignMessageEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1a\x61\n\x1cMapStringForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\"A\n\x11TestMapSubmessage\x12,\n\x08test_map\x18\x01 \x01(\x0b\x32\x1a.protobuf_unittest.TestMap\"\xbc\x01\n\x0eTestMessageMap\x12Q\n\x11map_int32_message\x18\x01 \x03(\x0b\x32\x36.protobuf_unittest.TestMessageMap.MapInt32MessageEntry\x1aW\n\x14MapInt32MessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes:\x02\x38\x01\"\xe3\x01\n\x0fTestSameTypeMap\x12:\n\x04map1\x18\x01 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map1Entry\x12:\n\x04map2\x18\x02 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map2Entry\x1a+\n\tMap1Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a+\n\tMap2Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xb6\x01\n\x16TestRequiredMessageMap\x12J\n\tmap_field\x18\x01 \x03(\x0b\x32\x37.protobuf_unittest.TestRequiredMessageMap.MapFieldEntry\x1aP\n\rMapFieldEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired:\x02\x38\x01\"\xd2\x14\n\x0cTestArenaMap\x12K\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32Int32Entry\x12K\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt64Int64Entry\x12O\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint32Uint32Entry\x12O\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint64Uint64Entry\x12O\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint32Sint32Entry\x12O\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint64Sint64Entry\x12S\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry\x12S\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry\x12W\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry\x12W\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry\x12K\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32FloatEntry\x12M\n\x10map_int32_double\x18\x0c \x03(\x0b\x32\x33.protobuf_unittest.TestArenaMap.MapInt32DoubleEntry\x12G\n\rmap_bool_bool\x18\r \x03(\x0b\x32\x30.protobuf_unittest.TestArenaMap.MapBoolBoolEntry\x12O\n\x11map_string_string\x18\x0e \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapStringStringEntry\x12K\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32BytesEntry\x12I\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32\x31.protobuf_unittest.TestArenaMap.MapInt32EnumEntry\x12^\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32;.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry\x12n\n\"map_int32_foreign_message_no_arena\x18\x12 \x03(\x0b\x32\x42.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1ap\n\"MapInt32ForeignMessageNoArenaEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage:\x02\x38\x01\"\xe4\x01\n\x1fMessageContainingEnumCalledType\x12J\n\x04type\x18\x01 \x03(\x0b\x32<.protobuf_unittest.MessageContainingEnumCalledType.TypeEntry\x1a_\n\tTypeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.protobuf_unittest.MessageContainingEnumCalledType:\x02\x38\x01\"\x14\n\x04Type\x12\x0c\n\x08TYPE_FOO\x10\x00\"\x9d\x01\n\x1fMessageContainingMapCalledEntry\x12L\n\x05\x65ntry\x18\x01 \x03(\x0b\x32=.protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry\x1a,\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xad\x01\n\x17TestRecursiveMapMessage\x12<\n\x01\x61\x18\x01 \x03(\x0b\x32\x31.protobuf_unittest.TestRecursiveMapMessage.AEntry\x1aT\n\x06\x41\x45ntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest.TestRecursiveMapMessage:\x02\x38\x01*?\n\x07MapEnum\x12\x10\n\x0cMAP_ENUM_FOO\x10\x00\x12\x10\n\x0cMAP_ENUM_BAR\x10\x01\x12\x10\n\x0cMAP_ENUM_BAZ\x10\x02\x42\x03\xf8\x01\x01\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_unittest__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__no__arena__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_MAPENUM = _descriptor.EnumDescriptor(
name='MapEnum',
full_name='protobuf_unittest.MapEnum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAP_ENUM_FOO', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAP_ENUM_BAR', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAP_ENUM_BAZ', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6536,
serialized_end=6599,
)
_sym_db.RegisterEnumDescriptor(_MAPENUM)
MapEnum = enum_type_wrapper.EnumTypeWrapper(_MAPENUM)
MAP_ENUM_FOO = 0
MAP_ENUM_BAR = 1
MAP_ENUM_BAZ = 2
_MESSAGECONTAININGENUMCALLEDTYPE_TYPE = _descriptor.EnumDescriptor(
name='Type',
full_name='protobuf_unittest.MessageContainingEnumCalledType.Type',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TYPE_FOO', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6178,
serialized_end=6198,
)
_sym_db.RegisterEnumDescriptor(_MESSAGECONTAININGENUMCALLEDTYPE_TYPE)
_TESTMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor(
name='MapInt32Int32Entry',
full_name='protobuf_unittest.TestMap.MapInt32Int32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1534,
serialized_end=1586,
)
_TESTMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor(
name='MapInt64Int64Entry',
full_name='protobuf_unittest.TestMap.MapInt64Int64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.key', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1588,
serialized_end=1640,
)
_TESTMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
name='MapUint32Uint32Entry',
full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.key', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.value', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1642,
serialized_end=1696,
)
_TESTMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
name='MapUint64Uint64Entry',
full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.value', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1698,
serialized_end=1752,
)
_TESTMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
name='MapSint32Sint32Entry',
full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.key', index=0,
number=1, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.value', index=1,
number=2, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1754,
serialized_end=1808,
)
_TESTMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
name='MapSint64Sint64Entry',
full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.key', index=0,
number=1, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.value', index=1,
number=2, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1810,
serialized_end=1864,
)
_TESTMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
name='MapFixed32Fixed32Entry',
full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.key', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.value', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1866,
serialized_end=1922,
)
_TESTMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
name='MapFixed64Fixed64Entry',
full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.key', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.value', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1924,
serialized_end=1980,
)
_TESTMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
name='MapSfixed32Sfixed32Entry',
full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.key', index=0,
number=1, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.value', index=1,
number=2, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1982,
serialized_end=2040,
)
_TESTMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
name='MapSfixed64Sfixed64Entry',
full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.key', index=0,
number=1, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.value', index=1,
number=2, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2042,
serialized_end=2100,
)
_TESTMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor(
name='MapInt32FloatEntry',
full_name='protobuf_unittest.TestMap.MapInt32FloatEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2102,
serialized_end=2154,
)
_TESTMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
name='MapInt32DoubleEntry',
full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2156,
serialized_end=2209,
)
_TESTMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
name='MapBoolBoolEntry',
full_name='protobuf_unittest.TestMap.MapBoolBoolEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.key', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2211,
serialized_end=2261,
)
_TESTMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
name='MapStringStringEntry',
full_name='protobuf_unittest.TestMap.MapStringStringEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapStringStringEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapStringStringEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2263,
serialized_end=2317,
)
_TESTMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor(
name='MapInt32BytesEntry',
full_name='protobuf_unittest.TestMap.MapInt32BytesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2319,
serialized_end=2371,
)
_TESTMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor(
name='MapInt32EnumEntry',
full_name='protobuf_unittest.TestMap.MapInt32EnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2373,
serialized_end=2452,
)
_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor(
name='MapInt32ForeignMessageEntry',
full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2454,
serialized_end=2550,
)
_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY = _descriptor.Descriptor(
name='MapStringForeignMessageEntry',
full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2552,
serialized_end=2649,
)
_TESTMAP = _descriptor.Descriptor(
name='TestMap',
full_name='protobuf_unittest.TestMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='map_int32_int32', full_name='protobuf_unittest.TestMap.map_int32_int32', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int64_int64', full_name='protobuf_unittest.TestMap.map_int64_int64', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_uint32_uint32', full_name='protobuf_unittest.TestMap.map_uint32_uint32', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_uint64_uint64', full_name='protobuf_unittest.TestMap.map_uint64_uint64', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sint32_sint32', full_name='protobuf_unittest.TestMap.map_sint32_sint32', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sint64_sint64', full_name='protobuf_unittest.TestMap.map_sint64_sint64', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_fixed32_fixed32', full_name='protobuf_unittest.TestMap.map_fixed32_fixed32', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_fixed64_fixed64', full_name='protobuf_unittest.TestMap.map_fixed64_fixed64', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestMap.map_sfixed32_sfixed32', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestMap.map_sfixed64_sfixed64', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_float', full_name='protobuf_unittest.TestMap.map_int32_float', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_double', full_name='protobuf_unittest.TestMap.map_int32_double', index=11,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_bool_bool', full_name='protobuf_unittest.TestMap.map_bool_bool', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_string_string', full_name='protobuf_unittest.TestMap.map_string_string', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_bytes', full_name='protobuf_unittest.TestMap.map_int32_bytes', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_enum', full_name='protobuf_unittest.TestMap.map_int32_enum', index=15,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_foreign_message', full_name='protobuf_unittest.TestMap.map_int32_foreign_message', index=16,
number=17, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_string_foreign_message', full_name='protobuf_unittest.TestMap.map_string_foreign_message', index=17,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTMAP_MAPINT32INT32ENTRY, _TESTMAP_MAPINT64INT64ENTRY, _TESTMAP_MAPUINT32UINT32ENTRY, _TESTMAP_MAPUINT64UINT64ENTRY, _TESTMAP_MAPSINT32SINT32ENTRY, _TESTMAP_MAPSINT64SINT64ENTRY, _TESTMAP_MAPFIXED32FIXED32ENTRY, _TESTMAP_MAPFIXED64FIXED64ENTRY, _TESTMAP_MAPSFIXED32SFIXED32ENTRY, _TESTMAP_MAPSFIXED64SFIXED64ENTRY, _TESTMAP_MAPINT32FLOATENTRY, _TESTMAP_MAPINT32DOUBLEENTRY, _TESTMAP_MAPBOOLBOOLENTRY, _TESTMAP_MAPSTRINGSTRINGENTRY, _TESTMAP_MAPINT32BYTESENTRY, _TESTMAP_MAPINT32ENUMENTRY, _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=131,
serialized_end=2649,
)
_TESTMAPSUBMESSAGE = _descriptor.Descriptor(
name='TestMapSubmessage',
full_name='protobuf_unittest.TestMapSubmessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='test_map', full_name='protobuf_unittest.TestMapSubmessage.test_map', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2651,
serialized_end=2716,
)
_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY = _descriptor.Descriptor(
name='MapInt32MessageEntry',
full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2820,
serialized_end=2907,
)
_TESTMESSAGEMAP = _descriptor.Descriptor(
name='TestMessageMap',
full_name='protobuf_unittest.TestMessageMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='map_int32_message', full_name='protobuf_unittest.TestMessageMap.map_int32_message', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2719,
serialized_end=2907,
)
_TESTSAMETYPEMAP_MAP1ENTRY = _descriptor.Descriptor(
name='Map1Entry',
full_name='protobuf_unittest.TestSameTypeMap.Map1Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3049,
serialized_end=3092,
)
_TESTSAMETYPEMAP_MAP2ENTRY = _descriptor.Descriptor(
name='Map2Entry',
full_name='protobuf_unittest.TestSameTypeMap.Map2Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3094,
serialized_end=3137,
)
_TESTSAMETYPEMAP = _descriptor.Descriptor(
name='TestSameTypeMap',
full_name='protobuf_unittest.TestSameTypeMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='map1', full_name='protobuf_unittest.TestSameTypeMap.map1', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map2', full_name='protobuf_unittest.TestSameTypeMap.map2', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTSAMETYPEMAP_MAP1ENTRY, _TESTSAMETYPEMAP_MAP2ENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2910,
serialized_end=3137,
)
_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY = _descriptor.Descriptor(
name='MapFieldEntry',
full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3242,
serialized_end=3322,
)
_TESTREQUIREDMESSAGEMAP = _descriptor.Descriptor(
name='TestRequiredMessageMap',
full_name='protobuf_unittest.TestRequiredMessageMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='map_field', full_name='protobuf_unittest.TestRequiredMessageMap.map_field', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3140,
serialized_end=3322,
)
_TESTARENAMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor(
name='MapInt32Int32Entry',
full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1534,
serialized_end=1586,
)
_TESTARENAMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor(
name='MapInt64Int64Entry',
full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.key', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1588,
serialized_end=1640,
)
_TESTARENAMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor(
name='MapUint32Uint32Entry',
full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.key', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.value', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1642,
serialized_end=1696,
)
_TESTARENAMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor(
name='MapUint64Uint64Entry',
full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.value', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1698,
serialized_end=1752,
)
_TESTARENAMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor(
name='MapSint32Sint32Entry',
full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.key', index=0,
number=1, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.value', index=1,
number=2, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1754,
serialized_end=1808,
)
_TESTARENAMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor(
name='MapSint64Sint64Entry',
full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.key', index=0,
number=1, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.value', index=1,
number=2, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1810,
serialized_end=1864,
)
_TESTARENAMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor(
name='MapFixed32Fixed32Entry',
full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.key', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.value', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1866,
serialized_end=1922,
)
_TESTARENAMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor(
name='MapFixed64Fixed64Entry',
full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.key', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.value', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1924,
serialized_end=1980,
)
_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor(
name='MapSfixed32Sfixed32Entry',
full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.key', index=0,
number=1, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.value', index=1,
number=2, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1982,
serialized_end=2040,
)
_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor(
name='MapSfixed64Sfixed64Entry',
full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.key', index=0,
number=1, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.value', index=1,
number=2, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2042,
serialized_end=2100,
)
_TESTARENAMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor(
name='MapInt32FloatEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2102,
serialized_end=2154,
)
_TESTARENAMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor(
name='MapInt32DoubleEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2156,
serialized_end=2209,
)
_TESTARENAMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor(
name='MapBoolBoolEntry',
full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.key', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2211,
serialized_end=2261,
)
_TESTARENAMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor(
name='MapStringStringEntry',
full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2263,
serialized_end=2317,
)
_TESTARENAMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor(
name='MapInt32BytesEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2319,
serialized_end=2371,
)
_TESTARENAMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor(
name='MapInt32EnumEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.value', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2373,
serialized_end=2452,
)
_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor(
name='MapInt32ForeignMessageEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2454,
serialized_end=2550,
)
_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY = _descriptor.Descriptor(
name='MapInt32ForeignMessageNoArenaEntry',
full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5855,
serialized_end=5967,
)
_TESTARENAMAP = _descriptor.Descriptor(
name='TestArenaMap',
full_name='protobuf_unittest.TestArenaMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='map_int32_int32', full_name='protobuf_unittest.TestArenaMap.map_int32_int32', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int64_int64', full_name='protobuf_unittest.TestArenaMap.map_int64_int64', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_uint32_uint32', full_name='protobuf_unittest.TestArenaMap.map_uint32_uint32', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_uint64_uint64', full_name='protobuf_unittest.TestArenaMap.map_uint64_uint64', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sint32_sint32', full_name='protobuf_unittest.TestArenaMap.map_sint32_sint32', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sint64_sint64', full_name='protobuf_unittest.TestArenaMap.map_sint64_sint64', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_fixed32_fixed32', full_name='protobuf_unittest.TestArenaMap.map_fixed32_fixed32', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_fixed64_fixed64', full_name='protobuf_unittest.TestArenaMap.map_fixed64_fixed64', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestArenaMap.map_sfixed32_sfixed32', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestArenaMap.map_sfixed64_sfixed64', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_float', full_name='protobuf_unittest.TestArenaMap.map_int32_float', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_double', full_name='protobuf_unittest.TestArenaMap.map_int32_double', index=11,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_bool_bool', full_name='protobuf_unittest.TestArenaMap.map_bool_bool', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_string_string', full_name='protobuf_unittest.TestArenaMap.map_string_string', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_bytes', full_name='protobuf_unittest.TestArenaMap.map_int32_bytes', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_enum', full_name='protobuf_unittest.TestArenaMap.map_int32_enum', index=15,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_foreign_message', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message', index=16,
number=17, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_int32_foreign_message_no_arena', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message_no_arena', index=17,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTARENAMAP_MAPINT32INT32ENTRY, _TESTARENAMAP_MAPINT64INT64ENTRY, _TESTARENAMAP_MAPUINT32UINT32ENTRY, _TESTARENAMAP_MAPUINT64UINT64ENTRY, _TESTARENAMAP_MAPSINT32SINT32ENTRY, _TESTARENAMAP_MAPSINT64SINT64ENTRY, _TESTARENAMAP_MAPFIXED32FIXED32ENTRY, _TESTARENAMAP_MAPFIXED64FIXED64ENTRY, _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY, _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY, _TESTARENAMAP_MAPINT32FLOATENTRY, _TESTARENAMAP_MAPINT32DOUBLEENTRY, _TESTARENAMAP_MAPBOOLBOOLENTRY, _TESTARENAMAP_MAPSTRINGSTRINGENTRY, _TESTARENAMAP_MAPINT32BYTESENTRY, _TESTARENAMAP_MAPINT32ENUMENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3325,
serialized_end=5967,
)
_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY = _descriptor.Descriptor(
name='TypeEntry',
full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6081,
serialized_end=6176,
)
_MESSAGECONTAININGENUMCALLEDTYPE = _descriptor.Descriptor(
name='MessageContainingEnumCalledType',
full_name='protobuf_unittest.MessageContainingEnumCalledType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='protobuf_unittest.MessageContainingEnumCalledType.type', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY, ],
enum_types=[
_MESSAGECONTAININGENUMCALLEDTYPE_TYPE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5970,
serialized_end=6198,
)
_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY = _descriptor.Descriptor(
name='EntryEntry',
full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.key', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6314,
serialized_end=6358,
)
_MESSAGECONTAININGMAPCALLEDENTRY = _descriptor.Descriptor(
name='MessageContainingMapCalledEntry',
full_name='protobuf_unittest.MessageContainingMapCalledEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entry', full_name='protobuf_unittest.MessageContainingMapCalledEntry.entry', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6201,
serialized_end=6358,
)
_TESTRECURSIVEMAPMESSAGE_AENTRY = _descriptor.Descriptor(
name='AEntry',
full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6450,
serialized_end=6534,
)
_TESTRECURSIVEMAPMESSAGE = _descriptor.Descriptor(
name='TestRecursiveMapMessage',
full_name='protobuf_unittest.TestRecursiveMapMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='protobuf_unittest.TestRecursiveMapMessage.a', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_TESTRECURSIVEMAPMESSAGE_AENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6361,
serialized_end=6534,
)
_TESTMAP_MAPINT32INT32ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT64INT64ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT32FLOATENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTMAP
_TESTMAP_MAPBOOLBOOLENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT32BYTESENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM
_TESTMAP_MAPINT32ENUMENTRY.containing_type = _TESTMAP
_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTMAP
_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.containing_type = _TESTMAP
_TESTMAP.fields_by_name['map_int32_int32'].message_type = _TESTMAP_MAPINT32INT32ENTRY
_TESTMAP.fields_by_name['map_int64_int64'].message_type = _TESTMAP_MAPINT64INT64ENTRY
_TESTMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTMAP_MAPUINT32UINT32ENTRY
_TESTMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTMAP_MAPUINT64UINT64ENTRY
_TESTMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTMAP_MAPSINT32SINT32ENTRY
_TESTMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTMAP_MAPSINT64SINT64ENTRY
_TESTMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTMAP_MAPFIXED32FIXED32ENTRY
_TESTMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTMAP_MAPFIXED64FIXED64ENTRY
_TESTMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTMAP_MAPSFIXED32SFIXED32ENTRY
_TESTMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTMAP_MAPSFIXED64SFIXED64ENTRY
_TESTMAP.fields_by_name['map_int32_float'].message_type = _TESTMAP_MAPINT32FLOATENTRY
_TESTMAP.fields_by_name['map_int32_double'].message_type = _TESTMAP_MAPINT32DOUBLEENTRY
_TESTMAP.fields_by_name['map_bool_bool'].message_type = _TESTMAP_MAPBOOLBOOLENTRY
_TESTMAP.fields_by_name['map_string_string'].message_type = _TESTMAP_MAPSTRINGSTRINGENTRY
_TESTMAP.fields_by_name['map_int32_bytes'].message_type = _TESTMAP_MAPINT32BYTESENTRY
_TESTMAP.fields_by_name['map_int32_enum'].message_type = _TESTMAP_MAPINT32ENUMENTRY
_TESTMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY
_TESTMAP.fields_by_name['map_string_foreign_message'].message_type = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY
_TESTMAPSUBMESSAGE.fields_by_name['test_map'].message_type = _TESTMAP
_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTALLTYPES
_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.containing_type = _TESTMESSAGEMAP
_TESTMESSAGEMAP.fields_by_name['map_int32_message'].message_type = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY
_TESTSAMETYPEMAP_MAP1ENTRY.containing_type = _TESTSAMETYPEMAP
_TESTSAMETYPEMAP_MAP2ENTRY.containing_type = _TESTSAMETYPEMAP
_TESTSAMETYPEMAP.fields_by_name['map1'].message_type = _TESTSAMETYPEMAP_MAP1ENTRY
_TESTSAMETYPEMAP.fields_by_name['map2'].message_type = _TESTSAMETYPEMAP_MAP2ENTRY
_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTREQUIRED
_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.containing_type = _TESTREQUIREDMESSAGEMAP
_TESTREQUIREDMESSAGEMAP.fields_by_name['map_field'].message_type = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY
_TESTARENAMAP_MAPINT32INT32ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT64INT64ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32FLOATENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPBOOLBOOLENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32BYTESENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM
_TESTARENAMAP_MAPINT32ENUMENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE
_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__no__arena__pb2._FOREIGNMESSAGE
_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.containing_type = _TESTARENAMAP
_TESTARENAMAP.fields_by_name['map_int32_int32'].message_type = _TESTARENAMAP_MAPINT32INT32ENTRY
_TESTARENAMAP.fields_by_name['map_int64_int64'].message_type = _TESTARENAMAP_MAPINT64INT64ENTRY
_TESTARENAMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTARENAMAP_MAPUINT32UINT32ENTRY
_TESTARENAMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTARENAMAP_MAPUINT64UINT64ENTRY
_TESTARENAMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTARENAMAP_MAPSINT32SINT32ENTRY
_TESTARENAMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTARENAMAP_MAPSINT64SINT64ENTRY
_TESTARENAMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY
_TESTARENAMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY
_TESTARENAMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY
_TESTARENAMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY
_TESTARENAMAP.fields_by_name['map_int32_float'].message_type = _TESTARENAMAP_MAPINT32FLOATENTRY
_TESTARENAMAP.fields_by_name['map_int32_double'].message_type = _TESTARENAMAP_MAPINT32DOUBLEENTRY
_TESTARENAMAP.fields_by_name['map_bool_bool'].message_type = _TESTARENAMAP_MAPBOOLBOOLENTRY
_TESTARENAMAP.fields_by_name['map_string_string'].message_type = _TESTARENAMAP_MAPSTRINGSTRINGENTRY
_TESTARENAMAP.fields_by_name['map_int32_bytes'].message_type = _TESTARENAMAP_MAPINT32BYTESENTRY
_TESTARENAMAP.fields_by_name['map_int32_enum'].message_type = _TESTARENAMAP_MAPINT32ENUMENTRY
_TESTARENAMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY
_TESTARENAMAP.fields_by_name['map_int32_foreign_message_no_arena'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY
_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.fields_by_name['value'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE
_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE
_MESSAGECONTAININGENUMCALLEDTYPE.fields_by_name['type'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY
_MESSAGECONTAININGENUMCALLEDTYPE_TYPE.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE
_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.containing_type = _MESSAGECONTAININGMAPCALLEDENTRY
_MESSAGECONTAININGMAPCALLEDENTRY.fields_by_name['entry'].message_type = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY
_TESTRECURSIVEMAPMESSAGE_AENTRY.fields_by_name['value'].message_type = _TESTRECURSIVEMAPMESSAGE
_TESTRECURSIVEMAPMESSAGE_AENTRY.containing_type = _TESTRECURSIVEMAPMESSAGE
_TESTRECURSIVEMAPMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMAPMESSAGE_AENTRY
DESCRIPTOR.message_types_by_name['TestMap'] = _TESTMAP
DESCRIPTOR.message_types_by_name['TestMapSubmessage'] = _TESTMAPSUBMESSAGE
DESCRIPTOR.message_types_by_name['TestMessageMap'] = _TESTMESSAGEMAP
DESCRIPTOR.message_types_by_name['TestSameTypeMap'] = _TESTSAMETYPEMAP
DESCRIPTOR.message_types_by_name['TestRequiredMessageMap'] = _TESTREQUIREDMESSAGEMAP
DESCRIPTOR.message_types_by_name['TestArenaMap'] = _TESTARENAMAP
DESCRIPTOR.message_types_by_name['MessageContainingEnumCalledType'] = _MESSAGECONTAININGENUMCALLEDTYPE
DESCRIPTOR.message_types_by_name['MessageContainingMapCalledEntry'] = _MESSAGECONTAININGMAPCALLEDENTRY
DESCRIPTOR.message_types_by_name['TestRecursiveMapMessage'] = _TESTRECURSIVEMAPMESSAGE
DESCRIPTOR.enum_types_by_name['MapEnum'] = _MAPENUM
TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), dict(
MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32INT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32Int32Entry)
))
,
MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT64INT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt64Int64Entry)
))
,
MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPUINT32UINT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint32Uint32Entry)
))
,
MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPUINT64UINT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint64Uint64Entry)
))
,
MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSINT32SINT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint32Sint32Entry)
))
,
MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSINT64SINT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint64Sint64Entry)
))
,
MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPFIXED32FIXED32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed32Fixed32Entry)
))
,
MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPFIXED64FIXED64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed64Fixed64Entry)
))
,
MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSFIXED32SFIXED32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry)
))
,
MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSFIXED64SFIXED64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry)
))
,
MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32FLOATENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32FloatEntry)
))
,
MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32DOUBLEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32DoubleEntry)
))
,
MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPBOOLBOOLENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapBoolBoolEntry)
))
,
MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSTRINGSTRINGENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringStringEntry)
))
,
MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32BYTESENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32BytesEntry)
))
,
MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32ENUMENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32EnumEntry)
))
,
MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32ForeignMessageEntry)
))
,
MapStringForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapStringForeignMessageEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringForeignMessageEntry)
))
,
DESCRIPTOR = _TESTMAP,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap)
))
_sym_db.RegisterMessage(TestMap)
_sym_db.RegisterMessage(TestMap.MapInt32Int32Entry)
_sym_db.RegisterMessage(TestMap.MapInt64Int64Entry)
_sym_db.RegisterMessage(TestMap.MapUint32Uint32Entry)
_sym_db.RegisterMessage(TestMap.MapUint64Uint64Entry)
_sym_db.RegisterMessage(TestMap.MapSint32Sint32Entry)
_sym_db.RegisterMessage(TestMap.MapSint64Sint64Entry)
_sym_db.RegisterMessage(TestMap.MapFixed32Fixed32Entry)
_sym_db.RegisterMessage(TestMap.MapFixed64Fixed64Entry)
_sym_db.RegisterMessage(TestMap.MapSfixed32Sfixed32Entry)
_sym_db.RegisterMessage(TestMap.MapSfixed64Sfixed64Entry)
_sym_db.RegisterMessage(TestMap.MapInt32FloatEntry)
_sym_db.RegisterMessage(TestMap.MapInt32DoubleEntry)
_sym_db.RegisterMessage(TestMap.MapBoolBoolEntry)
_sym_db.RegisterMessage(TestMap.MapStringStringEntry)
_sym_db.RegisterMessage(TestMap.MapInt32BytesEntry)
_sym_db.RegisterMessage(TestMap.MapInt32EnumEntry)
_sym_db.RegisterMessage(TestMap.MapInt32ForeignMessageEntry)
_sym_db.RegisterMessage(TestMap.MapStringForeignMessageEntry)
TestMapSubmessage = _reflection.GeneratedProtocolMessageType('TestMapSubmessage', (_message.Message,), dict(
DESCRIPTOR = _TESTMAPSUBMESSAGE,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMapSubmessage)
))
_sym_db.RegisterMessage(TestMapSubmessage)
TestMessageMap = _reflection.GeneratedProtocolMessageType('TestMessageMap', (_message.Message,), dict(
MapInt32MessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32MessageEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap.MapInt32MessageEntry)
))
,
DESCRIPTOR = _TESTMESSAGEMAP,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap)
))
_sym_db.RegisterMessage(TestMessageMap)
_sym_db.RegisterMessage(TestMessageMap.MapInt32MessageEntry)
TestSameTypeMap = _reflection.GeneratedProtocolMessageType('TestSameTypeMap', (_message.Message,), dict(
Map1Entry = _reflection.GeneratedProtocolMessageType('Map1Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTSAMETYPEMAP_MAP1ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map1Entry)
))
,
Map2Entry = _reflection.GeneratedProtocolMessageType('Map2Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTSAMETYPEMAP_MAP2ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map2Entry)
))
,
DESCRIPTOR = _TESTSAMETYPEMAP,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap)
))
_sym_db.RegisterMessage(TestSameTypeMap)
_sym_db.RegisterMessage(TestSameTypeMap.Map1Entry)
_sym_db.RegisterMessage(TestSameTypeMap.Map2Entry)
TestRequiredMessageMap = _reflection.GeneratedProtocolMessageType('TestRequiredMessageMap', (_message.Message,), dict(
MapFieldEntry = _reflection.GeneratedProtocolMessageType('MapFieldEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap.MapFieldEntry)
))
,
DESCRIPTOR = _TESTREQUIREDMESSAGEMAP,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap)
))
_sym_db.RegisterMessage(TestRequiredMessageMap)
_sym_db.RegisterMessage(TestRequiredMessageMap.MapFieldEntry)
TestArenaMap = _reflection.GeneratedProtocolMessageType('TestArenaMap', (_message.Message,), dict(
MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32INT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32Int32Entry)
))
,
MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT64INT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt64Int64Entry)
))
,
MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPUINT32UINT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint32Uint32Entry)
))
,
MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPUINT64UINT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint64Uint64Entry)
))
,
MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPSINT32SINT32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint32Sint32Entry)
))
,
MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPSINT64SINT64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint64Sint64Entry)
))
,
MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry)
))
,
MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry)
))
,
MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry)
))
,
MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry)
))
,
MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32FLOATENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32FloatEntry)
))
,
MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32DOUBLEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32DoubleEntry)
))
,
MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPBOOLBOOLENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapBoolBoolEntry)
))
,
MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPSTRINGSTRINGENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapStringStringEntry)
))
,
MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32BYTESENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32BytesEntry)
))
,
MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32ENUMENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32EnumEntry)
))
,
MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry)
))
,
MapInt32ForeignMessageNoArenaEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageNoArenaEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry)
))
,
DESCRIPTOR = _TESTARENAMAP,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap)
))
_sym_db.RegisterMessage(TestArenaMap)
_sym_db.RegisterMessage(TestArenaMap.MapInt32Int32Entry)
_sym_db.RegisterMessage(TestArenaMap.MapInt64Int64Entry)
_sym_db.RegisterMessage(TestArenaMap.MapUint32Uint32Entry)
_sym_db.RegisterMessage(TestArenaMap.MapUint64Uint64Entry)
_sym_db.RegisterMessage(TestArenaMap.MapSint32Sint32Entry)
_sym_db.RegisterMessage(TestArenaMap.MapSint64Sint64Entry)
_sym_db.RegisterMessage(TestArenaMap.MapFixed32Fixed32Entry)
_sym_db.RegisterMessage(TestArenaMap.MapFixed64Fixed64Entry)
_sym_db.RegisterMessage(TestArenaMap.MapSfixed32Sfixed32Entry)
_sym_db.RegisterMessage(TestArenaMap.MapSfixed64Sfixed64Entry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32FloatEntry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32DoubleEntry)
_sym_db.RegisterMessage(TestArenaMap.MapBoolBoolEntry)
_sym_db.RegisterMessage(TestArenaMap.MapStringStringEntry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32BytesEntry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32EnumEntry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageEntry)
_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageNoArenaEntry)
MessageContainingEnumCalledType = _reflection.GeneratedProtocolMessageType('MessageContainingEnumCalledType', (_message.Message,), dict(
TypeEntry = _reflection.GeneratedProtocolMessageType('TypeEntry', (_message.Message,), dict(
DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType.TypeEntry)
))
,
DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType)
))
_sym_db.RegisterMessage(MessageContainingEnumCalledType)
_sym_db.RegisterMessage(MessageContainingEnumCalledType.TypeEntry)
MessageContainingMapCalledEntry = _reflection.GeneratedProtocolMessageType('MessageContainingMapCalledEntry', (_message.Message,), dict(
EntryEntry = _reflection.GeneratedProtocolMessageType('EntryEntry', (_message.Message,), dict(
DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry)
))
,
DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry)
))
_sym_db.RegisterMessage(MessageContainingMapCalledEntry)
_sym_db.RegisterMessage(MessageContainingMapCalledEntry.EntryEntry)
TestRecursiveMapMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMapMessage', (_message.Message,), dict(
AEntry = _reflection.GeneratedProtocolMessageType('AEntry', (_message.Message,), dict(
DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE_AENTRY,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage.AEntry)
))
,
DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE,
__module__ = 'google.protobuf.map_unittest_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage)
))
_sym_db.RegisterMessage(TestRecursiveMapMessage)
_sym_db.RegisterMessage(TestRecursiveMapMessage.AEntry)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001'))
_TESTMAP_MAPINT32INT32ENTRY.has_options = True
_TESTMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT64INT64ENTRY.has_options = True
_TESTMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPUINT32UINT32ENTRY.has_options = True
_TESTMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPUINT64UINT64ENTRY.has_options = True
_TESTMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSINT32SINT32ENTRY.has_options = True
_TESTMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSINT64SINT64ENTRY.has_options = True
_TESTMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPFIXED32FIXED32ENTRY.has_options = True
_TESTMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPFIXED64FIXED64ENTRY.has_options = True
_TESTMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True
_TESTMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True
_TESTMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT32FLOATENTRY.has_options = True
_TESTMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT32DOUBLEENTRY.has_options = True
_TESTMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPBOOLBOOLENTRY.has_options = True
_TESTMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSTRINGSTRINGENTRY.has_options = True
_TESTMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT32BYTESENTRY.has_options = True
_TESTMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT32ENUMENTRY.has_options = True
_TESTMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True
_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.has_options = True
_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.has_options = True
_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTSAMETYPEMAP_MAP1ENTRY.has_options = True
_TESTSAMETYPEMAP_MAP1ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTSAMETYPEMAP_MAP2ENTRY.has_options = True
_TESTSAMETYPEMAP_MAP2ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.has_options = True
_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32INT32ENTRY.has_options = True
_TESTARENAMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT64INT64ENTRY.has_options = True
_TESTARENAMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPUINT32UINT32ENTRY.has_options = True
_TESTARENAMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPUINT64UINT64ENTRY.has_options = True
_TESTARENAMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPSINT32SINT32ENTRY.has_options = True
_TESTARENAMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPSINT64SINT64ENTRY.has_options = True
_TESTARENAMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.has_options = True
_TESTARENAMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.has_options = True
_TESTARENAMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True
_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True
_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32FLOATENTRY.has_options = True
_TESTARENAMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32DOUBLEENTRY.has_options = True
_TESTARENAMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPBOOLBOOLENTRY.has_options = True
_TESTARENAMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPSTRINGSTRINGENTRY.has_options = True
_TESTARENAMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32BYTESENTRY.has_options = True
_TESTARENAMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32ENUMENTRY.has_options = True
_TESTARENAMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True
_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.has_options = True
_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.has_options = True
_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.has_options = True
_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_TESTRECURSIVEMAPMESSAGE_AENTRY.has_options = True
_TESTRECURSIVEMAPMESSAGE_AENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| mit |
johnwlockwood/appengine-mapreduce | python/src/mapreduce/lib/pipeline/status_ui.py | 4 | 5903 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Status UI for Google App Engine Pipeline API."""
import logging
import os
import traceback
from google.appengine.api import users
from google.appengine.ext import webapp
# Relative imports
from mapreduce.lib import simplejson
import util
class _StatusUiHandler(webapp.RequestHandler):
"""Render the status UI."""
_RESOURCE_MAP = {
'/status': ('ui/status.html', 'text/html'),
'/status.css': ('ui/status.css', 'text/css'),
'/status.js': ('ui/status.js', 'text/javascript'),
'/list': ('ui/root_list.html', 'text/html'),
'/list.css': ('ui/root_list.css', 'text/css'),
'/list.js': ('ui/root_list.js', 'text/javascript'),
'/common.js': ('ui/common.js', 'text/javascript'),
'/common.css': ('ui/common.css', 'text/css'),
'/jquery-1.4.2.min.js': ('ui/jquery-1.4.2.min.js', 'text/javascript'),
'/jquery.treeview.min.js': ('ui/jquery.treeview.min.js', 'text/javascript'),
'/jquery.cookie.js': ('ui/jquery.cookie.js', 'text/javascript'),
'/jquery.timeago.js': ('ui/jquery.timeago.js', 'text/javascript'),
'/jquery.ba-hashchange.min.js': (
'ui/jquery.ba-hashchange.min.js', 'text/javascript'),
'/jquery.json.min.js': ('ui/jquery.json.min.js', 'text/javascript'),
'/jquery.treeview.css': ('ui/jquery.treeview.css', 'text/css'),
'/treeview-default.gif': ('ui/images/treeview-default.gif', 'image/gif'),
'/treeview-default-line.gif': (
'ui/images/treeview-default-line.gif', 'image/gif'),
'/treeview-black.gif': ('ui/images/treeview-black.gif', 'image/gif'),
'/treeview-black-line.gif': (
'ui/images/treeview-black-line.gif', 'image/gif'),
'/images/treeview-default.gif': (
'ui/images/treeview-default.gif', 'image/gif'),
'/images/treeview-default-line.gif': (
'ui/images/treeview-default-line.gif', 'image/gif'),
'/images/treeview-black.gif': (
'ui/images/treeview-black.gif', 'image/gif'),
'/images/treeview-black-line.gif': (
'ui/images/treeview-black-line.gif', 'image/gif'),
}
def get(self, resource=''):
import pipeline # Break circular dependency
if pipeline._ENFORCE_AUTH:
if users.get_current_user() is None:
self.redirect(users.create_login_url(self.request.url))
return
if not users.is_current_user_admin():
self.response.out.write('Forbidden')
self.response.set_status(403)
return
if resource not in self._RESOURCE_MAP:
logging.info('Could not find: %s', resource)
self.response.set_status(404)
self.response.out.write("Resource not found.")
self.response.headers['Content-Type'] = 'text/plain'
return
relative_path, content_type = self._RESOURCE_MAP[resource]
path = os.path.join(os.path.dirname(__file__), relative_path)
if not pipeline._DEBUG:
self.response.headers["Cache-Control"] = "public, max-age=300"
self.response.headers["Content-Type"] = content_type
self.response.out.write(open(path, 'rb').read())
class _BaseRpcHandler(webapp.RequestHandler):
"""Base handler for JSON-RPC responses.
Sub-classes should fill in the 'json_response' property. All exceptions will
be rturne
"""
def get(self):
import pipeline # Break circular dependency
if pipeline._ENFORCE_AUTH:
if not users.is_current_user_admin():
self.response.out.write('Forbidden')
self.response.set_status(403)
return
# XSRF protection
if (not pipeline._DEBUG and
self.request.headers.get('X-Requested-With') != 'XMLHttpRequest'):
self.response.out.write('Request missing X-Requested-With header')
self.response.set_status(403)
return
self.json_response = {}
try:
self.handle()
output = simplejson.dumps(self.json_response, cls=util.JsonEncoder)
except Exception, e:
self.json_response.clear()
self.json_response['error_class'] = e.__class__.__name__
self.json_response['error_message'] = str(e)
self.json_response['error_traceback'] = traceback.format_exc()
output = simplejson.dumps(self.json_response, cls=util.JsonEncoder)
self.response.set_status(200)
self.response.headers['Content-Type'] = 'text/javascript'
self.response.headers['Cache-Control'] = 'no-cache'
self.response.out.write(output)
def handle(self):
raise NotImplementedError('To be implemented by sub-classes.')
class _TreeStatusHandler(_BaseRpcHandler):
"""RPC handler for getting the status of all children of root pipeline."""
def handle(self):
import pipeline # Break circular dependency
self.json_response.update(
pipeline.get_status_tree(self.request.get('root_pipeline_id')))
class _ClassPathListHandler(_BaseRpcHandler):
"""RPC handler for getting the list of all Pipeline classes defined."""
def handle(self):
import pipeline # Break circular dependency
self.json_response['classPaths'] = pipeline.get_pipeline_names()
class _RootListHandler(_BaseRpcHandler):
"""RPC handler for getting the status of all root pipelines."""
def handle(self):
import pipeline # Break circular dependency
self.json_response.update(
pipeline.get_root_list(
class_path=self.request.get('class_path'),
cursor=self.request.get('cursor')))
| apache-2.0 |
smarterclayton/origin | cmd/service-catalog/go/src/github.com/kubernetes-incubator/service-catalog/vendor/k8s.io/kubernetes/hack/lookup_pull.py | 246 | 1299 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script to print out PR info in release note format.
import json
import sys
import urllib2
PULLQUERY=("https://api.github.com/repos/"
"GoogleCloudPlatform/kubernetes/pulls/{pull}")
LOGIN="login"
TITLE="title"
USER="user"
def print_pulls(pulls):
for pull in pulls:
d = json.loads(urllib2.urlopen(PULLQUERY.format(pull=pull)).read())
print "* {title} #{pull} ({author})".format(
title=d[TITLE], pull=pull, author=d[USER][LOGIN])
if __name__ == "__main__":
if len(sys.argv) < 2:
print ("Usage: {cmd} <pulls>...: Prints out short " +
"markdown description for PRs appropriate for release notes.")
sys.exit(1)
print_pulls(sys.argv[1:])
| apache-2.0 |
urandu/rethinkdb | external/v8_3.30.33.16/tools/run-deopt-fuzzer.py | 36 | 16911 | #!/usr/bin/env python
#
# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import math
import multiprocessing
import optparse
import os
from os.path import join
import random
import shlex
import subprocess
import sys
import time
from testrunner.local import execution
from testrunner.local import progress
from testrunner.local import testsuite
from testrunner.local import utils
from testrunner.local import verbose
from testrunner.objects import context
ARCH_GUESS = utils.DefaultArch()
DEFAULT_TESTS = ["mjsunit", "webkit"]
TIMEOUT_DEFAULT = 60
TIMEOUT_SCALEFACTOR = {"debug" : 4,
"release" : 1 }
MODE_FLAGS = {
"debug" : ["--nohard-abort", "--nodead-code-elimination",
"--nofold-constants", "--enable-slow-asserts",
"--debug-code", "--verify-heap",
"--noconcurrent-recompilation"],
"release" : ["--nohard-abort", "--nodead-code-elimination",
"--nofold-constants", "--noconcurrent-recompilation"]}
SUPPORTED_ARCHS = ["android_arm",
"android_ia32",
"arm",
"ia32",
"mipsel",
"nacl_ia32",
"nacl_x64",
"x64"]
# Double the timeout for these:
SLOW_ARCHS = ["android_arm",
"android_ia32",
"arm",
"mipsel",
"nacl_ia32",
"nacl_x64"]
MAX_DEOPT = 1000000000
DISTRIBUTION_MODES = ["smooth", "random"]
class RandomDistribution:
def __init__(self, seed=None):
seed = seed or random.randint(1, sys.maxint)
print "Using random distribution with seed %d" % seed
self._random = random.Random(seed)
def Distribute(self, n, m):
if n > m:
n = m
return self._random.sample(xrange(1, m + 1), n)
class SmoothDistribution:
"""Distribute n numbers into the interval [1:m].
F1: Factor of the first derivation of the distribution function.
F2: Factor of the second derivation of the distribution function.
With F1 and F2 set to 0, the distribution will be equal.
"""
def __init__(self, factor1=2.0, factor2=0.2):
self._factor1 = factor1
self._factor2 = factor2
def Distribute(self, n, m):
if n > m:
n = m
if n <= 1:
return [ 1 ]
result = []
x = 0.0
dx = 1.0
ddx = self._factor1
dddx = self._factor2
for i in range(0, n):
result += [ x ]
x += dx
dx += ddx
ddx += dddx
# Project the distribution into the interval [0:M].
result = [ x * m / result[-1] for x in result ]
# Equalize by n. The closer n is to m, the more equal will be the
# distribution.
for (i, x) in enumerate(result):
# The value of x if it was equally distributed.
equal_x = i / float(n - 1) * float(m - 1) + 1
# Difference factor between actual and equal distribution.
diff = 1 - (x / equal_x)
# Equalize x dependent on the number of values to distribute.
result[i] = int(x + (i + 1) * diff)
return result
def Distribution(options):
if options.distribution_mode == "random":
return RandomDistribution(options.seed)
if options.distribution_mode == "smooth":
return SmoothDistribution(options.distribution_factor1,
options.distribution_factor2)
def BuildOptions():
result = optparse.OptionParser()
result.add_option("--arch",
help=("The architecture to run tests for, "
"'auto' or 'native' for auto-detect"),
default="ia32,x64,arm")
result.add_option("--arch-and-mode",
help="Architecture and mode in the format 'arch.mode'",
default=None)
result.add_option("--asan",
help="Regard test expectations for ASAN",
default=False, action="store_true")
result.add_option("--buildbot",
help="Adapt to path structure used on buildbots",
default=False, action="store_true")
result.add_option("--command-prefix",
help="Prepended to each shell command used to run a test",
default="")
result.add_option("--coverage", help=("Exponential test coverage "
"(range 0.0, 1.0) -- 0.0: one test, 1.0 all tests (slow)"),
default=0.4, type="float")
result.add_option("--coverage-lift", help=("Lifts test coverage for tests "
"with a small number of deopt points (range 0, inf)"),
default=20, type="int")
result.add_option("--download-data", help="Download missing test suite data",
default=False, action="store_true")
result.add_option("--distribution-factor1", help=("Factor of the first "
"derivation of the distribution function"), default=2.0,
type="float")
result.add_option("--distribution-factor2", help=("Factor of the second "
"derivation of the distribution function"), default=0.7,
type="float")
result.add_option("--distribution-mode", help=("How to select deopt points "
"for a given test (smooth|random)"),
default="smooth")
result.add_option("--dump-results-file", help=("Dump maximum number of "
"deopt points per test to a file"))
result.add_option("--extra-flags",
help="Additional flags to pass to each test command",
default="")
result.add_option("--isolates", help="Whether to test isolates",
default=False, action="store_true")
result.add_option("-j", help="The number of parallel tasks to run",
default=0, type="int")
result.add_option("-m", "--mode",
help="The test modes in which to run (comma-separated)",
default="release,debug")
result.add_option("--outdir", help="Base directory with compile output",
default="out")
result.add_option("-p", "--progress",
help=("The style of progress indicator"
" (verbose, dots, color, mono)"),
choices=progress.PROGRESS_INDICATORS.keys(),
default="mono")
result.add_option("--shard-count",
help="Split testsuites into this number of shards",
default=1, type="int")
result.add_option("--shard-run",
help="Run this shard from the split up tests.",
default=1, type="int")
result.add_option("--shell-dir", help="Directory containing executables",
default="")
result.add_option("--seed", help="The seed for the random distribution",
type="int")
result.add_option("-t", "--timeout", help="Timeout in seconds",
default= -1, type="int")
result.add_option("-v", "--verbose", help="Verbose output",
default=False, action="store_true")
result.add_option("--random-seed", default=0, dest="random_seed",
help="Default seed for initializing random generator")
return result
def ProcessOptions(options):
global VARIANT_FLAGS
# Architecture and mode related stuff.
if options.arch_and_mode:
tokens = options.arch_and_mode.split(".")
options.arch = tokens[0]
options.mode = tokens[1]
options.mode = options.mode.split(",")
for mode in options.mode:
if not mode.lower() in ["debug", "release"]:
print "Unknown mode %s" % mode
return False
if options.arch in ["auto", "native"]:
options.arch = ARCH_GUESS
options.arch = options.arch.split(",")
for arch in options.arch:
if not arch in SUPPORTED_ARCHS:
print "Unknown architecture %s" % arch
return False
# Special processing of other options, sorted alphabetically.
options.command_prefix = shlex.split(options.command_prefix)
options.extra_flags = shlex.split(options.extra_flags)
if options.j == 0:
options.j = multiprocessing.cpu_count()
while options.random_seed == 0:
options.random_seed = random.SystemRandom().randint(-2147483648, 2147483647)
if not options.distribution_mode in DISTRIBUTION_MODES:
print "Unknown distribution mode %s" % options.distribution_mode
return False
if options.distribution_factor1 < 0.0:
print ("Distribution factor1 %s is out of range. Defaulting to 0.0"
% options.distribution_factor1)
options.distribution_factor1 = 0.0
if options.distribution_factor2 < 0.0:
print ("Distribution factor2 %s is out of range. Defaulting to 0.0"
% options.distribution_factor2)
options.distribution_factor2 = 0.0
if options.coverage < 0.0 or options.coverage > 1.0:
print ("Coverage %s is out of range. Defaulting to 0.4"
% options.coverage)
options.coverage = 0.4
if options.coverage_lift < 0:
print ("Coverage lift %s is out of range. Defaulting to 0"
% options.coverage_lift)
options.coverage_lift = 0
return True
def ShardTests(tests, shard_count, shard_run):
if shard_count < 2:
return tests
if shard_run < 1 or shard_run > shard_count:
print "shard-run not a valid number, should be in [1:shard-count]"
print "defaulting back to running all tests"
return tests
count = 0
shard = []
for test in tests:
if count % shard_count == shard_run - 1:
shard.append(test)
count += 1
return shard
def Main():
parser = BuildOptions()
(options, args) = parser.parse_args()
if not ProcessOptions(options):
parser.print_help()
return 1
exit_code = 0
workspace = os.path.abspath(join(os.path.dirname(sys.argv[0]), ".."))
suite_paths = utils.GetSuitePaths(join(workspace, "test"))
if len(args) == 0:
suite_paths = [ s for s in suite_paths if s in DEFAULT_TESTS ]
else:
args_suites = set()
for arg in args:
suite = arg.split(os.path.sep)[0]
if not suite in args_suites:
args_suites.add(suite)
suite_paths = [ s for s in suite_paths if s in args_suites ]
suites = []
for root in suite_paths:
suite = testsuite.TestSuite.LoadTestSuite(
os.path.join(workspace, "test", root))
if suite:
suites.append(suite)
if options.download_data:
for s in suites:
s.DownloadData()
for mode in options.mode:
for arch in options.arch:
try:
code = Execute(arch, mode, args, options, suites, workspace)
exit_code = exit_code or code
except KeyboardInterrupt:
return 2
return exit_code
def CalculateNTests(m, options):
"""Calculates the number of tests from m deopt points with exponential
coverage.
The coverage is expected to be between 0.0 and 1.0.
The 'coverage lift' lifts the coverage for tests with smaller m values.
"""
c = float(options.coverage)
l = float(options.coverage_lift)
return int(math.pow(m, (m * c + l) / (m + l)))
def Execute(arch, mode, args, options, suites, workspace):
print(">>> Running tests for %s.%s" % (arch, mode))
dist = Distribution(options)
shell_dir = options.shell_dir
if not shell_dir:
if options.buildbot:
shell_dir = os.path.join(workspace, options.outdir, mode)
mode = mode.lower()
else:
shell_dir = os.path.join(workspace, options.outdir,
"%s.%s" % (arch, mode))
shell_dir = os.path.relpath(shell_dir)
# Populate context object.
mode_flags = MODE_FLAGS[mode]
timeout = options.timeout
if timeout == -1:
# Simulators are slow, therefore allow a longer default timeout.
if arch in SLOW_ARCHS:
timeout = 2 * TIMEOUT_DEFAULT;
else:
timeout = TIMEOUT_DEFAULT;
timeout *= TIMEOUT_SCALEFACTOR[mode]
ctx = context.Context(arch, mode, shell_dir,
mode_flags, options.verbose,
timeout, options.isolates,
options.command_prefix,
options.extra_flags,
False, # Keep i18n on by default.
options.random_seed,
True, # No sorting of test cases.
0, # Don't rerun failing tests.
0, # No use of a rerun-failing-tests maximum.
False) # No predictable mode.
# Find available test suites and read test cases from them.
variables = {
"arch": arch,
"asan": options.asan,
"deopt_fuzzer": True,
"gc_stress": False,
"isolates": options.isolates,
"mode": mode,
"no_i18n": False,
"no_snap": False,
"simulator": utils.UseSimulator(arch),
"system": utils.GuessOS(),
"tsan": False,
"msan": False,
}
all_tests = []
num_tests = 0
test_id = 0
# Remember test case prototypes for the fuzzing phase.
test_backup = dict((s, []) for s in suites)
for s in suites:
s.ReadStatusFile(variables)
s.ReadTestCases(ctx)
if len(args) > 0:
s.FilterTestCasesByArgs(args)
all_tests += s.tests
s.FilterTestCasesByStatus(False)
test_backup[s] = s.tests
analysis_flags = ["--deopt-every-n-times", "%d" % MAX_DEOPT,
"--print-deopt-stress"]
s.tests = [ t.CopyAddingFlags(analysis_flags) for t in s.tests ]
num_tests += len(s.tests)
for t in s.tests:
t.id = test_id
test_id += 1
if num_tests == 0:
print "No tests to run."
return 0
print(">>> Collection phase")
progress_indicator = progress.PROGRESS_INDICATORS[options.progress]()
runner = execution.Runner(suites, progress_indicator, ctx)
exit_code = runner.Run(options.j)
print(">>> Analysis phase")
num_tests = 0
test_id = 0
for s in suites:
test_results = {}
for t in s.tests:
for line in t.output.stdout.splitlines():
if line.startswith("=== Stress deopt counter: "):
test_results[t.path] = MAX_DEOPT - int(line.split(" ")[-1])
for t in s.tests:
if t.path not in test_results:
print "Missing results for %s" % t.path
if options.dump_results_file:
results_dict = dict((t.path, n) for (t, n) in test_results.iteritems())
with file("%s.%d.txt" % (dump_results_file, time.time()), "w") as f:
f.write(json.dumps(results_dict))
# Reset tests and redistribute the prototypes from the collection phase.
s.tests = []
if options.verbose:
print "Test distributions:"
for t in test_backup[s]:
max_deopt = test_results.get(t.path, 0)
if max_deopt == 0:
continue
n_deopt = CalculateNTests(max_deopt, options)
distribution = dist.Distribute(n_deopt, max_deopt)
if options.verbose:
print "%s %s" % (t.path, distribution)
for i in distribution:
fuzzing_flags = ["--deopt-every-n-times", "%d" % i]
s.tests.append(t.CopyAddingFlags(fuzzing_flags))
num_tests += len(s.tests)
for t in s.tests:
t.id = test_id
test_id += 1
if num_tests == 0:
print "No tests to run."
return 0
print(">>> Deopt fuzzing phase (%d test cases)" % num_tests)
progress_indicator = progress.PROGRESS_INDICATORS[options.progress]()
runner = execution.Runner(suites, progress_indicator, ctx)
code = runner.Run(options.j)
return exit_code or code
if __name__ == "__main__":
sys.exit(Main())
| agpl-3.0 |
alikins/ansible | lib/ansible/modules/network/a10/a10_service_group.py | 16 | 13386 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Mischa Peters <mpeters@a10networks.com>,
# Eric Chou <ericc@a10networks.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: a10_service_group
version_added: 1.8
short_description: Manage A10 Networks AX/SoftAX/Thunder/vThunder devices' service groups.
description:
- Manage SLB (Server Load Balancing) service-group objects on A10 Networks devices via aXAPIv2.
author: "Eric Chou (@ericchou) 2016, Mischa Peters (@mischapeters) 2014"
notes:
- Requires A10 Networks aXAPI 2.1.
- When a server doesn't exist and is added to the service-group the server will be created.
extends_documentation_fragment:
- a10
- url
options:
state:
description:
- If the specified service group should exists.
default: present
choices: ['present', 'absent']
partition:
version_added: "2.3"
description:
- set active-partition
required: false
default: null
service_group:
description:
- The SLB (Server Load Balancing) service-group name
required: true
default: null
aliases: ['service', 'pool', 'group']
service_group_protocol:
description:
- The SLB service-group protocol of TCP or UDP.
required: false
default: tcp
aliases: ['proto', 'protocol']
choices: ['tcp', 'udp']
service_group_method:
description:
- The SLB service-group load balancing method, such as round-robin or weighted-rr.
required: false
default: round-robin
aliases: ['method']
choices:
- 'round-robin'
- 'weighted-rr'
- 'least-connection'
- 'weighted-least-connection'
- 'service-least-connection'
- 'service-weighted-least-connection'
- 'fastest-response'
- 'least-request'
- 'round-robin-strict'
- 'src-ip-only-hash'
- 'src-ip-hash'
servers:
description:
- A list of servers to add to the service group. Each list item should be a
dictionary which specifies the C(server:) and C(port:), but can also optionally
specify the C(status:). See the examples below for details.
required: false
default: null
aliases: ['server', 'member']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled devices using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
EXAMPLES = '''
# Create a new service-group
- a10_service_group:
host: a10.mydomain.com
username: myadmin
password: mypassword
partition: mypartition
service_group: sg-80-tcp
servers:
- server: foo1.mydomain.com
port: 8080
- server: foo2.mydomain.com
port: 8080
- server: foo3.mydomain.com
port: 8080
- server: foo4.mydomain.com
port: 8080
status: disabled
'''
RETURN = '''
content:
description: the full info regarding the slb_service_group
returned: success
type: string
sample: "mynewservicegroup"
'''
import json
from ansible.module_utils.network.a10.a10 import (axapi_call, a10_argument_spec, axapi_authenticate, axapi_failure, axapi_enabled_disabled)
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import url_argument_spec
VALID_SERVICE_GROUP_FIELDS = ['name', 'protocol', 'lb_method']
VALID_SERVER_FIELDS = ['server', 'port', 'status']
def validate_servers(module, servers):
for item in servers:
for key in item:
if key not in VALID_SERVER_FIELDS:
module.fail_json(msg="invalid server field (%s), must be one of: %s" % (key, ','.join(VALID_SERVER_FIELDS)))
# validate the server name is present
if 'server' not in item:
module.fail_json(msg="server definitions must define the server field")
# validate the port number is present and an integer
if 'port' in item:
try:
item['port'] = int(item['port'])
except:
module.fail_json(msg="server port definitions must be integers")
else:
module.fail_json(msg="server definitions must define the port field")
# convert the status to the internal API integer value
if 'status' in item:
item['status'] = axapi_enabled_disabled(item['status'])
else:
item['status'] = 1
def main():
argument_spec = a10_argument_spec()
argument_spec.update(url_argument_spec())
argument_spec.update(
dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
service_group=dict(type='str', aliases=['service', 'pool', 'group'], required=True),
service_group_protocol=dict(type='str', default='tcp', aliases=['proto', 'protocol'], choices=['tcp', 'udp']),
service_group_method=dict(type='str', default='round-robin',
aliases=['method'],
choices=['round-robin',
'weighted-rr',
'least-connection',
'weighted-least-connection',
'service-least-connection',
'service-weighted-least-connection',
'fastest-response',
'least-request',
'round-robin-strict',
'src-ip-only-hash',
'src-ip-hash']),
servers=dict(type='list', aliases=['server', 'member'], default=[]),
partition=dict(type='str', default=[]),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False
)
host = module.params['host']
username = module.params['username']
password = module.params['password']
partition = module.params['partition']
state = module.params['state']
write_config = module.params['write_config']
slb_service_group = module.params['service_group']
slb_service_group_proto = module.params['service_group_protocol']
slb_service_group_method = module.params['service_group_method']
slb_servers = module.params['servers']
if slb_service_group is None:
module.fail_json(msg='service_group is required')
axapi_base_url = 'https://' + host + '/services/rest/V2.1/?format=json'
load_balancing_methods = {'round-robin': 0,
'weighted-rr': 1,
'least-connection': 2,
'weighted-least-connection': 3,
'service-least-connection': 4,
'service-weighted-least-connection': 5,
'fastest-response': 6,
'least-request': 7,
'round-robin-strict': 8,
'src-ip-only-hash': 14,
'src-ip-hash': 15}
if not slb_service_group_proto or slb_service_group_proto.lower() == 'tcp':
protocol = 2
else:
protocol = 3
# validate the server data list structure
validate_servers(module, slb_servers)
json_post = {
'service_group': {
'name': slb_service_group,
'protocol': protocol,
'lb_method': load_balancing_methods[slb_service_group_method],
}
}
# first we authenticate to get a session id
session_url = axapi_authenticate(module, axapi_base_url, username, password)
# then we select the active-partition
axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition}))
# then we check to see if the specified group exists
slb_result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group}))
slb_service_group_exist = not axapi_failure(slb_result)
changed = False
if state == 'present':
# before creating/updating we need to validate that servers
# defined in the servers list exist to prevent errors
checked_servers = []
for server in slb_servers:
result = axapi_call(module, session_url + '&method=slb.server.search', json.dumps({'name': server['server']}))
if axapi_failure(result):
module.fail_json(msg="the server %s specified in the servers list does not exist" % server['server'])
checked_servers.append(server['server'])
if not slb_service_group_exist:
result = axapi_call(module, session_url + '&method=slb.service_group.create', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg=result['response']['err']['msg'])
changed = True
else:
# check to see if the service group definition without the
# server members is different, and update that individually
# if it needs it
do_update = False
for field in VALID_SERVICE_GROUP_FIELDS:
if json_post['service_group'][field] != slb_result['service_group'][field]:
do_update = True
break
if do_update:
result = axapi_call(module, session_url + '&method=slb.service_group.update', json.dumps(json_post))
if axapi_failure(result):
module.fail_json(msg=result['response']['err']['msg'])
changed = True
# next we pull the defined list of servers out of the returned
# results to make it a bit easier to iterate over
defined_servers = slb_result.get('service_group', {}).get('member_list', [])
# next we add/update new member servers from the user-specified
# list if they're different or not on the target device
for server in slb_servers:
found = False
different = False
for def_server in defined_servers:
if server['server'] == def_server['server']:
found = True
for valid_field in VALID_SERVER_FIELDS:
if server[valid_field] != def_server[valid_field]:
different = True
break
if found or different:
break
# add or update as required
server_data = {
"name": slb_service_group,
"member": server,
}
if not found:
result = axapi_call(module, session_url + '&method=slb.service_group.member.create', json.dumps(server_data))
changed = True
elif different:
result = axapi_call(module, session_url + '&method=slb.service_group.member.update', json.dumps(server_data))
changed = True
# finally, remove any servers that are on the target
# device but were not specified in the list given
for server in defined_servers:
found = False
for slb_server in slb_servers:
if server['server'] == slb_server['server']:
found = True
break
# remove if not found
server_data = {
"name": slb_service_group,
"member": server,
}
if not found:
result = axapi_call(module, session_url + '&method=slb.service_group.member.delete', json.dumps(server_data))
changed = True
# if we changed things, get the full info regarding
# the service group for the return data below
if changed:
result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': slb_service_group}))
else:
result = slb_result
elif state == 'absent':
if slb_service_group_exist:
result = axapi_call(module, session_url + '&method=slb.service_group.delete', json.dumps({'name': slb_service_group}))
changed = True
else:
result = dict(msg="the service group was not present")
# if the config has changed, save the config unless otherwise requested
if changed and write_config:
write_result = axapi_call(module, session_url + '&method=system.action.write_memory')
if axapi_failure(write_result):
module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg'])
# log out of the session nicely and exit
axapi_call(module, session_url + '&method=session.close')
module.exit_json(changed=changed, content=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
hfp/tensorflow-xsmm | tensorflow/contrib/gan/python/estimator/python/tpu_gan_estimator.py | 11 | 1229 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`tf.Learn` components for `TPUGANEstimator`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.gan.python.estimator.python import tpu_gan_estimator_impl
# pylint: disable=wildcard-import
from tensorflow.contrib.gan.python.estimator.python.tpu_gan_estimator_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
__all__ = tpu_gan_estimator_impl.__all__
remove_undocumented(__name__, __all__)
| apache-2.0 |
zooba/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/setuptools/msvc.py | 40 | 40838 | """
Improved support for Microsoft Visual C++ compilers.
Known supported compilers:
--------------------------
Microsoft Visual C++ 9.0:
Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
Microsoft Windows SDK 6.1 (x86, x64, ia64)
Microsoft Windows SDK 7.0 (x86, x64, ia64)
Microsoft Visual C++ 10.0:
Microsoft Windows SDK 7.1 (x86, x64, ia64)
Microsoft Visual C++ 14.0:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
"""
import os
import sys
import platform
import itertools
import distutils.errors
from setuptools.extern.packaging.version import LegacyVersion
from setuptools.extern.six.moves import filterfalse
from .monkey import get_unpatched
if platform.system() == 'Windows':
from setuptools.extern.six.moves import winreg
safe_env = os.environ
else:
"""
Mock winreg and environ so the module can be imported
on this platform.
"""
class winreg:
HKEY_USERS = None
HKEY_CURRENT_USER = None
HKEY_LOCAL_MACHINE = None
HKEY_CLASSES_ROOT = None
safe_env = dict()
_msvc9_suppress_errors = (
# msvc9compiler isn't available on some platforms
ImportError,
# msvc9compiler raises DistutilsPlatformError in some
# environments. See #1118.
distutils.errors.DistutilsPlatformError,
)
try:
from distutils.msvc9compiler import Reg
except _msvc9_suppress_errors:
pass
def msvc9_find_vcvarsall(version):
"""
Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
compiler build for Python (VCForPython). Fall back to original behavior
when the standalone compiler is not available.
Redirect the path of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 9.0:
Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
Parameters
----------
version: float
Required Microsoft Visual C++ version.
Return
------
vcvarsall.bat path: str
"""
VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = VC_BASE % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
key = VC_BASE % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
return get_unpatched(msvc9_find_vcvarsall)(version)
def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
"""
Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
compilers.
Set environment without use of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 9.0:
Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
Microsoft Windows SDK 6.1 (x86, x64, ia64)
Microsoft Windows SDK 7.0 (x86, x64, ia64)
Microsoft Visual C++ 10.0:
Microsoft Windows SDK 7.1 (x86, x64, ia64)
Parameters
----------
ver: float
Required Microsoft Visual C++ version.
arch: str
Target architecture.
Return
------
environment: dict
"""
# Try to get environement from vcvarsall.bat (Classical way)
try:
orig = get_unpatched(msvc9_query_vcvarsall)
return orig(ver, arch, *args, **kwargs)
except distutils.errors.DistutilsPlatformError:
# Pass error if Vcvarsall.bat is missing
pass
except ValueError:
# Pass error if environment not set after executing vcvarsall.bat
pass
# If error, try to set environment directly
try:
return EnvironmentInfo(arch, ver).return_env()
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, ver, arch)
raise
def msvc14_get_vc_env(plat_spec):
"""
Patched "distutils._msvccompiler._get_vc_env" for support extra
compilers.
Set environment without use of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 14.0:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
Parameters
----------
plat_spec: str
Target architecture.
Return
------
environment: dict
"""
# Try to get environment from vcvarsall.bat (Classical way)
try:
return get_unpatched(msvc14_get_vc_env)(plat_spec)
except distutils.errors.DistutilsPlatformError:
# Pass error Vcvarsall.bat is missing
pass
# If error, try to set environment directly
try:
return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0)
raise
def msvc14_gen_lib_options(*args, **kwargs):
"""
Patched "distutils._msvccompiler.gen_lib_options" for fix
compatibility between "numpy.distutils" and "distutils._msvccompiler"
(for Numpy < 1.11.2)
"""
if "numpy.distutils" in sys.modules:
import numpy as np
if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
def _augment_exception(exc, version, arch=''):
"""
Add details to the exception message to help guide the user
as to what action will resolve it.
"""
# Error if MSVC++ directory not found or environment not set
message = exc.args[0]
if "vcvarsall" in message.lower() or "visual c" in message.lower():
# Special error message if MSVC++ not installed
tmpl = 'Microsoft Visual C++ {version:0.1f} is required.'
message = tmpl.format(**locals())
msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
if version == 9.0:
if arch.lower().find('ia64') > -1:
# For VC++ 9.0, if IA64 support is needed, redirect user
# to Windows SDK 7.0
message += ' Get it with "Microsoft Windows SDK 7.0": '
message += msdownload % 3138
else:
# For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
# This redirection link is maintained by Microsoft.
# Contact vspython@microsoft.com if it needs updating.
message += ' Get it from http://aka.ms/vcpython27'
elif version == 10.0:
# For VC++ 10.0 Redirect user to Windows SDK 7.1
message += ' Get it with "Microsoft Windows SDK 7.1": '
message += msdownload % 8279
elif version >= 14.0:
# For VC++ 14.0 Redirect user to Visual C++ Build Tools
message += (' Get it with "Microsoft Visual C++ Build Tools": '
r'https://visualstudio.microsoft.com/downloads/')
exc.args = (message, )
class PlatformInfo:
"""
Current and Target Architectures informations.
Parameters
----------
arch: str
Target architecture.
"""
current_cpu = safe_env.get('processor_architecture', '').lower()
def __init__(self, arch):
self.arch = arch.lower().replace('x64', 'amd64')
@property
def target_cpu(self):
return self.arch[self.arch.find('_') + 1:]
def target_is_x86(self):
return self.target_cpu == 'x86'
def current_is_x86(self):
return self.current_cpu == 'x86'
def current_dir(self, hidex86=False, x64=False):
"""
Current platform specific subfolder.
Parameters
----------
hidex86: bool
return '' and not '\x86' if architecture is x86.
x64: bool
return '\x64' and not '\amd64' if architecture is amd64.
Return
------
subfolder: str
'\target', or '' (see hidex86 parameter)
"""
return (
'' if (self.current_cpu == 'x86' and hidex86) else
r'\x64' if (self.current_cpu == 'amd64' and x64) else
r'\%s' % self.current_cpu
)
def target_dir(self, hidex86=False, x64=False):
r"""
Target platform specific subfolder.
Parameters
----------
hidex86: bool
return '' and not '\x86' if architecture is x86.
x64: bool
return '\x64' and not '\amd64' if architecture is amd64.
Return
------
subfolder: str
'\current', or '' (see hidex86 parameter)
"""
return (
'' if (self.target_cpu == 'x86' and hidex86) else
r'\x64' if (self.target_cpu == 'amd64' and x64) else
r'\%s' % self.target_cpu
)
def cross_dir(self, forcex86=False):
r"""
Cross platform specific subfolder.
Parameters
----------
forcex86: bool
Use 'x86' as current architecture even if current acritecture is
not x86.
Return
------
subfolder: str
'' if target architecture is current architecture,
'\current_target' if not.
"""
current = 'x86' if forcex86 else self.current_cpu
return (
'' if self.target_cpu == current else
self.target_dir().replace('\\', '\\%s_' % current)
)
class RegistryInfo:
"""
Microsoft Visual Studio related registry informations.
Parameters
----------
platform_info: PlatformInfo
"PlatformInfo" instance.
"""
HKEYS = (winreg.HKEY_USERS,
winreg.HKEY_CURRENT_USER,
winreg.HKEY_LOCAL_MACHINE,
winreg.HKEY_CLASSES_ROOT)
def __init__(self, platform_info):
self.pi = platform_info
@property
def visualstudio(self):
"""
Microsoft Visual Studio root registry key.
"""
return 'VisualStudio'
@property
def sxs(self):
"""
Microsoft Visual Studio SxS registry key.
"""
return os.path.join(self.visualstudio, 'SxS')
@property
def vc(self):
"""
Microsoft Visual C++ VC7 registry key.
"""
return os.path.join(self.sxs, 'VC7')
@property
def vs(self):
"""
Microsoft Visual Studio VS7 registry key.
"""
return os.path.join(self.sxs, 'VS7')
@property
def vc_for_python(self):
"""
Microsoft Visual C++ for Python registry key.
"""
return r'DevDiv\VCForPython'
@property
def microsoft_sdk(self):
"""
Microsoft SDK registry key.
"""
return 'Microsoft SDKs'
@property
def windows_sdk(self):
"""
Microsoft Windows/Platform SDK registry key.
"""
return os.path.join(self.microsoft_sdk, 'Windows')
@property
def netfx_sdk(self):
"""
Microsoft .NET Framework SDK registry key.
"""
return os.path.join(self.microsoft_sdk, 'NETFXSDK')
@property
def windows_kits_roots(self):
"""
Microsoft Windows Kits Roots registry key.
"""
return r'Windows Kits\Installed Roots'
def microsoft(self, key, x86=False):
"""
Return key in Microsoft software registry.
Parameters
----------
key: str
Registry key path where look.
x86: str
Force x86 software registry.
Return
------
str: value
"""
node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
return os.path.join('Software', node64, 'Microsoft', key)
def lookup(self, key, name):
"""
Look for values in registry in Microsoft software registry.
Parameters
----------
key: str
Registry key path where look.
name: str
Value name to find.
Return
------
str: value
"""
KEY_READ = winreg.KEY_READ
openkey = winreg.OpenKey
ms = self.microsoft
for hkey in self.HKEYS:
try:
bkey = openkey(hkey, ms(key), 0, KEY_READ)
except (OSError, IOError):
if not self.pi.current_is_x86():
try:
bkey = openkey(hkey, ms(key, True), 0, KEY_READ)
except (OSError, IOError):
continue
else:
continue
try:
return winreg.QueryValueEx(bkey, name)[0]
except (OSError, IOError):
pass
class SystemInfo:
"""
Microsoft Windows and Visual Studio related system inormations.
Parameters
----------
registry_info: RegistryInfo
"RegistryInfo" instance.
vc_ver: float
Required Microsoft Visual C++ version.
"""
# Variables and properties in this class use originals CamelCase variables
# names from Microsoft source files for more easy comparaison.
WinDir = safe_env.get('WinDir', '')
ProgramFiles = safe_env.get('ProgramFiles', '')
ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles)
def __init__(self, registry_info, vc_ver=None):
self.ri = registry_info
self.pi = self.ri.pi
self.vc_ver = vc_ver or self._find_latest_available_vc_ver()
def _find_latest_available_vc_ver(self):
try:
return self.find_available_vc_vers()[-1]
except IndexError:
err = 'No Microsoft Visual C++ version found'
raise distutils.errors.DistutilsPlatformError(err)
def find_available_vc_vers(self):
"""
Find all available Microsoft Visual C++ versions.
"""
ms = self.ri.microsoft
vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
vc_vers = []
for hkey in self.ri.HKEYS:
for key in vckeys:
try:
bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
except (OSError, IOError):
continue
subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values):
try:
ver = float(winreg.EnumValue(bkey, i)[0])
if ver not in vc_vers:
vc_vers.append(ver)
except ValueError:
pass
for i in range(subkeys):
try:
ver = float(winreg.EnumKey(bkey, i))
if ver not in vc_vers:
vc_vers.append(ver)
except ValueError:
pass
return sorted(vc_vers)
@property
def VSInstallDir(self):
"""
Microsoft Visual Studio directory.
"""
# Default path
name = 'Microsoft Visual Studio %0.1f' % self.vc_ver
default = os.path.join(self.ProgramFilesx86, name)
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default
@property
def VCInstallDir(self):
"""
Microsoft Visual C++ directory.
"""
self.VSInstallDir
guess_vc = self._guess_vc() or self._guess_vc_legacy()
# Try to get "VC++ for Python" path from registry as default path
reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
python_vc = self.ri.lookup(reg_path, 'installdir')
default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc
# Try to get path from registry, if fail use default path
path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc
if not os.path.isdir(path):
msg = 'Microsoft Visual C++ directory not found'
raise distutils.errors.DistutilsPlatformError(msg)
return path
def _guess_vc(self):
"""
Locate Visual C for 2017
"""
if self.vc_ver <= 14.0:
return
default = r'VC\Tools\MSVC'
guess_vc = os.path.join(self.VSInstallDir, default)
# Subdir with VC exact version as name
try:
vc_exact_ver = os.listdir(guess_vc)[-1]
return os.path.join(guess_vc, vc_exact_ver)
except (OSError, IOError, IndexError):
pass
def _guess_vc_legacy(self):
"""
Locate Visual C for versions prior to 2017
"""
default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver
return os.path.join(self.ProgramFilesx86, default)
@property
def WindowsSdkVersion(self):
"""
Microsoft Windows SDK versions for specified MSVC++ version.
"""
if self.vc_ver <= 9.0:
return ('7.0', '6.1', '6.0a')
elif self.vc_ver == 10.0:
return ('7.1', '7.0a')
elif self.vc_ver == 11.0:
return ('8.0', '8.0a')
elif self.vc_ver == 12.0:
return ('8.1', '8.1a')
elif self.vc_ver >= 14.0:
return ('10.0', '8.1')
@property
def WindowsSdkLastVersion(self):
"""
Microsoft Windows SDK last version
"""
return self._use_last_dir_name(os.path.join(
self.WindowsSdkDir, 'lib'))
@property
def WindowsSdkDir(self):
"""
Microsoft Windows SDK directory.
"""
sdkdir = ''
for ver in self.WindowsSdkVersion:
# Try to get it from registry
loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
sdkdir = self.ri.lookup(loc, 'installationfolder')
if sdkdir:
break
if not sdkdir or not os.path.isdir(sdkdir):
# Try to get "VC++ for Python" version from registry
path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
install_base = self.ri.lookup(path, 'installdir')
if install_base:
sdkdir = os.path.join(install_base, 'WinSDK')
if not sdkdir or not os.path.isdir(sdkdir):
# If fail, use default new path
for ver in self.WindowsSdkVersion:
intver = ver[:ver.rfind('.')]
path = r'Microsoft SDKs\Windows Kits\%s' % (intver)
d = os.path.join(self.ProgramFiles, path)
if os.path.isdir(d):
sdkdir = d
if not sdkdir or not os.path.isdir(sdkdir):
# If fail, use default old path
for ver in self.WindowsSdkVersion:
path = r'Microsoft SDKs\Windows\v%s' % ver
d = os.path.join(self.ProgramFiles, path)
if os.path.isdir(d):
sdkdir = d
if not sdkdir:
# If fail, use Platform SDK
sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')
return sdkdir
@property
def WindowsSDKExecutablePath(self):
"""
Microsoft Windows SDK executable directory.
"""
# Find WinSDK NetFx Tools registry dir name
if self.vc_ver <= 11.0:
netfxver = 35
arch = ''
else:
netfxver = 40
hidex86 = True if self.vc_ver <= 12.0 else False
arch = self.pi.current_dir(x64=True, hidex86=hidex86)
fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
# liste all possibles registry paths
regpaths = []
if self.vc_ver >= 14.0:
for ver in self.NetFxSdkVersion:
regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
for ver in self.WindowsSdkVersion:
regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
# Return installation folder from the more recent path
for path in regpaths:
execpath = self.ri.lookup(path, 'installationfolder')
if execpath:
break
return execpath
@property
def FSharpInstallDir(self):
"""
Microsoft Visual F# directory.
"""
path = r'%0.1f\Setup\F#' % self.vc_ver
path = os.path.join(self.ri.visualstudio, path)
return self.ri.lookup(path, 'productdir') or ''
@property
def UniversalCRTSdkDir(self):
"""
Microsoft Universal CRT SDK directory.
"""
# Set Kit Roots versions for specified MSVC++ version
if self.vc_ver >= 14.0:
vers = ('10', '81')
else:
vers = ()
# Find path of the more recent Kit
for ver in vers:
sdkdir = self.ri.lookup(self.ri.windows_kits_roots,
'kitsroot%s' % ver)
if sdkdir:
break
return sdkdir or ''
@property
def UniversalCRTSdkLastVersion(self):
"""
Microsoft Universal C Runtime SDK last version
"""
return self._use_last_dir_name(os.path.join(
self.UniversalCRTSdkDir, 'lib'))
@property
def NetFxSdkVersion(self):
"""
Microsoft .NET Framework SDK versions.
"""
# Set FxSdk versions for specified MSVC++ version
if self.vc_ver >= 14.0:
return ('4.6.1', '4.6')
else:
return ()
@property
def NetFxSdkDir(self):
"""
Microsoft .NET Framework SDK directory.
"""
for ver in self.NetFxSdkVersion:
loc = os.path.join(self.ri.netfx_sdk, ver)
sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
if sdkdir:
break
return sdkdir or ''
@property
def FrameworkDir32(self):
"""
Microsoft .NET Framework 32bit directory.
"""
# Default path
guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
@property
def FrameworkDir64(self):
"""
Microsoft .NET Framework 64bit directory.
"""
# Default path
guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
@property
def FrameworkVersion32(self):
"""
Microsoft .NET Framework 32bit versions.
"""
return self._find_dot_net_versions(32)
@property
def FrameworkVersion64(self):
"""
Microsoft .NET Framework 64bit versions.
"""
return self._find_dot_net_versions(64)
def _find_dot_net_versions(self, bits):
"""
Find Microsoft .NET Framework versions.
Parameters
----------
bits: int
Platform number of bits: 32 or 64.
"""
# Find actual .NET version in registry
reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
# Set .NET versions for specified MSVC++ version
if self.vc_ver >= 12.0:
frameworkver = (ver, 'v4.0')
elif self.vc_ver >= 10.0:
frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver,
'v3.5')
elif self.vc_ver == 9.0:
frameworkver = ('v3.5', 'v2.0.50727')
if self.vc_ver == 8.0:
frameworkver = ('v3.0', 'v2.0.50727')
return frameworkver
def _use_last_dir_name(self, path, prefix=''):
"""
Return name of the last dir in path or '' if no dir found.
Parameters
----------
path: str
Use dirs in this path
prefix: str
Use only dirs startings by this prefix
"""
matching_dirs = (
dir_name
for dir_name in reversed(os.listdir(path))
if os.path.isdir(os.path.join(path, dir_name)) and
dir_name.startswith(prefix)
)
return next(matching_dirs, None) or ''
class EnvironmentInfo:
"""
Return environment variables for specified Microsoft Visual C++ version
and platform : Lib, Include, Path and libpath.
This function is compatible with Microsoft Visual C++ 9.0 to 14.0.
Script created by analysing Microsoft environment configuration files like
"vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
Parameters
----------
arch: str
Target architecture.
vc_ver: float
Required Microsoft Visual C++ version. If not set, autodetect the last
version.
vc_min_ver: float
Minimum Microsoft Visual C++ version.
"""
# Variables and properties in this class use originals CamelCase variables
# names from Microsoft source files for more easy comparaison.
def __init__(self, arch, vc_ver=None, vc_min_ver=0):
self.pi = PlatformInfo(arch)
self.ri = RegistryInfo(self.pi)
self.si = SystemInfo(self.ri, vc_ver)
if self.vc_ver < vc_min_ver:
err = 'No suitable Microsoft Visual C++ version found'
raise distutils.errors.DistutilsPlatformError(err)
@property
def vc_ver(self):
"""
Microsoft Visual C++ version.
"""
return self.si.vc_ver
@property
def VSTools(self):
"""
Microsoft Visual Studio Tools
"""
paths = [r'Common7\IDE', r'Common7\Tools']
if self.vc_ver >= 14.0:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
paths += [r'Team Tools\Performance Tools']
paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
return [os.path.join(self.si.VSInstallDir, path) for path in paths]
@property
def VCIncludes(self):
"""
Microsoft Visual C++ & Microsoft Foundation Class Includes
"""
return [os.path.join(self.si.VCInstallDir, 'Include'),
os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')]
@property
def VCLibraries(self):
"""
Microsoft Visual C++ & Microsoft Foundation Class Libraries
"""
if self.vc_ver >= 15.0:
arch_subdir = self.pi.target_dir(x64=True)
else:
arch_subdir = self.pi.target_dir(hidex86=True)
paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
if self.vc_ver >= 14.0:
paths += [r'Lib\store%s' % arch_subdir]
return [os.path.join(self.si.VCInstallDir, path) for path in paths]
@property
def VCStoreRefs(self):
"""
Microsoft Visual C++ store references Libraries
"""
if self.vc_ver < 14.0:
return []
return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]
@property
def VCTools(self):
"""
Microsoft Visual C++ Tools
"""
si = self.si
tools = [os.path.join(si.VCInstallDir, 'VCPackages')]
forcex86 = True if self.vc_ver <= 10.0 else False
arch_subdir = self.pi.cross_dir(forcex86)
if arch_subdir:
tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
if self.vc_ver == 14.0:
path = 'Bin%s' % self.pi.current_dir(hidex86=True)
tools += [os.path.join(si.VCInstallDir, path)]
elif self.vc_ver >= 15.0:
host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
r'bin\HostX64%s')
tools += [os.path.join(
si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
if self.pi.current_cpu != self.pi.target_cpu:
tools += [os.path.join(
si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
else:
tools += [os.path.join(si.VCInstallDir, 'Bin')]
return tools
@property
def OSLibraries(self):
"""
Microsoft Windows SDK Libraries
"""
if self.vc_ver <= 10.0:
arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
else:
arch_subdir = self.pi.target_dir(x64=True)
lib = os.path.join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir
return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))]
@property
def OSIncludes(self):
"""
Microsoft Windows SDK Include
"""
include = os.path.join(self.si.WindowsSdkDir, 'include')
if self.vc_ver <= 10.0:
return [include, os.path.join(include, 'gl')]
else:
if self.vc_ver >= 14.0:
sdkver = self._sdk_subdir
else:
sdkver = ''
return [os.path.join(include, '%sshared' % sdkver),
os.path.join(include, '%sum' % sdkver),
os.path.join(include, '%swinrt' % sdkver)]
@property
def OSLibpath(self):
"""
Microsoft Windows SDK Libraries Paths
"""
ref = os.path.join(self.si.WindowsSdkDir, 'References')
libpath = []
if self.vc_ver <= 9.0:
libpath += self.OSLibraries
if self.vc_ver >= 11.0:
libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]
if self.vc_ver >= 14.0:
libpath += [
ref,
os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),
os.path.join(
ref,
'Windows.Foundation.UniversalApiContract',
'1.0.0.0',
),
os.path.join(
ref,
'Windows.Foundation.FoundationContract',
'1.0.0.0',
),
os.path.join(
ref,
'Windows.Networking.Connectivity.WwanContract',
'1.0.0.0',
),
os.path.join(
self.si.WindowsSdkDir,
'ExtensionSDKs',
'Microsoft.VCLibs',
'%0.1f' % self.vc_ver,
'References',
'CommonConfiguration',
'neutral',
),
]
return libpath
@property
def SdkTools(self):
"""
Microsoft Windows SDK Tools
"""
return list(self._sdk_tools())
def _sdk_tools(self):
"""
Microsoft Windows SDK Tools paths generator
"""
if self.vc_ver < 15.0:
bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86'
yield os.path.join(self.si.WindowsSdkDir, bin_dir)
if not self.pi.current_is_x86():
arch_subdir = self.pi.current_dir(x64=True)
path = 'Bin%s' % arch_subdir
yield os.path.join(self.si.WindowsSdkDir, path)
if self.vc_ver == 10.0 or self.vc_ver == 11.0:
if self.pi.target_is_x86():
arch_subdir = ''
else:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
yield os.path.join(self.si.WindowsSdkDir, path)
elif self.vc_ver >= 15.0:
path = os.path.join(self.si.WindowsSdkDir, 'Bin')
arch_subdir = self.pi.current_dir(x64=True)
sdkver = self.si.WindowsSdkLastVersion
yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
if self.si.WindowsSDKExecutablePath:
yield self.si.WindowsSDKExecutablePath
@property
def _sdk_subdir(self):
"""
Microsoft Windows SDK version subdir
"""
ucrtver = self.si.WindowsSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@property
def SdkSetup(self):
"""
Microsoft Windows SDK Setup
"""
if self.vc_ver > 9.0:
return []
return [os.path.join(self.si.WindowsSdkDir, 'Setup')]
@property
def FxTools(self):
"""
Microsoft .NET Framework Tools
"""
pi = self.pi
si = self.si
if self.vc_ver <= 10.0:
include32 = True
include64 = not pi.target_is_x86() and not pi.current_is_x86()
else:
include32 = pi.target_is_x86() or pi.current_is_x86()
include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'
tools = []
if include32:
tools += [os.path.join(si.FrameworkDir32, ver)
for ver in si.FrameworkVersion32]
if include64:
tools += [os.path.join(si.FrameworkDir64, ver)
for ver in si.FrameworkVersion64]
return tools
@property
def NetFxSDKLibraries(self):
"""
Microsoft .Net Framework SDK Libraries
"""
if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
return []
arch_subdir = self.pi.target_dir(x64=True)
return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
@property
def NetFxSDKIncludes(self):
"""
Microsoft .Net Framework SDK Includes
"""
if self.vc_ver < 14.0 or not self.si.NetFxSdkDir:
return []
return [os.path.join(self.si.NetFxSdkDir, r'include\um')]
@property
def VsTDb(self):
"""
Microsoft Visual Studio Team System Database
"""
return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
@property
def MSBuild(self):
"""
Microsoft Build Engine
"""
if self.vc_ver < 12.0:
return []
elif self.vc_ver < 15.0:
base_path = self.si.ProgramFilesx86
arch_subdir = self.pi.current_dir(hidex86=True)
else:
base_path = self.si.VSInstallDir
arch_subdir = ''
path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir)
build = [os.path.join(base_path, path)]
if self.vc_ver >= 15.0:
# Add Roslyn C# & Visual Basic Compiler
build += [os.path.join(base_path, path, 'Roslyn')]
return build
@property
def HTMLHelpWorkshop(self):
"""
Microsoft HTML Help Workshop
"""
if self.vc_ver < 11.0:
return []
return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
@property
def UCRTLibraries(self):
"""
Microsoft Universal C Runtime SDK Libraries
"""
if self.vc_ver < 14.0:
return []
arch_subdir = self.pi.target_dir(x64=True)
lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
ucrtver = self._ucrt_subdir
return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
@property
def UCRTIncludes(self):
"""
Microsoft Universal C Runtime SDK Include
"""
if self.vc_ver < 14.0:
return []
include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
@property
def _ucrt_subdir(self):
"""
Microsoft Universal C Runtime SDK version subdir
"""
ucrtver = self.si.UniversalCRTSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@property
def FSharp(self):
"""
Microsoft Visual F#
"""
if self.vc_ver < 11.0 and self.vc_ver > 12.0:
return []
return self.si.FSharpInstallDir
@property
def VCRuntimeRedist(self):
"""
Microsoft Visual C++ runtime redistribuable dll
"""
arch_subdir = self.pi.target_dir(x64=True)
if self.vc_ver < 15:
redist_path = self.si.VCInstallDir
vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
else:
redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist')
vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll'
# Visual Studio 2017 is still Visual C++ 14.0
dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver
vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver)
return os.path.join(redist_path, vcruntime)
def return_env(self, exists=True):
"""
Return environment dict.
Parameters
----------
exists: bool
It True, only return existing paths.
"""
env = dict(
include=self._build_paths('include',
[self.VCIncludes,
self.OSIncludes,
self.UCRTIncludes,
self.NetFxSDKIncludes],
exists),
lib=self._build_paths('lib',
[self.VCLibraries,
self.OSLibraries,
self.FxTools,
self.UCRTLibraries,
self.NetFxSDKLibraries],
exists),
libpath=self._build_paths('libpath',
[self.VCLibraries,
self.FxTools,
self.VCStoreRefs,
self.OSLibpath],
exists),
path=self._build_paths('path',
[self.VCTools,
self.VSTools,
self.VsTDb,
self.SdkTools,
self.SdkSetup,
self.FxTools,
self.MSBuild,
self.HTMLHelpWorkshop,
self.FSharp],
exists),
)
if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist):
env['py_vcruntime_redist'] = self.VCRuntimeRedist
return env
def _build_paths(self, name, spec_path_lists, exists):
"""
Given an environment variable name and specified paths,
return a pathsep-separated string of paths containing
unique, extant, directories from those paths and from
the environment variable. Raise an error if no paths
are resolved.
"""
# flatten spec_path_lists
spec_paths = itertools.chain.from_iterable(spec_path_lists)
env_paths = safe_env.get(name, '').split(os.pathsep)
paths = itertools.chain(spec_paths, env_paths)
extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
if not extant_paths:
msg = "%s environment variable is empty" % name.upper()
raise distutils.errors.DistutilsPlatformError(msg)
unique_paths = self._unique_everseen(extant_paths)
return os.pathsep.join(unique_paths)
# from Python docs
def _unique_everseen(self, iterable, key=None):
"""
List unique elements, preserving order.
Remember all elements ever seen.
_unique_everseen('AAAABBBCCDAABBB') --> A B C D
_unique_everseen('ABBCcAD', str.lower) --> A B C D
"""
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
| apache-2.0 |
grantdelozier/TextGWR | GWRMain.py | 1 | 14380 | import sys
if len(sys.argv) >= 3:
#try:
print sys.argv
args = sys.argv
mode_arg = args[args.index("-mode")+1]
print mode_arg
#############Build Reference File Mode###############
if mode_arg.lower() == "build_ref_files":
import BuildRef
print "Building Reference Files"
tf = args[args.index("-tf")+1]
print tf
rf_std_out = args[args.index("-rf_std_out")+1]
print rf_std_out
rf_obs_out = args[args.index("-rf_obs_out")+1]
print rf_obs_out
if '-wordlist' in args:
wordlist = args[args.index("-wordlist")+1]
if '-listuse' in args:
listuse = args[args.index("-listuse")+1]
else: listuse = 'NA'
else: wordlist = 'any'
BuildRef.Build_ref_files(tf, rf_std_out, rf_obs_out, wordlist, listuse)
print "~~~~~~~~~Building Complete~~~~~~~~"
print "Check: ", rf_std_out, " AND ", rf_obs_out
#############Create Weighted(u) matrix and Y(u) vector files################
if mode_arg.lower() == "create_wu_y":
import CreateWu_Y
print "Creating Weight and Y vector files"
try:
if '-kern' in args:
fullarg = args[args.index("-kern")+1]
kerntype = fullarg[:fullarg.rfind('_')]
print kerntype
dist = float(fullarg[fullarg.rfind('_')+1:])
print dist
else:
kerntype = 'quartic'
dist = 900000.0
except:
print "Kernel Argument is not formmated correctly"
print "it should be something like quartic_900000 or epanech_800000 (units must be meters)"
print "run with -help for more options"
sys.exit("Error")
try:
ulist = (args[args.index("-ulist")+1]).split(',')
except:
print "Your ulist is not formatted correctly"
print "it should be something like 400,8,3000 with no spaces between the numbers"
sys.exit("Error")
try:
ptbl = args[args.index("-ptbl")+1]
except:
print "ERROR ON -ptbl argument"
print "This argument should contain the name of the table which was created using DB_Load"
sys.exit("Error")
if '-pointgrid' in args:
pointgrid = args[args.index("-pointgrid")+1]
else: pointgrid = 'pointgrid_5_clip'
try:
conn = args[args.index('-conn')+1]
except:
print "Problem parsing the connection information provided"
if '-zeroed' in args:
zval = args[args.index('-zeroed')+1]
if zval.lower() == 'f':
zeroed = False
else: zeroed = True
else: zeroed = True
rf_obs_in = args[args.index("-rf_obs_in")+1]
w_y_direct = args[args.index("-wu_y_dir_out")+1]
CreateWu_Y.create(w_y_direct, ulist, kerntype, dist, conn, ptbl, pointgrid, zeroed, rf_obs_in)
#################Create and Load Database With People/Documents####################
if mode_arg.lower() == "db_load":
import DB_Load
print "Beginning DB Loading Process"
if '-tf' in args:
f = args[args.index("-tf")+1]
elif '-df' in args:
f = args[args.index("-df")+1]
elif '-tstf' in args:
f = args[args.index("-tstf")+1]
tbl_name = args[args.index("-ptbl")+1]
try:
conn = args[args.index('-conn')+1]
except:
print "Problem parsing the connection information provided"
DB_Load.Load(f, tbl_name, conn)
#################Train the prediction model on given file using GWR####################
if mode_arg.lower() == "train":
import Train
print "Beginning GWR Train Process"
if '-tf' in args:
f = args[args.index("-tf")+1]
elif '-df' in args:
f = args[args.index("-df")+1]
elif '-tstf' in args:
f = args[args.index("-tstf")+1]
rf_obs_in = args[args.index("-rf_obs_in")+1]
rf_std_in = args[args.index("-rf_std_in")+1]
wu_y_direct = args[args.index("-wu_y_dir_in")+1]
b_direct = args[args.index("-b_dir_out")+1]
if '-lam' in args:
lam = float(args[args.index("-lam")+1])
else: lam = 0
try:
ulist = (args[args.index("-ulist")+1]).split(',')
except:
print "Your ulist is not formatted correctly"
print "it should be something like 400,8,3000 with no spaces between the numbers"
sys.exit("Error")
try:
if '-kern' in args:
fullarg = args[args.index("-kern")+1]
kerntype = fullarg[:fullarg.rfind('_')]
print kerntype
dist = float(fullarg[fullarg.rfind('_')+1:])
print dist
else:
kerntype = 'quartic'
dist = 900000.0
except:
print "Kernel Argument is not formmated correctly"
print "it should be something like quartic_900000 or epanech_800000 (units must be meters)"
print "run with -help for more options"
sys.exit("Error")
Train.train(f, rf_obs_in, rf_std_in, wu_y_direct, ulist, kerntype, lam, b_direct)
if mode_arg.lower() == "morans_calc":
import MCV1
print "Beginning Morans Calc Process"
if '-tf' in args:
f = args[args.index("-tf")+1]
try:
if '-kern' in args:
fullarg = args[args.index("-kern")+1]
kerntype = fullarg[:fullarg.rfind('_')]
print kerntype
dist = float(fullarg[fullarg.rfind('_')+1:])
print dist
else:
kerntype = 'quartic'
dist = 100000.0
except:
print "Kernel Argument is not formmated correctly"
print "it should be something like quartic_900000 or uniform_50000 (units must be meters)"
print "run with -help for more options"
sys.exit("Error")
if "-pointgrid" in args:
pointgrid = args[args.index("-pointgrid")+1]
else: pointgrid = 'pointgrid_5_clip'
ptbl = args[args.index("-ptbl")+1]
outmorans = args[args.index("-outmoranfile")+1]
MCV1.calc(f, ptbl, pointgrid, kerntype, dist, outmorans)
if mode_arg.lower() == "gi_calc":
import GiStatV1
print "Beginning Getis Ord Gi* Statistic Calculation"
if '-tf' in args:
f = args[args.index("-tf")+1]
elif '-df' in args:
f = args[args.index("-df")+1]
elif '-tstf' in args:
f = args[args.index("-tstf")+1]
if '-wordlist' in args:
wordlist = args[args.index("-wordlist")+1]
if '-listuse' in args:
listuse = args[args.index("-listuse")+1]
else: listuse = 'NA'
else: wordlist = 'any'
try:
conn = args[args.index('-conn')+1]
except:
print "Problem parsing the connection information provided"
try:
ptbl = args[args.index("-ptbl")+1]
except:
print "ERROR ON -ptbl argument"
print "This argument should contain the name of the table which was created using DB_Load"
sys.exit("Error")
try:
if '-kern' in args:
fullarg = args[args.index("-kern")+1]
kerntype = fullarg[:fullarg.rfind('_')].lower()
print kerntype
dist = float(fullarg[fullarg.rfind('_')+1:])
print dist
else:
kerntype = 'quartic'
dist = 900000.0
except:
print "Kernel Argument is not formmated correctly"
print "it should be something like quartic_900000 or epanech_800000 (units must be meters)"
print "run with -help for more options"
sys.exit("Error")
if "-pointgrid" in args:
pointgrid = args[args.index("-pointgrid")+1]
else: pointgrid = "None"
outf = args[args.index("-gi_out")+1]
GiStatV1.calc(f, ptbl, kerntype, dist, conn, outf, wordlist, listuse, pointgrid)
if mode_arg.lower() == "test":
import Test
if '-tf' in args:
f = args[args.index("-tf")+1]
elif '-df' in args:
f = args[args.index("-df")+1]
elif '-tstf' in args:
f = args[args.index("-tstf")+1]
rf_std_in = args[args.index("-rf_std_in")+1]
b_direct = args[args.index("-b_dir_in")+1]
try:
ulist = (args[args.index("-ulist")+1]).split(',')
except:
print "Your ulist is not formatted correctly"
print "it should be something like 400,8,3000 with no spaces between the numbers"
sys.exit("Error")
try:
if '-kern' in args:
fullarg = args[args.index("-kern")+1]
kerntype = fullarg[:fullarg.rfind('_')]
print kerntype
dist = float(fullarg[fullarg.rfind('_')+1:])
print dist
else:
kerntype = 'quartic'
dist = 900000.0
except:
print "Kernel Argument is not formmated correctly"
print "it should be something like quartic_900000 or epanech_800000 (units must be meters)"
print "run with -help for more options"
sys.exit("Error")
Test.test(f, rf_std_in, b_direct, ulist, kerntype)
#except:
# print "ERROR: THERE WAS A PROBLEM INTERPRETING THE ARGUMENTS"
# print "Must Specify -mode"
# print "Execute GWRMain.py -help for information"
elif "-help" in sys.argv:
print "---------------------"
print "MODE ARGUMENTS"
print "-mode"
print "db_load ((-tf OR -df OR -tstf), -conn, -tbl)"
print "Build_ref_files (-tf, -rf_std_out, -rf_obs_out, -wordlist(OPTIONAL))"
print "NOT FUNCTIONAL: Create_Wu ((-tf OR -df OR -tstf), -kern, -ulist, -wu_dir_out)"
print "NOT FUNCTIONAL: Create_Y ((-tf OR -df OR -tstf), -ulist, -y_dir_out)"
print "Create_Wu_Y (-ptbl, -conn, -pointgrid(OPTIONAL), -kern(OPTIONAL), -zeroed(OPTOINAL), -ulist, -wu_y_dir_out, -rf_obs_in)"
print "Morans_Calc (-ptbl, -pointgrid, -tf, -outmoranfile, -kern, -rf_std_in, -wordlist(OPTIONAL))"
print "Train (-tf, (-wu_y_dir_in OR (-y_dir_in AND -wu_dir_in), -rf_std_in, -rf_obs_in, -ulist, -b_dir_out, -lambda))"
print "Gi_Calc (-tf, -ptbl, -conn, -kern, -gi_out, -pointgrid(OPTIONAL))"
print "NOT FUNCTIONAL: Test (-tstf, -rf_std_in, -b_dir_in, -pred_out)"
print "NOT FUNCTIONAL: Train_Test (-tf, -tstf, (-wu_y_dir_in OR (-y_dir_in AND -wu_dir_in), -rf_std_in, -rf_obs_in, -ulist, -b_dir_out, -pred_out, -lambda))"
print "---------------------"
print "Train File"
print "-tf"
print "absolute path of train file"
print "---------------------"
print "Devel File"
print "-df"
print "absolute path of devel file"
print "---------------------"
print "Test File"
print "-tstf"
print "absolute path of test file"
print "---------------------"
print "Standard Deviation Reference File (in)"
print "-rf_std_in"
print "absolute path of std_dev reference"
print "---------------------"
print "Standard Deviation Reference File (out)"
print "-rf_std_out"
print "absolute path of std_dev reference"
print "---------------------"
print "Observation (aka people, users) Reference File (in)"
print "-rf_obs_in"
print "absolute path of std_dev reference"
print "---------------------"
print "Observation (aka people, users) Reference File (out)"
print "-rf_obs_out"
print "absolute path of std_dev reference"
print "---------------------"
print "Weight Matrix Directory (out)"
print "-wu_dir_out"
print "---------------------"
print "Weight Matrix Directory (in)"
print "-wu_dir_in"
print "---------------------"
print "Y(u) vector Directory (out)"
print "-y_dir_out"
print "---------------------"
print "Y(u) vector Directory (in)"
print "-y_dir_in"
print "---------------------"
print "Weight Matrix and Y(u) vector Directory (out)"
print "-wu_y_dir_out"
print "---------------------"
print "Weight Matrix and Y(u) vector Directory (in)"
print "-wu_y_dir_in"
print "---------------------"
print "Ulist: a list of grid point id's; a different regression is trained for each one"
print "-ulist"
print "e.g. -ulist 900,2000,2100,4000,5000"
print "---------------------"
print "Score Files (out)"
print "-b_dir_out"
print "directory where score files will be written to"
print "---------------------"
print "Score Files (in)"
print "-b_dir_in"
print "directory where score files will be read from"
print "---------------------"
print "lambda (cost value), default set to 1"
print "-lambda"
print "---------------------"
print "Predictions Out"
print "-pred_out"
print "Absolute path of a file predictions written to"
print "---------------------"
print "Kernel Function (OPTIONAL)(defaults to quartic_900000) (<method>_<number_of_meters>)"
print "-kern"
print "e.g. quartic, epanech"
print "---------------------"
print "-Zeroed Kernel (OPTIONAL)"
print "-zeroed"
print "e.g. -zeroed F"
print "---------------------"
print "-Person Table: name of person table that you are creating/reading from in postgres"
print "-ptbl"
print "i.e. do not begin with symbols/numbers and avoid upper case"
print "---------------------"
print "-Word List File: name of a file that contains words (one per line) that you want to include in the model"
print "-wordlist"
print "OPTIONAL: if left unspecified will default to all possible words in the train file"
print "Should be an absolute path"
else:
print "###ERRROR####: You did not specify enough arguments"
print "Try -help"
| apache-2.0 |
klenks/jobsportal | env/lib/python2.7/site-packages/pip/vcs/__init__.py | 344 | 12374 | """Handles all VCS (version control) support"""
from __future__ import absolute_import
import errno
import logging
import os
import shutil
import sys
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.exceptions import BadCommand
from pip.utils import (display_path, backup_dir, call_subprocess,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
logger = logging.getLogger(__name__)
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control
# systems
urllib_parse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warning('Cannot register VCS %s', cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
logger.debug('Registered VCS backend: %s', cls.name)
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warning('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
if vc_type.controls_location(location):
logger.debug('Determine that %s uses VCS: %s',
location, vc_type.name)
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
# List of supported schemes for this Version Control
schemes = ()
def __init__(self, url=None, *args, **kwargs):
self.url = url
super(VersionControl, self).__init__(*args, **kwargs)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
# See issue #1083 for why this method was introduced:
# https://github.com/pypa/pip/issues/1083
def translate_egg_surname(self, surname):
# For example, Django has branches of the form "stable/1.7.x".
return surname.replace('/', '_')
def export(self, location):
"""
Export the repository at the url to the destination location
i.e. only download the files, without vcs informations
"""
raise NotImplementedError
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any
trailing slash.
"""
return urllib_parse.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplementedError
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_version(self, dest, rev_options):
"""
Return True if the version is identical to what exists and
doesn't need to be updated.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.debug(
'%s in %s exists, and has correct URL (%s)',
self.repo_name.title(),
display_path(dest),
url,
)
if not self.check_version(dest, rev_options):
logger.info(
'Updating %s %s%s',
display_path(dest),
self.repo_name,
rev_display,
)
self.update(dest, rev_options)
else:
logger.info(
'Skipping because already up-to-date.')
else:
logger.warning(
'%s %s in %s exists with URL %s',
self.name,
self.repo_name,
display_path(dest),
existing_url,
)
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warning(
'Directory %s already exists, and is not a %s %s.',
dest,
self.name,
self.repo_name,
)
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warning(
'The plan is to install the %s repository %s',
self.name,
url,
)
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.info(
'Switching %s %s to %s%s',
self.repo_name,
display_path(dest),
url,
rev_display,
)
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
checkout = True
elif response == 'a':
sys.exit(-1)
return checkout
def unpack(self, location):
"""
Clean up current location and download the url repository
(and vcs infos) into location
"""
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location):
"""
Return a string representing the requirement needed to
redownload the files currently present in location, something
like:
{repository_url}@{revision}#egg={project_name}-{version_identifier}
"""
raise NotImplementedError
def get_url(self, location):
"""
Return the url used at location
Used in get_info or check_destination
"""
raise NotImplementedError
def get_revision(self, location):
"""
Return the current revision of the files at location
Used in get_info
"""
raise NotImplementedError
def run_command(self, cmd, show_stdout=True, cwd=None,
on_returncode='raise',
command_desc=None,
extra_environ=None, spinner=None):
"""
Run a VCS subcommand
This is simply a wrapper around call_subprocess that adds the VCS
command name, and checks that the VCS is available
"""
cmd = [self.name] + cmd
try:
return call_subprocess(cmd, show_stdout, cwd,
on_returncode,
command_desc, extra_environ,
spinner)
except OSError as e:
# errno.ENOENT = no such file or directory
# In other words, the VCS executable isn't available
if e.errno == errno.ENOENT:
raise BadCommand('Cannot find command %r' % self.name)
else:
raise # re-raise exception if a different error occurred
@classmethod
def controls_location(cls, location):
"""
Check if a location is controlled by the vcs.
It is meant to be overridden to implement smarter detection
mechanisms for specific vcs.
"""
logger.debug('Checking in %s for %s (%s)...',
location, cls.dirname, cls.name)
path = os.path.join(location, cls.dirname)
return os.path.exists(path)
def get_src_requirement(dist, location):
version_control = vcs.get_backend_from_location(location)
if version_control:
try:
return version_control().get_src_requirement(dist,
location)
except BadCommand:
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
version_control.name,
)
return dist.as_requirement()
logger.warning(
'cannot determine version of editable source in %s (is not SVN '
'checkout, Git clone, Mercurial clone or Bazaar branch)',
location,
)
return dist.as_requirement()
| mit |
shivaenigma/electrum | plugins/email_requests.py | 5 | 7044 | #!/usr/bin/env python
#
# Electrum - Lightweight Bitcoin Client
# Copyright (C) 2015 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import socket
import time
import threading
import base64
from decimal import Decimal
from Queue import Queue
import smtplib
import imaplib
import email
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email import Encoders
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
import PyQt4.QtGui as QtGui
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_EXPIRED
from electrum.plugins import BasePlugin, hook
from electrum import util
from electrum.paymentrequest import PaymentRequest
from electrum.i18n import _
from electrum_gui.qt.util import text_dialog, EnterButton
class Processor(threading.Thread):
polling_interval = 5*60
def __init__(self, imap_server, username, password, callback):
threading.Thread.__init__(self)
self.daemon = True
self.username = username
self.password = password
self.imap_server = imap_server
self.on_receive = callback
def poll(self):
try:
self.M.select()
except:
return
typ, data = self.M.search(None, 'ALL')
for num in data[0].split():
typ, msg_data = self.M.fetch(num, '(RFC822)')
msg = email.message_from_string(msg_data[0][1])
p = msg.get_payload()
if not msg.is_multipart():
p = [p]
continue
for item in p:
if item.get_content_type() == "application/bitcoin-paymentrequest":
pr_str = item.get_payload()
pr_str = base64.b64decode(pr_str)
self.on_receive(pr_str)
def run(self):
self.M = imaplib.IMAP4_SSL(self.imap_server)
self.M.login(self.username, self.password)
while True:
self.poll()
time.sleep(self.polling_interval)
self.M.close()
self.M.logout()
def send(self, recipient, message, payment_request):
msg = MIMEMultipart()
msg['Subject'] = message
msg['To'] = recipient
msg['From'] = self.username
part = MIMEBase('application', "bitcoin-paymentrequest")
part.set_payload(payment_request)
Encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="payreq.btc"')
msg.attach(part)
s = smtplib.SMTP_SSL(self.imap_server, timeout=2)
s.login(self.username, self.password)
s.sendmail(self.username, [recipient], msg.as_string())
s.quit()
class Plugin(BasePlugin):
def fullname(self):
return 'Email'
def description(self):
return _("Send and receive payment requests via email")
def is_available(self):
return True
def __init__(self, parent, config, name):
BasePlugin.__init__(self, parent, config, name)
self.imap_server = self.config.get('email_server', '')
self.username = self.config.get('email_username', '')
self.password = self.config.get('email_password', '')
if self.imap_server and self.username and self.password:
self.processor = Processor(self.imap_server, self.username, self.password, self.on_receive)
self.processor.start()
self.obj = QObject()
self.obj.connect(self.obj, SIGNAL('email:new_invoice'), self.new_invoice)
def on_receive(self, pr_str):
self.print_error('received payment request')
self.pr = PaymentRequest(pr_str)
self.obj.emit(SIGNAL('email:new_invoice'))
def new_invoice(self):
if self.parent.windows:
window = self.parent.windows[0]
window.invoices.add(self.pr)
window.update_invoices_list()
@hook
def receive_list_menu(self, menu, addr):
window = menu.parentWidget()
menu.addAction(_("Send via e-mail"), lambda: self.send(window, addr))
def send(self, window, addr):
from electrum import paymentrequest
r = window.wallet.receive_requests.get(addr)
message = r.get('memo', '')
if r.get('signature'):
pr = paymentrequest.serialize_request(r)
else:
pr = paymentrequest.make_request(self.config, r)
if not pr:
return
recipient, ok = QtGui.QInputDialog.getText(window, 'Send request', 'Email invoice to:')
if not ok:
return
recipient = str(recipient)
payload = pr.SerializeToString()
self.print_error('sending mail to', recipient)
try:
self.processor.send(recipient, message, payload)
except BaseException as e:
window.show_message(str(e))
return
window.show_message(_('Request sent.'))
def requires_settings(self):
return True
def settings_widget(self, window):
self.settings_window = window
return EnterButton(_('Settings'), self.settings_dialog)
def settings_dialog(self, x):
from electrum_gui.qt.util import Buttons, CloseButton, OkButton
d = QDialog(self.settings_window)
d.setWindowTitle("Email settings")
d.setMinimumSize(500, 200)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('Server hosting your email acount')))
grid = QGridLayout()
vbox.addLayout(grid)
grid.addWidget(QLabel('Server (IMAP)'), 0, 0)
server_e = QLineEdit()
server_e.setText(self.imap_server)
grid.addWidget(server_e, 0, 1)
grid.addWidget(QLabel('Username'), 1, 0)
username_e = QLineEdit()
username_e.setText(self.username)
grid.addWidget(username_e, 1, 1)
grid.addWidget(QLabel('Password'), 2, 0)
password_e = QLineEdit()
password_e.setText(self.password)
grid.addWidget(password_e, 2, 1)
vbox.addStretch()
vbox.addLayout(Buttons(CloseButton(d), OkButton(d)))
if not d.exec_():
return
server = str(server_e.text())
self.config.set_key('email_server', server)
username = str(username_e.text())
self.config.set_key('email_username', username)
password = str(password_e.text())
self.config.set_key('email_password', password)
| gpl-3.0 |
thaim/ansible | lib/ansible/modules/notification/twilio.py | 75 | 5583 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Makai <matthew.makai@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
version_added: "1.6"
module: twilio
short_description: Sends a text message to a mobile phone through Twilio.
description:
- Sends a text message to a phone number through the Twilio messaging API.
notes:
- This module is non-idempotent because it sends an email through the
external API. It is idempotent only in the case that the module fails.
- Like the other notification modules, this one requires an external
dependency to work. In this case, you'll need a Twilio account with
a purchased or verified phone number to send the text message.
options:
account_sid:
description:
user's Twilio account token found on the account page
required: true
auth_token:
description: user's Twilio authentication token
required: true
msg:
description:
the body of the text message
required: true
to_numbers:
description:
one or more phone numbers to send the text message to,
format +15551112222
required: true
aliases: [ to_number ]
from_number:
description:
the Twilio number to send the text message from, format +15551112222
required: true
media_url:
description:
a URL with a picture, video or sound clip to send with an MMS
(multimedia message) instead of a plain SMS
required: false
author: "Matt Makai (@makaimc)"
'''
EXAMPLES = '''
# send an SMS about the build status to (555) 303 5681
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: All servers with webserver role are now configured.
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15552014545
to_number: +15553035681
delegate_to: localhost
# send an SMS to multiple phone numbers about the deployment
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: This server configuration is now complete.
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15553258899
to_numbers:
- +15551113232
- +12025551235
- +19735559010
delegate_to: localhost
# send an MMS to a single recipient with an update on the deployment
# and an image of the results
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: Deployment complete!
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15552014545
to_number: +15553035681
media_url: https://demo.twilio.com/logo.png
delegate_to: localhost
'''
# =======================================
# twilio module support methods
#
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.urls import fetch_url
def post_twilio_api(module, account_sid, auth_token, msg, from_number,
to_number, media_url=None):
URI = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json" \
% (account_sid,)
AGENT = "Ansible"
data = {'From': from_number, 'To': to_number, 'Body': msg}
if media_url:
data['MediaUrl'] = media_url
encoded_data = urlencode(data)
headers = {'User-Agent': AGENT,
'Content-type': 'application/x-www-form-urlencoded',
'Accept': 'application/json',
}
# Hack module params to have the Basic auth params that fetch_url expects
module.params['url_username'] = account_sid.replace('\n', '')
module.params['url_password'] = auth_token.replace('\n', '')
return fetch_url(module, URI, data=encoded_data, headers=headers)
# =======================================
# Main
#
def main():
module = AnsibleModule(
argument_spec=dict(
account_sid=dict(required=True),
auth_token=dict(required=True, no_log=True),
msg=dict(required=True),
from_number=dict(required=True),
to_numbers=dict(required=True, aliases=['to_number'], type='list'),
media_url=dict(default=None, required=False),
),
supports_check_mode=True
)
account_sid = module.params['account_sid']
auth_token = module.params['auth_token']
msg = module.params['msg']
from_number = module.params['from_number']
to_numbers = module.params['to_numbers']
media_url = module.params['media_url']
for number in to_numbers:
r, info = post_twilio_api(module, account_sid, auth_token, msg,
from_number, number, media_url)
if info['status'] not in [200, 201]:
body_message = "unknown error"
if 'body' in info:
body = module.from_json(info['body'])
body_message = body['message']
module.fail_json(msg="unable to send message to %s: %s" % (number, body_message))
module.exit_json(msg=msg, changed=False)
if __name__ == '__main__':
main()
| mit |
razvanphp/arangodb | 3rdParty/V8-3.31.74.1/third_party/python_26/Lib/distutils/tests/test_sysconfig.py | 17 | 1593 | """Tests for distutils.dist."""
from distutils import sysconfig
import os
import unittest
from test.test_support import TESTFN
class SysconfigTestCase(unittest.TestCase):
def test_get_config_h_filename(self):
config_h = sysconfig.get_config_h_filename()
self.assert_(os.path.isfile(config_h), config_h)
def test_get_python_lib(self):
lib_dir = sysconfig.get_python_lib()
# XXX doesn't work on Linux when Python was never installed before
#self.assert_(os.path.isdir(lib_dir), lib_dir)
# test for pythonxx.lib?
self.assertNotEqual(sysconfig.get_python_lib(),
sysconfig.get_python_lib(prefix=TESTFN))
def test_get_python_inc(self):
# NOTE: See bugs 4151, 4070 of the py tracker. The patch
# proposed in 4151 does lots more changes to setup.py,
# sysconfig.py and other tests. I cannot say how much these
# would influence the build at large, so I ignored these
# parts. What is now left of this function is identical to
# that patch, functionality-wise.
inc_dir = sysconfig.get_python_inc()
self.assert_(os.path.isdir(inc_dir), inc_dir)
python_h = os.path.join(inc_dir, "Python.h")
self.assert_(os.path.isfile(python_h), python_h)
def test_get_config_vars(self):
cvars = sysconfig.get_config_vars()
self.assert_(isinstance(cvars, dict))
self.assert_(cvars)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SysconfigTestCase))
return suite
| apache-2.0 |
sanjayankur31/nest-simulator | pynest/nest/tests/test_threads.py | 15 | 3654 | # -*- coding: utf-8 -*-
#
# test_threads.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
UnitTests for multithreaded pynest
"""
import unittest
import nest
@nest.ll_api.check_stack
class ThreadTestCase(unittest.TestCase):
"""Tests for multi-threading"""
def nest_multithreaded(self):
"""Return True, if we have a thread-enabled NEST, False otherwise"""
return nest.ll_api.sli_func("statusdict/threading :: (no) eq not")
def test_Threads(self):
"""Multiple threads"""
if not self.nest_multithreaded():
self.skipTest("NEST was compiled without multi-threading")
nest.ResetKernel()
self.assertEqual(nest.GetKernelStatus()['local_num_threads'], 1)
nest.SetKernelStatus({'local_num_threads': 8})
n = nest.Create('iaf_psc_alpha', 8)
st = list(nest.GetStatus(n, 'vp'))
st.sort()
self.assertEqual(st, [0, 1, 2, 3, 4, 5, 6, 7])
def test_ThreadsGetConnections(self):
"""GetConnections with threads"""
if not self.nest_multithreaded():
self.skipTest("NEST was compiled without multi-threading")
nest.ResetKernel()
nest.SetKernelStatus({'local_num_threads': 8})
pre = nest.Create("iaf_psc_alpha")
post = nest.Create("iaf_psc_alpha", 6)
nest.Connect(pre, post)
conn = nest.GetConnections(pre)
# Because of threading, targets may be in a different order than
# in post, so we sort the vector.
targets = list(conn.get("target"))
targets.sort()
self.assertEqual(targets, post.tolist())
def test_ThreadsGetEvents(self):
""" Gathering events across threads """
if not self.nest_multithreaded():
self.skipTest("NEST was compiled without multi-threading")
threads = (1, 2, 4, 8)
n_events_sr = []
n_events_vm = []
N = 128
Simtime = 1000.
for t in threads:
nest.ResetKernel()
nest.SetKernelStatus({'local_num_threads': t})
# force a lot of spike events
n = nest.Create('iaf_psc_alpha', N, {'I_e': 2000.})
sr = nest.Create('spike_recorder')
vm = nest.Create('voltmeter')
nest.Connect(n, sr)
nest.Connect(vm, n)
nest.Simulate(Simtime)
n_events_sr.append(nest.GetStatus(sr, 'n_events')[0])
n_events_vm.append(nest.GetStatus(vm, 'n_events')[0])
ref_vm = N * (Simtime - 1)
ref_sr = n_events_sr[0]
# could be done more elegantly with any(), ravel(),
# but we dont want to be dependent on numpy et al
[self.assertEqual(x, ref_vm) for x in n_events_vm]
[self.assertEqual(x, ref_sr) for x in n_events_sr]
def suite():
suite = unittest.makeSuite(ThreadTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| gpl-2.0 |
cloudera/Impala | tests/util/filesystem_base.py | 2 | 2221 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Filsystem access abstraction
from abc import ABCMeta, abstractmethod
class BaseFilesystem(object):
__metaclass__ = ABCMeta
@abstractmethod
def create_file(self, path, file_data, overwrite):
"""Create a file in 'path' and populate with the string 'file_data'. If overwrite is
True, the file is overwritten. Returns True if successful, False if the file already
exists and throws an exception otherwise"""
pass
@abstractmethod
def make_dir(self, path, permission):
"""Create a directory in 'path' with octal umask 'permission'.
Returns True if successful and throws an exception otherwise"""
pass
@abstractmethod
def copy(self, src, dst):
"""Copy a file from 'src' to 'dst'. Throws an exception if unsuccessful."""
pass
@abstractmethod
def ls(self, path):
"""Return a list of all files/dirs/keys in path. Throws an exception if path
is invalid."""
pass
@abstractmethod
def exists(self, path):
"""Returns True if a particular path exists, else it returns False."""
pass
@abstractmethod
def delete_file_dir(self, path, recursive):
"""Delete all files/dirs/keys in a path. Returns True if successful or if the file
does not exist. Throws an exception otherwise."""
pass
@abstractmethod
def get_all_file_sizes(self, path):
"""Returns a list of integers which are all the file sizes of files found under
'path'."""
pass
| apache-2.0 |
HKUST-SING/tensorflow | tensorflow/python/kernel_tests/summary_audio_op_test.py | 134 | 2594 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for summary sound op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import summary_pb2
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
from tensorflow.python.summary import summary
class SummaryAudioOpTest(test.TestCase):
def _AsSummary(self, s):
summ = summary_pb2.Summary()
summ.ParseFromString(s)
return summ
def _CheckProto(self, audio_summ, sample_rate, num_channels, length_frames):
"""Verify that the non-audio parts of the audio_summ proto match shape."""
# Only the first 3 sounds are returned.
for v in audio_summ.value:
v.audio.ClearField("encoded_audio_string")
expected = "\n".join("""
value {
tag: "snd/audio/%d"
audio { content_type: "audio/wav" sample_rate: %d
num_channels: %d length_frames: %d }
}""" % (i, sample_rate, num_channels, length_frames) for i in xrange(3))
self.assertProtoEquals(expected, audio_summ)
def testAudioSummary(self):
np.random.seed(7)
for channels in (1, 2, 5, 8):
with self.test_session(graph=ops.Graph()) as sess:
num_frames = 7
shape = (4, num_frames, channels)
# Generate random audio in the range [-1.0, 1.0).
const = 2.0 * np.random.random(shape) - 1.0
# Summarize
sample_rate = 8000
summ = summary.audio(
"snd", const, max_outputs=3, sample_rate=sample_rate)
value = sess.run(summ)
self.assertEqual([], summ.get_shape())
audio_summ = self._AsSummary(value)
# Check the rest of the proto
self._CheckProto(audio_summ, sample_rate, channels, num_frames)
if __name__ == "__main__":
test.main()
| apache-2.0 |
willthames/ansible | lib/ansible/module_utils/mysql.py | 114 | 3293 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Jonathan Mainguy <jon@soh.re>, 2015
# Most of this was originally added by Sven Schliesing @muffl0n in the mysql_user.py module
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
try:
import MySQLdb
mysqldb_found = True
except ImportError:
mysqldb_found = False
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
connect_timeout=30):
config = {}
if ssl_ca is not None or ssl_key is not None or ssl_cert is not None:
config['ssl'] = {}
if module.params['login_unix_socket']:
config['unix_socket'] = module.params['login_unix_socket']
else:
config['host'] = module.params['login_host']
config['port'] = module.params['login_port']
if os.path.exists(config_file):
config['read_default_file'] = config_file
# If login_user or login_password are given, they should override the
# config file
if login_user is not None:
config['user'] = login_user
if login_password is not None:
config['passwd'] = login_password
if ssl_cert is not None:
config['ssl']['cert'] = ssl_cert
if ssl_key is not None:
config['ssl']['key'] = ssl_key
if ssl_ca is not None:
config['ssl']['ca'] = ssl_ca
if db is not None:
config['db'] = db
if connect_timeout is not None:
config['connect_timeout'] = connect_timeout
db_connection = MySQLdb.connect(**config)
if cursor_class is not None:
return db_connection.cursor(cursorclass=MySQLdb.cursors.DictCursor)
else:
return db_connection.cursor()
| gpl-3.0 |
libscie/liberator | liberator/lib/python3.6/site-packages/chardet/mbcharsetprober.py | 289 | 3413 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .enums import ProbingState, MachineState
class MultiByteCharSetProber(CharSetProber):
"""
MultiByteCharSetProber
"""
def __init__(self, lang_filter=None):
super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter)
self.distribution_analyzer = None
self.coding_sm = None
self._last_char = [0, 0]
def reset(self):
super(MultiByteCharSetProber, self).reset()
if self.coding_sm:
self.coding_sm.reset()
if self.distribution_analyzer:
self.distribution_analyzer.reset()
self._last_char = [0, 0]
@property
def charset_name(self):
raise NotImplementedError
@property
def language(self):
raise NotImplementedError
def feed(self, byte_str):
for i in range(len(byte_str)):
coding_state = self.coding_sm.next_state(byte_str[i])
if coding_state == MachineState.ERROR:
self.logger.debug('%s %s prober hit error at byte %s',
self.charset_name, self.language, i)
self._state = ProbingState.NOT_ME
break
elif coding_state == MachineState.ITS_ME:
self._state = ProbingState.FOUND_IT
break
elif coding_state == MachineState.START:
char_len = self.coding_sm.get_current_charlen()
if i == 0:
self._last_char[1] = byte_str[0]
self.distribution_analyzer.feed(self._last_char, char_len)
else:
self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
char_len)
self._last_char[0] = byte_str[-1]
if self.state == ProbingState.DETECTING:
if (self.distribution_analyzer.got_enough_data() and
(self.get_confidence() > self.SHORTCUT_THRESHOLD)):
self._state = ProbingState.FOUND_IT
return self.state
def get_confidence(self):
return self.distribution_analyzer.get_confidence()
| cc0-1.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.